diff options
Diffstat (limited to 'compiler')
514 files changed, 13525 insertions, 11671 deletions
diff --git a/compiler/rustc_abi/Cargo.toml b/compiler/rustc_abi/Cargo.toml index 5f9afc46a1a..3de6d186b8a 100644 --- a/compiler/rustc_abi/Cargo.toml +++ b/compiler/rustc_abi/Cargo.toml @@ -5,16 +5,17 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true rand = { version = "0.9.0", default-features = false, optional = true } rand_xoshiro = { version = "0.7.0", optional = true } rustc_data_structures = { path = "../rustc_data_structures", optional = true } +rustc_error_messages = { path = "../rustc_error_messages", optional = true } rustc_hashes = { path = "../rustc_hashes" } rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } rustc_span = { path = "../rustc_span", optional = true } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [features] @@ -24,6 +25,7 @@ default = ["nightly", "randomize"] # without depending on rustc_data_structures, rustc_macros and rustc_serialize nightly = [ "dep:rustc_data_structures", + "dep:rustc_error_messages", "dep:rustc_macros", "dep:rustc_serialize", "dep:rustc_span", diff --git a/compiler/rustc_abi/src/extern_abi.rs b/compiler/rustc_abi/src/extern_abi.rs index 29a3678abf3..41d744e1946 100644 --- a/compiler/rustc_abi/src/extern_abi.rs +++ b/compiler/rustc_abi/src/extern_abi.rs @@ -223,6 +223,9 @@ impl StableOrd for ExternAbi { const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = (); } +#[cfg(feature = "nightly")] +rustc_error_messages::into_diag_arg_using_display!(ExternAbi); + impl ExternAbi { /// An ABI "like Rust" /// diff --git a/compiler/rustc_ast/Cargo.toml b/compiler/rustc_ast/Cargo.toml index 155e14a3796..0b8ab7c391c 100644 --- a/compiler/rustc_ast/Cargo.toml +++ b/compiler/rustc_ast/Cargo.toml @@ -5,9 +5,9 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" -memchr = "2.7.4" -rustc-literal-escaper = "0.0.5" +bitflags.workspace = true +memchr.workspace = true +rustc-literal-escaper.workspace = true rustc_ast_ir = { path = "../rustc_ast_ir" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_index = { path = "../rustc_index" } @@ -15,6 +15,6 @@ rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" +thin-vec.workspace = true tracing = "0.1" # tidy-alphabetical-end diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 87c9c797ea5..de3e0e0c87f 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -3137,7 +3137,7 @@ impl FnRetTy { #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, Walkable)] pub enum Inline { Yes, - No, + No { had_parse_error: Result<(), ErrorGuaranteed> }, } /// Module item kind. @@ -3147,7 +3147,7 @@ pub enum ModKind { /// or with definition outlined to a separate file `mod foo;` and already loaded from it. /// The inner span is from the first token past `{` to the last token until `}`, /// or from the first to the last token in the loaded file. - Loaded(ThinVec<Box<Item>>, Inline, ModSpans, Result<(), ErrorGuaranteed>), + Loaded(ThinVec<Box<Item>>, Inline, ModSpans), /// Module with definition outlined to a separate file `mod foo;` but not yet loaded from it. Unloaded, } diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index fc816f2cb79..ea98bebd305 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -7,6 +7,7 @@ pub use NtPatKind::*; pub use TokenKind::*; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::edition::Edition; +use rustc_span::symbol::IdentPrintMode; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; #[allow(clippy::useless_attribute)] // FIXME: following use of `hidden_glob_reexports` incorrectly triggers `useless_attribute` lint. #[allow(hidden_glob_reexports)] @@ -344,15 +345,24 @@ pub enum IdentIsRaw { Yes, } -impl From<bool> for IdentIsRaw { - fn from(b: bool) -> Self { - if b { Self::Yes } else { Self::No } +impl IdentIsRaw { + pub fn to_print_mode_ident(self) -> IdentPrintMode { + match self { + IdentIsRaw::No => IdentPrintMode::Normal, + IdentIsRaw::Yes => IdentPrintMode::RawIdent, + } + } + pub fn to_print_mode_lifetime(self) -> IdentPrintMode { + match self { + IdentIsRaw::No => IdentPrintMode::Normal, + IdentIsRaw::Yes => IdentPrintMode::RawLifetime, + } } } -impl From<IdentIsRaw> for bool { - fn from(is_raw: IdentIsRaw) -> bool { - matches!(is_raw, IdentIsRaw::Yes) +impl From<bool> for IdentIsRaw { + fn from(b: bool) -> Self { + if b { Self::Yes } else { Self::No } } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index e55399adfb8..f4f35a4d2ee 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -907,6 +907,12 @@ impl TokenTreeCursor { pub fn bump(&mut self) { self.index += 1; } + + // For skipping ahead in rare circumstances. + #[inline] + pub fn bump_to_end(&mut self) { + self.index = self.stream.len(); + } } /// A `TokenStream` cursor that produces `Token`s. It's a bit odd that diff --git a/compiler/rustc_ast_lowering/Cargo.toml b/compiler/rustc_ast_lowering/Cargo.toml index 6ac258155fe..317742a3bb8 100644 --- a/compiler/rustc_ast_lowering/Cargo.toml +++ b/compiler/rustc_ast_lowering/Cargo.toml @@ -24,6 +24,6 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_ast_lowering/src/block.rs b/compiler/rustc_ast_lowering/src/block.rs index 2cc07694afb..f1e810a8b9e 100644 --- a/compiler/rustc_ast_lowering/src/block.rs +++ b/compiler/rustc_ast_lowering/src/block.rs @@ -1,5 +1,6 @@ use rustc_ast::{Block, BlockCheckMode, Local, LocalKind, Stmt, StmtKind}; use rustc_hir as hir; +use rustc_hir::Target; use rustc_span::sym; use smallvec::SmallVec; @@ -109,7 +110,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; let span = self.lower_span(l.span); let source = hir::LocalSource::Normal; - self.lower_attrs(hir_id, &l.attrs, l.span); + self.lower_attrs(hir_id, &l.attrs, l.span, Target::Statement); self.arena.alloc(hir::LetStmt { hir_id, super_, ty, pat, init, els, span, source }) } diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 2ddbf083a09..cbd17d66b75 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -7,7 +7,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; use rustc_hir::attrs::AttributeKind; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::{HirId, find_attr}; +use rustc_hir::{HirId, Target, find_attr}; use rustc_middle::span_bug; use rustc_middle::ty::TyCtxt; use rustc_session::errors::report_lit_error; @@ -74,7 +74,7 @@ impl<'hir> LoweringContext<'_, 'hir> { if !e.attrs.is_empty() { let old_attrs = self.attrs.get(&ex.hir_id.local_id).copied().unwrap_or(&[]); let new_attrs = self - .lower_attrs_vec(&e.attrs, e.span, ex.hir_id) + .lower_attrs_vec(&e.attrs, e.span, ex.hir_id, Target::from_expr(e)) .into_iter() .chain(old_attrs.iter().cloned()); let new_attrs = &*self.arena.alloc_from_iter(new_attrs); @@ -97,7 +97,7 @@ impl<'hir> LoweringContext<'_, 'hir> { } let expr_hir_id = self.lower_node_id(e.id); - let attrs = self.lower_attrs(expr_hir_id, &e.attrs, e.span); + let attrs = self.lower_attrs(expr_hir_id, &e.attrs, e.span, Target::from_expr(e)); let kind = match &e.kind { ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), @@ -639,7 +639,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let guard = arm.guard.as_ref().map(|cond| self.lower_expr(cond)); let hir_id = self.next_id(); let span = self.lower_span(arm.span); - self.lower_attrs(hir_id, &arm.attrs, arm.span); + self.lower_attrs(hir_id, &arm.attrs, arm.span, Target::Arm); let is_never_pattern = pat.is_never_pattern(); // We need to lower the body even if it's unneeded for never pattern in match, // ensure that we can get HirId for DefId if need (issue #137708). @@ -820,6 +820,7 @@ impl<'hir> LoweringContext<'_, 'hir> { span: unstable_span, }], span, + Target::Fn, ); } } @@ -1654,7 +1655,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> { let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs, f.span); + self.lower_attrs(hir_id, &f.attrs, f.span, Target::ExprField); hir::ExprField { hir_id, ident: self.lower_ident(f.ident), @@ -1910,7 +1911,7 @@ impl<'hir> LoweringContext<'_, 'hir> { // // Also, add the attributes to the outer returned expr node. let expr = self.expr_drop_temps_mut(for_span, match_expr); - self.lower_attrs(expr.hir_id, &e.attrs, e.span); + self.lower_attrs(expr.hir_id, &e.attrs, e.span, Target::from_expr(e)); expr } @@ -1967,7 +1968,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let val_ident = Ident::with_dummy_span(sym::val); let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident); let val_expr = self.expr_ident(span, val_ident, val_pat_nid); - self.lower_attrs(val_expr.hir_id, &attrs, span); + self.lower_attrs(val_expr.hir_id, &attrs, span, Target::Expression); let continue_pat = self.pat_cf_continue(unstable_span, val_pat); self.arm(continue_pat, val_expr) }; @@ -1998,7 +1999,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let ret_expr = self.checked_return(Some(from_residual_expr)); self.arena.alloc(self.expr(try_span, ret_expr)) }; - self.lower_attrs(ret_expr.hir_id, &attrs, span); + self.lower_attrs(ret_expr.hir_id, &attrs, span, Target::Expression); let break_pat = self.pat_cf_break(try_span, residual_local); self.arm(break_pat, ret_expr) diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 235573c96e4..bb559bd8921 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -5,7 +5,7 @@ use rustc_errors::{E0570, ErrorGuaranteed, struct_span_code_err}; use rustc_hir::attrs::AttributeKind; use rustc_hir::def::{DefKind, PerNS, Res}; use rustc_hir::def_id::{CRATE_DEF_ID, LocalDefId}; -use rustc_hir::{self as hir, HirId, LifetimeSource, PredicateOrigin, find_attr}; +use rustc_hir::{self as hir, HirId, LifetimeSource, PredicateOrigin, Target, find_attr}; use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::span_bug; use rustc_middle::ty::{ResolverAstLowering, TyCtxt}; @@ -80,7 +80,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> { self.with_lctx(CRATE_NODE_ID, |lctx| { let module = lctx.lower_mod(&c.items, &c.spans); // FIXME(jdonszelman): is dummy span ever a problem here? - lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs, DUMMY_SP); + lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs, DUMMY_SP, Target::Crate); hir::OwnerNode::Crate(module) }) } @@ -136,7 +136,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_item(&mut self, i: &Item) -> &'hir hir::Item<'hir> { let vis_span = self.lower_span(i.vis.span); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span, Target::from_ast_item(i)); let kind = self.lower_item_kind(i.span, i.id, hir_id, attrs, vis_span, &i.kind); let item = hir::Item { owner_id: hir_id.expect_owner(), @@ -251,7 +251,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ItemKind::Mod(_, ident, mod_kind) => { let ident = self.lower_ident(*ident); match mod_kind { - ModKind::Loaded(items, _, spans, _) => { + ModKind::Loaded(items, _, spans) => { hir::ItemKind::Mod(ident, self.lower_mod(items, spans)) } ModKind::Unloaded => panic!("`mod` items should have been loaded by now"), @@ -436,14 +436,14 @@ impl<'hir> LoweringContext<'_, 'hir> { let body = Box::new(self.lower_delim_args(body)); let def_id = self.local_def_id(id); let def_kind = self.tcx.def_kind(def_id); - let DefKind::Macro(macro_kind) = def_kind else { + let DefKind::Macro(macro_kinds) = def_kind else { unreachable!( "expected DefKind::Macro for macro item, found {}", def_kind.descr(def_id.to_def_id()) ); }; let macro_def = self.arena.alloc(ast::MacroDef { body, macro_rules: *macro_rules }); - hir::ItemKind::Macro(ident, macro_def, macro_kind) + hir::ItemKind::Macro(ident, macro_def, macro_kinds) } ItemKind::Delegation(box delegation) => { let delegation_results = self.lower_delegation(delegation, id, false); @@ -621,7 +621,8 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> { let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); let owner_id = hir_id.expect_owner(); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); + let attrs = + self.lower_attrs(hir_id, &i.attrs, i.span, Target::from_foreign_item_kind(&i.kind)); let (ident, kind) = match &i.kind { ForeignItemKind::Fn(box Fn { sig, ident, generics, define_opaque, .. }) => { let fdec = &sig.decl; @@ -690,7 +691,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_variant(&mut self, item_kind: &ItemKind, v: &Variant) -> hir::Variant<'hir> { let hir_id = self.lower_node_id(v.id); - self.lower_attrs(hir_id, &v.attrs, v.span); + self.lower_attrs(hir_id, &v.attrs, v.span, Target::Variant); hir::Variant { hir_id, def_id: self.local_def_id(v.id), @@ -773,7 +774,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::FieldDef<'hir> { let ty = self.lower_ty(&f.ty, ImplTraitContext::Disallowed(ImplTraitPosition::FieldTy)); let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs, f.span); + self.lower_attrs(hir_id, &f.attrs, f.span, Target::Field); hir::FieldDef { span: self.lower_span(f.span), hir_id, @@ -792,7 +793,12 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_trait_item(&mut self, i: &AssocItem) -> &'hir hir::TraitItem<'hir> { let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); + let attrs = self.lower_attrs( + hir_id, + &i.attrs, + i.span, + Target::from_assoc_item_kind(&i.kind, AssocCtxt::Trait), + ); let trait_item_def_id = hir_id.expect_owner(); let (ident, generics, kind, has_default) = match &i.kind { @@ -1001,7 +1007,12 @@ impl<'hir> LoweringContext<'_, 'hir> { let has_value = true; let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); + let attrs = self.lower_attrs( + hir_id, + &i.attrs, + i.span, + Target::from_assoc_item_kind(&i.kind, AssocCtxt::Impl { of_trait: is_in_trait_impl }), + ); let (ident, (generics, kind)) = match &i.kind { AssocItemKind::Const(box ConstItem { @@ -1171,7 +1182,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_param(&mut self, param: &Param) -> hir::Param<'hir> { let hir_id = self.lower_node_id(param.id); - self.lower_attrs(hir_id, ¶m.attrs, param.span); + self.lower_attrs(hir_id, ¶m.attrs, param.span, Target::Param); hir::Param { hir_id, pat: self.lower_pat(¶m.pat), @@ -1585,7 +1596,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let safety = self.lower_safety(h.safety, default_safety); // Treat safe `#[target_feature]` functions as unsafe, but also remember that we did so. - let safety = if find_attr!(attrs, AttributeKind::TargetFeature { .. }) + let safety = if find_attr!(attrs, AttributeKind::TargetFeature { was_forced: false, .. }) && safety.is_safe() && !self.tcx.sess.target.is_like_wasm { @@ -1851,7 +1862,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::WherePredicate<'hir> { let hir_id = self.lower_node_id(pred.id); let span = self.lower_span(pred.span); - self.lower_attrs(hir_id, &pred.attrs, span); + self.lower_attrs(hir_id, &pred.attrs, span, Target::WherePredicate); let kind = self.arena.alloc(match &pred.kind { WherePredicateKind::BoundPredicate(WhereBoundPredicate { bound_generic_params, diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index 465d9dc82bc..70595391b85 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -54,7 +54,7 @@ use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE, LocalDefId}; use rustc_hir::lints::DelayedLint; use rustc_hir::{ self as hir, AngleBrackets, ConstArg, GenericArg, HirId, ItemLocalMap, LifetimeSource, - LifetimeSyntax, ParamName, TraitCandidate, + LifetimeSyntax, ParamName, Target, TraitCandidate, }; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_macros::extension; @@ -943,11 +943,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { id: HirId, attrs: &[Attribute], target_span: Span, + target: Target, ) -> &'hir [hir::Attribute] { if attrs.is_empty() { &[] } else { - let lowered_attrs = self.lower_attrs_vec(attrs, self.lower_span(target_span), id); + let lowered_attrs = + self.lower_attrs_vec(attrs, self.lower_span(target_span), id, target); assert_eq!(id.owner, self.current_hir_id_owner); let ret = self.arena.alloc_from_iter(lowered_attrs); @@ -972,12 +974,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { attrs: &[Attribute], target_span: Span, target_hir_id: HirId, + target: Target, ) -> Vec<hir::Attribute> { let l = self.span_lowerer(); self.attribute_parser.parse_attribute_list( attrs, target_span, target_hir_id, + target, OmitDoc::Lower, |s| l.lower(s), |l| { @@ -1942,7 +1946,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let (name, kind) = self.lower_generic_param_kind(param, source); let hir_id = self.lower_node_id(param.id); - self.lower_attrs(hir_id, ¶m.attrs, param.span()); + self.lower_attrs(hir_id, ¶m.attrs, param.span(), Target::Param); hir::GenericParam { hir_id, def_id: self.local_def_id(param.id), diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs index b6533060a76..b8f86247875 100644 --- a/compiler/rustc_ast_lowering/src/pat.rs +++ b/compiler/rustc_ast_lowering/src/pat.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use rustc_ast::*; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::{self as hir, LangItem}; +use rustc_hir::{self as hir, LangItem, Target}; use rustc_middle::span_bug; use rustc_span::source_map::{Spanned, respan}; use rustc_span::{DesugaringKind, Ident, Span}; @@ -93,7 +93,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let fs = self.arena.alloc_from_iter(fields.iter().map(|f| { let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs, f.span); + self.lower_attrs(hir_id, &f.attrs, f.span, Target::PatField); hir::PatField { hir_id, diff --git a/compiler/rustc_ast_passes/Cargo.toml b/compiler/rustc_ast_passes/Cargo.toml index 1940628b44a..6d2bcbed22b 100644 --- a/compiler/rustc_ast_passes/Cargo.toml +++ b/compiler/rustc_ast_passes/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } @@ -15,9 +15,8 @@ rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_macros = { path = "../rustc_macros" } -rustc_parse = { path = "../rustc_parse" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } -thin-vec = "0.2.12" +thin-vec.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl index 340a1a239c5..a95f1443968 100644 --- a/compiler/rustc_ast_passes/messages.ftl +++ b/compiler/rustc_ast_passes/messages.ftl @@ -17,9 +17,13 @@ ast_passes_abi_must_not_have_parameters_or_return_type= ast_passes_abi_must_not_have_return_type= invalid signature for `extern {$abi}` function - .note = functions with the "custom" ABI cannot have a return type + .note = functions with the {$abi} ABI cannot have a return type .help = remove the return type +ast_passes_abi_x86_interrupt = + invalid signature for `extern "x86-interrupt"` function + .note = functions with the "x86-interrupt" ABI must be have either 1 or 2 parameters (but found {$param_count}) + ast_passes_assoc_const_without_body = associated constant in `impl` without body .suggestion = provide a definition for the constant @@ -32,6 +36,13 @@ ast_passes_assoc_type_without_body = associated type in `impl` without body .suggestion = provide a definition for the type +ast_passes_async_fn_in_const_trait_or_trait_impl = + async functions are not allowed in `const` {$in_impl -> + [true] trait impls + *[false] traits + } + .label = associated functions of `const` cannot be declared `async` + ast_passes_at_least_one_trait = at least one trait must be specified ast_passes_auto_generic = auto traits cannot have generic parameters @@ -102,6 +113,10 @@ ast_passes_extern_without_abi = `extern` declarations without an explicit ABI ar .suggestion = specify an ABI .help = prior to Rust 2024, a default ABI was inferred +ast_passes_extern_without_abi_sugg = `extern` declarations without an explicit ABI are deprecated + .label = ABI should be specified here + .suggestion = explicitly specify the {$default_abi} ABI + ast_passes_feature_on_non_nightly = `#![feature]` may not be used on the {$channel} release channel .suggestion = remove the attribute .stable_since = the feature `{$name}` has been stable since `{$since}` and no longer requires an attribute to enable diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index dc2eb17589c..6133cc3548b 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -25,16 +25,16 @@ use rustc_abi::{CanonAbi, ExternAbi, InterruptKind}; use rustc_ast::visit::{AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor, walk_list}; use rustc_ast::*; use rustc_ast_pretty::pprust::{self, State}; +use rustc_attr_parsing::validate_attr; use rustc_data_structures::fx::FxIndexMap; -use rustc_errors::DiagCtxtHandle; +use rustc_errors::{DiagCtxtHandle, LintBuffer}; use rustc_feature::Features; -use rustc_parse::validate_attr; use rustc_session::Session; +use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{ DEPRECATED_WHERE_CLAUSE_LOCATION, MISSING_ABI, MISSING_UNSAFE_ON_EXTERN, PATTERNS_IN_FNS_WITHOUT_BODY, }; -use rustc_session::lint::{BuiltinLintDiag, LintBuffer}; use rustc_span::{Ident, Span, kw, sym}; use rustc_target::spec::{AbiMap, AbiMapping}; use thin_vec::thin_vec; @@ -293,6 +293,21 @@ impl<'a> AstValidator<'a> { }); } + fn check_async_fn_in_const_trait_or_impl(&self, sig: &FnSig, parent: &TraitOrTraitImpl) { + let Some(const_keyword) = parent.constness() else { return }; + + let Some(CoroutineKind::Async { span: async_keyword, .. }) = sig.header.coroutine_kind + else { + return; + }; + + self.dcx().emit_err(errors::AsyncFnInConstTraitOrTraitImpl { + async_keyword, + in_impl: matches!(parent, TraitOrTraitImpl::TraitImpl { .. }), + const_keyword, + }); + } + fn check_fn_decl(&self, fn_decl: &FnDecl, self_semantic: SelfSemantic) { self.check_decl_num_args(fn_decl); self.check_decl_cvariadic_pos(fn_decl); @@ -390,7 +405,24 @@ impl<'a> AstValidator<'a> { if let InterruptKind::X86 = interrupt_kind { // "x86-interrupt" is special because it does have arguments. // FIXME(workingjubilee): properly lint on acceptable input types. - if let FnRetTy::Ty(ref ret_ty) = sig.decl.output { + let inputs = &sig.decl.inputs; + let param_count = inputs.len(); + if !matches!(param_count, 1 | 2) { + let mut spans: Vec<Span> = + inputs.iter().map(|arg| arg.span).collect(); + if spans.is_empty() { + spans = vec![sig.span]; + } + self.dcx().emit_err(errors::AbiX86Interrupt { spans, param_count }); + } + + if let FnRetTy::Ty(ref ret_ty) = sig.decl.output + && match &ret_ty.kind { + TyKind::Never => false, + TyKind::Tup(tup) if tup.is_empty() => false, + _ => true, + } + { self.dcx().emit_err(errors::AbiMustNotHaveReturnType { span: ret_ty.span, abi, @@ -449,7 +481,13 @@ impl<'a> AstValidator<'a> { fn reject_params_or_return(&self, abi: ExternAbi, ident: &Ident, sig: &FnSig) { let mut spans: Vec<_> = sig.decl.inputs.iter().map(|p| p.span).collect(); - if let FnRetTy::Ty(ref ret_ty) = sig.decl.output { + if let FnRetTy::Ty(ref ret_ty) = sig.decl.output + && match &ret_ty.kind { + TyKind::Never => false, + TyKind::Tup(tup) if tup.is_empty() => false, + _ => true, + } + { spans.push(ret_ty.span); } @@ -838,7 +876,7 @@ impl<'a> AstValidator<'a> { MISSING_ABI, id, span, - BuiltinLintDiag::MissingAbi(span, ExternAbi::FALLBACK), + errors::MissingAbiSugg { span, default_abi: ExternAbi::FALLBACK }, ) } } @@ -1142,7 +1180,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.dcx().emit_err(errors::UnsafeItem { span, kind: "module" }); } // Ensure that `path` attributes on modules are recorded as used (cf. issue #35584). - if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _, _)) + if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _)) && !attr::contains_name(&item.attrs, sym::path) { self.check_mod_file_item_asciionly(*ident); @@ -1566,6 +1604,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.visibility_not_permitted(&item.vis, errors::VisibilityNotPermittedNote::TraitImpl); if let AssocItemKind::Fn(box Fn { sig, .. }) = &item.kind { self.check_trait_fn_not_const(sig.header.constness, parent); + self.check_async_fn_in_const_trait_or_impl(sig, parent); } } @@ -1741,7 +1780,7 @@ fn deny_equality_constraints( .map(|segment| segment.ident.name) .zip(poly.trait_ref.path.segments.iter().map(|segment| segment.ident.name)) .all(|(a, b)| a == b) - && let Some(potential_assoc) = full_path.segments.iter().last() + && let Some(potential_assoc) = full_path.segments.last() { suggest(poly, potential_assoc, predicate); } diff --git a/compiler/rustc_ast_passes/src/errors.rs b/compiler/rustc_ast_passes/src/errors.rs index 1cb2493afe8..ae8f056cb4e 100644 --- a/compiler/rustc_ast_passes/src/errors.rs +++ b/compiler/rustc_ast_passes/src/errors.rs @@ -4,7 +4,7 @@ use rustc_abi::ExternAbi; use rustc_ast::ParamKindOrd; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, EmissionGuarantee, Subdiagnostic}; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_span::{Ident, Span, Symbol}; use crate::fluent_generated as fluent; @@ -63,6 +63,16 @@ pub(crate) struct TraitFnConst { } #[derive(Diagnostic)] +#[diag(ast_passes_async_fn_in_const_trait_or_trait_impl)] +pub(crate) struct AsyncFnInConstTraitOrTraitImpl { + #[primary_span] + pub async_keyword: Span, + pub in_impl: bool, + #[label] + pub const_keyword: Span, +} + +#[derive(Diagnostic)] #[diag(ast_passes_forbidden_bound)] pub(crate) struct ForbiddenBound { #[primary_span] @@ -805,6 +815,14 @@ pub(crate) struct MissingAbi { pub span: Span, } +#[derive(LintDiagnostic)] +#[diag(ast_passes_extern_without_abi_sugg)] +pub(crate) struct MissingAbiSugg { + #[suggestion(code = "extern {default_abi}", applicability = "machine-applicable")] + pub span: Span, + pub default_abi: ExternAbi, +} + #[derive(Diagnostic)] #[diag(ast_passes_abi_custom_safe_foreign_function)] pub(crate) struct AbiCustomSafeForeignFunction { @@ -881,3 +899,12 @@ pub(crate) struct AbiMustNotHaveReturnType { pub span: Span, pub abi: ExternAbi, } + +#[derive(Diagnostic)] +#[diag(ast_passes_abi_x86_interrupt)] +#[note] +pub(crate) struct AbiX86Interrupt { + #[primary_span] + pub spans: Vec<Span>, + pub param_count: usize, +} diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index c9344a76a7b..9ab5b0b3547 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -188,6 +188,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { notable_trait => doc_notable_trait } "meant for internal use only" { + attribute => rustdoc_internals keyword => rustdoc_internals fake_variadic => rustdoc_internals search_unbox => rustdoc_internals @@ -524,6 +525,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { gate_all!(where_clause_attrs, "attributes in `where` clause are unstable"); gate_all!(super_let, "`super let` is experimental"); gate_all!(frontmatter, "frontmatters are experimental"); + gate_all!(coroutines, "coroutine syntax is experimental"); if !visitor.features.never_patterns() { if let Some(spans) = spans.get(&sym::never_patterns) { diff --git a/compiler/rustc_ast_pretty/Cargo.toml b/compiler/rustc_ast_pretty/Cargo.toml index b704040be96..0c7e55305b4 100644 --- a/compiler/rustc_ast_pretty/Cargo.toml +++ b/compiler/rustc_ast_pretty/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_ast = { path = "../rustc_ast" } rustc_lexer = { path = "../rustc_lexer" } rustc_span = { path = "../rustc_span" } @@ -13,5 +13,5 @@ rustc_span = { path = "../rustc_span" } [dev-dependencies] # tidy-alphabetical-start -thin-vec = "0.2.12" +thin-vec.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 85f76036df7..a056ce3e29d 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -10,7 +10,7 @@ use std::borrow::Cow; use std::sync::Arc; use rustc_ast::attr::AttrIdGenerator; -use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree}; use rustc_ast::util::classify; use rustc_ast::util::comments::{Comment, CommentStyle}; @@ -441,7 +441,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); fn print_ident(&mut self, ident: Ident) { - self.word(IdentPrinter::for_ast_ident(ident, ident.is_raw_guess()).to_string()); + self.word(IdentPrinter::for_ast_ident(ident, ident.guess_print_mode()).to_string()); self.ann_post(ident) } @@ -1015,17 +1015,16 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere /* Name components */ token::Ident(name, is_raw) => { - IdentPrinter::new(name, is_raw.into(), convert_dollar_crate).to_string().into() + IdentPrinter::new(name, is_raw.to_print_mode_ident(), convert_dollar_crate) + .to_string() + .into() } token::NtIdent(ident, is_raw) => { - IdentPrinter::for_ast_ident(ident, is_raw.into()).to_string().into() + IdentPrinter::for_ast_ident(ident, is_raw.to_print_mode_ident()).to_string().into() } - token::Lifetime(name, IdentIsRaw::No) - | token::NtLifetime(Ident { name, .. }, IdentIsRaw::No) => name.to_string().into(), - token::Lifetime(name, IdentIsRaw::Yes) - | token::NtLifetime(Ident { name, .. }, IdentIsRaw::Yes) => { - format!("'r#{}", &name.as_str()[1..]).into() + token::Lifetime(name, is_raw) | token::NtLifetime(Ident { name, .. }, is_raw) => { + IdentPrinter::new(name, is_raw.to_print_mode_lifetime(), None).to_string().into() } /* Other */ diff --git a/compiler/rustc_attr_parsing/Cargo.toml b/compiler/rustc_attr_parsing/Cargo.toml index cec9d62e656..8bfde43fd33 100644 --- a/compiler/rustc_attr_parsing/Cargo.toml +++ b/compiler/rustc_attr_parsing/Cargo.toml @@ -14,7 +14,9 @@ rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hir = { path = "../rustc_hir" } rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } +rustc_parse = { path = "../rustc_parse" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -thin-vec = "0.2.12" +rustc_target = { path = "../rustc_target" } +thin-vec.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_attr_parsing/messages.ftl b/compiler/rustc_attr_parsing/messages.ftl index de22ea322c7..b8a748563d5 100644 --- a/compiler/rustc_attr_parsing/messages.ftl +++ b/compiler/rustc_attr_parsing/messages.ftl @@ -10,6 +10,14 @@ attr_parsing_empty_attribute = unused attribute .suggestion = remove this attribute +attr_parsing_invalid_target = `#[{$name}]` attribute cannot be used on {$target} + .help = `#[{$name}]` can {$only}be applied to {$applied} + .suggestion = remove the attribute +attr_parsing_invalid_target_lint = `#[{$name}]` attribute cannot be used on {$target} + .warn = {-attr_parsing_previously_accepted} + .help = `#[{$name}]` can {$only}be applied to {$applied} + .suggestion = remove the attribute + attr_parsing_empty_confusables = expected at least one confusable name attr_parsing_expected_one_cfg_pattern = @@ -78,6 +86,12 @@ attr_parsing_invalid_repr_hint_no_value = attr_parsing_invalid_since = 'since' must be a Rust version number, such as "1.31.0" +attr_parsing_invalid_style = {$is_used_as_inner -> + [false] crate-level attribute should be an inner attribute: add an exclamation mark: `#![{$name}]` + *[other] the `#![{$name}]` attribute can only be used at the crate root + } + .note = This attribute does not have an `!`, which means it is applied to this {$target} + attr_parsing_link_ordinal_out_of_range = ordinal value in `link_ordinal` is too large: `{$ordinal}` .note = the value may not exceed `u16::MAX` @@ -162,3 +176,74 @@ attr_parsing_unused_multiple = -attr_parsing_previously_accepted = this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + +attr_parsing_meta_bad_delim = wrong meta list delimiters +attr_parsing_meta_bad_delim_suggestion = the delimiters should be `(` and `)` + +attr_parsing_unsafe_attr_outside_unsafe = unsafe attribute used without unsafe + .label = usage of unsafe attribute +attr_parsing_unsafe_attr_outside_unsafe_suggestion = wrap the attribute in `unsafe(...)` + +attr_parsing_invalid_attr_unsafe = `{$name}` is not an unsafe attribute + .label = this is not an unsafe attribute + .suggestion = remove the `unsafe(...)` + .note = extraneous unsafe is not allowed in attributes + +attr_parsing_invalid_meta_item = expected a literal (`1u8`, `1.0f32`, `"string"`, etc.) here, found {$descr} + .remove_neg_sugg = negative numbers are not literals, try removing the `-` sign + .quote_ident_sugg = surround the identifier with quotation marks to make it into a string literal + +attr_parsing_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes + .help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.) + +attr_parsing_as_needed_compatibility = + linking modifier `as-needed` is only compatible with `dylib` and `framework` linking kinds + +attr_parsing_bundle_needs_static = + linking modifier `bundle` is only compatible with `static` linking kind + +attr_parsing_empty_link_name = + link name must not be empty + .label = empty link name + +attr_parsing_import_name_type_raw = + import name type can only be used with link kind `raw-dylib` + +attr_parsing_import_name_type_x86 = + import name type is only supported on x86 + +attr_parsing_incompatible_wasm_link = + `wasm_import_module` is incompatible with other arguments in `#[link]` attributes + +attr_parsing_invalid_link_modifier = + invalid linking modifier syntax, expected '+' or '-' prefix before one of: bundle, verbatim, whole-archive, as-needed + +attr_parsing_link_arg_unstable = + link kind `link-arg` is unstable + +attr_parsing_link_cfg_unstable = + link cfg is unstable + +attr_parsing_link_framework_apple = + link kind `framework` is only supported on Apple targets + +attr_parsing_link_requires_name = + `#[link]` attribute requires a `name = "string"` argument + .label = missing `name` argument + +attr_parsing_multiple_modifiers = + multiple `{$modifier}` modifiers in a single `modifiers` argument + +attr_parsing_multiple_renamings = + multiple renamings were specified for library `{$lib_name}` +attr_parsing_raw_dylib_no_nul = + link name must not contain NUL characters if link kind is `raw-dylib` + +attr_parsing_raw_dylib_elf_unstable = + link kind `raw-dylib` is unstable on ELF platforms + +attr_parsing_raw_dylib_only_windows = + link kind `raw-dylib` is only supported on Windows targets + +attr_parsing_whole_archive_needs_static = + linking modifier `whole-archive` is only compatible with `static` linking kind diff --git a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs index b3393e93de8..088fa73d742 100644 --- a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs +++ b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs @@ -1,12 +1,6 @@ use std::iter; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; - -use super::{CombineAttributeParser, ConvertFn}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; use crate::session_diagnostics; pub(crate) struct AllowInternalUnstableParser; @@ -15,6 +9,12 @@ impl<S: Stage> CombineAttributeParser<S> for AllowInternalUnstableParser { type Item = (Symbol, Span); const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::AllowInternalUnstable(items, span); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::MacroDef), + Allow(Target::Fn), + Warn(Target::Field), + Warn(Target::Arm), + ]); const TEMPLATE: AttributeTemplate = template!(Word, List: &["feat1, feat2, ..."]); fn extend<'c>( @@ -32,6 +32,11 @@ impl<S: Stage> CombineAttributeParser<S> for UnstableFeatureBoundParser { const PATH: &'static [rustc_span::Symbol] = &[sym::unstable_feature_bound]; type Item = (Symbol, Span); const CONVERT: ConvertFn<Self::Item> = |items, _| AttributeKind::UnstableFeatureBound(items); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Impl { of_trait: true }), + Allow(Target::Trait), + ]); const TEMPLATE: AttributeTemplate = template!(Word, List: &["feat1, feat2, ..."]); fn extend<'c>( @@ -53,6 +58,13 @@ impl<S: Stage> CombineAttributeParser<S> for AllowConstFnUnstableParser { type Item = Symbol; const CONVERT: ConvertFn<Self::Item> = |items, first_span| AttributeKind::AllowConstFnUnstable(items, first_span); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + ]); const TEMPLATE: AttributeTemplate = template!(Word, List: &["feat1, feat2, ..."]); fn extend<'c>( diff --git a/compiler/rustc_attr_parsing/src/attributes/body.rs b/compiler/rustc_attr_parsing/src/attributes/body.rs index ab9330216f6..a1492d76194 100644 --- a/compiler/rustc_attr_parsing/src/attributes/body.rs +++ b/compiler/rustc_attr_parsing/src/attributes/body.rs @@ -1,15 +1,12 @@ //! Attributes that can be found in function body. -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; - -use super::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::Stage; +use super::prelude::*; pub(crate) struct CoroutineParser; impl<S: Stage> NoArgsAttributeParser<S> for CoroutineParser { const PATH: &[Symbol] = &[sym::coroutine]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Closure)]); const CREATE: fn(rustc_span::Span) -> AttributeKind = |span| AttributeKind::Coroutine(span); } diff --git a/compiler/rustc_attr_parsing/src/attributes/cfg.rs b/compiler/rustc_attr_parsing/src/attributes/cfg.rs index 695ee666476..70855611079 100644 --- a/compiler/rustc_attr_parsing/src/attributes/cfg.rs +++ b/compiler/rustc_attr_parsing/src/attributes/cfg.rs @@ -36,7 +36,7 @@ pub fn parse_cfg_attr<'c, S: Stage>( parse_cfg_entry(cx, single) } -fn parse_cfg_entry<S: Stage>( +pub(crate) fn parse_cfg_entry<S: Stage>( cx: &mut AcceptContext<'_, '_, S>, item: &MetaItemOrLitParser<'_>, ) -> Option<CfgEntry> { diff --git a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs index a9f77195d1b..ffdacff7152 100644 --- a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs @@ -1,14 +1,7 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, CoverageAttrKind, OptimizeAttr, UsedBy}; +use rustc_hir::attrs::{CoverageAttrKind, OptimizeAttr, SanitizerSet, UsedBy}; use rustc_session::parse::feature_err; -use rustc_span::{Span, Symbol, sym}; - -use super::{ - AcceptMapping, AttributeOrder, AttributeParser, CombineAttributeParser, ConvertFn, - NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, -}; -use crate::context::{AcceptContext, FinalizeContext, Stage}; -use crate::parser::ArgParser; + +use super::prelude::*; use crate::session_diagnostics::{NakedFunctionIncompatibleAttribute, NullOnExport}; pub(crate) struct OptimizeParser; @@ -17,6 +10,13 @@ impl<S: Stage> SingleAttributeParser<S> for OptimizeParser { const PATH: &[Symbol] = &[sym::optimize]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Closure), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Method(MethodKind::Inherent)), + ]); const TEMPLATE: AttributeTemplate = template!(List: &["size", "speed", "none"]); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { @@ -35,7 +35,7 @@ impl<S: Stage> SingleAttributeParser<S> for OptimizeParser { Some(sym::speed) => OptimizeAttr::Speed, Some(sym::none) => OptimizeAttr::DoNotOptimize, _ => { - cx.expected_specific_argument(single.span(), vec!["size", "speed", "none"]); + cx.expected_specific_argument(single.span(), &[sym::size, sym::speed, sym::none]); OptimizeAttr::Default } }; @@ -49,6 +49,15 @@ pub(crate) struct ColdParser; impl<S: Stage> NoArgsAttributeParser<S> for ColdParser { const PATH: &[Symbol] = &[sym::cold]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::ForeignFn), + Allow(Target::Closure), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::Cold; } @@ -58,11 +67,22 @@ impl<S: Stage> SingleAttributeParser<S> for CoverageParser { const PATH: &[Symbol] = &[sym::coverage]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Closure), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Impl { of_trait: true }), + Allow(Target::Impl { of_trait: false }), + Allow(Target::Mod), + Allow(Target::Crate), + ]); const TEMPLATE: AttributeTemplate = template!(OneOf: &[sym::off, sym::on]); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { let Some(args) = args.list() else { - cx.expected_specific_argument_and_list(cx.attr_span, vec!["on", "off"]); + cx.expected_specific_argument_and_list(cx.attr_span, &[sym::on, sym::off]); return None; }; @@ -71,7 +91,8 @@ impl<S: Stage> SingleAttributeParser<S> for CoverageParser { return None; }; - let fail_incorrect_argument = |span| cx.expected_specific_argument(span, vec!["on", "off"]); + let fail_incorrect_argument = + |span| cx.expected_specific_argument(span, &[sym::on, sym::off]); let Some(arg) = arg.meta_item() else { fail_incorrect_argument(args.span); @@ -97,6 +118,17 @@ impl<S: Stage> SingleAttributeParser<S> for ExportNameParser { const PATH: &[rustc_span::Symbol] = &[sym::export_name]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Static), + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Warn(Target::Field), + Warn(Target::Arm), + Warn(Target::MacroDef), + Warn(Target::MacroCall), + ]); const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name"); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { @@ -138,6 +170,13 @@ impl<S: Stage> AttributeParser<S> for NakedParser { this.span = Some(cx.attr_span); } })]; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Warn(Target::MacroCall), + ]); fn finalize(self, cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { // FIXME(jdonszelmann): upgrade this list to *parsed* attributes @@ -230,6 +269,19 @@ pub(crate) struct TrackCallerParser; impl<S: Stage> NoArgsAttributeParser<S> for TrackCallerParser { const PATH: &[Symbol] = &[sym::track_caller]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::ForeignFn), + Allow(Target::Closure), + Warn(Target::MacroDef), + Warn(Target::Arm), + Warn(Target::Field), + Warn(Target::MacroCall), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::TrackCaller; } @@ -237,6 +289,12 @@ pub(crate) struct NoMangleParser; impl<S: Stage> NoArgsAttributeParser<S> for NoMangleParser { const PATH: &[Symbol] = &[sym::no_mangle]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Fn), + Allow(Target::Static), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::TraitImpl)), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::NoMangle; } @@ -289,7 +347,7 @@ impl<S: Stage> AttributeParser<S> for UsedParser { UsedBy::Linker } _ => { - cx.expected_specific_argument(l.span(), vec!["compiler", "linker"]); + cx.expected_specific_argument(l.span(), &[sym::compiler, sym::linker]); return; } } @@ -310,6 +368,8 @@ impl<S: Stage> AttributeParser<S> for UsedParser { } }, )]; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Static), Warn(Target::MacroCall)]); fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { // Ratcheting behaviour, if both `linker` and `compiler` are specified, use `linker` @@ -321,56 +381,207 @@ impl<S: Stage> AttributeParser<S> for UsedParser { } } +fn parse_tf_attribute<'c, S: Stage>( + cx: &'c mut AcceptContext<'_, '_, S>, + args: &'c ArgParser<'_>, +) -> impl IntoIterator<Item = (Symbol, Span)> + 'c { + let mut features = Vec::new(); + let ArgParser::List(list) = args else { + cx.expected_list(cx.attr_span); + return features; + }; + if list.is_empty() { + cx.warn_empty_attribute(cx.attr_span); + return features; + } + for item in list.mixed() { + let Some(name_value) = item.meta_item() else { + cx.expected_name_value(item.span(), Some(sym::enable)); + return features; + }; + + // Validate name + let Some(name) = name_value.path().word_sym() else { + cx.expected_name_value(name_value.path().span(), Some(sym::enable)); + return features; + }; + if name != sym::enable { + cx.expected_name_value(name_value.path().span(), Some(sym::enable)); + return features; + } + + // Use value + let Some(name_value) = name_value.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::enable)); + return features; + }; + let Some(value_str) = name_value.value_as_str() else { + cx.expected_string_literal(name_value.value_span, Some(name_value.value_as_lit())); + return features; + }; + for feature in value_str.as_str().split(",") { + features.push((Symbol::intern(feature), item.span())); + } + } + features +} + pub(crate) struct TargetFeatureParser; impl<S: Stage> CombineAttributeParser<S> for TargetFeatureParser { type Item = (Symbol, Span); const PATH: &[Symbol] = &[sym::target_feature]; - const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::TargetFeature(items, span); + const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::TargetFeature { + features: items, + attr_span: span, + was_forced: false, + }; + const TEMPLATE: AttributeTemplate = template!(List: &["enable = \"feat1, feat2\""]); + + fn extend<'c>( + cx: &'c mut AcceptContext<'_, '_, S>, + args: &'c ArgParser<'_>, + ) -> impl IntoIterator<Item = Self::Item> + 'c { + parse_tf_attribute(cx, args) + } + + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Warn(Target::Statement), + Warn(Target::Field), + Warn(Target::Arm), + Warn(Target::MacroDef), + Warn(Target::MacroCall), + ]); +} + +pub(crate) struct ForceTargetFeatureParser; + +impl<S: Stage> CombineAttributeParser<S> for ForceTargetFeatureParser { + type Item = (Symbol, Span); + const PATH: &[Symbol] = &[sym::force_target_feature]; + const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::TargetFeature { + features: items, + attr_span: span, + was_forced: true, + }; const TEMPLATE: AttributeTemplate = template!(List: &["enable = \"feat1, feat2\""]); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + ]); fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, args: &'c ArgParser<'_>, ) -> impl IntoIterator<Item = Self::Item> + 'c { - let mut features = Vec::new(); - let ArgParser::List(list) = args else { + parse_tf_attribute(cx, args) + } +} + +pub(crate) struct SanitizeParser; + +impl<S: Stage> SingleAttributeParser<S> for SanitizeParser { + const PATH: &[Symbol] = &[sym::sanitize]; + + // FIXME: still checked in check_attrs.rs + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); + + const TEMPLATE: AttributeTemplate = template!(List: &[ + r#"address = "on|off""#, + r#"kernel_address = "on|off""#, + r#"cfi = "on|off""#, + r#"hwaddress = "on|off""#, + r#"kcfi = "on|off""#, + r#"memory = "on|off""#, + r#"memtag = "on|off""#, + r#"shadow_call_stack = "on|off""#, + r#"thread = "on|off""# + ]); + + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let Some(list) = args.list() else { cx.expected_list(cx.attr_span); - return features; + return None; }; - if list.is_empty() { - cx.warn_empty_attribute(cx.attr_span); - return features; - } + + let mut on_set = SanitizerSet::empty(); + let mut off_set = SanitizerSet::empty(); + for item in list.mixed() { - let Some(name_value) = item.meta_item() else { - cx.expected_name_value(item.span(), Some(sym::enable)); - return features; + let Some(item) = item.meta_item() else { + cx.expected_name_value(item.span(), None); + continue; }; - // Validate name - let Some(name) = name_value.path().word_sym() else { - cx.expected_name_value(name_value.path().span(), Some(sym::enable)); - return features; + let path = item.path().word_sym(); + let Some(value) = item.args().name_value() else { + cx.expected_name_value(item.span(), path); + continue; }; - if name != sym::enable { - cx.expected_name_value(name_value.path().span(), Some(sym::enable)); - return features; - } - // Use value - let Some(name_value) = name_value.args().name_value() else { - cx.expected_name_value(item.span(), Some(sym::enable)); - return features; - }; - let Some(value_str) = name_value.value_as_str() else { - cx.expected_string_literal(name_value.value_span, Some(name_value.value_as_lit())); - return features; + let mut apply = |s: SanitizerSet| { + let is_on = match value.value_as_str() { + Some(sym::on) => true, + Some(sym::off) => false, + Some(_) => { + cx.expected_specific_argument_strings( + value.value_span, + &[sym::on, sym::off], + ); + return; + } + None => { + cx.expected_string_literal(value.value_span, Some(value.value_as_lit())); + return; + } + }; + + if is_on { + on_set |= s; + } else { + off_set |= s; + } }; - for feature in value_str.as_str().split(",") { - features.push((Symbol::intern(feature), item.span())); + + match path { + Some(sym::address) | Some(sym::kernel_address) => { + apply(SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS) + } + Some(sym::cfi) => apply(SanitizerSet::CFI), + Some(sym::kcfi) => apply(SanitizerSet::KCFI), + Some(sym::memory) => apply(SanitizerSet::MEMORY), + Some(sym::memtag) => apply(SanitizerSet::MEMTAG), + Some(sym::shadow_call_stack) => apply(SanitizerSet::SHADOWCALLSTACK), + Some(sym::thread) => apply(SanitizerSet::THREAD), + Some(sym::hwaddress) => apply(SanitizerSet::HWADDRESS), + _ => { + cx.expected_specific_argument_strings( + item.path().span(), + &[ + sym::address, + sym::cfi, + sym::kcfi, + sym::memory, + sym::memtag, + sym::shadow_call_stack, + sym::thread, + sym::hwaddress, + ], + ); + continue; + } } } - features + + Some(AttributeKind::Sanitize { on_set, off_set, span: cx.attr_span }) } } diff --git a/compiler/rustc_attr_parsing/src/attributes/confusables.rs b/compiler/rustc_attr_parsing/src/attributes/confusables.rs index edd22172ca2..97e78dfb136 100644 --- a/compiler/rustc_attr_parsing/src/attributes/confusables.rs +++ b/compiler/rustc_attr_parsing/src/attributes/confusables.rs @@ -1,11 +1,5 @@ -use rustc_feature::template; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; -use thin_vec::ThinVec; - -use super::{AcceptMapping, AttributeParser}; -use crate::context::{FinalizeContext, Stage}; -use crate::session_diagnostics; +use super::prelude::*; +use crate::session_diagnostics::EmptyConfusables; #[derive(Default)] pub(crate) struct ConfusablesParser { @@ -24,7 +18,7 @@ impl<S: Stage> AttributeParser<S> for ConfusablesParser { }; if list.is_empty() { - cx.emit_err(session_diagnostics::EmptyConfusables { span: cx.attr_span }); + cx.emit_err(EmptyConfusables { span: cx.attr_span }); } for param in list.mixed() { @@ -41,6 +35,8 @@ impl<S: Stage> AttributeParser<S> for ConfusablesParser { this.first_span.get_or_insert(cx.attr_span); }, )]; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Method(MethodKind::Inherent))]); fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { if self.confusables.is_empty() { diff --git a/compiler/rustc_attr_parsing/src/attributes/crate_level.rs b/compiler/rustc_attr_parsing/src/attributes/crate_level.rs new file mode 100644 index 00000000000..2fed09b85e8 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/crate_level.rs @@ -0,0 +1,36 @@ +use rustc_feature::AttributeType; + +use super::prelude::*; + +pub(crate) struct CrateNameParser; + +impl<S: Stage> SingleAttributeParser<S> for CrateNameParser { + const PATH: &[Symbol] = &[sym::crate_name]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name"); + const TYPE: AttributeType = AttributeType::CrateLevel; + + // FIXME: crate name is allowed on all targets and ignored, + // even though it should only be valid on crates of course + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let ArgParser::NameValue(n) = args else { + cx.expected_name_value(cx.attr_span, None); + return None; + }; + + let Some(name) = n.value_as_str() else { + cx.expected_string_literal(n.value_span, Some(n.value_as_lit())); + return None; + }; + + Some(AttributeKind::CrateName { + name, + name_span: n.value_span, + attr_span: cx.attr_span, + style: cx.attr_style, + }) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs index e57ea8bbb5c..31c698228ef 100644 --- a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs +++ b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs @@ -1,12 +1,10 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, DeprecatedSince, Deprecation}; -use rustc_span::{Span, Symbol, sym}; +use rustc_hir::attrs::{DeprecatedSince, Deprecation}; +use super::prelude::*; use super::util::parse_version; -use super::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; -use crate::session_diagnostics; +use crate::session_diagnostics::{ + DeprecatedItemSuggestion, InvalidSince, MissingNote, MissingSince, +}; pub(crate) struct DeprecationParser; @@ -38,6 +36,32 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { const PATH: &[Symbol] = &[sym::deprecated]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Fn), + Allow(Target::Mod), + Allow(Target::Struct), + Allow(Target::Enum), + Allow(Target::Union), + Allow(Target::Const), + Allow(Target::Static), + Allow(Target::MacroDef), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::TyAlias), + Allow(Target::Use), + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + Allow(Target::ForeignTy), + Allow(Target::Field), + Allow(Target::Trait), + Allow(Target::AssocTy), + Allow(Target::AssocConst), + Allow(Target::Variant), + Allow(Target::Impl { of_trait: false }), //FIXME This does not make sense + Allow(Target::Crate), + Error(Target::WherePredicate), + ]); const TEMPLATE: AttributeTemplate = template!( Word, List: &[r#"since = "version""#, r#"note = "reason""#, r#"since = "version", note = "reason""#], @@ -75,7 +99,7 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { } Some(name @ sym::suggestion) => { if !features.deprecated_suggestion() { - cx.emit_err(session_diagnostics::DeprecatedItemSuggestion { + cx.emit_err(DeprecatedItemSuggestion { span: param.span(), is_nightly: cx.sess().is_nightly_build(), details: (), @@ -117,18 +141,18 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { } else if let Some(version) = parse_version(since) { DeprecatedSince::RustcVersion(version) } else { - cx.emit_err(session_diagnostics::InvalidSince { span: cx.attr_span }); + cx.emit_err(InvalidSince { span: cx.attr_span }); DeprecatedSince::Err } } else if is_rustc { - cx.emit_err(session_diagnostics::MissingSince { span: cx.attr_span }); + cx.emit_err(MissingSince { span: cx.attr_span }); DeprecatedSince::Err } else { DeprecatedSince::Unspecified }; if is_rustc && note.is_none() { - cx.emit_err(session_diagnostics::MissingNote { span: cx.attr_span }); + cx.emit_err(MissingNote { span: cx.attr_span }); return None; } diff --git a/compiler/rustc_attr_parsing/src/attributes/dummy.rs b/compiler/rustc_attr_parsing/src/attributes/dummy.rs index bbcd9ab530c..7293cee842c 100644 --- a/compiler/rustc_attr_parsing/src/attributes/dummy.rs +++ b/compiler/rustc_attr_parsing/src/attributes/dummy.rs @@ -5,12 +5,14 @@ use rustc_span::{Symbol, sym}; use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; use crate::context::{AcceptContext, Stage}; use crate::parser::ArgParser; +use crate::target_checking::{ALL_TARGETS, AllowedTargets}; pub(crate) struct DummyParser; impl<S: Stage> SingleAttributeParser<S> for DummyParser { const PATH: &[Symbol] = &[sym::rustc_dummy]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Ignore; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); const TEMPLATE: AttributeTemplate = template!(Word); // Anything, really fn convert(_: &mut AcceptContext<'_, '_, S>, _: &ArgParser<'_>) -> Option<AttributeKind> { diff --git a/compiler/rustc_attr_parsing/src/attributes/inline.rs b/compiler/rustc_attr_parsing/src/attributes/inline.rs index e9a45f20bff..a73430c9d00 100644 --- a/compiler/rustc_attr_parsing/src/attributes/inline.rs +++ b/compiler/rustc_attr_parsing/src/attributes/inline.rs @@ -2,15 +2,9 @@ // note: need to model better how duplicate attr errors work when not using // SingleAttributeParser which is what we have two of here. -use rustc_feature::{AttributeTemplate, template}; use rustc_hir::attrs::{AttributeKind, InlineAttr}; -use rustc_hir::lints::AttributeLintKind; -use rustc_span::{Symbol, sym}; -use super::{AcceptContext, AttributeOrder, OnDuplicate}; -use crate::attributes::SingleAttributeParser; -use crate::context::Stage; -use crate::parser::ArgParser; +use super::prelude::*; pub(crate) struct InlineParser; @@ -18,6 +12,21 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser { const PATH: &'static [Symbol] = &[sym::inline]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Closure), + Allow(Target::Delegation { mac: false }), + Warn(Target::Method(MethodKind::Trait { body: false })), + Warn(Target::ForeignFn), + Warn(Target::Field), + Warn(Target::MacroDef), + Warn(Target::Arm), + Warn(Target::AssocConst), + Warn(Target::MacroCall), + ]); const TEMPLATE: AttributeTemplate = template!( Word, List: &["always", "never"], @@ -41,14 +50,14 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser { Some(AttributeKind::Inline(InlineAttr::Never, cx.attr_span)) } _ => { - cx.expected_specific_argument(l.span(), vec!["always", "never"]); + cx.expected_specific_argument(l.span(), &[sym::always, sym::never]); return None; } } } ArgParser::NameValue(_) => { - let suggestions = - <Self as SingleAttributeParser<S>>::TEMPLATE.suggestions(false, "inline"); + let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE + .suggestions(cx.attr_style, "inline"); let span = cx.attr_span; cx.emit_lint(AttributeLintKind::IllFormedAttributeInput { suggestions }, span); return None; @@ -63,6 +72,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcForceInlineParser { const PATH: &'static [Symbol] = &[sym::rustc_force_inline]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Fn)]); const TEMPLATE: AttributeTemplate = template!(Word, List: &["reason"], NameValueStr: "reason"); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { diff --git a/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs index d406c30b83e..d4942e56f42 100644 --- a/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs @@ -1,14 +1,21 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; +use rustc_feature::Features; use rustc_hir::attrs::AttributeKind::{LinkName, LinkOrdinal, LinkSection}; -use rustc_span::{Span, Symbol, sym}; +use rustc_hir::attrs::*; +use rustc_session::Session; +use rustc_session::parse::feature_err; +use rustc_span::kw; +use rustc_target::spec::BinaryFormat; -use crate::attributes::{ - AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, +use super::prelude::*; +use super::util::parse_single_integer; +use crate::attributes::cfg::parse_cfg_entry; +use crate::fluent_generated; +use crate::session_diagnostics::{ + AsNeededCompatibility, BundleNeedsStatic, EmptyLinkName, ImportNameTypeRaw, ImportNameTypeX86, + IncompatibleWasmLink, InvalidLinkModifier, LinkFrameworkApple, LinkOrdinalOutOfRange, + LinkRequiresName, MultipleModifiers, NullOnLinkSection, RawDylibNoNul, RawDylibOnlyWindows, + WholeArchiveNeedsStatic, }; -use crate::context::{AcceptContext, Stage, parse_single_integer}; -use crate::parser::ArgParser; -use crate::session_diagnostics::{LinkOrdinalOutOfRange, NullOnLinkSection}; pub(crate) struct LinkNameParser; @@ -16,6 +23,10 @@ impl<S: Stage> SingleAttributeParser<S> for LinkNameParser { const PATH: &[Symbol] = &[sym::link_name]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + ]); const TEMPLATE: AttributeTemplate = template!( NameValueStr: "name", "https://doc.rust-lang.org/reference/items/external-blocks.html#the-link_name-attribute" @@ -35,12 +46,417 @@ impl<S: Stage> SingleAttributeParser<S> for LinkNameParser { } } +pub(crate) struct LinkParser; + +impl<S: Stage> CombineAttributeParser<S> for LinkParser { + type Item = LinkEntry; + const PATH: &[Symbol] = &[sym::link]; + const CONVERT: ConvertFn<Self::Item> = AttributeKind::Link; + const TEMPLATE: AttributeTemplate = template!(List: &[ + r#"name = "...""#, + r#"name = "...", kind = "dylib|static|...""#, + r#"name = "...", wasm_import_module = "...""#, + r#"name = "...", import_name_type = "decorated|noprefix|undecorated""#, + r#"name = "...", kind = "dylib|static|...", wasm_import_module = "...", import_name_type = "decorated|noprefix|undecorated""#, + ], "https://doc.rust-lang.org/reference/items/external-blocks.html#the-link-attribute"); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); //FIXME Still checked fully in `check_attr.rs` + + fn extend<'c>( + cx: &'c mut AcceptContext<'_, '_, S>, + args: &'c ArgParser<'_>, + ) -> impl IntoIterator<Item = Self::Item> + 'c { + let mut result = None; + let Some(items) = args.list() else { + cx.expected_list(cx.attr_span); + return result; + }; + + let sess = cx.sess(); + let features = cx.features(); + + let mut name = None; + let mut kind = None; + let mut modifiers = None; + let mut cfg = None; + let mut wasm_import_module = None; + let mut import_name_type = None; + for item in items.mixed() { + let Some(item) = item.meta_item() else { + cx.unexpected_literal(item.span()); + continue; + }; + + let cont = match item.path().word().map(|ident| ident.name) { + Some(sym::name) => Self::parse_link_name(item, &mut name, cx), + Some(sym::kind) => Self::parse_link_kind(item, &mut kind, cx, sess, features), + Some(sym::modifiers) => Self::parse_link_modifiers(item, &mut modifiers, cx), + Some(sym::cfg) => Self::parse_link_cfg(item, &mut cfg, cx, sess, features), + Some(sym::wasm_import_module) => { + Self::parse_link_wasm_import_module(item, &mut wasm_import_module, cx) + } + Some(sym::import_name_type) => { + Self::parse_link_import_name_type(item, &mut import_name_type, cx) + } + _ => { + cx.expected_specific_argument_strings( + item.span(), + &[ + sym::name, + sym::kind, + sym::modifiers, + sym::cfg, + sym::wasm_import_module, + sym::import_name_type, + ], + ); + true + } + }; + if !cont { + return result; + } + } + + // Do this outside the above loop so we don't depend on modifiers coming after kinds + let mut verbatim = None; + if let Some((modifiers, span)) = modifiers { + for modifier in modifiers.as_str().split(',') { + let (modifier, value): (Symbol, bool) = match modifier.strip_prefix(&['+', '-']) { + Some(m) => (Symbol::intern(m), modifier.starts_with('+')), + None => { + cx.emit_err(InvalidLinkModifier { span }); + continue; + } + }; + + macro report_unstable_modifier($feature: ident) { + if !features.$feature() { + // FIXME: make this translatable + #[expect(rustc::untranslatable_diagnostic)] + feature_err( + sess, + sym::$feature, + span, + format!("linking modifier `{modifier}` is unstable"), + ) + .emit(); + } + } + let assign_modifier = |dst: &mut Option<bool>| { + if dst.is_some() { + cx.emit_err(MultipleModifiers { span, modifier }); + } else { + *dst = Some(value); + } + }; + match (modifier, &mut kind) { + (sym::bundle, Some(NativeLibKind::Static { bundle, .. })) => { + assign_modifier(bundle) + } + (sym::bundle, _) => { + cx.emit_err(BundleNeedsStatic { span }); + } + + (sym::verbatim, _) => assign_modifier(&mut verbatim), + + ( + sym::whole_dash_archive, + Some(NativeLibKind::Static { whole_archive, .. }), + ) => assign_modifier(whole_archive), + (sym::whole_dash_archive, _) => { + cx.emit_err(WholeArchiveNeedsStatic { span }); + } + + (sym::as_dash_needed, Some(NativeLibKind::Dylib { as_needed })) + | (sym::as_dash_needed, Some(NativeLibKind::Framework { as_needed })) => { + report_unstable_modifier!(native_link_modifiers_as_needed); + assign_modifier(as_needed) + } + (sym::as_dash_needed, _) => { + cx.emit_err(AsNeededCompatibility { span }); + } + + _ => { + cx.expected_specific_argument_strings( + span, + &[ + sym::bundle, + sym::verbatim, + sym::whole_dash_archive, + sym::as_dash_needed, + ], + ); + } + } + } + } + + if let Some((_, span)) = wasm_import_module { + if name.is_some() || kind.is_some() || modifiers.is_some() || cfg.is_some() { + cx.emit_err(IncompatibleWasmLink { span }); + } + } + + if wasm_import_module.is_some() { + (name, kind) = (wasm_import_module, Some(NativeLibKind::WasmImportModule)); + } + let Some((name, name_span)) = name else { + cx.emit_err(LinkRequiresName { span: cx.attr_span }); + return result; + }; + + // Do this outside of the loop so that `import_name_type` can be specified before `kind`. + if let Some((_, span)) = import_name_type { + if kind != Some(NativeLibKind::RawDylib) { + cx.emit_err(ImportNameTypeRaw { span }); + } + } + + if let Some(NativeLibKind::RawDylib) = kind + && name.as_str().contains('\0') + { + cx.emit_err(RawDylibNoNul { span: name_span }); + } + + result = Some(LinkEntry { + span: cx.attr_span, + kind: kind.unwrap_or(NativeLibKind::Unspecified), + name, + cfg, + verbatim, + import_name_type, + }); + result + } +} + +impl LinkParser { + fn parse_link_name<S: Stage>( + item: &MetaItemParser<'_>, + name: &mut Option<(Symbol, Span)>, + cx: &mut AcceptContext<'_, '_, S>, + ) -> bool { + if name.is_some() { + cx.duplicate_key(item.span(), sym::name); + return true; + } + let Some(nv) = item.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::name)); + return false; + }; + let Some(link_name) = nv.value_as_str() else { + cx.expected_name_value(item.span(), Some(sym::name)); + return false; + }; + + if link_name.is_empty() { + cx.emit_err(EmptyLinkName { span: nv.value_span }); + } + *name = Some((link_name, nv.value_span)); + true + } + + fn parse_link_kind<S: Stage>( + item: &MetaItemParser<'_>, + kind: &mut Option<NativeLibKind>, + cx: &mut AcceptContext<'_, '_, S>, + sess: &Session, + features: &Features, + ) -> bool { + if kind.is_some() { + cx.duplicate_key(item.span(), sym::kind); + return true; + } + let Some(nv) = item.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::kind)); + return true; + }; + let Some(link_kind) = nv.value_as_str() else { + cx.expected_name_value(item.span(), Some(sym::kind)); + return true; + }; + + let link_kind = match link_kind { + kw::Static => NativeLibKind::Static { bundle: None, whole_archive: None }, + sym::dylib => NativeLibKind::Dylib { as_needed: None }, + sym::framework => { + if !sess.target.is_like_darwin { + cx.emit_err(LinkFrameworkApple { span: nv.value_span }); + } + NativeLibKind::Framework { as_needed: None } + } + sym::raw_dash_dylib => { + if sess.target.is_like_windows { + // raw-dylib is stable and working on Windows + } else if sess.target.binary_format == BinaryFormat::Elf && features.raw_dylib_elf() + { + // raw-dylib is unstable on ELF, but the user opted in + } else if sess.target.binary_format == BinaryFormat::Elf && sess.is_nightly_build() + { + feature_err( + sess, + sym::raw_dylib_elf, + nv.value_span, + fluent_generated::attr_parsing_raw_dylib_elf_unstable, + ) + .emit(); + } else { + cx.emit_err(RawDylibOnlyWindows { span: nv.value_span }); + } + + NativeLibKind::RawDylib + } + sym::link_dash_arg => { + if !features.link_arg_attribute() { + feature_err( + sess, + sym::link_arg_attribute, + nv.value_span, + fluent_generated::attr_parsing_link_arg_unstable, + ) + .emit(); + } + NativeLibKind::LinkArg + } + _kind => { + cx.expected_specific_argument_strings( + nv.value_span, + &[ + kw::Static, + sym::dylib, + sym::framework, + sym::raw_dash_dylib, + sym::link_dash_arg, + ], + ); + return true; + } + }; + *kind = Some(link_kind); + true + } + + fn parse_link_modifiers<S: Stage>( + item: &MetaItemParser<'_>, + modifiers: &mut Option<(Symbol, Span)>, + cx: &mut AcceptContext<'_, '_, S>, + ) -> bool { + if modifiers.is_some() { + cx.duplicate_key(item.span(), sym::modifiers); + return true; + } + let Some(nv) = item.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::modifiers)); + return true; + }; + let Some(link_modifiers) = nv.value_as_str() else { + cx.expected_name_value(item.span(), Some(sym::modifiers)); + return true; + }; + *modifiers = Some((link_modifiers, nv.value_span)); + true + } + + fn parse_link_cfg<S: Stage>( + item: &MetaItemParser<'_>, + cfg: &mut Option<CfgEntry>, + cx: &mut AcceptContext<'_, '_, S>, + sess: &Session, + features: &Features, + ) -> bool { + if cfg.is_some() { + cx.duplicate_key(item.span(), sym::cfg); + return true; + } + let Some(link_cfg) = item.args().list() else { + cx.expected_list(item.span()); + return true; + }; + let Some(link_cfg) = link_cfg.single() else { + cx.expected_single_argument(item.span()); + return true; + }; + if !features.link_cfg() { + feature_err( + sess, + sym::link_cfg, + item.span(), + fluent_generated::attr_parsing_link_cfg_unstable, + ) + .emit(); + } + *cfg = parse_cfg_entry(cx, link_cfg); + true + } + + fn parse_link_wasm_import_module<S: Stage>( + item: &MetaItemParser<'_>, + wasm_import_module: &mut Option<(Symbol, Span)>, + cx: &mut AcceptContext<'_, '_, S>, + ) -> bool { + if wasm_import_module.is_some() { + cx.duplicate_key(item.span(), sym::wasm_import_module); + return true; + } + let Some(nv) = item.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::wasm_import_module)); + return true; + }; + let Some(link_wasm_import_module) = nv.value_as_str() else { + cx.expected_name_value(item.span(), Some(sym::wasm_import_module)); + return true; + }; + *wasm_import_module = Some((link_wasm_import_module, item.span())); + true + } + + fn parse_link_import_name_type<S: Stage>( + item: &MetaItemParser<'_>, + import_name_type: &mut Option<(PeImportNameType, Span)>, + cx: &mut AcceptContext<'_, '_, S>, + ) -> bool { + if import_name_type.is_some() { + cx.duplicate_key(item.span(), sym::import_name_type); + return true; + } + let Some(nv) = item.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::import_name_type)); + return true; + }; + let Some(link_import_name_type) = nv.value_as_str() else { + cx.expected_name_value(item.span(), Some(sym::import_name_type)); + return true; + }; + if cx.sess().target.arch != "x86" { + cx.emit_err(ImportNameTypeX86 { span: item.span() }); + return true; + } + + let link_import_name_type = match link_import_name_type { + sym::decorated => PeImportNameType::Decorated, + sym::noprefix => PeImportNameType::NoPrefix, + sym::undecorated => PeImportNameType::Undecorated, + _ => { + cx.expected_specific_argument_strings( + item.span(), + &[sym::decorated, sym::noprefix, sym::undecorated], + ); + return true; + } + }; + *import_name_type = Some((link_import_name_type, item.span())); + true + } +} + pub(crate) struct LinkSectionParser; impl<S: Stage> SingleAttributeParser<S> for LinkSectionParser { const PATH: &[Symbol] = &[sym::link_section]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowListWarnRest(&[Allow(Target::Static), Allow(Target::Fn)]); const TEMPLATE: AttributeTemplate = template!( NameValueStr: "name", "https://doc.rust-lang.org/reference/abi.html#the-link_section-attribute" @@ -70,6 +486,7 @@ pub(crate) struct ExportStableParser; impl<S: Stage> NoArgsAttributeParser<S> for ExportStableParser { const PATH: &[Symbol] = &[sym::export_stable]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); //FIXME Still checked fully in `check_attr.rs` const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::ExportStable; } @@ -77,6 +494,7 @@ pub(crate) struct FfiConstParser; impl<S: Stage> NoArgsAttributeParser<S> for FfiConstParser { const PATH: &[Symbol] = &[sym::ffi_const]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::ForeignFn)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::FfiConst; } @@ -84,6 +502,7 @@ pub(crate) struct FfiPureParser; impl<S: Stage> NoArgsAttributeParser<S> for FfiPureParser { const PATH: &[Symbol] = &[sym::ffi_pure]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::ForeignFn)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::FfiPure; } @@ -91,6 +510,12 @@ pub(crate) struct StdInternalSymbolParser; impl<S: Stage> NoArgsAttributeParser<S> for StdInternalSymbolParser { const PATH: &[Symbol] = &[sym::rustc_std_internal_symbol]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::ForeignFn), + Allow(Target::Static), + Allow(Target::ForeignStatic), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::StdInternalSymbol; } @@ -100,6 +525,11 @@ impl<S: Stage> SingleAttributeParser<S> for LinkOrdinalParser { const PATH: &[Symbol] = &[sym::link_ordinal]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + Warn(Target::MacroCall), + ]); const TEMPLATE: AttributeTemplate = template!( List: &["ordinal"], "https://doc.rust-lang.org/reference/items/external-blocks.html#the-link_ordinal-attribute" @@ -129,3 +559,87 @@ impl<S: Stage> SingleAttributeParser<S> for LinkOrdinalParser { Some(LinkOrdinal { ordinal, span: cx.attr_span }) } } + +pub(crate) struct LinkageParser; + +impl<S: Stage> SingleAttributeParser<S> for LinkageParser { + const PATH: &[Symbol] = &[sym::linkage]; + + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Static), + Allow(Target::ForeignStatic), + Allow(Target::ForeignFn), + ]); + + const TEMPLATE: AttributeTemplate = template!(NameValueStr: [ + "available_externally", + "common", + "extern_weak", + "external", + "internal", + "linkonce", + "linkonce_odr", + "weak", + "weak_odr", + ]); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let Some(name_value) = args.name_value() else { + cx.expected_name_value(cx.attr_span, Some(sym::linkage)); + return None; + }; + + let Some(value) = name_value.value_as_str() else { + cx.expected_string_literal(name_value.value_span, Some(name_value.value_as_lit())); + return None; + }; + + // Use the names from src/llvm/docs/LangRef.rst here. Most types are only + // applicable to variable declarations and may not really make sense for + // Rust code in the first place but allow them anyway and trust that the + // user knows what they're doing. Who knows, unanticipated use cases may pop + // up in the future. + // + // ghost, dllimport, dllexport and linkonce_odr_autohide are not supported + // and don't have to be, LLVM treats them as no-ops. + let linkage = match value { + sym::available_externally => Linkage::AvailableExternally, + sym::common => Linkage::Common, + sym::extern_weak => Linkage::ExternalWeak, + sym::external => Linkage::External, + sym::internal => Linkage::Internal, + sym::linkonce => Linkage::LinkOnceAny, + sym::linkonce_odr => Linkage::LinkOnceODR, + sym::weak => Linkage::WeakAny, + sym::weak_odr => Linkage::WeakODR, + + _ => { + cx.expected_specific_argument( + name_value.value_span, + &[ + sym::available_externally, + sym::common, + sym::extern_weak, + sym::external, + sym::internal, + sym::linkonce, + sym::linkonce_odr, + sym::weak, + sym::weak_odr, + ], + ); + return None; + } + }; + + Some(AttributeKind::Linkage(linkage, cx.attr_span)) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs b/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs index 9530fec07d6..63b0809d0d8 100644 --- a/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs +++ b/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs @@ -1,13 +1,16 @@ -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; - -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::Stage; +use super::prelude::*; pub(crate) struct AsPtrParser; impl<S: Stage> NoArgsAttributeParser<S> for AsPtrParser { const PATH: &[Symbol] = &[sym::rustc_as_ptr]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::AsPtr; } @@ -15,6 +18,11 @@ pub(crate) struct PubTransparentParser; impl<S: Stage> NoArgsAttributeParser<S> for PubTransparentParser { const PATH: &[Symbol] = &[sym::rustc_pub_transparent]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Struct), + Allow(Target::Enum), + Allow(Target::Union), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::PubTransparent; } @@ -22,6 +30,11 @@ pub(crate) struct PassByValueParser; impl<S: Stage> NoArgsAttributeParser<S> for PassByValueParser { const PATH: &[Symbol] = &[sym::rustc_pass_by_value]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Struct), + Allow(Target::Enum), + Allow(Target::TyAlias), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::PassByValue; } @@ -29,5 +42,10 @@ pub(crate) struct AutomaticallyDerivedParser; impl<S: Stage> NoArgsAttributeParser<S> for AutomaticallyDerivedParser { const PATH: &[Symbol] = &[sym::automatically_derived]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Impl { of_trait: true }), + Error(Target::Crate), + Error(Target::WherePredicate), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::AutomaticallyDerived; } diff --git a/compiler/rustc_attr_parsing/src/attributes/loop_match.rs b/compiler/rustc_attr_parsing/src/attributes/loop_match.rs index 868c113a6d1..528090b8673 100644 --- a/compiler/rustc_attr_parsing/src/attributes/loop_match.rs +++ b/compiler/rustc_attr_parsing/src/attributes/loop_match.rs @@ -1,13 +1,10 @@ -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; - -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::Stage; +use super::prelude::*; pub(crate) struct LoopMatchParser; impl<S: Stage> NoArgsAttributeParser<S> for LoopMatchParser { const PATH: &[Symbol] = &[sym::loop_match]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Expression)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::LoopMatch; } @@ -15,5 +12,6 @@ pub(crate) struct ConstContinueParser; impl<S: Stage> NoArgsAttributeParser<S> for ConstContinueParser { const PATH: &[Symbol] = &[sym::const_continue]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Expression)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::ConstContinue; } diff --git a/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs index a1166bf9ac5..180130c7be4 100644 --- a/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs @@ -1,18 +1,14 @@ use rustc_errors::DiagArgValue; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, MacroUseArgs}; -use rustc_span::{Span, Symbol, sym}; -use thin_vec::ThinVec; +use rustc_hir::attrs::MacroUseArgs; -use crate::attributes::{AcceptMapping, AttributeParser, NoArgsAttributeParser, OnDuplicate}; -use crate::context::{AcceptContext, FinalizeContext, Stage}; -use crate::parser::ArgParser; -use crate::session_diagnostics; +use super::prelude::*; +use crate::session_diagnostics::IllFormedAttributeInputLint; pub(crate) struct MacroEscapeParser; impl<S: Stage> NoArgsAttributeParser<S> for MacroEscapeParser { const PATH: &[Symbol] = &[sym::macro_escape]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = MACRO_USE_ALLOWED_TARGETS; const CREATE: fn(Span) -> AttributeKind = AttributeKind::MacroEscape; } @@ -35,6 +31,12 @@ const MACRO_USE_TEMPLATE: AttributeTemplate = template!( Word, List: &["name1, name2, ..."], "https://doc.rust-lang.org/reference/macros-by-example.html#the-macro_use-attribute" ); +const MACRO_USE_ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Mod), + Allow(Target::ExternCrate), + Allow(Target::Crate), + Error(Target::WherePredicate), +]); impl<S: Stage> AttributeParser<S> for MacroUseParser { const ATTRIBUTES: AcceptMapping<Self, S> = &[( @@ -99,8 +101,8 @@ impl<S: Stage> AttributeParser<S> for MacroUseParser { } } ArgParser::NameValue(_) => { - let suggestions = MACRO_USE_TEMPLATE.suggestions(false, sym::macro_use); - cx.emit_err(session_diagnostics::IllFormedAttributeInputLint { + let suggestions = MACRO_USE_TEMPLATE.suggestions(cx.attr_style, sym::macro_use); + cx.emit_err(IllFormedAttributeInputLint { num_suggestions: suggestions.len(), suggestions: DiagArgValue::StrListSepByAnd( suggestions.into_iter().map(|s| format!("`{s}`").into()).collect(), @@ -111,6 +113,7 @@ impl<S: Stage> AttributeParser<S> for MacroUseParser { } }, )]; + const ALLOWED_TARGETS: AllowedTargets = MACRO_USE_ALLOWED_TARGETS; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { Some(AttributeKind::MacroUse { span: self.first_span?, arguments: self.state }) @@ -122,5 +125,11 @@ pub(crate) struct AllowInternalUnsafeParser; impl<S: Stage> NoArgsAttributeParser<S> for AllowInternalUnsafeParser { const PATH: &[Symbol] = &[sym::allow_internal_unsafe]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Ignore; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::MacroDef), + Warn(Target::Field), + Warn(Target::Arm), + ]); const CREATE: fn(Span) -> AttributeKind = |span| AttributeKind::AllowInternalUnsafe(span); } diff --git a/compiler/rustc_attr_parsing/src/attributes/mod.rs b/compiler/rustc_attr_parsing/src/attributes/mod.rs index f7946ade6d2..043bc925eac 100644 --- a/compiler/rustc_attr_parsing/src/attributes/mod.rs +++ b/compiler/rustc_attr_parsing/src/attributes/mod.rs @@ -7,16 +7,20 @@ //! Specifically, you might not care about managing the state of your [`AttributeParser`] //! state machine yourself. In this case you can choose to implement: //! -//! - [`SingleAttributeParser`]: makes it easy to implement an attribute which should error if it +//! - [`SingleAttributeParser`](crate::attributes::SingleAttributeParser): makes it easy to implement an attribute which should error if it //! appears more than once in a list of attributes -//! - [`CombineAttributeParser`]: makes it easy to implement an attribute which should combine the +//! - [`CombineAttributeParser`](crate::attributes::CombineAttributeParser): makes it easy to implement an attribute which should combine the //! contents of attributes, if an attribute appear multiple times in a list //! +//! By default, attributes are allowed anywhere. When adding an attribute that should only be used +//! at the crate root, consider setting the `TYPE` in the parser trait to +//! [`AttributeType::CrateLevel`](rustc_feature::AttributeType::CrateLevel). +//! //! Attributes should be added to `crate::context::ATTRIBUTE_PARSERS` to be parsed. use std::marker::PhantomData; -use rustc_feature::{AttributeTemplate, template}; +use rustc_feature::{AttributeTemplate, AttributeType, template}; use rustc_hir::attrs::AttributeKind; use rustc_span::{Span, Symbol}; use thin_vec::ThinVec; @@ -24,6 +28,10 @@ use thin_vec::ThinVec; use crate::context::{AcceptContext, FinalizeContext, Stage}; use crate::parser::ArgParser; use crate::session_diagnostics::UnusedMultiple; +use crate::target_checking::AllowedTargets; + +/// All the parsers require roughly the same imports, so this prelude has most of the often-needed ones. +mod prelude; pub(crate) mod allow_unstable; pub(crate) mod body; @@ -31,6 +39,7 @@ pub(crate) mod cfg; pub(crate) mod cfg_old; pub(crate) mod codegen_attrs; pub(crate) mod confusables; +pub(crate) mod crate_level; pub(crate) mod deprecation; pub(crate) mod dummy; pub(crate) mod inline; @@ -43,6 +52,7 @@ pub(crate) mod no_implicit_prelude; pub(crate) mod non_exhaustive; pub(crate) mod path; pub(crate) mod proc_macro_attrs; +pub(crate) mod prototype; pub(crate) mod repr; pub(crate) mod rustc_internal; pub(crate) mod semantics; @@ -80,6 +90,10 @@ pub(crate) trait AttributeParser<S: Stage>: Default + 'static { /// If an attribute has this symbol, the `accept` function will be called on it. const ATTRIBUTES: AcceptMapping<Self, S>; + const ALLOWED_TARGETS: AllowedTargets; + + const TYPE: AttributeType = AttributeType::Normal; + /// The parser has gotten a chance to accept the attributes on an item, /// here it can produce an attribute. /// @@ -116,9 +130,13 @@ pub(crate) trait SingleAttributeParser<S: Stage>: 'static { /// and this specified whether to, for example, warn or error on the other one. const ON_DUPLICATE: OnDuplicate<S>; + const ALLOWED_TARGETS: AllowedTargets; + /// The template this attribute parser should implement. Used for diagnostics. const TEMPLATE: AttributeTemplate; + const TYPE: AttributeType = AttributeType::Normal; + /// Converts a single syntactical attribute to a single semantic attribute, or [`AttributeKind`] fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind>; } @@ -163,6 +181,9 @@ impl<T: SingleAttributeParser<S>, S: Stage> AttributeParser<S> for Single<T, S> } }, )]; + const ALLOWED_TARGETS: AllowedTargets = T::ALLOWED_TARGETS; + + const TYPE: AttributeType = T::TYPE; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { Some(self.1?.0) @@ -247,6 +268,8 @@ pub(crate) enum AttributeOrder { pub(crate) trait NoArgsAttributeParser<S: Stage>: 'static { const PATH: &[Symbol]; const ON_DUPLICATE: OnDuplicate<S>; + const ALLOWED_TARGETS: AllowedTargets; + const TYPE: AttributeType = AttributeType::Normal; /// Create the [`AttributeKind`] given attribute's [`Span`]. const CREATE: fn(Span) -> AttributeKind; @@ -264,7 +287,9 @@ impl<T: NoArgsAttributeParser<S>, S: Stage> SingleAttributeParser<S> for Without const PATH: &[Symbol] = T::PATH; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = T::ON_DUPLICATE; + const ALLOWED_TARGETS: AllowedTargets = T::ALLOWED_TARGETS; const TEMPLATE: AttributeTemplate = template!(Word); + const TYPE: AttributeType = T::TYPE; fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { if let Err(span) = args.no_args() { @@ -293,9 +318,13 @@ pub(crate) trait CombineAttributeParser<S: Stage>: 'static { /// where `x` is a vec of these individual reprs. const CONVERT: ConvertFn<Self::Item>; + const ALLOWED_TARGETS: AllowedTargets; + /// The template this attribute parser should implement. Used for diagnostics. const TEMPLATE: AttributeTemplate; + const TYPE: AttributeType = AttributeType::Normal; + /// Converts a single syntactical attribute to a number of elements of the semantic attribute, or [`AttributeKind`] fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, @@ -324,15 +353,14 @@ impl<T: CombineAttributeParser<S>, S: Stage> Default for Combine<T, S> { } impl<T: CombineAttributeParser<S>, S: Stage> AttributeParser<S> for Combine<T, S> { - const ATTRIBUTES: AcceptMapping<Self, S> = &[( - T::PATH, - <T as CombineAttributeParser<S>>::TEMPLATE, - |group: &mut Combine<T, S>, cx, args| { + const ATTRIBUTES: AcceptMapping<Self, S> = + &[(T::PATH, T::TEMPLATE, |group: &mut Combine<T, S>, cx, args| { // Keep track of the span of the first attribute, for diagnostics group.first_span.get_or_insert(cx.attr_span); group.items.extend(T::extend(cx, args)) - }, - )]; + })]; + const ALLOWED_TARGETS: AllowedTargets = T::ALLOWED_TARGETS; + const TYPE: AttributeType = T::TYPE; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { if let Some(first_span) = self.first_span { diff --git a/compiler/rustc_attr_parsing/src/attributes/must_use.rs b/compiler/rustc_attr_parsing/src/attributes/must_use.rs index c88bb5a69e5..e6a5141d783 100644 --- a/compiler/rustc_attr_parsing/src/attributes/must_use.rs +++ b/compiler/rustc_attr_parsing/src/attributes/must_use.rs @@ -1,12 +1,7 @@ use rustc_errors::DiagArgValue; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; -use crate::session_diagnostics; +use super::prelude::*; +use crate::session_diagnostics::IllFormedAttributeInputLint; pub(crate) struct MustUseParser; @@ -14,6 +9,21 @@ impl<S: Stage> SingleAttributeParser<S> for MustUseParser { const PATH: &[Symbol] = &[sym::must_use]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Fn), + Allow(Target::Enum), + Allow(Target::Struct), + Allow(Target::Union), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::ForeignFn), + // `impl Trait` in return position can trip + // `unused_must_use` if `Trait` is marked as + // `#[must_use]` + Allow(Target::Trait), + Error(Target::WherePredicate), + ]); const TEMPLATE: AttributeTemplate = template!( Word, NameValueStr: "reason", "https://doc.rust-lang.org/reference/attributes/diagnostics.html#the-must_use-attribute" @@ -35,9 +45,9 @@ impl<S: Stage> SingleAttributeParser<S> for MustUseParser { Some(value_str) } ArgParser::List(_) => { - let suggestions = - <Self as SingleAttributeParser<S>>::TEMPLATE.suggestions(false, "must_use"); - cx.emit_err(session_diagnostics::IllFormedAttributeInputLint { + let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE + .suggestions(cx.attr_style, "must_use"); + cx.emit_err(IllFormedAttributeInputLint { num_suggestions: suggestions.len(), suggestions: DiagArgValue::StrListSepByAnd( suggestions.into_iter().map(|s| format!("`{s}`").into()).collect(), diff --git a/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs b/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs index 40f8d00685e..40073ea0f46 100644 --- a/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs +++ b/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs @@ -1,13 +1,11 @@ -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, sym}; - -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::Stage; +use super::prelude::*; pub(crate) struct NoImplicitPreludeParser; impl<S: Stage> NoArgsAttributeParser<S> for NoImplicitPreludeParser { const PATH: &[rustc_span::Symbol] = &[sym::no_implicit_prelude]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowListWarnRest(&[Allow(Target::Mod), Allow(Target::Crate)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::NoImplicitPrelude; } diff --git a/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs b/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs index 361ac8e959d..fc41c073fd2 100644 --- a/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs +++ b/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs @@ -1,13 +1,25 @@ +use rustc_hir::Target; use rustc_hir::attrs::AttributeKind; use rustc_span::{Span, Symbol, sym}; use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; use crate::context::Stage; +use crate::target_checking::AllowedTargets; +use crate::target_checking::Policy::{Allow, Warn}; pub(crate) struct NonExhaustiveParser; impl<S: Stage> NoArgsAttributeParser<S> for NonExhaustiveParser { const PATH: &[Symbol] = &[sym::non_exhaustive]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Enum), + Allow(Target::Struct), + Allow(Target::Variant), + Warn(Target::Field), + Warn(Target::Arm), + Warn(Target::MacroDef), + Warn(Target::MacroCall), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::NonExhaustive; } diff --git a/compiler/rustc_attr_parsing/src/attributes/path.rs b/compiler/rustc_attr_parsing/src/attributes/path.rs index c1c3de8cbfc..e4cb806bb42 100644 --- a/compiler/rustc_attr_parsing/src/attributes/path.rs +++ b/compiler/rustc_attr_parsing/src/attributes/path.rs @@ -1,10 +1,4 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; - -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; pub(crate) struct PathParser; @@ -12,6 +6,8 @@ impl<S: Stage> SingleAttributeParser<S> for PathParser { const PATH: &[Symbol] = &[sym::path]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowListWarnRest(&[Allow(Target::Mod), Error(Target::Crate)]); const TEMPLATE: AttributeTemplate = template!( NameValueStr: "file", "https://doc.rust-lang.org/reference/items/modules.html#the-path-attribute" diff --git a/compiler/rustc_attr_parsing/src/attributes/prelude.rs b/compiler/rustc_attr_parsing/src/attributes/prelude.rs new file mode 100644 index 00000000000..2bcdee55c75 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/prelude.rs @@ -0,0 +1,20 @@ +// parsing +// templates +pub(super) use rustc_feature::{AttributeTemplate, template}; +// data structures +pub(super) use rustc_hir::attrs::AttributeKind; +pub(super) use rustc_hir::lints::AttributeLintKind; +pub(super) use rustc_hir::{MethodKind, Target}; +pub(super) use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym}; +pub(super) use thin_vec::ThinVec; + +pub(super) use crate::attributes::{ + AcceptMapping, AttributeOrder, AttributeParser, CombineAttributeParser, ConvertFn, + NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, +}; +// contexts +pub(super) use crate::context::{AcceptContext, FinalizeContext, Stage}; +pub(super) use crate::parser::*; +// target checking +pub(super) use crate::target_checking::Policy::{Allow, Error, Warn}; +pub(super) use crate::target_checking::{ALL_TARGETS, AllowedTargets}; diff --git a/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs index b267980914c..b9929d6f1f8 100644 --- a/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs @@ -1,18 +1,13 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; -use thin_vec::ThinVec; +use super::prelude::*; -use crate::attributes::{ - AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, -}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; +const PROC_MACRO_ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate), Warn(Target::MacroCall)]); pub(crate) struct ProcMacroParser; impl<S: Stage> NoArgsAttributeParser<S> for ProcMacroParser { const PATH: &[Symbol] = &[sym::proc_macro]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS; const CREATE: fn(Span) -> AttributeKind = AttributeKind::ProcMacro; } @@ -20,6 +15,7 @@ pub(crate) struct ProcMacroAttributeParser; impl<S: Stage> NoArgsAttributeParser<S> for ProcMacroAttributeParser { const PATH: &[Symbol] = &[sym::proc_macro_attribute]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS; const CREATE: fn(Span) -> AttributeKind = AttributeKind::ProcMacroAttribute; } @@ -28,6 +24,7 @@ impl<S: Stage> SingleAttributeParser<S> for ProcMacroDeriveParser { const PATH: &[Symbol] = &[sym::proc_macro_derive]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS; const TEMPLATE: AttributeTemplate = template!( List: &["TraitName", "TraitName, attributes(name1, name2, ...)"], "https://doc.rust-lang.org/reference/procedural-macros.html#derive-macros" @@ -48,6 +45,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcBuiltinMacroParser { const PATH: &[Symbol] = &[sym::rustc_builtin_macro]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::MacroDef)]); const TEMPLATE: AttributeTemplate = template!(List: &["TraitName", "TraitName, attributes(name1, name2, ...)"]); @@ -102,7 +100,7 @@ fn parse_derive_like<S: Stage>( return None; }; if !attr_list.path().word_is(sym::attributes) { - cx.expected_specific_argument(attrs.span(), vec!["attributes"]); + cx.expected_specific_argument(attrs.span(), &[sym::attributes]); return None; } let Some(attr_list) = attr_list.args().list() else { diff --git a/compiler/rustc_attr_parsing/src/attributes/prototype.rs b/compiler/rustc_attr_parsing/src/attributes/prototype.rs new file mode 100644 index 00000000000..80fe82bf542 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/prototype.rs @@ -0,0 +1,141 @@ +//! Attributes that are only used on function prototypes. + +use rustc_feature::{AttributeTemplate, template}; +use rustc_hir::Target; +use rustc_hir::attrs::{AttributeKind, MirDialect, MirPhase}; +use rustc_span::{Span, Symbol, sym}; + +use super::{AttributeOrder, OnDuplicate}; +use crate::attributes::SingleAttributeParser; +use crate::context::{AcceptContext, Stage}; +use crate::parser::ArgParser; +use crate::target_checking::AllowedTargets; +use crate::target_checking::Policy::Allow; + +pub(crate) struct CustomMirParser; + +impl<S: Stage> SingleAttributeParser<S> for CustomMirParser { + const PATH: &[rustc_span::Symbol] = &[sym::custom_mir]; + + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Fn)]); + + const TEMPLATE: AttributeTemplate = template!(List: &[r#"dialect = "...", phase = "...""#]); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); + return None; + }; + + let mut dialect = None; + let mut phase = None; + let mut failed = false; + + for item in list.mixed() { + let Some(meta_item) = item.meta_item() else { + cx.expected_name_value(item.span(), None); + failed = true; + break; + }; + + if let Some(arg) = meta_item.word_is(sym::dialect) { + extract_value(cx, sym::dialect, arg, meta_item.span(), &mut dialect, &mut failed); + } else if let Some(arg) = meta_item.word_is(sym::phase) { + extract_value(cx, sym::phase, arg, meta_item.span(), &mut phase, &mut failed); + } else if let Some(word) = meta_item.path().word() { + let word = word.to_string(); + cx.unknown_key(meta_item.span(), word, &["dialect", "phase"]); + failed = true; + } else { + cx.expected_name_value(meta_item.span(), None); + failed = true; + }; + } + + let dialect = parse_dialect(cx, dialect, &mut failed); + let phase = parse_phase(cx, phase, &mut failed); + + if failed { + return None; + } + + Some(AttributeKind::CustomMir(dialect, phase, cx.attr_span)) + } +} + +fn extract_value<S: Stage>( + cx: &mut AcceptContext<'_, '_, S>, + key: Symbol, + arg: &ArgParser<'_>, + span: Span, + out_val: &mut Option<(Symbol, Span)>, + failed: &mut bool, +) { + if out_val.is_some() { + cx.duplicate_key(span, key); + *failed = true; + return; + } + + let Some(val) = arg.name_value() else { + cx.expected_single_argument(arg.span().unwrap_or(span)); + *failed = true; + return; + }; + + let Some(value_sym) = val.value_as_str() else { + cx.expected_string_literal(val.value_span, Some(val.value_as_lit())); + *failed = true; + return; + }; + + *out_val = Some((value_sym, val.value_span)); +} + +fn parse_dialect<S: Stage>( + cx: &mut AcceptContext<'_, '_, S>, + dialect: Option<(Symbol, Span)>, + failed: &mut bool, +) -> Option<(MirDialect, Span)> { + let (dialect, span) = dialect?; + + let dialect = match dialect { + sym::analysis => MirDialect::Analysis, + sym::built => MirDialect::Built, + sym::runtime => MirDialect::Runtime, + + _ => { + cx.expected_specific_argument(span, &[sym::analysis, sym::built, sym::runtime]); + *failed = true; + return None; + } + }; + + Some((dialect, span)) +} + +fn parse_phase<S: Stage>( + cx: &mut AcceptContext<'_, '_, S>, + phase: Option<(Symbol, Span)>, + failed: &mut bool, +) -> Option<(MirPhase, Span)> { + let (phase, span) = phase?; + + let phase = match phase { + sym::initial => MirPhase::Initial, + sym::post_cleanup => MirPhase::PostCleanup, + sym::optimized => MirPhase::Optimized, + + _ => { + cx.expected_specific_argument(span, &[sym::initial, sym::post_cleanup, sym::optimized]); + *failed = true; + return None; + } + }; + + Some((phase, span)) +} diff --git a/compiler/rustc_attr_parsing/src/attributes/repr.rs b/compiler/rustc_attr_parsing/src/attributes/repr.rs index 996d2af5f37..23aabd15597 100644 --- a/compiler/rustc_attr_parsing/src/attributes/repr.rs +++ b/compiler/rustc_attr_parsing/src/attributes/repr.rs @@ -1,14 +1,9 @@ use rustc_abi::Align; use rustc_ast::{IntTy, LitIntType, LitKind, UintTy}; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, IntType, ReprAttr}; -use rustc_span::{DUMMY_SP, Span, Symbol, sym}; +use rustc_hir::attrs::{IntType, ReprAttr}; -use super::{AcceptMapping, AttributeParser, CombineAttributeParser, ConvertFn, FinalizeContext}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::{ArgParser, MetaItemListParser, MetaItemParser}; -use crate::session_diagnostics; -use crate::session_diagnostics::IncorrectReprFormatGenericCause; +use super::prelude::*; +use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause}; /// Parse #[repr(...)] forms. /// @@ -60,6 +55,10 @@ impl<S: Stage> CombineAttributeParser<S> for ReprParser { reprs } + + //FIXME Still checked fully in `check_attr.rs` + //This one is slightly more complicated because the allowed targets depend on the arguments + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); } macro_rules! int_pat { @@ -318,6 +317,14 @@ impl AlignParser { impl<S: Stage> AttributeParser<S> for AlignParser { const ATTRIBUTES: AcceptMapping<Self, S> = &[(Self::PATH, Self::TEMPLATE, Self::parse)]; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::ForeignFn), + ]); fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { let (align, span) = self.0?; diff --git a/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs b/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs index 1a668b4416f..a995549fc7c 100644 --- a/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs +++ b/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs @@ -1,10 +1,5 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; - -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{AcceptContext, Stage, parse_single_integer}; -use crate::parser::ArgParser; +use super::prelude::*; +use super::util::parse_single_integer; pub(crate) struct RustcLayoutScalarValidRangeStart; @@ -12,6 +7,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcLayoutScalarValidRangeStart { const PATH: &'static [Symbol] = &[sym::rustc_layout_scalar_valid_range_start]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Struct)]); const TEMPLATE: AttributeTemplate = template!(List: &["start"]); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { @@ -26,6 +22,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcLayoutScalarValidRangeEnd { const PATH: &'static [Symbol] = &[sym::rustc_layout_scalar_valid_range_end]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Struct)]); const TEMPLATE: AttributeTemplate = template!(List: &["end"]); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { @@ -40,6 +37,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcObjectLifetimeDefaultParser { const PATH: &[rustc_span::Symbol] = &[sym::rustc_object_lifetime_default]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Struct)]); const TEMPLATE: AttributeTemplate = template!(Word); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { diff --git a/compiler/rustc_attr_parsing/src/attributes/semantics.rs b/compiler/rustc_attr_parsing/src/attributes/semantics.rs index 70a8a002099..d7f62483297 100644 --- a/compiler/rustc_attr_parsing/src/attributes/semantics.rs +++ b/compiler/rustc_attr_parsing/src/attributes/semantics.rs @@ -1,12 +1,9 @@ -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; - -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::Stage; +use super::prelude::*; pub(crate) struct MayDangleParser; impl<S: Stage> NoArgsAttributeParser<S> for MayDangleParser { const PATH: &[Symbol] = &[sym::may_dangle]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); //FIXME Still checked fully in `check_attr.rs` const CREATE: fn(span: Span) -> AttributeKind = AttributeKind::MayDangle; } diff --git a/compiler/rustc_attr_parsing/src/attributes/stability.rs b/compiler/rustc_attr_parsing/src/attributes/stability.rs index c6707f5048b..b94e23477ff 100644 --- a/compiler/rustc_attr_parsing/src/attributes/stability.rs +++ b/compiler/rustc_attr_parsing/src/attributes/stability.rs @@ -1,19 +1,13 @@ use std::num::NonZero; use rustc_errors::ErrorGuaranteed; -use rustc_feature::template; -use rustc_hir::attrs::AttributeKind; use rustc_hir::{ - DefaultBodyStability, PartialConstStability, Stability, StabilityLevel, StableSince, - UnstableReason, VERSION_PLACEHOLDER, + DefaultBodyStability, MethodKind, PartialConstStability, Stability, StabilityLevel, + StableSince, Target, UnstableReason, VERSION_PLACEHOLDER, }; -use rustc_span::{Ident, Span, Symbol, sym}; +use super::prelude::*; use super::util::parse_version; -use super::{AcceptMapping, AttributeParser, OnDuplicate}; -use crate::attributes::NoArgsAttributeParser; -use crate::context::{AcceptContext, FinalizeContext, Stage}; -use crate::parser::{ArgParser, MetaItemParser}; use crate::session_diagnostics::{self, UnsupportedLiteralReason}; macro_rules! reject_outside_std { @@ -26,6 +20,36 @@ macro_rules! reject_outside_std { }; } +const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Struct), + Allow(Target::Enum), + Allow(Target::Union), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::Impl { of_trait: false }), + Allow(Target::Impl { of_trait: true }), + Allow(Target::MacroDef), + Allow(Target::Crate), + Allow(Target::Mod), + Allow(Target::Use), // FIXME I don't think this does anything? + Allow(Target::Const), + Allow(Target::AssocConst), + Allow(Target::AssocTy), + Allow(Target::Trait), + Allow(Target::TraitAlias), + Allow(Target::TyAlias), + Allow(Target::Variant), + Allow(Target::Field), + Allow(Target::Param), + Allow(Target::Static), + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + Allow(Target::ExternCrate), +]); + #[derive(Default)] pub(crate) struct StabilityParser { allowed_through_unstable_modules: Option<Symbol>, @@ -87,6 +111,7 @@ impl<S: Stage> AttributeParser<S> for StabilityParser { }, ), ]; + const ALLOWED_TARGETS: AllowedTargets = ALLOWED_TARGETS; fn finalize(mut self, cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { if let Some(atum) = self.allowed_through_unstable_modules { @@ -142,6 +167,7 @@ impl<S: Stage> AttributeParser<S> for BodyStabilityParser { } }, )]; + const ALLOWED_TARGETS: AllowedTargets = ALLOWED_TARGETS; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { let (stability, span) = self.stability?; @@ -154,6 +180,10 @@ pub(crate) struct ConstStabilityIndirectParser; impl<S: Stage> NoArgsAttributeParser<S> for ConstStabilityIndirectParser { const PATH: &[Symbol] = &[sym::rustc_const_stable_indirect]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Ignore; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + ]); const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::ConstStabilityIndirect; } @@ -213,6 +243,7 @@ impl<S: Stage> AttributeParser<S> for ConstStabilityParser { this.promotable = true; }), ]; + const ALLOWED_TARGETS: AllowedTargets = ALLOWED_TARGETS; fn finalize(mut self, cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { if self.promotable { diff --git a/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs index 3267855fb0d..510ff1ded49 100644 --- a/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs @@ -1,11 +1,4 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_hir::lints::AttributeLintKind; -use rustc_span::{Symbol, sym}; - -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; pub(crate) struct IgnoreParser; @@ -13,6 +6,8 @@ impl<S: Stage> SingleAttributeParser<S> for IgnoreParser { const PATH: &[Symbol] = &[sym::ignore]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowListWarnRest(&[Allow(Target::Fn), Error(Target::WherePredicate)]); const TEMPLATE: AttributeTemplate = template!( Word, NameValueStr: "reason", "https://doc.rust-lang.org/reference/attributes/testing.html#the-ignore-attribute" @@ -26,7 +21,7 @@ impl<S: Stage> SingleAttributeParser<S> for IgnoreParser { ArgParser::NameValue(name_value) => { let Some(str_value) = name_value.value_as_str() else { let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE - .suggestions(false, "ignore"); + .suggestions(cx.attr_style, "ignore"); let span = cx.attr_span; cx.emit_lint( AttributeLintKind::IllFormedAttributeInput { suggestions }, @@ -37,8 +32,8 @@ impl<S: Stage> SingleAttributeParser<S> for IgnoreParser { Some(str_value) } ArgParser::List(_) => { - let suggestions = - <Self as SingleAttributeParser<S>>::TEMPLATE.suggestions(false, "ignore"); + let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE + .suggestions(cx.attr_style, "ignore"); let span = cx.attr_span; cx.emit_lint(AttributeLintKind::IllFormedAttributeInput { suggestions }, span); return None; @@ -54,6 +49,8 @@ impl<S: Stage> SingleAttributeParser<S> for ShouldPanicParser { const PATH: &[Symbol] = &[sym::should_panic]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowListWarnRest(&[Allow(Target::Fn), Error(Target::WherePredicate)]); const TEMPLATE: AttributeTemplate = template!( Word, List: &[r#"expected = "reason""#], NameValueStr: "reason", "https://doc.rust-lang.org/reference/attributes/testing.html#the-should_panic-attribute" @@ -84,7 +81,7 @@ impl<S: Stage> SingleAttributeParser<S> for ShouldPanicParser { return None; }; if !single.path().word_is(sym::expected) { - cx.expected_specific_argument_strings(list.span, vec!["expected"]); + cx.expected_specific_argument_strings(list.span, &[sym::expected]); return None; } let Some(nv) = single.args().name_value() else { diff --git a/compiler/rustc_attr_parsing/src/attributes/traits.rs b/compiler/rustc_attr_parsing/src/attributes/traits.rs index 8514d799aa4..fbd9a480fbb 100644 --- a/compiler/rustc_attr_parsing/src/attributes/traits.rs +++ b/compiler/rustc_attr_parsing/src/attributes/traits.rs @@ -1,20 +1,22 @@ -use core::mem; +use std::mem; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; +use rustc_feature::AttributeType; +use super::prelude::*; use crate::attributes::{ AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, }; use crate::context::{AcceptContext, Stage}; use crate::parser::ArgParser; +use crate::target_checking::Policy::{Allow, Warn}; +use crate::target_checking::{ALL_TARGETS, AllowedTargets}; pub(crate) struct SkipDuringMethodDispatchParser; impl<S: Stage> SingleAttributeParser<S> for SkipDuringMethodDispatchParser { const PATH: &[Symbol] = &[sym::rustc_skip_during_method_dispatch]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepInnermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const TEMPLATE: AttributeTemplate = template!(List: &["array, boxed_slice"]); @@ -42,7 +44,7 @@ impl<S: Stage> SingleAttributeParser<S> for SkipDuringMethodDispatchParser { Some(key @ sym::array) => (key, &mut array), Some(key @ sym::boxed_slice) => (key, &mut boxed_slice), _ => { - cx.expected_specific_argument(path.span(), vec!["array", "boxed_slice"]); + cx.expected_specific_argument(path.span(), &[sym::array, sym::boxed_slice]); continue; } }; @@ -58,6 +60,7 @@ pub(crate) struct ParenSugarParser; impl<S: Stage> NoArgsAttributeParser<S> for ParenSugarParser { const PATH: &[Symbol] = &[sym::rustc_paren_sugar]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::ParenSugar; } @@ -65,6 +68,7 @@ pub(crate) struct TypeConstParser; impl<S: Stage> NoArgsAttributeParser<S> for TypeConstParser { const PATH: &[Symbol] = &[sym::type_const]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::AssocConst)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::TypeConst; } @@ -74,6 +78,12 @@ pub(crate) struct MarkerParser; impl<S: Stage> NoArgsAttributeParser<S> for MarkerParser { const PATH: &[Symbol] = &[sym::marker]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Trait), + Warn(Target::Field), + Warn(Target::Arm), + Warn(Target::MacroDef), + ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::Marker; } @@ -81,6 +91,7 @@ pub(crate) struct DenyExplicitImplParser; impl<S: Stage> NoArgsAttributeParser<S> for DenyExplicitImplParser { const PATH: &[Symbol] = &[sym::rustc_deny_explicit_impl]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::DenyExplicitImpl; } @@ -88,6 +99,7 @@ pub(crate) struct DoNotImplementViaObjectParser; impl<S: Stage> NoArgsAttributeParser<S> for DoNotImplementViaObjectParser { const PATH: &[Symbol] = &[sym::rustc_do_not_implement_via_object]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::DoNotImplementViaObject; } @@ -98,6 +110,7 @@ pub(crate) struct ConstTraitParser; impl<S: Stage> NoArgsAttributeParser<S> for ConstTraitParser { const PATH: &[Symbol] = &[sym::const_trait]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::ConstTrait; } @@ -107,6 +120,7 @@ pub(crate) struct SpecializationTraitParser; impl<S: Stage> NoArgsAttributeParser<S> for SpecializationTraitParser { const PATH: &[Symbol] = &[sym::rustc_specialization_trait]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::SpecializationTrait; } @@ -114,6 +128,7 @@ pub(crate) struct UnsafeSpecializationMarkerParser; impl<S: Stage> NoArgsAttributeParser<S> for UnsafeSpecializationMarkerParser { const PATH: &[Symbol] = &[sym::rustc_unsafe_specialization_marker]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::UnsafeSpecializationMarker; } @@ -123,6 +138,7 @@ pub(crate) struct CoinductiveParser; impl<S: Stage> NoArgsAttributeParser<S> for CoinductiveParser { const PATH: &[Symbol] = &[sym::rustc_coinductive]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::Coinductive; } @@ -130,6 +146,8 @@ pub(crate) struct AllowIncoherentImplParser; impl<S: Stage> NoArgsAttributeParser<S> for AllowIncoherentImplParser { const PATH: &[Symbol] = &[sym::rustc_allow_incoherent_impl]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Method(MethodKind::Inherent))]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::AllowIncoherentImpl; } @@ -137,6 +155,8 @@ pub(crate) struct CoherenceIsCoreParser; impl<S: Stage> NoArgsAttributeParser<S> for CoherenceIsCoreParser { const PATH: &[Symbol] = &[sym::rustc_coherence_is_core]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]); + const TYPE: AttributeType = AttributeType::CrateLevel; const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::CoherenceIsCore; } @@ -144,6 +164,8 @@ pub(crate) struct FundamentalParser; impl<S: Stage> NoArgsAttributeParser<S> for FundamentalParser { const PATH: &[Symbol] = &[sym::fundamental]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Struct), Allow(Target::Trait)]); const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::Fundamental; } @@ -151,5 +173,6 @@ pub(crate) struct PointeeParser; impl<S: Stage> NoArgsAttributeParser<S> for PointeeParser { const PATH: &[Symbol] = &[sym::pointee]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); //FIXME Still checked fully in `check_attr.rs` const CREATE: fn(Span) -> AttributeKind = AttributeKind::Pointee; } diff --git a/compiler/rustc_attr_parsing/src/attributes/transparency.rs b/compiler/rustc_attr_parsing/src/attributes/transparency.rs index d4d68eb8b27..ea1f5549c4e 100644 --- a/compiler/rustc_attr_parsing/src/attributes/transparency.rs +++ b/compiler/rustc_attr_parsing/src/attributes/transparency.rs @@ -1,11 +1,6 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; use rustc_span::hygiene::Transparency; -use rustc_span::{Symbol, sym}; -use super::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{AcceptContext, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; pub(crate) struct TransparencyParser; @@ -18,6 +13,7 @@ impl<S: Stage> SingleAttributeParser<S> for TransparencyParser { const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Custom(|cx, used, unused| { cx.dcx().span_err(vec![used, unused], "multiple macro transparency attributes"); }); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::MacroDef)]); const TEMPLATE: AttributeTemplate = template!(NameValueStr: ["transparent", "semitransparent", "opaque"]); @@ -33,7 +29,7 @@ impl<S: Stage> SingleAttributeParser<S> for TransparencyParser { Some(_) => { cx.expected_specific_argument_strings( nv.value_span, - vec!["transparent", "semitransparent", "opaque"], + &[sym::transparent, sym::semitransparent, sym::opaque], ); None } diff --git a/compiler/rustc_attr_parsing/src/attributes/util.rs b/compiler/rustc_attr_parsing/src/attributes/util.rs index 10134915b27..77e8c32e59d 100644 --- a/compiler/rustc_attr_parsing/src/attributes/util.rs +++ b/compiler/rustc_attr_parsing/src/attributes/util.rs @@ -1,8 +1,12 @@ -use rustc_ast::attr::{AttributeExt, first_attr_value_str_by_name}; +use rustc_ast::LitKind; +use rustc_ast::attr::AttributeExt; use rustc_feature::is_builtin_attr_name; use rustc_hir::RustcVersion; use rustc_span::{Symbol, sym}; +use crate::context::{AcceptContext, Stage}; +use crate::parser::ArgParser; + /// Parse a rustc version number written inside string literal in an attribute, /// like appears in `since = "1.0.0"`. Suffixes like "-dev" and "-nightly" are /// not accepted in this position, unlike when parsing CFG_RELEASE. @@ -23,10 +27,6 @@ pub fn is_builtin_attr(attr: &impl AttributeExt) -> bool { attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name)) } -pub fn find_crate_name(attrs: &[impl AttributeExt]) -> Option<Symbol> { - first_attr_value_str_by_name(attrs, sym::crate_name) -} - pub fn is_doc_alias_attrs_contain_symbol<'tcx, T: AttributeExt + 'tcx>( attrs: impl Iterator<Item = &'tcx T>, symbol: Symbol, @@ -56,3 +56,32 @@ pub fn is_doc_alias_attrs_contain_symbol<'tcx, T: AttributeExt + 'tcx>( } false } + +/// Parse a single integer. +/// +/// Used by attributes that take a single integer as argument, such as +/// `#[link_ordinal]` and `#[rustc_layout_scalar_valid_range_start]`. +/// `cx` is the context given to the attribute. +/// `args` is the parser for the attribute arguments. +pub(crate) fn parse_single_integer<S: Stage>( + cx: &mut AcceptContext<'_, '_, S>, + args: &ArgParser<'_>, +) -> Option<u128> { + let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); + return None; + }; + let Some(single) = list.single() else { + cx.expected_single_argument(list.span); + return None; + }; + let Some(lit) = single.lit() else { + cx.expected_integer_literal(single.span()); + return None; + }; + let LitKind::Int(num, _ty) = lit.kind else { + cx.expected_integer_literal(single.span()); + return None; + }; + Some(num.0) +} diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index 1420753a44e..7f5b810f244 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -4,30 +4,33 @@ use std::ops::{Deref, DerefMut}; use std::sync::LazyLock; use private::Sealed; -use rustc_ast::{self as ast, LitKind, MetaItemLit, NodeId}; -use rustc_errors::{DiagCtxtHandle, Diagnostic}; -use rustc_feature::{AttributeTemplate, Features}; +use rustc_ast::{AttrStyle, CRATE_NODE_ID, MetaItemLit, NodeId}; +use rustc_errors::{Diag, Diagnostic, Level}; +use rustc_feature::{AttributeTemplate, AttributeType}; use rustc_hir::attrs::AttributeKind; use rustc_hir::lints::{AttributeLint, AttributeLintKind}; -use rustc_hir::{AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId, HirId}; +use rustc_hir::{AttrPath, CRATE_HIR_ID, HirId}; use rustc_session::Session; -use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; +use rustc_span::{ErrorGuaranteed, Span, Symbol}; +use crate::AttributeParser; use crate::attributes::allow_unstable::{ AllowConstFnUnstableParser, AllowInternalUnstableParser, UnstableFeatureBoundParser, }; use crate::attributes::body::CoroutineParser; use crate::attributes::codegen_attrs::{ - ColdParser, CoverageParser, ExportNameParser, NakedParser, NoMangleParser, OptimizeParser, - TargetFeatureParser, TrackCallerParser, UsedParser, + ColdParser, CoverageParser, ExportNameParser, ForceTargetFeatureParser, NakedParser, + NoMangleParser, OptimizeParser, SanitizeParser, TargetFeatureParser, TrackCallerParser, + UsedParser, }; use crate::attributes::confusables::ConfusablesParser; +use crate::attributes::crate_level::CrateNameParser; use crate::attributes::deprecation::DeprecationParser; use crate::attributes::dummy::DummyParser; use crate::attributes::inline::{InlineParser, RustcForceInlineParser}; use crate::attributes::link_attrs::{ ExportStableParser, FfiConstParser, FfiPureParser, LinkNameParser, LinkOrdinalParser, - LinkSectionParser, StdInternalSymbolParser, + LinkParser, LinkSectionParser, LinkageParser, StdInternalSymbolParser, }; use crate::attributes::lint_helpers::{ AsPtrParser, AutomaticallyDerivedParser, PassByValueParser, PubTransparentParser, @@ -43,6 +46,7 @@ use crate::attributes::path::PathParser as PathAttributeParser; use crate::attributes::proc_macro_attrs::{ ProcMacroAttributeParser, ProcMacroDeriveParser, ProcMacroParser, RustcBuiltinMacroParser, }; +use crate::attributes::prototype::CustomMirParser; use crate::attributes::repr::{AlignParser, ReprParser}; use crate::attributes::rustc_internal::{ RustcLayoutScalarValidRangeEnd, RustcLayoutScalarValidRangeStart, @@ -61,19 +65,22 @@ use crate::attributes::traits::{ }; use crate::attributes::transparency::TransparencyParser; use crate::attributes::{AttributeParser as _, Combine, Single, WithoutArgs}; -use crate::parser::{ArgParser, MetaItemParser, PathParser}; +use crate::parser::{ArgParser, PathParser}; use crate::session_diagnostics::{AttributeParseError, AttributeParseErrorReason, UnknownMetaItem}; +use crate::target_checking::AllowedTargets; type GroupType<S> = LazyLock<GroupTypeInner<S>>; -struct GroupTypeInner<S: Stage> { - accepters: BTreeMap<&'static [Symbol], Vec<GroupTypeInnerAccept<S>>>, - finalizers: Vec<FinalizeFn<S>>, +pub(super) struct GroupTypeInner<S: Stage> { + pub(super) accepters: BTreeMap<&'static [Symbol], Vec<GroupTypeInnerAccept<S>>>, + pub(super) finalizers: Vec<FinalizeFn<S>>, } -struct GroupTypeInnerAccept<S: Stage> { - template: AttributeTemplate, - accept_fn: AcceptFn<S>, +pub(super) struct GroupTypeInnerAccept<S: Stage> { + pub(super) template: AttributeTemplate, + pub(super) accept_fn: AcceptFn<S>, + pub(super) allowed_targets: AllowedTargets, + pub(super) attribute_type: AttributeType, } type AcceptFn<S> = @@ -121,7 +128,9 @@ macro_rules! attribute_parsers { STATE_OBJECT.with_borrow_mut(|s| { accept_fn(s, cx, args) }) - }) + }), + allowed_targets: <$names as crate::attributes::AttributeParser<$stage>>::ALLOWED_TARGETS, + attribute_type: <$names as crate::attributes::AttributeParser<$stage>>::TYPE, }); } @@ -152,6 +161,8 @@ attribute_parsers!( // tidy-alphabetical-start Combine<AllowConstFnUnstableParser>, Combine<AllowInternalUnstableParser>, + Combine<ForceTargetFeatureParser>, + Combine<LinkParser>, Combine<ReprParser>, Combine<TargetFeatureParser>, Combine<UnstableFeatureBoundParser>, @@ -159,6 +170,8 @@ attribute_parsers!( // tidy-alphabetical-start Single<CoverageParser>, + Single<CrateNameParser>, + Single<CustomMirParser>, Single<DeprecationParser>, Single<DummyParser>, Single<ExportNameParser>, @@ -167,6 +180,7 @@ attribute_parsers!( Single<LinkNameParser>, Single<LinkOrdinalParser>, Single<LinkSectionParser>, + Single<LinkageParser>, Single<MustUseParser>, Single<OptimizeParser>, Single<PathAttributeParser>, @@ -176,6 +190,7 @@ attribute_parsers!( Single<RustcLayoutScalarValidRangeEnd>, Single<RustcLayoutScalarValidRangeStart>, Single<RustcObjectLifetimeDefaultParser>, + Single<SanitizeParser>, Single<ShouldPanicParser>, Single<SkipDuringMethodDispatchParser>, Single<TransparencyParser>, @@ -238,6 +253,8 @@ pub trait Stage: Sized + 'static + Sealed { ) -> ErrorGuaranteed; fn should_emit(&self) -> ShouldEmit; + + fn id_is_crate_root(id: Self::Id) -> bool; } // allow because it's a sealed trait @@ -253,16 +270,16 @@ impl Stage for Early { sess: &'sess Session, diag: impl for<'x> Diagnostic<'x>, ) -> ErrorGuaranteed { - if self.emit_errors.should_emit() { - sess.dcx().emit_err(diag) - } else { - sess.dcx().create_err(diag).delay_as_bug() - } + self.should_emit().emit_err(sess.dcx().create_err(diag)) } fn should_emit(&self) -> ShouldEmit { self.emit_errors } + + fn id_is_crate_root(id: Self::Id) -> bool { + id == CRATE_NODE_ID + } } // allow because it's a sealed trait @@ -284,6 +301,10 @@ impl Stage for Late { fn should_emit(&self) -> ShouldEmit { ShouldEmit::ErrorsAndLints } + + fn id_is_crate_root(id: Self::Id) -> bool { + id == CRATE_HIR_ID + } } /// used when parsing attributes for miscellaneous things *before* ast lowering @@ -304,6 +325,9 @@ pub struct AcceptContext<'f, 'sess, S: Stage> { /// The span of the attribute currently being parsed pub(crate) attr_span: Span, + /// Whether it is an inner or outer attribute + pub(crate) attr_style: AttrStyle, + /// The expected structure of the attribute. /// /// Used in reporting errors to give a hint to users what the attribute *should* look like. @@ -322,7 +346,7 @@ impl<'f, 'sess: 'f, S: Stage> SharedContext<'f, 'sess, S> { /// must be delayed until after HIR is built. This method will take care of the details of /// that. pub(crate) fn emit_lint(&mut self, lint: AttributeLintKind, span: Span) { - if !self.stage.should_emit().should_emit() { + if matches!(self.stage.should_emit(), ShouldEmit::Nothing) { return; } let id = self.target_id; @@ -385,6 +409,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { i.kind.is_bytestr().then(|| self.sess().source_map().start_point(i.span)) }), }, + attr_style: self.attr_style, }) } @@ -395,6 +420,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedIntegerLiteral, + attr_style: self.attr_style, }) } @@ -405,6 +431,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedList, + attr_style: self.attr_style, }) } @@ -415,6 +442,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedNoArgs, + attr_style: self.attr_style, }) } @@ -426,6 +454,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedIdentifier, + attr_style: self.attr_style, }) } @@ -438,6 +467,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedNameValue(name), + attr_style: self.attr_style, }) } @@ -449,6 +479,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::DuplicateKey(key), + attr_style: self.attr_style, }) } @@ -461,6 +492,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::UnexpectedLiteral, + attr_style: self.attr_style, }) } @@ -471,6 +503,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedSingleArgument, + attr_style: self.attr_style, }) } @@ -481,13 +514,15 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { template: self.template.clone(), attribute: self.attr_path.clone(), reason: AttributeParseErrorReason::ExpectedAtLeastOneArgument, + attr_style: self.attr_style, }) } + /// produces an error along the lines of `expected one of [foo, meow]` pub(crate) fn expected_specific_argument( &self, span: Span, - possibilities: Vec<&'static str>, + possibilities: &[Symbol], ) -> ErrorGuaranteed { self.emit_err(AttributeParseError { span, @@ -499,13 +534,16 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { strings: false, list: false, }, + attr_style: self.attr_style, }) } + /// produces an error along the lines of `expected one of [foo, meow] as an argument`. + /// i.e. slightly different wording to [`expected_specific_argument`](Self::expected_specific_argument). pub(crate) fn expected_specific_argument_and_list( &self, span: Span, - possibilities: Vec<&'static str>, + possibilities: &[Symbol], ) -> ErrorGuaranteed { self.emit_err(AttributeParseError { span, @@ -517,13 +555,15 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { strings: false, list: true, }, + attr_style: self.attr_style, }) } + /// produces an error along the lines of `expected one of ["foo", "meow"]` pub(crate) fn expected_specific_argument_strings( &self, span: Span, - possibilities: Vec<&'static str>, + possibilities: &[Symbol], ) -> ErrorGuaranteed { self.emit_err(AttributeParseError { span, @@ -535,6 +575,7 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { strings: true, list: false, }, + attr_style: self.attr_style, }) } @@ -570,7 +611,7 @@ pub struct SharedContext<'p, 'sess, S: Stage> { /// The id ([`NodeId`] if `S` is `Early`, [`HirId`] if `S` is `Late`) of the syntactical component this attribute was applied to pub(crate) target_id: S::Id, - emit_lint: &'p mut dyn FnMut(AttributeLint<S::Id>), + pub(crate) emit_lint: &'p mut dyn FnMut(AttributeLint<S::Id>), } /// Context given to every attribute parser during finalization. @@ -623,8 +664,13 @@ pub enum OmitDoc { Skip, } -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] pub enum ShouldEmit { + /// The operations will emit errors, and lints, and errors are fatal. + /// + /// Only relevant when early parsing, in late parsing equivalent to `ErrorsAndLints`. + /// Late parsing is never fatal, and instead tries to emit as many diagnostics as possible. + EarlyFatal, /// The operation will emit errors and lints. /// This is usually what you need. ErrorsAndLints, @@ -634,332 +680,12 @@ pub enum ShouldEmit { } impl ShouldEmit { - pub fn should_emit(&self) -> bool { + pub(crate) fn emit_err(&self, diag: Diag<'_>) -> ErrorGuaranteed { match self { - ShouldEmit::ErrorsAndLints => true, - ShouldEmit::Nothing => false, - } - } -} - -/// Context created once, for example as part of the ast lowering -/// context, through which all attributes can be lowered. -pub struct AttributeParser<'sess, S: Stage = Late> { - pub(crate) tools: Vec<Symbol>, - features: Option<&'sess Features>, - sess: &'sess Session, - stage: S, - - /// *Only* parse attributes with this symbol. - /// - /// Used in cases where we want the lowering infrastructure for parse just a single attribute. - parse_only: Option<Symbol>, -} - -impl<'sess> AttributeParser<'sess, Early> { - /// This method allows you to parse attributes *before* you have access to features or tools. - /// One example where this is necessary, is to parse `feature` attributes themselves for - /// example. - /// - /// Try to use this as little as possible. Attributes *should* be lowered during - /// `rustc_ast_lowering`. Some attributes require access to features to parse, which would - /// crash if you tried to do so through [`parse_limited`](Self::parse_limited). - /// - /// To make sure use is limited, supply a `Symbol` you'd like to parse. Only attributes with - /// that symbol are picked out of the list of instructions and parsed. Those are returned. - /// - /// No diagnostics will be emitted when parsing limited. Lints are not emitted at all, while - /// errors will be emitted as a delayed bugs. in other words, we *expect* attributes parsed - /// with `parse_limited` to be reparsed later during ast lowering where we *do* emit the errors - pub fn parse_limited( - sess: &'sess Session, - attrs: &[ast::Attribute], - sym: Symbol, - target_span: Span, - target_node_id: NodeId, - features: Option<&'sess Features>, - ) -> Option<Attribute> { - let mut p = Self { - features, - tools: Vec::new(), - parse_only: Some(sym), - sess, - stage: Early { emit_errors: ShouldEmit::Nothing }, - }; - let mut parsed = p.parse_attribute_list( - attrs, - target_span, - target_node_id, - OmitDoc::Skip, - std::convert::identity, - |_lint| { - panic!("can't emit lints here for now (nothing uses this atm)"); - }, - ); - assert!(parsed.len() <= 1); - - parsed.pop() - } - - pub fn parse_single<T>( - sess: &'sess Session, - attr: &ast::Attribute, - target_span: Span, - target_node_id: NodeId, - features: Option<&'sess Features>, - emit_errors: ShouldEmit, - parse_fn: fn(cx: &mut AcceptContext<'_, '_, Early>, item: &ArgParser<'_>) -> T, - template: &AttributeTemplate, - ) -> T { - let mut parser = Self { - features, - tools: Vec::new(), - parse_only: None, - sess, - stage: Early { emit_errors }, - }; - let ast::AttrKind::Normal(normal_attr) = &attr.kind else { - panic!("parse_single called on a doc attr") - }; - let meta_parser = MetaItemParser::from_attr(normal_attr, parser.dcx()); - let path = meta_parser.path(); - let args = meta_parser.args(); - let mut cx: AcceptContext<'_, 'sess, Early> = AcceptContext { - shared: SharedContext { - cx: &mut parser, - target_span, - target_id: target_node_id, - emit_lint: &mut |_lint| { - panic!("can't emit lints here for now (nothing uses this atm)"); - }, - }, - attr_span: attr.span, - template, - attr_path: path.get_attribute_path(), - }; - parse_fn(&mut cx, args) - } -} - -impl<'sess, S: Stage> AttributeParser<'sess, S> { - pub fn new( - sess: &'sess Session, - features: &'sess Features, - tools: Vec<Symbol>, - stage: S, - ) -> Self { - Self { features: Some(features), tools, parse_only: None, sess, stage } - } - - pub(crate) fn sess(&self) -> &'sess Session { - &self.sess - } - - pub(crate) fn features(&self) -> &'sess Features { - self.features.expect("features not available at this point in the compiler") - } - - pub(crate) fn features_option(&self) -> Option<&'sess Features> { - self.features - } - - pub(crate) fn dcx(&self) -> DiagCtxtHandle<'sess> { - self.sess().dcx() - } - - /// Parse a list of attributes. - /// - /// `target_span` is the span of the thing this list of attributes is applied to, - /// and when `omit_doc` is set, doc attributes are filtered out. - pub fn parse_attribute_list( - &mut self, - attrs: &[ast::Attribute], - target_span: Span, - target_id: S::Id, - omit_doc: OmitDoc, - - lower_span: impl Copy + Fn(Span) -> Span, - mut emit_lint: impl FnMut(AttributeLint<S::Id>), - ) -> Vec<Attribute> { - let mut attributes = Vec::new(); - let mut attr_paths = Vec::new(); - - for attr in attrs { - // If we're only looking for a single attribute, skip all the ones we don't care about. - if let Some(expected) = self.parse_only { - if !attr.has_name(expected) { - continue; - } - } - - // Sometimes, for example for `#![doc = include_str!("readme.md")]`, - // doc still contains a non-literal. You might say, when we're lowering attributes - // that's expanded right? But no, sometimes, when parsing attributes on macros, - // we already use the lowering logic and these are still there. So, when `omit_doc` - // is set we *also* want to ignore these. - if omit_doc == OmitDoc::Skip && attr.has_name(sym::doc) { - continue; - } - - match &attr.kind { - ast::AttrKind::DocComment(comment_kind, symbol) => { - if omit_doc == OmitDoc::Skip { - continue; - } - - attributes.push(Attribute::Parsed(AttributeKind::DocComment { - style: attr.style, - kind: *comment_kind, - span: lower_span(attr.span), - comment: *symbol, - })) - } - // // FIXME: make doc attributes go through a proper attribute parser - // ast::AttrKind::Normal(n) if n.has_name(sym::doc) => { - // let p = GenericMetaItemParser::from_attr(&n, self.dcx()); - // - // attributes.push(Attribute::Parsed(AttributeKind::DocComment { - // style: attr.style, - // kind: CommentKind::Line, - // span: attr.span, - // comment: p.args().name_value(), - // })) - // } - ast::AttrKind::Normal(n) => { - attr_paths.push(PathParser::Ast(&n.item.path)); - - let parser = MetaItemParser::from_attr(n, self.dcx()); - let path = parser.path(); - let args = parser.args(); - let parts = path.segments().map(|i| i.name).collect::<Vec<_>>(); - - if let Some(accepts) = S::parsers().accepters.get(parts.as_slice()) { - for accept in accepts { - let mut cx: AcceptContext<'_, 'sess, S> = AcceptContext { - shared: SharedContext { - cx: self, - target_span, - target_id, - emit_lint: &mut emit_lint, - }, - attr_span: lower_span(attr.span), - template: &accept.template, - attr_path: path.get_attribute_path(), - }; - - (accept.accept_fn)(&mut cx, args) - } - } else { - // If we're here, we must be compiling a tool attribute... Or someone - // forgot to parse their fancy new attribute. Let's warn them in any case. - // If you are that person, and you really think your attribute should - // remain unparsed, carefully read the documentation in this module and if - // you still think so you can add an exception to this assertion. - - // FIXME(jdonszelmann): convert other attributes, and check with this that - // we caught em all - // const FIXME_TEMPORARY_ATTR_ALLOWLIST: &[Symbol] = &[sym::cfg]; - // assert!( - // self.tools.contains(&parts[0]) || true, - // // || FIXME_TEMPORARY_ATTR_ALLOWLIST.contains(&parts[0]), - // "attribute {path} wasn't parsed and isn't a know tool attribute", - // ); - - attributes.push(Attribute::Unparsed(Box::new(AttrItem { - path: AttrPath::from_ast(&n.item.path), - args: self.lower_attr_args(&n.item.args, lower_span), - id: HashIgnoredAttrId { attr_id: attr.id }, - style: attr.style, - span: lower_span(attr.span), - }))); - } - } - } - } - - let mut parsed_attributes = Vec::new(); - for f in &S::parsers().finalizers { - if let Some(attr) = f(&mut FinalizeContext { - shared: SharedContext { - cx: self, - target_span, - target_id, - emit_lint: &mut emit_lint, - }, - all_attrs: &attr_paths, - }) { - parsed_attributes.push(Attribute::Parsed(attr)); - } - } - - attributes.extend(parsed_attributes); - - attributes - } - - /// Returns whether there is a parser for an attribute with this name - pub fn is_parsed_attribute(path: &[Symbol]) -> bool { - Late::parsers().accepters.contains_key(path) - } - - fn lower_attr_args(&self, args: &ast::AttrArgs, lower_span: impl Fn(Span) -> Span) -> AttrArgs { - match args { - ast::AttrArgs::Empty => AttrArgs::Empty, - ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(args.clone()), - // This is an inert key-value attribute - it will never be visible to macros - // after it gets lowered to HIR. Therefore, we can extract literals to handle - // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). - ast::AttrArgs::Eq { eq_span, expr } => { - // In valid code the value always ends up as a single literal. Otherwise, a dummy - // literal suffices because the error is handled elsewhere. - let lit = if let ast::ExprKind::Lit(token_lit) = expr.kind - && let Ok(lit) = - ast::MetaItemLit::from_token_lit(token_lit, lower_span(expr.span)) - { - lit - } else { - let guar = self.dcx().span_delayed_bug( - args.span().unwrap_or(DUMMY_SP), - "expr in place where literal is expected (builtin attr parsing)", - ); - ast::MetaItemLit { - symbol: sym::dummy, - suffix: None, - kind: ast::LitKind::Err(guar), - span: DUMMY_SP, - } - }; - AttrArgs::Eq { eq_span: lower_span(*eq_span), expr: lit } - } + ShouldEmit::EarlyFatal if diag.level() == Level::DelayedBug => diag.emit(), + ShouldEmit::EarlyFatal => diag.upgrade_to_fatal().emit(), + ShouldEmit::ErrorsAndLints => diag.emit(), + ShouldEmit::Nothing => diag.delay_as_bug(), } } } - -/// Parse a single integer. -/// -/// Used by attributes that take a single integer as argument, such as -/// `#[link_ordinal]` and `#[rustc_layout_scalar_valid_range_start]`. -/// `cx` is the context given to the attribute. -/// `args` is the parser for the attribute arguments. -pub(crate) fn parse_single_integer<S: Stage>( - cx: &mut AcceptContext<'_, '_, S>, - args: &ArgParser<'_>, -) -> Option<u128> { - let Some(list) = args.list() else { - cx.expected_list(cx.attr_span); - return None; - }; - let Some(single) = list.single() else { - cx.expected_single_argument(list.span); - return None; - }; - let Some(lit) = single.lit() else { - cx.expected_integer_literal(single.span()); - return None; - }; - let LitKind::Int(num, _ty) = lit.kind else { - cx.expected_integer_literal(single.span()); - return None; - }; - Some(num.0) -} diff --git a/compiler/rustc_attr_parsing/src/interface.rs b/compiler/rustc_attr_parsing/src/interface.rs new file mode 100644 index 00000000000..0fe3c209421 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/interface.rs @@ -0,0 +1,363 @@ +use std::borrow::Cow; + +use rustc_ast as ast; +use rustc_ast::NodeId; +use rustc_errors::DiagCtxtHandle; +use rustc_feature::{AttributeTemplate, Features}; +use rustc_hir::attrs::AttributeKind; +use rustc_hir::lints::AttributeLint; +use rustc_hir::{AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId, Target}; +use rustc_session::Session; +use rustc_span::{DUMMY_SP, Span, Symbol, sym}; + +use crate::context::{AcceptContext, FinalizeContext, SharedContext, Stage}; +use crate::parser::{ArgParser, MetaItemParser, PathParser}; +use crate::{Early, Late, OmitDoc, ShouldEmit}; + +/// Context created once, for example as part of the ast lowering +/// context, through which all attributes can be lowered. +pub struct AttributeParser<'sess, S: Stage = Late> { + pub(crate) tools: Vec<Symbol>, + pub(crate) features: Option<&'sess Features>, + pub(crate) sess: &'sess Session, + pub(crate) stage: S, + + /// *Only* parse attributes with this symbol. + /// + /// Used in cases where we want the lowering infrastructure for parse just a single attribute. + parse_only: Option<Symbol>, +} + +impl<'sess> AttributeParser<'sess, Early> { + /// This method allows you to parse attributes *before* you have access to features or tools. + /// One example where this is necessary, is to parse `feature` attributes themselves for + /// example. + /// + /// Try to use this as little as possible. Attributes *should* be lowered during + /// `rustc_ast_lowering`. Some attributes require access to features to parse, which would + /// crash if you tried to do so through [`parse_limited`](Self::parse_limited). + /// + /// To make sure use is limited, supply a `Symbol` you'd like to parse. Only attributes with + /// that symbol are picked out of the list of instructions and parsed. Those are returned. + /// + /// No diagnostics will be emitted when parsing limited. Lints are not emitted at all, while + /// errors will be emitted as a delayed bugs. in other words, we *expect* attributes parsed + /// with `parse_limited` to be reparsed later during ast lowering where we *do* emit the errors + pub fn parse_limited( + sess: &'sess Session, + attrs: &[ast::Attribute], + sym: Symbol, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + ) -> Option<Attribute> { + Self::parse_limited_should_emit( + sess, + attrs, + sym, + target_span, + target_node_id, + features, + ShouldEmit::Nothing, + ) + } + + /// Usually you want `parse_limited`, which defaults to no errors. + pub fn parse_limited_should_emit( + sess: &'sess Session, + attrs: &[ast::Attribute], + sym: Symbol, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + should_emit: ShouldEmit, + ) -> Option<Attribute> { + let mut parsed = Self::parse_limited_all( + sess, + attrs, + Some(sym), + Target::Crate, // Does not matter, we're not going to emit errors anyways + target_span, + target_node_id, + features, + should_emit, + ); + assert!(parsed.len() <= 1); + parsed.pop() + } + + pub fn parse_limited_all( + sess: &'sess Session, + attrs: &[ast::Attribute], + parse_only: Option<Symbol>, + target: Target, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + emit_errors: ShouldEmit, + ) -> Vec<Attribute> { + let mut p = + Self { features, tools: Vec::new(), parse_only, sess, stage: Early { emit_errors } }; + p.parse_attribute_list( + attrs, + target_span, + target_node_id, + target, + OmitDoc::Skip, + std::convert::identity, + |lint| { + crate::lints::emit_attribute_lint(&lint, sess); + }, + ) + } + + pub fn parse_single<T>( + sess: &'sess Session, + attr: &ast::Attribute, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + emit_errors: ShouldEmit, + parse_fn: fn(cx: &mut AcceptContext<'_, '_, Early>, item: &ArgParser<'_>) -> Option<T>, + template: &AttributeTemplate, + ) -> Option<T> { + let mut parser = Self { + features, + tools: Vec::new(), + parse_only: None, + sess, + stage: Early { emit_errors }, + }; + let ast::AttrKind::Normal(normal_attr) = &attr.kind else { + panic!("parse_single called on a doc attr") + }; + let parts = + normal_attr.item.path.segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>(); + let meta_parser = MetaItemParser::from_attr(normal_attr, &parts, &sess.psess, emit_errors)?; + let path = meta_parser.path(); + let args = meta_parser.args(); + let mut cx: AcceptContext<'_, 'sess, Early> = AcceptContext { + shared: SharedContext { + cx: &mut parser, + target_span, + target_id: target_node_id, + emit_lint: &mut |lint| { + crate::lints::emit_attribute_lint(&lint, sess); + }, + }, + attr_span: attr.span, + attr_style: attr.style, + template, + attr_path: path.get_attribute_path(), + }; + parse_fn(&mut cx, args) + } +} + +impl<'sess, S: Stage> AttributeParser<'sess, S> { + pub fn new( + sess: &'sess Session, + features: &'sess Features, + tools: Vec<Symbol>, + stage: S, + ) -> Self { + Self { features: Some(features), tools, parse_only: None, sess, stage } + } + + pub(crate) fn sess(&self) -> &'sess Session { + &self.sess + } + + pub(crate) fn features(&self) -> &'sess Features { + self.features.expect("features not available at this point in the compiler") + } + + pub(crate) fn features_option(&self) -> Option<&'sess Features> { + self.features + } + + pub(crate) fn dcx(&self) -> DiagCtxtHandle<'sess> { + self.sess().dcx() + } + + /// Parse a list of attributes. + /// + /// `target_span` is the span of the thing this list of attributes is applied to, + /// and when `omit_doc` is set, doc attributes are filtered out. + pub fn parse_attribute_list( + &mut self, + attrs: &[ast::Attribute], + target_span: Span, + target_id: S::Id, + target: Target, + omit_doc: OmitDoc, + + lower_span: impl Copy + Fn(Span) -> Span, + mut emit_lint: impl FnMut(AttributeLint<S::Id>), + ) -> Vec<Attribute> { + let mut attributes = Vec::new(); + let mut attr_paths = Vec::new(); + + for attr in attrs { + // If we're only looking for a single attribute, skip all the ones we don't care about. + if let Some(expected) = self.parse_only { + if !attr.has_name(expected) { + continue; + } + } + + // Sometimes, for example for `#![doc = include_str!("readme.md")]`, + // doc still contains a non-literal. You might say, when we're lowering attributes + // that's expanded right? But no, sometimes, when parsing attributes on macros, + // we already use the lowering logic and these are still there. So, when `omit_doc` + // is set we *also* want to ignore these. + if omit_doc == OmitDoc::Skip && attr.has_name(sym::doc) { + continue; + } + + match &attr.kind { + ast::AttrKind::DocComment(comment_kind, symbol) => { + if omit_doc == OmitDoc::Skip { + continue; + } + + attributes.push(Attribute::Parsed(AttributeKind::DocComment { + style: attr.style, + kind: *comment_kind, + span: lower_span(attr.span), + comment: *symbol, + })) + } + // // FIXME: make doc attributes go through a proper attribute parser + // ast::AttrKind::Normal(n) if n.has_name(sym::doc) => { + // let p = GenericMetaItemParser::from_attr(&n, self.dcx()); + // + // attributes.push(Attribute::Parsed(AttributeKind::DocComment { + // style: attr.style, + // kind: CommentKind::Line, + // span: attr.span, + // comment: p.args().name_value(), + // })) + // } + ast::AttrKind::Normal(n) => { + attr_paths.push(PathParser(Cow::Borrowed(&n.item.path))); + + let parts = + n.item.path.segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>(); + + if let Some(accepts) = S::parsers().accepters.get(parts.as_slice()) { + let Some(parser) = MetaItemParser::from_attr( + n, + &parts, + &self.sess.psess, + self.stage.should_emit(), + ) else { + continue; + }; + let path = parser.path(); + let args = parser.args(); + for accept in accepts { + let mut cx: AcceptContext<'_, 'sess, S> = AcceptContext { + shared: SharedContext { + cx: self, + target_span, + target_id, + emit_lint: &mut emit_lint, + }, + attr_span: lower_span(attr.span), + attr_style: attr.style, + template: &accept.template, + attr_path: path.get_attribute_path(), + }; + + (accept.accept_fn)(&mut cx, args); + if !matches!(cx.stage.should_emit(), ShouldEmit::Nothing) { + Self::check_type(accept.attribute_type, target, &mut cx); + Self::check_target(&accept.allowed_targets, target, &mut cx); + } + } + } else { + // If we're here, we must be compiling a tool attribute... Or someone + // forgot to parse their fancy new attribute. Let's warn them in any case. + // If you are that person, and you really think your attribute should + // remain unparsed, carefully read the documentation in this module and if + // you still think so you can add an exception to this assertion. + + // FIXME(jdonszelmann): convert other attributes, and check with this that + // we caught em all + // const FIXME_TEMPORARY_ATTR_ALLOWLIST: &[Symbol] = &[sym::cfg]; + // assert!( + // self.tools.contains(&parts[0]) || true, + // // || FIXME_TEMPORARY_ATTR_ALLOWLIST.contains(&parts[0]), + // "attribute {path} wasn't parsed and isn't a know tool attribute", + // ); + + attributes.push(Attribute::Unparsed(Box::new(AttrItem { + path: AttrPath::from_ast(&n.item.path), + args: self.lower_attr_args(&n.item.args, lower_span), + id: HashIgnoredAttrId { attr_id: attr.id }, + style: attr.style, + span: lower_span(attr.span), + }))); + } + } + } + } + + let mut parsed_attributes = Vec::new(); + for f in &S::parsers().finalizers { + if let Some(attr) = f(&mut FinalizeContext { + shared: SharedContext { + cx: self, + target_span, + target_id, + emit_lint: &mut emit_lint, + }, + all_attrs: &attr_paths, + }) { + parsed_attributes.push(Attribute::Parsed(attr)); + } + } + + attributes.extend(parsed_attributes); + + attributes + } + + /// Returns whether there is a parser for an attribute with this name + pub fn is_parsed_attribute(path: &[Symbol]) -> bool { + Late::parsers().accepters.contains_key(path) + } + + fn lower_attr_args(&self, args: &ast::AttrArgs, lower_span: impl Fn(Span) -> Span) -> AttrArgs { + match args { + ast::AttrArgs::Empty => AttrArgs::Empty, + ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(args.clone()), + // This is an inert key-value attribute - it will never be visible to macros + // after it gets lowered to HIR. Therefore, we can extract literals to handle + // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). + ast::AttrArgs::Eq { eq_span, expr } => { + // In valid code the value always ends up as a single literal. Otherwise, a dummy + // literal suffices because the error is handled elsewhere. + let lit = if let ast::ExprKind::Lit(token_lit) = expr.kind + && let Ok(lit) = + ast::MetaItemLit::from_token_lit(token_lit, lower_span(expr.span)) + { + lit + } else { + let guar = self.dcx().span_delayed_bug( + args.span().unwrap_or(DUMMY_SP), + "expr in place where literal is expected (builtin attr parsing)", + ); + ast::MetaItemLit { + symbol: sym::dummy, + suffix: None, + kind: ast::LitKind::Err(guar), + span: DUMMY_SP, + } + }; + AttrArgs::Eq { eq_span: lower_span(*eq_span), expr: lit } + } + } + } +} diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs index fc1377e5314..f51cc8c4e8b 100644 --- a/compiler/rustc_attr_parsing/src/lib.rs +++ b/compiler/rustc_attr_parsing/src/lib.rs @@ -79,23 +79,37 @@ // tidy-alphabetical-start #![allow(internal_features)] #![doc(rust_logo)] +#![feature(decl_macro)] #![feature(rustdoc_internals)] #![recursion_limit = "256"] // tidy-alphabetical-end #[macro_use] +/// All the individual attribute parsers for each of rustc's built-in attributes. mod attributes; + +/// All the important types given to attribute parsers when parsing pub(crate) mod context; -mod lints; + +/// Code that other crates interact with, to actually parse a list (or sometimes single) +/// attribute. +mod interface; + +/// Despite this entire module called attribute parsing and the term being a little overloaded, +/// in this module the code lives that actually breaks up tokenstreams into semantic pieces of attributes, +/// like lists or name-value pairs. pub mod parser; + +mod lints; mod session_diagnostics; +mod target_checking; +pub mod validate_attr; pub use attributes::cfg::{CFG_TEMPLATE, EvalConfigResult, eval_config_entry, parse_cfg_attr}; pub use attributes::cfg_old::*; -pub use attributes::util::{ - find_crate_name, is_builtin_attr, is_doc_alias_attrs_contain_symbol, parse_version, -}; -pub use context::{AttributeParser, Early, Late, OmitDoc, ShouldEmit}; +pub use attributes::util::{is_builtin_attr, is_doc_alias_attrs_contain_symbol, parse_version}; +pub use context::{Early, Late, OmitDoc, ShouldEmit}; +pub use interface::AttributeParser; pub use lints::emit_attribute_lint; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_attr_parsing/src/lints.rs b/compiler/rustc_attr_parsing/src/lints.rs index 22f5531bc80..b1a971eec32 100644 --- a/compiler/rustc_attr_parsing/src/lints.rs +++ b/compiler/rustc_attr_parsing/src/lints.rs @@ -1,10 +1,13 @@ +use std::borrow::Cow; + use rustc_errors::{DiagArgValue, LintEmitter}; -use rustc_hir::HirId; +use rustc_hir::Target; use rustc_hir::lints::{AttributeLint, AttributeLintKind}; +use rustc_span::sym; use crate::session_diagnostics; -pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<HirId>, lint_emitter: L) { +pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<L::Id>, lint_emitter: L) { let AttributeLint { id, span, kind } = lint; match kind { @@ -34,5 +37,48 @@ pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<HirId>, lint_emi *first_span, session_diagnostics::EmptyAttributeList { attr_span: *first_span }, ), + AttributeLintKind::InvalidTarget { name, target, applied, only } => lint_emitter + .emit_node_span_lint( + // This check is here because `deprecated` had its own lint group and removing this would be a breaking change + if name.segments[0].name == sym::deprecated + && ![ + Target::Closure, + Target::Expression, + Target::Statement, + Target::Arm, + Target::MacroCall, + ] + .contains(target) + { + rustc_session::lint::builtin::USELESS_DEPRECATED + } else { + rustc_session::lint::builtin::UNUSED_ATTRIBUTES + }, + *id, + *span, + session_diagnostics::InvalidTargetLint { + name: name.clone(), + target: target.plural_name(), + applied: DiagArgValue::StrListSepByAnd( + applied.into_iter().map(|i| Cow::Owned(i.to_string())).collect(), + ), + only, + attr_span: *span, + }, + ), + + &AttributeLintKind::InvalidStyle { ref name, is_used_as_inner, target, target_span } => { + lint_emitter.emit_node_span_lint( + rustc_session::lint::builtin::UNUSED_ATTRIBUTES, + *id, + *span, + session_diagnostics::InvalidAttrStyle { + name: name.clone(), + is_used_as_inner, + target_span: (!is_used_as_inner).then_some(target_span), + target, + }, + ) + } } } diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs index aecaae947c9..6d3cf684296 100644 --- a/compiler/rustc_attr_parsing/src/parser.rs +++ b/compiler/rustc_attr_parsing/src/parser.rs @@ -3,45 +3,30 @@ //! //! FIXME(jdonszelmann): delete `rustc_ast/attr/mod.rs` +use std::borrow::Cow; use std::fmt::{Debug, Display}; -use std::iter::Peekable; -use rustc_ast::token::{self, Delimiter, Token}; -use rustc_ast::tokenstream::{TokenStreamIter, TokenTree}; +use rustc_ast::token::{self, Delimiter, MetaVarKind}; +use rustc_ast::tokenstream::TokenStream; use rustc_ast::{AttrArgs, DelimArgs, Expr, ExprKind, LitKind, MetaItemLit, NormalAttr, Path}; use rustc_ast_pretty::pprust; -use rustc_errors::DiagCtxtHandle; +use rustc_errors::PResult; use rustc_hir::{self as hir, AttrPath}; -use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; - -pub struct SegmentIterator<'a> { - offset: usize, - path: &'a PathParser<'a>, -} - -impl<'a> Iterator for SegmentIterator<'a> { - type Item = &'a Ident; - - fn next(&mut self) -> Option<Self::Item> { - if self.offset >= self.path.len() { - return None; - } - - let res = match self.path { - PathParser::Ast(ast_path) => &ast_path.segments[self.offset].ident, - PathParser::Attr(attr_path) => &attr_path.segments[self.offset], - }; - - self.offset += 1; - Some(res) - } -} +use rustc_parse::exp; +use rustc_parse::parser::{Parser, PathStyle, token_descr}; +use rustc_session::errors::report_lit_error; +use rustc_session::parse::ParseSess; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, sym}; +use thin_vec::ThinVec; + +use crate::ShouldEmit; +use crate::session_diagnostics::{ + InvalidMetaItem, InvalidMetaItemQuoteIdentSugg, InvalidMetaItemRemoveNegSugg, MetaBadDelim, + MetaBadDelimSugg, SuffixedLiteralInAttribute, +}; #[derive(Clone, Debug)] -pub enum PathParser<'a> { - Ast(&'a Path), - Attr(AttrPath), -} +pub struct PathParser<'a>(pub Cow<'a, Path>); impl<'a> PathParser<'a> { pub fn get_attribute_path(&self) -> hir::AttrPath { @@ -52,21 +37,15 @@ impl<'a> PathParser<'a> { } pub fn segments(&'a self) -> impl Iterator<Item = &'a Ident> { - SegmentIterator { offset: 0, path: self } + self.0.segments.iter().map(|seg| &seg.ident) } pub fn span(&self) -> Span { - match self { - PathParser::Ast(path) => path.span, - PathParser::Attr(attr_path) => attr_path.span, - } + self.0.span } pub fn len(&self) -> usize { - match self { - PathParser::Ast(path) => path.segments.len(), - PathParser::Attr(attr_path) => attr_path.segments.len(), - } + self.0.segments.len() } pub fn segments_is(&self, segments: &[Symbol]) -> bool { @@ -99,10 +78,7 @@ impl<'a> PathParser<'a> { impl Display for PathParser<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - PathParser::Ast(path) => write!(f, "{}", pprust::path_to_string(path)), - PathParser::Attr(attr_path) => write!(f, "{attr_path}"), - } + write!(f, "{}", pprust::path_to_string(&self.0)) } } @@ -123,21 +99,39 @@ impl<'a> ArgParser<'a> { } } - pub fn from_attr_args<'sess>(value: &'a AttrArgs, dcx: DiagCtxtHandle<'sess>) -> Self { - match value { + pub fn from_attr_args<'sess>( + value: &'a AttrArgs, + parts: &[Symbol], + psess: &'sess ParseSess, + should_emit: ShouldEmit, + ) -> Option<Self> { + Some(match value { AttrArgs::Empty => Self::NoArgs, - AttrArgs::Delimited(args) if args.delim == Delimiter::Parenthesis => { - Self::List(MetaItemListParser::new(args, dcx)) - } AttrArgs::Delimited(args) => { - Self::List(MetaItemListParser { sub_parsers: vec![], span: args.dspan.entire() }) + // The arguments of rustc_dummy are not validated if the arguments are delimited + if parts == &[sym::rustc_dummy] { + return Some(ArgParser::List(MetaItemListParser { + sub_parsers: ThinVec::new(), + span: args.dspan.entire(), + })); + } + + if args.delim != Delimiter::Parenthesis { + psess.dcx().emit_err(MetaBadDelim { + span: args.dspan.entire(), + sugg: MetaBadDelimSugg { open: args.dspan.open, close: args.dspan.close }, + }); + return None; + } + + Self::List(MetaItemListParser::new(args, psess, should_emit)?) } AttrArgs::Eq { eq_span, expr } => Self::NameValue(NameValueParser { eq_span: *eq_span, - value: expr_to_lit(dcx, &expr, *eq_span), + value: expr_to_lit(psess, &expr, expr.span, should_emit)?, value_span: expr.span, }), - } + }) } /// Asserts that this MetaItem is a list @@ -249,11 +243,16 @@ impl<'a> Debug for MetaItemParser<'a> { impl<'a> MetaItemParser<'a> { /// Create a new parser from a [`NormalAttr`], which is stored inside of any /// [`ast::Attribute`](rustc_ast::Attribute) - pub fn from_attr<'sess>(attr: &'a NormalAttr, dcx: DiagCtxtHandle<'sess>) -> Self { - Self { - path: PathParser::Ast(&attr.item.path), - args: ArgParser::from_attr_args(&attr.item.args, dcx), - } + pub fn from_attr<'sess>( + attr: &'a NormalAttr, + parts: &[Symbol], + psess: &'sess ParseSess, + should_emit: ShouldEmit, + ) -> Option<Self> { + Some(Self { + path: PathParser(Cow::Borrowed(&attr.item.path)), + args: ArgParser::from_attr_args(&attr.item.args, parts, psess, should_emit)?, + }) } } @@ -318,215 +317,235 @@ impl NameValueParser { } } -fn expr_to_lit(dcx: DiagCtxtHandle<'_>, expr: &Expr, span: Span) -> MetaItemLit { - // In valid code the value always ends up as a single literal. Otherwise, a dummy - // literal suffices because the error is handled elsewhere. - if let ExprKind::Lit(token_lit) = expr.kind - && let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span) - { - lit +fn expr_to_lit( + psess: &ParseSess, + expr: &Expr, + span: Span, + should_emit: ShouldEmit, +) -> Option<MetaItemLit> { + if let ExprKind::Lit(token_lit) = expr.kind { + let res = MetaItemLit::from_token_lit(token_lit, expr.span); + match res { + Ok(lit) => { + if token_lit.suffix.is_some() { + should_emit.emit_err( + psess.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span }), + ); + None + } else { + if !lit.kind.is_unsuffixed() { + // Emit error and continue, we can still parse the attribute as if the suffix isn't there + should_emit.emit_err( + psess.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span }), + ); + } + + Some(lit) + } + } + Err(err) => { + let guar = report_lit_error(psess, err, token_lit, expr.span); + let lit = MetaItemLit { + symbol: token_lit.symbol, + suffix: token_lit.suffix, + kind: LitKind::Err(guar), + span: expr.span, + }; + Some(lit) + } + } } else { - let guar = dcx.span_delayed_bug( - span, - "expr in place where literal is expected (builtin attr parsing)", - ); - MetaItemLit { symbol: sym::dummy, suffix: None, kind: LitKind::Err(guar), span } + if matches!(should_emit, ShouldEmit::Nothing) { + return None; + } + + // Example cases: + // - `#[foo = 1+1]`: results in `ast::ExprKind::BinOp`. + // - `#[foo = include_str!("nonexistent-file.rs")]`: + // results in `ast::ExprKind::Err`. In that case we delay + // the error because an earlier error will have already + // been reported. + let msg = "attribute value must be a literal"; + let err = psess.dcx().struct_span_err(span, msg); + should_emit.emit_err(err); + None } } struct MetaItemListParserContext<'a, 'sess> { - // the tokens inside the delimiters, so `#[some::attr(a b c)]` would have `a b c` inside - inside_delimiters: Peekable<TokenStreamIter<'a>>, - dcx: DiagCtxtHandle<'sess>, + parser: &'a mut Parser<'sess>, + should_emit: ShouldEmit, } impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> { - fn done(&mut self) -> bool { - self.inside_delimiters.peek().is_none() - } - - fn next_path(&mut self) -> Option<AttrPath> { - // FIXME: Share code with `parse_path`. - let tt = self.inside_delimiters.next().map(|tt| TokenTree::uninterpolate(tt)); - - match tt.as_deref()? { - &TokenTree::Token( - Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span }, - _, - ) => { - // here we have either an ident or pathsep `::`. - - let mut segments = if let &token::Ident(name, _) = kind { - // when we lookahead another pathsep, more path's coming - if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - self.inside_delimiters.peek() - { - self.inside_delimiters.next(); - vec![Ident::new(name, span)] - } else { - // else we have a single identifier path, that's all - return Some(AttrPath { - segments: vec![Ident::new(name, span)].into_boxed_slice(), - span, - }); - } - } else { - // if `::` is all we get, we just got a path root - vec![Ident::new(kw::PathRoot, span)] - }; + fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'sess, MetaItemLit> { + let uninterpolated_span = self.parser.token_uninterpolated_span(); + let Some(token_lit) = self.parser.eat_token_lit() else { + return self.parser.handle_missing_lit(Parser::mk_meta_item_lit_char); + }; - // one segment accepted. accept n more - loop { - // another ident? - if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = - self.inside_delimiters - .next() - .map(|tt| TokenTree::uninterpolate(tt)) - .as_deref() - { - segments.push(Ident::new(name, span)); - } else { - return None; - } - // stop unless we see another `::` - if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - self.inside_delimiters.peek() - { - self.inside_delimiters.next(); - } else { - break; - } - } - let span = span.with_hi(segments.last().unwrap().span.hi()); - Some(AttrPath { segments: segments.into_boxed_slice(), span }) - } - TokenTree::Token(Token { kind, .. }, _) if kind.is_delim() => None, - _ => { - // malformed attributes can get here. We can't crash, but somewhere else should've - // already warned for this. - None + let lit = match MetaItemLit::from_token_lit(token_lit, self.parser.prev_token.span) { + Ok(lit) => lit, + Err(err) => { + let guar = + report_lit_error(&self.parser.psess, err, token_lit, uninterpolated_span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix); + MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap() } + }; + + if !lit.kind.is_unsuffixed() { + // Emit error and continue, we can still parse the attribute as if the suffix isn't there + self.should_emit.emit_err( + self.parser.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span }), + ); } - } - fn value(&mut self) -> Option<MetaItemLit> { - match self.inside_delimiters.next() { - Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { - MetaItemListParserContext { - inside_delimiters: inner_tokens.iter().peekable(), - dcx: self.dcx, - } - .value() - } - Some(TokenTree::Token(token, _)) => MetaItemLit::from_token(token), - _ => None, + Ok(lit) + } + + fn parse_attr_item(&mut self) -> PResult<'sess, MetaItemParser<'static>> { + if let Some(MetaVarKind::Meta { has_meta_form }) = self.parser.token.is_metavar_seq() { + return if has_meta_form { + let attr_item = self + .parser + .eat_metavar_seq(MetaVarKind::Meta { has_meta_form: true }, |this| { + MetaItemListParserContext { parser: this, should_emit: self.should_emit } + .parse_attr_item() + }) + .unwrap(); + Ok(attr_item) + } else { + self.parser.unexpected_any() + }; } + + let path = self.parser.parse_path(PathStyle::Mod)?; + + // Check style of arguments that this meta item has + let args = if self.parser.check(exp!(OpenParen)) { + let start = self.parser.token.span; + let (sub_parsers, _) = self.parser.parse_paren_comma_seq(|parser| { + MetaItemListParserContext { parser, should_emit: self.should_emit } + .parse_meta_item_inner() + })?; + let end = self.parser.prev_token.span; + ArgParser::List(MetaItemListParser { sub_parsers, span: start.with_hi(end.hi()) }) + } else if self.parser.eat(exp!(Eq)) { + let eq_span = self.parser.prev_token.span; + let value = self.parse_unsuffixed_meta_item_lit()?; + + ArgParser::NameValue(NameValueParser { eq_span, value, value_span: value.span }) + } else { + ArgParser::NoArgs + }; + + Ok(MetaItemParser { path: PathParser(Cow::Owned(path)), args }) } - /// parses one element on the inside of a list attribute like `#[my_attr( <insides> )]` - /// - /// parses a path followed be either: - /// 1. nothing (a word attr) - /// 2. a parenthesized list - /// 3. an equals sign and a literal (name-value) - /// - /// Can also parse *just* a literal. This is for cases like as `#[my_attr("literal")]` - /// where no path is given before the literal - /// - /// Some exceptions too for interpolated attributes which are already pre-processed - fn next(&mut self) -> Option<MetaItemOrLitParser<'a>> { - // a list element is either a literal - if let Some(TokenTree::Token(token, _)) = self.inside_delimiters.peek() - && let Some(lit) = MetaItemLit::from_token(token) - { - self.inside_delimiters.next(); - return Some(MetaItemOrLitParser::Lit(lit)); - } else if let Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) = - self.inside_delimiters.peek() + fn parse_meta_item_inner(&mut self) -> PResult<'sess, MetaItemOrLitParser<'static>> { + match self.parse_unsuffixed_meta_item_lit() { + Ok(lit) => return Ok(MetaItemOrLitParser::Lit(lit)), + Err(err) => err.cancel(), // we provide a better error below + } + + match self.parse_attr_item() { + Ok(mi) => return Ok(MetaItemOrLitParser::MetaItemParser(mi)), + Err(err) => err.cancel(), // we provide a better error below + } + + let mut err = InvalidMetaItem { + span: self.parser.token.span, + descr: token_descr(&self.parser.token), + quote_ident_sugg: None, + remove_neg_sugg: None, + }; + + // Suggest quoting idents, e.g. in `#[cfg(key = value)]`. We don't use `Token::ident` and + // don't `uninterpolate` the token to avoid suggesting anything butchered or questionable + // when macro metavariables are involved. + if self.parser.prev_token == token::Eq + && let token::Ident(..) = self.parser.token.kind { - self.inside_delimiters.next(); - return MetaItemListParserContext { - inside_delimiters: inner_tokens.iter().peekable(), - dcx: self.dcx, + let before = self.parser.token.span.shrink_to_lo(); + while let token::Ident(..) = self.parser.token.kind { + self.parser.bump(); } - .next(); + err.quote_ident_sugg = Some(InvalidMetaItemQuoteIdentSugg { + before, + after: self.parser.prev_token.span.shrink_to_hi(), + }); } - // or a path. - let path = self.next_path()?; - - // Paths can be followed by: - // - `(more meta items)` (another list) - // - `= lit` (a name-value) - // - nothing - Some(MetaItemOrLitParser::MetaItemParser(match self.inside_delimiters.peek() { - Some(TokenTree::Delimited(dspan, _, Delimiter::Parenthesis, inner_tokens)) => { - self.inside_delimiters.next(); - - MetaItemParser { - path: PathParser::Attr(path), - args: ArgParser::List(MetaItemListParser::new_tts( - inner_tokens.iter(), - dspan.entire(), - self.dcx, - )), - } - } - Some(TokenTree::Delimited(_, ..)) => { - self.inside_delimiters.next(); - // self.dcx.span_delayed_bug(span.entire(), "wrong delimiters"); - return None; - } - Some(TokenTree::Token(Token { kind: token::Eq, span }, _)) => { - self.inside_delimiters.next(); - let value = self.value()?; - MetaItemParser { - path: PathParser::Attr(path), - args: ArgParser::NameValue(NameValueParser { - eq_span: *span, - value_span: value.span, - value, - }), - } - } - _ => MetaItemParser { path: PathParser::Attr(path), args: ArgParser::NoArgs }, - })) + if self.parser.token == token::Minus + && self + .parser + .look_ahead(1, |t| matches!(t.kind, rustc_ast::token::TokenKind::Literal { .. })) + { + err.remove_neg_sugg = + Some(InvalidMetaItemRemoveNegSugg { negative_sign: self.parser.token.span }); + self.parser.bump(); + self.parser.bump(); + } + + Err(self.parser.dcx().create_err(err)) } - fn parse(mut self, span: Span) -> MetaItemListParser<'a> { - let mut sub_parsers = Vec::new(); + fn parse( + tokens: TokenStream, + psess: &'sess ParseSess, + span: Span, + should_emit: ShouldEmit, + ) -> PResult<'sess, MetaItemListParser<'static>> { + let mut parser = Parser::new(psess, tokens, None); + let mut this = MetaItemListParserContext { parser: &mut parser, should_emit }; - while !self.done() { - let Some(n) = self.next() else { - continue; - }; - sub_parsers.push(n); + // Presumably, the majority of the time there will only be one attr. + let mut sub_parsers = ThinVec::with_capacity(1); + while this.parser.token != token::Eof { + sub_parsers.push(this.parse_meta_item_inner()?); - match self.inside_delimiters.peek() { - None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => { - self.inside_delimiters.next(); - } - Some(_) => {} + if !this.parser.eat(exp!(Comma)) { + break; } } - MetaItemListParser { sub_parsers, span } + if parser.token != token::Eof { + parser.unexpected()?; + } + + Ok(MetaItemListParser { sub_parsers, span }) } } #[derive(Debug, Clone)] pub struct MetaItemListParser<'a> { - sub_parsers: Vec<MetaItemOrLitParser<'a>>, + sub_parsers: ThinVec<MetaItemOrLitParser<'a>>, pub span: Span, } impl<'a> MetaItemListParser<'a> { - fn new<'sess>(delim: &'a DelimArgs, dcx: DiagCtxtHandle<'sess>) -> Self { - MetaItemListParser::new_tts(delim.tokens.iter(), delim.dspan.entire(), dcx) - } - - fn new_tts<'sess>(tts: TokenStreamIter<'a>, span: Span, dcx: DiagCtxtHandle<'sess>) -> Self { - MetaItemListParserContext { inside_delimiters: tts.peekable(), dcx }.parse(span) + fn new<'sess>( + delim: &'a DelimArgs, + psess: &'sess ParseSess, + should_emit: ShouldEmit, + ) -> Option<Self> { + match MetaItemListParserContext::parse( + delim.tokens.clone(), + psess, + delim.dspan.entire(), + should_emit, + ) { + Ok(s) => Some(s), + Err(e) => { + should_emit.emit_err(e); + None + } + } } /// Lets you pick and choose as what you want to parse each element in the list diff --git a/compiler/rustc_attr_parsing/src/session_diagnostics.rs b/compiler/rustc_attr_parsing/src/session_diagnostics.rs index 41179844152..a9dee23bf6a 100644 --- a/compiler/rustc_attr_parsing/src/session_diagnostics.rs +++ b/compiler/rustc_attr_parsing/src/session_diagnostics.rs @@ -1,12 +1,12 @@ use std::num::IntErrorKind; -use rustc_ast as ast; +use rustc_ast::{self as ast, AttrStyle, Path}; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, }; use rustc_feature::AttributeTemplate; -use rustc_hir::AttrPath; +use rustc_hir::{AttrPath, Target}; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_span::{Span, Symbol}; @@ -480,6 +480,32 @@ pub(crate) struct EmptyAttributeList { pub attr_span: Span, } +#[derive(LintDiagnostic)] +#[diag(attr_parsing_invalid_target_lint)] +#[warning] +#[help] +pub(crate) struct InvalidTargetLint { + pub name: AttrPath, + pub target: &'static str, + pub applied: DiagArgValue, + pub only: &'static str, + #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")] + pub attr_span: Span, +} + +#[derive(Diagnostic)] +#[help] +#[diag(attr_parsing_invalid_target)] +pub(crate) struct InvalidTarget { + #[primary_span] + #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")] + pub span: Span, + pub name: AttrPath, + pub target: &'static str, + pub applied: DiagArgValue, + pub only: &'static str, +} + #[derive(Diagnostic)] #[diag(attr_parsing_invalid_alignment_value, code = E0589)] pub(crate) struct InvalidAlignmentValue { @@ -532,7 +558,7 @@ pub(crate) struct LinkOrdinalOutOfRange { pub ordinal: u128, } -pub(crate) enum AttributeParseErrorReason { +pub(crate) enum AttributeParseErrorReason<'a> { ExpectedNoArgs, ExpectedStringLiteral { byte_string: Option<Span>, @@ -545,7 +571,7 @@ pub(crate) enum AttributeParseErrorReason { ExpectedNameValue(Option<Symbol>), DuplicateKey(Symbol), ExpectedSpecificArgument { - possibilities: Vec<&'static str>, + possibilities: &'a [Symbol], strings: bool, /// Should we tell the user to write a list when they didn't? list: bool, @@ -553,15 +579,16 @@ pub(crate) enum AttributeParseErrorReason { ExpectedIdentifier, } -pub(crate) struct AttributeParseError { +pub(crate) struct AttributeParseError<'a> { pub(crate) span: Span, pub(crate) attr_span: Span, + pub(crate) attr_style: AttrStyle, pub(crate) template: AttributeTemplate, pub(crate) attribute: AttrPath, - pub(crate) reason: AttributeParseErrorReason, + pub(crate) reason: AttributeParseErrorReason<'a>, } -impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { +impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> { fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> { let name = self.attribute.to_string(); @@ -630,7 +657,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { list: false, } => { let quote = if strings { '"' } else { '`' }; - match possibilities.as_slice() { + match possibilities { &[] => {} &[x] => { diag.span_label( @@ -660,7 +687,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { list: true, } => { let quote = if strings { '"' } else { '`' }; - match possibilities.as_slice() { + match possibilities { &[] => {} &[x] => { diag.span_label( @@ -694,7 +721,8 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { if let Some(link) = self.template.docs { diag.note(format!("for more information, visit <{link}>")); } - let suggestions = self.template.suggestions(false, &name); + let suggestions = self.template.suggestions(self.attr_style, &name); + diag.span_suggestions( self.attr_span, if suggestions.len() == 1 { @@ -709,3 +737,196 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { diag } } + +#[derive(Diagnostic)] +#[diag(attr_parsing_invalid_attr_unsafe)] +#[note] +pub(crate) struct InvalidAttrUnsafe { + #[primary_span] + #[label] + pub span: Span, + pub name: Path, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_unsafe_attr_outside_unsafe)] +pub(crate) struct UnsafeAttrOutsideUnsafe { + #[primary_span] + #[label] + pub span: Span, + #[subdiagnostic] + pub suggestion: UnsafeAttrOutsideUnsafeSuggestion, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + attr_parsing_unsafe_attr_outside_unsafe_suggestion, + applicability = "machine-applicable" +)] +pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion { + #[suggestion_part(code = "unsafe(")] + pub left: Span, + #[suggestion_part(code = ")")] + pub right: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_meta_bad_delim)] +pub(crate) struct MetaBadDelim { + #[primary_span] + pub span: Span, + #[subdiagnostic] + pub sugg: MetaBadDelimSugg, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + attr_parsing_meta_bad_delim_suggestion, + applicability = "machine-applicable" +)] +pub(crate) struct MetaBadDelimSugg { + #[suggestion_part(code = "(")] + pub open: Span, + #[suggestion_part(code = ")")] + pub close: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_invalid_meta_item)] +pub(crate) struct InvalidMetaItem { + #[primary_span] + pub span: Span, + pub descr: String, + #[subdiagnostic] + pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>, + #[subdiagnostic] + pub remove_neg_sugg: Option<InvalidMetaItemRemoveNegSugg>, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(attr_parsing_quote_ident_sugg, applicability = "machine-applicable")] +pub(crate) struct InvalidMetaItemQuoteIdentSugg { + #[suggestion_part(code = "\"")] + pub before: Span, + #[suggestion_part(code = "\"")] + pub after: Span, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(attr_parsing_remove_neg_sugg, applicability = "machine-applicable")] +pub(crate) struct InvalidMetaItemRemoveNegSugg { + #[suggestion_part(code = "")] + pub negative_sign: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_suffixed_literal_in_attribute)] +#[help] +pub(crate) struct SuffixedLiteralInAttribute { + #[primary_span] + pub span: Span, +} + +#[derive(LintDiagnostic)] +#[diag(attr_parsing_invalid_style)] +pub(crate) struct InvalidAttrStyle { + pub name: AttrPath, + pub is_used_as_inner: bool, + #[note] + pub target_span: Option<Span>, + pub target: Target, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_empty_link_name, code = E0454)] +pub(crate) struct EmptyLinkName { + #[primary_span] + #[label] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_link_framework_apple, code = E0455)] +pub(crate) struct LinkFrameworkApple { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_incompatible_wasm_link)] +pub(crate) struct IncompatibleWasmLink { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_link_requires_name, code = E0459)] +pub(crate) struct LinkRequiresName { + #[primary_span] + #[label] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_raw_dylib_no_nul)] +pub(crate) struct RawDylibNoNul { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_raw_dylib_only_windows, code = E0455)] +pub(crate) struct RawDylibOnlyWindows { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_invalid_link_modifier)] +pub(crate) struct InvalidLinkModifier { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_multiple_modifiers)] +pub(crate) struct MultipleModifiers { + #[primary_span] + pub span: Span, + pub modifier: Symbol, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_import_name_type_x86)] +pub(crate) struct ImportNameTypeX86 { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_bundle_needs_static)] +pub(crate) struct BundleNeedsStatic { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_whole_archive_needs_static)] +pub(crate) struct WholeArchiveNeedsStatic { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_as_needed_compatibility)] +pub(crate) struct AsNeededCompatibility { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_import_name_type_raw)] +pub(crate) struct ImportNameTypeRaw { + #[primary_span] + pub span: Span, +} diff --git a/compiler/rustc_attr_parsing/src/target_checking.rs b/compiler/rustc_attr_parsing/src/target_checking.rs new file mode 100644 index 00000000000..edc496b460c --- /dev/null +++ b/compiler/rustc_attr_parsing/src/target_checking.rs @@ -0,0 +1,269 @@ +use std::borrow::Cow; + +use rustc_ast::AttrStyle; +use rustc_errors::DiagArgValue; +use rustc_feature::{AttributeType, Features}; +use rustc_hir::lints::AttributeLintKind; +use rustc_hir::{MethodKind, Target}; + +use crate::AttributeParser; +use crate::context::{AcceptContext, Stage}; +use crate::session_diagnostics::InvalidTarget; + +#[derive(Debug)] +pub(crate) enum AllowedTargets { + AllowList(&'static [Policy]), + AllowListWarnRest(&'static [Policy]), +} + +pub(crate) enum AllowedResult { + Allowed, + Warn, + Error, +} + +impl AllowedTargets { + pub(crate) fn is_allowed(&self, target: Target) -> AllowedResult { + match self { + AllowedTargets::AllowList(list) => { + if list.contains(&Policy::Allow(target)) { + AllowedResult::Allowed + } else if list.contains(&Policy::Warn(target)) { + AllowedResult::Warn + } else { + AllowedResult::Error + } + } + AllowedTargets::AllowListWarnRest(list) => { + if list.contains(&Policy::Allow(target)) { + AllowedResult::Allowed + } else if list.contains(&Policy::Error(target)) { + AllowedResult::Error + } else { + AllowedResult::Warn + } + } + } + } + + pub(crate) fn allowed_targets(&self) -> Vec<Target> { + match self { + AllowedTargets::AllowList(list) => list, + AllowedTargets::AllowListWarnRest(list) => list, + } + .iter() + .filter_map(|target| match target { + Policy::Allow(target) => Some(*target), + Policy::Warn(_) => None, + Policy::Error(_) => None, + }) + .collect() + } +} + +#[derive(Debug, Eq, PartialEq)] +pub(crate) enum Policy { + Allow(Target), + Warn(Target), + Error(Target), +} + +impl<'sess, S: Stage> AttributeParser<'sess, S> { + pub(crate) fn check_target( + allowed_targets: &AllowedTargets, + target: Target, + cx: &mut AcceptContext<'_, 'sess, S>, + ) { + match allowed_targets.is_allowed(target) { + AllowedResult::Allowed => {} + AllowedResult::Warn => { + let allowed_targets = allowed_targets.allowed_targets(); + let (applied, only) = allowed_targets_applied(allowed_targets, target, cx.features); + let name = cx.attr_path.clone(); + let attr_span = cx.attr_span; + cx.emit_lint( + AttributeLintKind::InvalidTarget { + name, + target, + only: if only { "only " } else { "" }, + applied, + }, + attr_span, + ); + } + AllowedResult::Error => { + let allowed_targets = allowed_targets.allowed_targets(); + let (applied, only) = allowed_targets_applied(allowed_targets, target, cx.features); + let name = cx.attr_path.clone(); + cx.dcx().emit_err(InvalidTarget { + span: cx.attr_span.clone(), + name, + target: target.plural_name(), + only: if only { "only " } else { "" }, + applied: DiagArgValue::StrListSepByAnd( + applied.into_iter().map(Cow::Owned).collect(), + ), + }); + } + } + } + + pub(crate) fn check_type( + attribute_type: AttributeType, + target: Target, + cx: &mut AcceptContext<'_, 'sess, S>, + ) { + let is_crate_root = S::id_is_crate_root(cx.target_id); + + if is_crate_root { + return; + } + + if attribute_type != AttributeType::CrateLevel { + return; + } + + let lint = AttributeLintKind::InvalidStyle { + name: cx.attr_path.clone(), + is_used_as_inner: cx.attr_style == AttrStyle::Inner, + target, + target_span: cx.target_span, + }; + let attr_span = cx.attr_span; + + cx.emit_lint(lint, attr_span); + } +} + +/// Takes a list of `allowed_targets` for an attribute, and the `target` the attribute was applied to. +/// Does some heuristic-based filtering to remove uninteresting targets, and formats the targets into a string +pub(crate) fn allowed_targets_applied( + mut allowed_targets: Vec<Target>, + target: Target, + features: Option<&Features>, +) -> (Vec<String>, bool) { + // Remove unstable targets from `allowed_targets` if their features are not enabled + if let Some(features) = features { + if !features.fn_delegation() { + allowed_targets.retain(|t| !matches!(t, Target::Delegation { .. })); + } + if !features.stmt_expr_attributes() { + allowed_targets.retain(|t| !matches!(t, Target::Expression | Target::Statement)); + } + if !features.extern_types() { + allowed_targets.retain(|t| !matches!(t, Target::ForeignTy)); + } + } + + // We define groups of "similar" targets. + // If at least two of the targets are allowed, and the `target` is not in the group, + // we collapse the entire group to a single entry to simplify the target list + const FUNCTION_LIKE: &[Target] = &[ + Target::Fn, + Target::Closure, + Target::ForeignFn, + Target::Method(MethodKind::Inherent), + Target::Method(MethodKind::Trait { body: false }), + Target::Method(MethodKind::Trait { body: true }), + Target::Method(MethodKind::TraitImpl), + ]; + const METHOD_LIKE: &[Target] = &[ + Target::Method(MethodKind::Inherent), + Target::Method(MethodKind::Trait { body: false }), + Target::Method(MethodKind::Trait { body: true }), + Target::Method(MethodKind::TraitImpl), + ]; + const IMPL_LIKE: &[Target] = + &[Target::Impl { of_trait: false }, Target::Impl { of_trait: true }]; + const ADT_LIKE: &[Target] = &[Target::Struct, Target::Enum]; + + let mut added_fake_targets = Vec::new(); + filter_targets( + &mut allowed_targets, + FUNCTION_LIKE, + "functions", + target, + &mut added_fake_targets, + ); + filter_targets(&mut allowed_targets, METHOD_LIKE, "methods", target, &mut added_fake_targets); + filter_targets(&mut allowed_targets, IMPL_LIKE, "impl blocks", target, &mut added_fake_targets); + filter_targets(&mut allowed_targets, ADT_LIKE, "data types", target, &mut added_fake_targets); + + // If there is now only 1 target left, show that as the only possible target + ( + added_fake_targets + .iter() + .copied() + .chain(allowed_targets.iter().map(|t| t.plural_name())) + .map(|i| i.to_string()) + .collect(), + allowed_targets.len() + added_fake_targets.len() == 1, + ) +} + +fn filter_targets( + allowed_targets: &mut Vec<Target>, + target_group: &'static [Target], + target_group_name: &'static str, + target: Target, + added_fake_targets: &mut Vec<&'static str>, +) { + if target_group.contains(&target) { + return; + } + if allowed_targets.iter().filter(|at| target_group.contains(at)).count() < 2 { + return; + } + allowed_targets.retain(|t| !target_group.contains(t)); + added_fake_targets.push(target_group_name); +} + +/// This is the list of all targets to which a attribute can be applied +/// This is used for: +/// - `rustc_dummy`, which can be applied to all targets +/// - Attributes that are not parted to the new target system yet can use this list as a placeholder +pub(crate) const ALL_TARGETS: &'static [Policy] = { + use Policy::Allow; + &[ + Allow(Target::ExternCrate), + Allow(Target::Use), + Allow(Target::Static), + Allow(Target::Const), + Allow(Target::Fn), + Allow(Target::Closure), + Allow(Target::Mod), + Allow(Target::ForeignMod), + Allow(Target::GlobalAsm), + Allow(Target::TyAlias), + Allow(Target::Enum), + Allow(Target::Variant), + Allow(Target::Struct), + Allow(Target::Field), + Allow(Target::Union), + Allow(Target::Trait), + Allow(Target::TraitAlias), + Allow(Target::Impl { of_trait: false }), + Allow(Target::Impl { of_trait: true }), + Allow(Target::Expression), + Allow(Target::Statement), + Allow(Target::Arm), + Allow(Target::AssocConst), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::AssocTy), + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + Allow(Target::ForeignTy), + Allow(Target::MacroDef), + Allow(Target::Param), + Allow(Target::PatField), + Allow(Target::ExprField), + Allow(Target::WherePredicate), + Allow(Target::MacroCall), + Allow(Target::Crate), + Allow(Target::Delegation { mac: false }), + Allow(Target::Delegation { mac: true }), + ] +}; diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_attr_parsing/src/validate_attr.rs index 68ef6d6f32c..7a7624893bd 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_attr_parsing/src/validate_attr.rs @@ -8,16 +8,16 @@ use rustc_ast::{ self as ast, AttrArgs, Attribute, DelimArgs, MetaItem, MetaItemInner, MetaItemKind, NodeId, Path, Safety, }; -use rustc_attr_parsing::{AttributeParser, Late}; use rustc_errors::{Applicability, DiagCtxtHandle, FatalError, PResult}; use rustc_feature::{AttributeSafety, AttributeTemplate, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute}; +use rustc_parse::parse_in; use rustc_session::errors::report_lit_error; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{ILL_FORMED_ATTRIBUTE_INPUT, UNSAFE_ATTR_OUTSIDE_UNSAFE}; use rustc_session::parse::ParseSess; use rustc_span::{Span, Symbol, sym}; -use crate::{errors, parse_in}; +use crate::{AttributeParser, Late, session_diagnostics as errors}; pub fn check_attr(psess: &ParseSess, attr: &Attribute, id: NodeId) { if attr.is_doc_comment() || attr.has_name(sym::cfg_trace) || attr.has_name(sym::cfg_attr_trace) @@ -33,7 +33,10 @@ pub fn check_attr(psess: &ParseSess, attr: &Attribute, id: NodeId) { // Check input tokens for built-in and key-value attributes. match builtin_attr_info { // `rustc_dummy` doesn't have any restrictions specific to built-in attributes. - Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => { + Some(BuiltinAttribute { name, template, .. }) => { + if AttributeParser::<Late>::is_parsed_attribute(slice::from_ref(&name)) { + return; + } match parse_meta(psess, attr) { // Don't check safety again, we just did that Ok(meta) => { @@ -133,16 +136,6 @@ fn check_meta_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) { }); } -pub(super) fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) { - if let Delimiter::Parenthesis = delim { - return; - } - psess.dcx().emit_err(errors::CfgAttrBadDelim { - span: span.entire(), - sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close }, - }); -} - /// Checks that the given meta-item is compatible with this `AttributeTemplate`. fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaItemKind) -> bool { let is_one_allowed_subword = |items: &[MetaItemInner]| match items { @@ -269,9 +262,6 @@ pub fn check_builtin_meta_item( ) { if !is_attr_template_compatible(&template, &meta.kind) { // attrs with new parsers are locally validated so excluded here - if AttributeParser::<Late>::is_parsed_attribute(slice::from_ref(&name)) { - return; - } emit_malformed_attribute(psess, style, meta.span, name, template); } diff --git a/compiler/rustc_baked_icu_data/Cargo.toml b/compiler/rustc_baked_icu_data/Cargo.toml index cb0e145386b..2f1ab7df379 100644 --- a/compiler/rustc_baked_icu_data/Cargo.toml +++ b/compiler/rustc_baked_icu_data/Cargo.toml @@ -5,9 +5,8 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -icu_list = "1.2" -icu_locid = "1.2" -icu_locid_transform = "1.3.2" -icu_provider = { version = "1.2", features = ["sync"] } -zerovec = "0.10.0" +icu_list = { version = "2.0", default-features = false } +icu_locale = { version = "2.0", default-features = false, features = ["compiled_data"] } +icu_provider = { version = "2.0", features = ["baked", "sync"] } +zerovec = "0.11.0" # tidy-alphabetical-end diff --git a/compiler/rustc_baked_icu_data/src/data/any.rs b/compiler/rustc_baked_icu_data/src/data/any.rs deleted file mode 100644 index 23028876676..00000000000 --- a/compiler/rustc_baked_icu_data/src/data/any.rs +++ /dev/null @@ -1,2 +0,0 @@ -// @generated -impl_any_provider!(BakedDataProvider); diff --git a/compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data b/compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data new file mode 100644 index 00000000000..1d60e0085fc --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data @@ -0,0 +1,71 @@ +// @generated +/// Implement `DataProvider<ListAndV1>` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +/// +/// Using this implementation will embed the following data in the binary's data segment: +/// * 179B for the lookup data structure (33 data identifiers) +/// * 4183B[^1] for the actual data (11 unique structs) +/// +/// [^1]: these numbers can be smaller in practice due to linker deduplication +/// +/// This macro requires the following crates: +/// * `icu_list` +/// * `icu_locale/compiled_data` +/// * `icu_provider` +/// * `icu_provider/baked` +/// * `zerovec` +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_list_and_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.82"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.82"] + impl $provider { + const DATA_LIST_AND_V1: icu_provider::baked::zerotrie::Data<icu_list::provider::ListAndV1> = { + const TRIE: icu_provider::baked::zerotrie::ZeroTrieSimpleAscii<&'static [u8]> = icu_provider::baked::zerotrie::ZeroTrieSimpleAscii { store: b"\xC8efijprtz\x18#.9DOZ\xC2ns\n\x1E\xC3NSW\x01\x02\x80\x85\x8A\x1E\xC3NSW\x01\x02\x81\x81\x81r\x1E\xC3NSW\x01\x02\x80\x86\x86t\x1E\xC3NSW\x01\x02\x82\x82\x82a\x1E\xC3NSW\x01\x02\x83\x83\x83t\x1E\xC3NSW\x01\x02\x80\x82\x82u\x1E\xC3NSW\x01\x02\x80\x87\x87r\x1E\xC3NSW\x01\x02\x80\x88\x88h\xC2\x1E-\t\xC3NSW\x01\x02\x83\x89\x89Han\xC2st\n\x1E\xC3NSW\x01\x02\x83\x89\x89\x1E\xC3NSW\x01\x02\x84\x89\x89" }; + const VALUES: &'static [<icu_list::provider::ListAndV1 as icu_provider::baked::zerotrie::DynamicDataMarker>::DataStruct] = &[icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" y ") }, 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x02\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\0\0\0\0\0\0\0\0\0\0\0\0\x03\0\0\x04\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\x01\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x06\0\0\0\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF`\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\x12\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x02\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\x80\x01\0\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\x05\x05\x05\x06\x06\x0C\x0C\r\r\0\0\0\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\x02\0\x1B\0\0\0\0\0\x12\0\0\0\x12\0\0\x03\x06\x06\r\r\0\0\0\0\0h\0\0\0h\0\0\0\0\0\0\x0E\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\x01\n\0\0\x01\x19\0\0\0\x12\0\0\x02\x0F\x11\0\0\0\0\0D\0\0\0\0\0\0\x02\x11\x11\0\0\0\0\0\xBF\0\0\0\0\0\0\x02\x0F\x11\0\0\0\0\0\xBF\0\0\0\0\0\0\x02\x0F\x10\0\0\0\0\0\xBF\0\0\0\0\0\0\x02\x10\x11\0\0\0\0\0\xDD\0\0\0\0\0\0\x02\x0F\x11\0\0\0\0\0\xDD\0\0\0\0\0\0\x02\x0F\x0F\0\0\0\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\0\0\0\0\0\0\0\0\0\x03\0\0\x04\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\x01\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x06\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\0\0\0`\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\x12\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" e ") }, 3u8) }) }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" e ") }, 3u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE5\x92\x8C") }, 3u8), special_case: None }, pair: Some(icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), special_case: None }) }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", & ") }, 4u8), special_case: None }, pair: Some(icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" & ") }, 3u8), special_case: None }) }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" et ") }, 4u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" \xD0\xB8 ") }, 4u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" ve ") }, 4u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE5\x92\x8C") }, 3u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", and ") }, 6u8), special_case: None }, pair: Some(icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" and ") }, 5u8), special_case: None }) }]; + unsafe { icu_provider::baked::zerotrie::Data::from_trie_and_values_unchecked(TRIE, VALUES) } + }; + } + #[clippy::msrv = "1.82"] + impl icu_provider::DataProvider<icu_list::provider::ListAndV1> for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_list::provider::ListAndV1>, icu_provider::DataError> { + let mut metadata = icu_provider::DataResponseMetadata::default(); + let payload = if let Some(payload) = icu_provider::baked::DataStore::get(&Self::DATA_LIST_AND_V1, req.id, req.metadata.attributes_prefix_match) { + payload + } else { + const FALLBACKER: icu_locale::fallback::LocaleFallbackerWithConfig<'static> = icu_locale::fallback::LocaleFallbacker::new().for_config(<icu_list::provider::ListAndV1 as icu_provider::DataMarker>::INFO.fallback_config); + let mut fallback_iterator = FALLBACKER.fallback_for(req.id.locale.clone()); + loop { + if let Some(payload) = icu_provider::baked::DataStore::get(&Self::DATA_LIST_AND_V1, icu_provider::DataIdentifierBorrowed::for_marker_attributes_and_locale(req.id.marker_attributes, fallback_iterator.get()), req.metadata.attributes_prefix_match) { + metadata.locale = Some(fallback_iterator.take()); + break payload; + } + if fallback_iterator.get().is_unknown() { + return Err(icu_provider::DataErrorKind::IdentifierNotFound.with_req(<icu_list::provider::ListAndV1 as icu_provider::DataMarker>::INFO, req)); + } + fallback_iterator.step(); + } + }; + Ok(icu_provider::DataResponse { payload, metadata }) + } + } + }; + ($ provider : ty , ITER) => { + __impl_list_and_v1!($provider); + #[clippy::msrv = "1.82"] + impl icu_provider::IterableDataProvider<icu_list::provider::ListAndV1> for $provider { + fn iter_ids(&self) -> Result<std::collections::BTreeSet<icu_provider::DataIdentifierCow<'static>>, icu_provider::DataError> { + Ok(icu_provider::baked::DataStore::iter(&Self::DATA_LIST_AND_V1).collect()) + } + } + }; + ($ provider : ty , DRY) => {}; + ($ provider : ty , DRY , ITER) => { + __impl_list_and_v1!($provider, ITER); + }; +} +#[doc(inline)] +pub use __impl_list_and_v1 as impl_list_and_v1; diff --git a/compiler/rustc_baked_icu_data/src/data/macros.rs b/compiler/rustc_baked_icu_data/src/data/macros.rs deleted file mode 100644 index bee309f9b81..00000000000 --- a/compiler/rustc_baked_icu_data/src/data/macros.rs +++ /dev/null @@ -1,46 +0,0 @@ -// @generated -/// Marks a type as a data provider. You can then use macros like -/// `impl_core_helloworld_v1` to add implementations. -/// -/// ```ignore -/// struct MyProvider; -/// const _: () = { -/// include!("path/to/generated/macros.rs"); -/// make_provider!(MyProvider); -/// impl_core_helloworld_v1!(MyProvider); -/// } -/// ``` -#[doc(hidden)] -#[macro_export] -macro_rules! __make_provider { - ($ name : ty) => { - #[clippy::msrv = "1.66"] - impl $name { - #[doc(hidden)] - #[allow(dead_code)] - pub const MUST_USE_MAKE_PROVIDER_MACRO: () = (); - } - }; -} -#[doc(inline)] -pub use __make_provider as make_provider; -#[macro_use] -#[path = "macros/fallback_likelysubtags_v1.data.rs"] -mod fallback_likelysubtags_v1; -#[doc(inline)] -pub use __impl_fallback_likelysubtags_v1 as impl_fallback_likelysubtags_v1; -#[macro_use] -#[path = "macros/fallback_parents_v1.data.rs"] -mod fallback_parents_v1; -#[doc(inline)] -pub use __impl_fallback_parents_v1 as impl_fallback_parents_v1; -#[macro_use] -#[path = "macros/fallback_supplement_co_v1.data.rs"] -mod fallback_supplement_co_v1; -#[doc(inline)] -pub use __impl_fallback_supplement_co_v1 as impl_fallback_supplement_co_v1; -#[macro_use] -#[path = "macros/list_and_v1.data.rs"] -mod list_and_v1; -#[doc(inline)] -pub use __impl_list_and_v1 as impl_list_and_v1; diff --git a/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs deleted file mode 100644 index 1adb58743f7..00000000000 --- a/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs +++ /dev/null @@ -1,40 +0,0 @@ -// @generated -/// Implement `DataProvider<LocaleFallbackLikelySubtagsV1Marker>` on the given struct using the data -/// hardcoded in this file. This allows the struct to be used with -/// `icu`'s `_unstable` constructors. -#[doc(hidden)] -#[macro_export] -macro_rules! __impl_fallback_likelysubtags_v1 { - ($ provider : ty) => { - #[clippy::msrv = "1.66"] - const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; - #[clippy::msrv = "1.66"] - impl $provider { - #[doc(hidden)] - pub const SINGLETON_FALLBACK_LIKELYSUBTAGS_V1: &'static <icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1 { - l2s: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"am\0ar\0as\0be\0bg\0bgcbhobn\0brxchrcv\0doiel\0fa\0gu\0he\0hi\0hy\0ja\0ka\0kk\0km\0kn\0ko\0kokks\0ky\0lo\0maimk\0ml\0mn\0mnimr\0my\0ne\0or\0pa\0ps\0rajru\0sa\0satsd\0si\0sr\0ta\0te\0tg\0th\0ti\0tt\0uk\0ur\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"EthiArabBengCyrlCyrlDevaDevaBengDevaCherCyrlDevaGrekArabGujrHebrDevaArmnJpanGeorCyrlKhmrKndaKoreDevaArabCyrlLaooDevaCyrlMlymCyrlBengDevaMymrDevaOryaGuruArabDevaCyrlDevaOlckArabSinhCyrlTamlTeluCyrlThaiEthiCyrlCyrlArabHantHans") }) - }, - lr2s: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0ha\0kk\0ky\0mn\0ms\0pa\0sd\0sr\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x03\0\0\0\x05\0\0\0\t\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x0F\0\0\0\x13\0\0\0\x14\0\0\0\x16\0\0\0\x17\0\0\0&\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IQ\0IR\0RU\0CM\0SD\0AF\0CN\0IR\0MN\0CN\0TR\0CN\0CC\0PK\0IN\0ME\0RO\0RU\0TR\0PK\0AF\0CN\0CN\0AU\0BN\0GB\0GF\0HK\0ID\0MO\0PA\0PF\0PH\0SR\0TH\0TW\0US\0VN\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabArabCyrlArabArabArabArabArabArabArabLatnMongArabArabDevaLatnLatnLatnLatnArabArabCyrlHansHantHantHantHantHantHantHantHantHantHantHantHantHantHantHant") }) - }, - l2r: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"af\0am\0ar\0as\0astaz\0be\0bg\0bgcbhobn\0br\0brxbs\0ca\0cebchrcs\0cv\0cy\0da\0de\0doidsbel\0en\0es\0et\0eu\0fa\0ff\0fi\0filfo\0fr\0ga\0gd\0gl\0gu\0ha\0he\0hi\0hr\0hsbhu\0hy\0ia\0id\0ig\0is\0it\0ja\0jv\0ka\0keakgpkk\0km\0kn\0ko\0kokks\0ky\0lo\0lt\0lv\0maimi\0mk\0ml\0mn\0mnimr\0ms\0my\0ne\0nl\0nn\0no\0or\0pa\0pcmpl\0ps\0pt\0qu\0rajrm\0ro\0ru\0sa\0satsc\0sd\0si\0sk\0sl\0so\0sq\0sr\0su\0sv\0sw\0ta\0te\0tg\0th\0ti\0tk\0to\0tr\0tt\0uk\0ur\0uz\0vi\0wo\0xh\0yo\0yrlyuezh\0zu\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ZA\0ET\0EG\0IN\0ES\0AZ\0BY\0BG\0IN\0IN\0BD\0FR\0IN\0BA\0ES\0PH\0US\0CZ\0RU\0GB\0DK\0DE\0IN\0DE\0GR\0US\0ES\0EE\0ES\0IR\0SN\0FI\0PH\0FO\0FR\0IE\0GB\0ES\0IN\0NG\0IL\0IN\0HR\0DE\0HU\0AM\x00001ID\0NG\0IS\0IT\0JP\0ID\0GE\0CV\0BR\0KZ\0KH\0IN\0KR\0IN\0IN\0KG\0LA\0LT\0LV\0IN\0NZ\0MK\0IN\0MN\0IN\0IN\0MY\0MM\0NP\0NL\0NO\0NO\0IN\0IN\0NG\0PL\0AF\0BR\0PE\0IN\0CH\0RO\0RU\0IN\0IN\0IT\0PK\0LK\0SK\0SI\0SO\0AL\0RS\0ID\0SE\0TZ\0IN\0IN\0TJ\0TH\0ET\0TM\0TO\0TR\0RU\0UA\0PK\0UZ\0VN\0SN\0ZA\0NG\0BR\0HK\0CN\0ZA\0") }) - }, - ls2r: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0en\0ff\0kk\0ky\0mn\0pa\0sd\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x01\0\0\0\x02\0\0\0\x03\0\0\0\x04\0\0\0\x06\0\0\0\x07\0\0\0\x08\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x11\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabShawAdlmArabArabLatnMongArabDevaKhojSindArabArabHansBopoHanbHant") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IR\0GB\0GN\0CN\0CN\0TR\0CN\0PK\0IN\0IN\0IN\0PK\0AF\0CN\0TW\0TW\0TW\0") }) - }, - }; - } - #[clippy::msrv = "1.66"] - impl icu_provider::DataProvider<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker> for $provider { - fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker>, icu_provider::DataError> { - if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_LIKELYSUBTAGS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) } - } - } - }; -} diff --git a/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs deleted file mode 100644 index 6f8d6590b08..00000000000 --- a/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs +++ /dev/null @@ -1,28 +0,0 @@ -// @generated -/// Implement `DataProvider<LocaleFallbackParentsV1Marker>` on the given struct using the data -/// hardcoded in this file. This allows the struct to be used with -/// `icu`'s `_unstable` constructors. -#[doc(hidden)] -#[macro_export] -macro_rules! __impl_fallback_parents_v1 { - ($ provider : ty) => { - #[clippy::msrv = "1.66"] - const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; - #[clippy::msrv = "1.66"] - impl $provider { - #[doc(hidden)] - pub const SINGLETON_FALLBACK_PARENTS_V1: &'static <icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackParentsV1 { - parents: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x84\0\0\0\0\0\x06\0\x0B\0\x10\0\x15\0\x1A\0\x1F\0$\0)\0.\x003\08\0=\0B\0G\0L\0Q\0V\0[\0`\0e\0j\0o\0t\0y\0~\0\x83\0\x88\0\x8D\0\x92\0\x97\0\x9C\0\xA1\0\xA6\0\xAB\0\xB0\0\xB5\0\xBA\0\xBF\0\xC4\0\xC9\0\xCE\0\xD3\0\xD8\0\xDD\0\xE2\0\xE7\0\xEC\0\xF1\0\xF6\0\xFB\0\0\x01\x05\x01\n\x01\x0F\x01\x14\x01\x19\x01\x1E\x01#\x01(\x01-\x012\x017\x01<\x01A\x01F\x01K\x01P\x01U\x01Z\x01_\x01d\x01i\x01n\x01s\x01x\x01}\x01\x82\x01\x87\x01\x8C\x01\x91\x01\x96\x01\x9B\x01\xA0\x01\xA5\x01\xAA\x01\xAF\x01\xB4\x01\xB9\x01\xBE\x01\xC3\x01\xC8\x01\xCD\x01\xD2\x01\xD7\x01\xDC\x01\xE1\x01\xE6\x01\xEB\x01\xF0\x01\xF5\x01\xFA\x01\xFF\x01\x04\x02\t\x02\x0E\x02\x13\x02\x18\x02\x1D\x02\"\x02'\x02,\x021\x026\x02;\x02@\x02G\x02I\x02K\x02M\x02R\x02W\x02\\\x02a\x02f\x02k\x02p\x02u\x02z\x02\x7F\x02\x84\x02\x89\x02en-150en-AGen-AIen-ATen-AUen-BBen-BEen-BMen-BSen-BWen-BZen-CCen-CHen-CKen-CMen-CXen-CYen-DEen-DGen-DKen-DMen-ERen-FIen-FJen-FKen-FMen-GBen-GDen-GGen-GHen-GIen-GMen-GYen-HKen-IEen-ILen-IMen-INen-IOen-JEen-JMen-KEen-KIen-KNen-KYen-LCen-LRen-LSen-MGen-MOen-MSen-MTen-MUen-MVen-MWen-MYen-NAen-NFen-NGen-NLen-NRen-NUen-NZen-PGen-PKen-PNen-PWen-RWen-SBen-SCen-SDen-SEen-SGen-SHen-SIen-SLen-SSen-SXen-SZen-TCen-TKen-TOen-TTen-TVen-TZen-UGen-VCen-VGen-VUen-WSen-ZAen-ZMen-ZWes-ARes-BOes-BRes-BZes-CLes-COes-CRes-CUes-DOes-ECes-GTes-HNes-MXes-NIes-PAes-PEes-PRes-PYes-SVes-USes-UYes-VEhi-Latnhtnbnnno-NOpt-AOpt-CHpt-CVpt-FRpt-GQpt-GWpt-LUpt-MOpt-MZpt-STpt-TLzh-Hant-MO") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419en\0\0\0\0\0\0\x01IN\0fr\0\0\0\0\0\0\x01HT\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0zh\0\x01Hant\x01HK\0") }) - }, - }; - } - #[clippy::msrv = "1.66"] - impl icu_provider::DataProvider<icu_locid_transform::provider::LocaleFallbackParentsV1Marker> for $provider { - fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::LocaleFallbackParentsV1Marker>, icu_provider::DataError> { - if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_PARENTS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) } - } - } - }; -} diff --git a/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs deleted file mode 100644 index 02eec37ee09..00000000000 --- a/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs +++ /dev/null @@ -1,32 +0,0 @@ -// @generated -/// Implement `DataProvider<CollationFallbackSupplementV1Marker>` on the given struct using the data -/// hardcoded in this file. This allows the struct to be used with -/// `icu`'s `_unstable` constructors. -#[doc(hidden)] -#[macro_export] -macro_rules! __impl_fallback_supplement_co_v1 { - ($ provider : ty) => { - #[clippy::msrv = "1.66"] - const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; - #[clippy::msrv = "1.66"] - impl $provider { - #[doc(hidden)] - pub const SINGLETON_FALLBACK_SUPPLEMENT_CO_V1: &'static <icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackSupplementV1 { - parents: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x01\0\0\0\0\0yue") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"zh\0\x01Hant\0\0\0\0") }) - }, - unicode_extension_defaults: unsafe { - #[allow(unused_unsafe)] - zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"co") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x02\0\0\0") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x02\0zhzh-Hant") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x06\0pinyinstroke") }) - }, - }; - } - #[clippy::msrv = "1.66"] - impl icu_provider::DataProvider<icu_locid_transform::provider::CollationFallbackSupplementV1Marker> for $provider { - fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::CollationFallbackSupplementV1Marker>, icu_provider::DataError> { - if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_SUPPLEMENT_CO_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) } - } - } - }; -} diff --git a/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs deleted file mode 100644 index 186f706cdb2..00000000000 --- a/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs +++ /dev/null @@ -1,35 +0,0 @@ -// @generated -/// Implement `DataProvider<AndListV1Marker>` on the given struct using the data -/// hardcoded in this file. This allows the struct to be used with -/// `icu`'s `_unstable` constructors. -#[doc(hidden)] -#[macro_export] -macro_rules! __impl_list_and_v1 { - ($ provider : ty) => { - #[clippy::msrv = "1.66"] - const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; - #[clippy::msrv = "1.66"] - impl icu_provider::DataProvider<icu_list::provider::AndListV1Marker> for $provider { - fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_list::provider::AndListV1Marker>, icu_provider::DataError> { - static EN_001: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static EN_IN: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static IT: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }]); - static PT: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static FR: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static TR: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static ES: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }]); - static RU: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static UND: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static EN: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); - static HI_LATN: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }]); - static JA: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]); - static ZH_HK: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }]); - static ZH: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]); - static ZH_HANT: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }]); - static VALUES: [&<icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable; 215usize] = [&EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_IN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &HI_LATN, &IT, &IT, &IT, &IT, &JA, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &RU, &RU, &RU, &RU, &RU, &RU, &TR, &TR, &UND, &ZH, &ZH_HK, &ZH, &ZH, &ZH, &ZH_HANT, &ZH_HK, &ZH]; - static KEYS: [&str; 215usize] = ["en", "en-001", "en-150", "en-AE", "en-AG", "en-AI", "en-AS", "en-AT", "en-AU", "en-BB", "en-BE", "en-BI", "en-BM", "en-BS", "en-BW", "en-BZ", "en-CA", "en-CC", "en-CH", "en-CK", "en-CM", "en-CX", "en-CY", "en-DE", "en-DG", "en-DK", "en-DM", "en-ER", "en-FI", "en-FJ", "en-FK", "en-FM", "en-GB", "en-GD", "en-GG", "en-GH", "en-GI", "en-GM", "en-GU", "en-GY", "en-HK", "en-IE", "en-IL", "en-IM", "en-IN", "en-IO", "en-JE", "en-JM", "en-KE", "en-KI", "en-KN", "en-KY", "en-LC", "en-LR", "en-LS", "en-MG", "en-MH", "en-MO", "en-MP", "en-MS", "en-MT", "en-MU", "en-MV", "en-MW", "en-MY", "en-NA", "en-NF", "en-NG", "en-NL", "en-NR", "en-NU", "en-NZ", "en-PG", "en-PH", "en-PK", "en-PN", "en-PR", "en-PW", "en-RW", "en-SB", "en-SC", "en-SD", "en-SE", "en-SG", "en-SH", "en-SI", "en-SL", "en-SS", "en-SX", "en-SZ", "en-TC", "en-TK", "en-TO", "en-TT", "en-TV", "en-TZ", "en-UG", "en-UM", "en-VC", "en-VG", "en-VI", "en-VU", "en-WS", "en-ZA", "en-ZM", "en-ZW", "es", "es-419", "es-AR", "es-BO", "es-BR", "es-BZ", "es-CL", "es-CO", "es-CR", "es-CU", "es-DO", "es-EA", "es-EC", "es-GQ", "es-GT", "es-HN", "es-IC", "es-MX", "es-NI", "es-PA", "es-PE", "es-PH", "es-PR", "es-PY", "es-SV", "es-US", "es-UY", "es-VE", "fr", "fr-BE", "fr-BF", "fr-BI", "fr-BJ", "fr-BL", "fr-CA", "fr-CD", "fr-CF", "fr-CG", "fr-CH", "fr-CI", "fr-CM", "fr-DJ", "fr-DZ", "fr-GA", "fr-GF", "fr-GN", "fr-GP", "fr-GQ", "fr-HT", "fr-KM", "fr-LU", "fr-MA", "fr-MC", "fr-MF", "fr-MG", "fr-ML", "fr-MQ", "fr-MR", "fr-MU", "fr-NC", "fr-NE", "fr-PF", "fr-PM", "fr-RE", "fr-RW", "fr-SC", "fr-SN", "fr-SY", "fr-TD", "fr-TG", "fr-TN", "fr-VU", "fr-WF", "fr-YT", "hi-Latn", "it", "it-CH", "it-SM", "it-VA", "ja", "pt", "pt-AO", "pt-CH", "pt-CV", "pt-GQ", "pt-GW", "pt-LU", "pt-MO", "pt-MZ", "pt-PT", "pt-ST", "pt-TL", "ru", "ru-BY", "ru-KG", "ru-KZ", "ru-MD", "ru-UA", "tr", "tr-CY", "und", "zh", "zh-HK", "zh-Hans", "zh-Hans-HK", "zh-Hans-MO", "zh-Hant", "zh-MO", "zh-SG"]; - if let Ok(payload) = KEYS.binary_search_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(payload)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::MissingLocale.with_req(<icu_list::provider::AndListV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) } - } - } - }; -} diff --git a/compiler/rustc_baked_icu_data/src/data/mod.rs b/compiler/rustc_baked_icu_data/src/data/mod.rs index 465689f0cb8..3146188a8e7 100644 --- a/compiler/rustc_baked_icu_data/src/data/mod.rs +++ b/compiler/rustc_baked_icu_data/src/data/mod.rs @@ -1,31 +1,40 @@ // @generated -include!("macros.rs"); -macro_rules! impl_data_provider { - ($ provider : ty) => { - make_provider!($provider); - impl_fallback_likelysubtags_v1!($provider); - impl_fallback_parents_v1!($provider); - impl_fallback_supplement_co_v1!($provider); - impl_list_and_v1!($provider); +include!("list_and_v1.rs.data"); +/// Marks a type as a data provider. You can then use macros like +/// `impl_core_helloworld_v1` to add implementations. +/// +/// ```ignore +/// struct MyProvider; +/// const _: () = { +/// include!("path/to/generated/macros.rs"); +/// make_provider!(MyProvider); +/// impl_core_helloworld_v1!(MyProvider); +/// } +/// ``` +#[doc(hidden)] +#[macro_export] +macro_rules! __make_provider { + ($ name : ty) => { + #[clippy::msrv = "1.82"] + impl $name { + #[allow(dead_code)] + pub(crate) const MUST_USE_MAKE_PROVIDER_MACRO: () = (); + } + icu_provider::marker::impl_data_provider_never_marker!($name); }; } +#[doc(inline)] +pub use __make_provider as make_provider; +/// This macro requires the following crates: +/// * `icu_list` +/// * `icu_locale/compiled_data` +/// * `icu_provider` +/// * `icu_provider/baked` +/// * `zerovec` #[allow(unused_macros)] -macro_rules! impl_any_provider { +macro_rules! impl_data_provider { ($ provider : ty) => { - #[clippy::msrv = "1.66"] - impl icu_provider::AnyProvider for $provider { - fn load_any(&self, key: icu_provider::DataKey, req: icu_provider::DataRequest) -> Result<icu_provider::AnyResponse, icu_provider::DataError> { - match key.hashed() { - h if h == <icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), - h if h == <icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::LocaleFallbackParentsV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), - h if h == <icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::CollationFallbackSupplementV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), - h if h == <icu_list::provider::AndListV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_list::provider::AndListV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), - _ => Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req)), - } - } - } + make_provider!($provider); + impl_list_and_v1!($provider); }; } -#[clippy::msrv = "1.66"] -pub struct BakedDataProvider; -impl_data_provider!(BakedDataProvider); diff --git a/compiler/rustc_baked_icu_data/src/lib.rs b/compiler/rustc_baked_icu_data/src/lib.rs index f3f6522f575..ea4c8242c62 100644 --- a/compiler/rustc_baked_icu_data/src/lib.rs +++ b/compiler/rustc_baked_icu_data/src/lib.rs @@ -14,10 +14,9 @@ //! To regenerate the data, run this command: //! //! ```text -//! icu4x-datagen -W --pretty --fingerprint --use-separate-crates \ -//! --format mod -l en es fr it ja pt ru tr zh zh-Hans zh-Hant \ -//! -k list/and@1 fallback/likelysubtags@1 fallback/parents@1 fallback/supplement/co@1 \ -//! --cldr-tag latest --icuexport-tag latest -o src/data +//! icu4x-datagen -W --pretty --use-separate-crates \ +//! --format baked --locales @en @es @fr @it @ja @pt @ru @tr @zh @zh-Hans @zh-Hant \ +//! -m ListAndV1 -o src/data //! ``` // tidy-alphabetical-start @@ -29,26 +28,26 @@ // #![warn(unreachable_pub)] // don't use because this crate is mostly generated code // tidy-alphabetical-end -mod data { - include!("data/mod.rs"); - include!("data/any.rs"); -} +pub struct BakedDataProvider; -pub use data::BakedDataProvider; +include!("data/mod.rs"); +const _: () = { + impl_data_provider!(BakedDataProvider); +}; pub const fn baked_data_provider() -> BakedDataProvider { - data::BakedDataProvider + BakedDataProvider } pub mod supported_locales { - pub const EN: icu_locid::Locale = icu_locid::locale!("en"); - pub const ES: icu_locid::Locale = icu_locid::locale!("es"); - pub const FR: icu_locid::Locale = icu_locid::locale!("fr"); - pub const IT: icu_locid::Locale = icu_locid::locale!("it"); - pub const JA: icu_locid::Locale = icu_locid::locale!("ja"); - pub const PT: icu_locid::Locale = icu_locid::locale!("pt"); - pub const RU: icu_locid::Locale = icu_locid::locale!("ru"); - pub const TR: icu_locid::Locale = icu_locid::locale!("tr"); - pub const ZH_HANS: icu_locid::Locale = icu_locid::locale!("zh-Hans"); - pub const ZH_HANT: icu_locid::Locale = icu_locid::locale!("zh-Hant"); + pub const EN: icu_locale::Locale = icu_locale::locale!("en"); + pub const ES: icu_locale::Locale = icu_locale::locale!("es"); + pub const FR: icu_locale::Locale = icu_locale::locale!("fr"); + pub const IT: icu_locale::Locale = icu_locale::locale!("it"); + pub const JA: icu_locale::Locale = icu_locale::locale!("ja"); + pub const PT: icu_locale::Locale = icu_locale::locale!("pt"); + pub const RU: icu_locale::Locale = icu_locale::locale!("ru"); + pub const TR: icu_locale::Locale = icu_locale::locale!("tr"); + pub const ZH_HANS: icu_locale::Locale = icu_locale::locale!("zh-Hans"); + pub const ZH_HANT: icu_locale::Locale = icu_locale::locale!("zh-Hant"); } diff --git a/compiler/rustc_borrowck/Cargo.toml b/compiler/rustc_borrowck/Cargo.toml index 9e7d55180a2..3162e9da1ba 100644 --- a/compiler/rustc_borrowck/Cargo.toml +++ b/compiler/rustc_borrowck/Cargo.toml @@ -6,7 +6,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start either = "1.5.0" -itertools = "0.12" +itertools.workspace = true polonius-engine = "0.13.0" rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } @@ -25,5 +25,5 @@ rustc_span = { path = "../rustc_span" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_traits = { path = "../rustc_traits" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_borrowck/messages.ftl b/compiler/rustc_borrowck/messages.ftl index 33b80c4b03d..f59e106c7ac 100644 --- a/compiler/rustc_borrowck/messages.ftl +++ b/compiler/rustc_borrowck/messages.ftl @@ -90,7 +90,7 @@ borrowck_lifetime_constraints_error = lifetime may not live long enough borrowck_limitations_implies_static = - due to current limitations in the borrow checker, this implies a `'static` lifetime + due to a current limitation of the type system, this implies a `'static` lifetime borrowck_move_closure_suggestion = consider adding 'move' keyword before the nested closure diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs index 57db2e9fb57..d3f6c01ab8c 100644 --- a/compiler/rustc_borrowck/src/dataflow.rs +++ b/compiler/rustc_borrowck/src/dataflow.rs @@ -1,7 +1,6 @@ use std::fmt; use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::graph; use rustc_index::bit_set::{DenseBitSet, MixedBitSet}; use rustc_middle::mir::{ self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges, @@ -317,9 +316,8 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { loans_out_of_scope_at_location: FxIndexMap::default(), }; for (loan_idx, loan_data) in borrow_set.iter_enumerated() { - let issuing_region = loan_data.region; let loan_issued_at = loan_data.reserve_location; - prec.precompute_loans_out_of_scope(loan_idx, issuing_region, loan_issued_at); + prec.precompute_loans_out_of_scope(loan_idx, loan_issued_at); } prec.loans_out_of_scope_at_location @@ -328,45 +326,7 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { /// Loans are in scope while they are live: whether they are contained within any live region. /// In the location-insensitive analysis, a loan will be contained in a region if the issuing /// region can reach it in the subset graph. So this is a reachability problem. - fn precompute_loans_out_of_scope( - &mut self, - loan_idx: BorrowIndex, - issuing_region: RegionVid, - loan_issued_at: Location, - ) { - let sccs = self.regioncx.constraint_sccs(); - let universal_regions = self.regioncx.universal_regions(); - - // The loop below was useful for the location-insensitive analysis but shouldn't be - // impactful in the location-sensitive case. It seems that it does, however, as without it a - // handful of tests fail. That likely means some liveness or outlives data related to choice - // regions is missing - // FIXME: investigate the impact of loans traversing applied member constraints and why some - // tests fail otherwise. - // - // We first handle the cases where the loan doesn't go out of scope, depending on the - // issuing region's successors. - for successor in graph::depth_first_search(&self.regioncx.region_graph(), issuing_region) { - // Via applied member constraints - // - // The issuing region can flow into the choice regions, and they are either: - // - placeholders or free regions themselves, - // - or also transitively outlive a free region. - // - // That is to say, if there are applied member constraints here, the loan escapes the - // function and cannot go out of scope. We could early return here. - // - // For additional insurance via fuzzing and crater, we verify that the constraint's min - // choice indeed escapes the function. In the future, we could e.g. turn this check into - // a debug assert and early return as an optimization. - let scc = sccs.scc(successor); - for constraint in self.regioncx.applied_member_constraints(scc) { - if universal_regions.is_universal_region(constraint.min_choice) { - return; - } - } - } - + fn precompute_loans_out_of_scope(&mut self, loan_idx: BorrowIndex, loan_issued_at: Location) { let first_block = loan_issued_at.block; let first_bb_data = &self.body.basic_blocks[first_block]; diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index fdca6b56540..fda96dde826 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -565,7 +565,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { let (blame_constraint, path) = self.regioncx.best_blame_constraint( borrow_region, NllRegionVariableOrigin::FreeRegion, - |r| self.regioncx.provides_universal_region(r, borrow_region, outlived_region), + outlived_region, ); let BlameConstraint { category, from_closure, cause, .. } = blame_constraint; diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs index b67dba3af96..5642cdf87fd 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mod.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs @@ -6,7 +6,9 @@ use rustc_abi::{FieldIdx, VariantIdx}; use rustc_data_structures::fx::FxIndexMap; use rustc_errors::{Applicability, Diag, EmissionGuarantee, MultiSpan, listify}; use rustc_hir::def::{CtorKind, Namespace}; -use rustc_hir::{self as hir, CoroutineKind, LangItem}; +use rustc_hir::{ + self as hir, CoroutineKind, GenericBound, LangItem, WhereBoundPredicate, WherePredicateKind, +}; use rustc_index::{IndexSlice, IndexVec}; use rustc_infer::infer::{BoundRegionConversionTime, NllRegionVariableOrigin}; use rustc_infer::traits::SelectionError; @@ -658,25 +660,66 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { /// Add a note to region errors and borrow explanations when higher-ranked regions in predicates /// implicitly introduce an "outlives `'static`" constraint. + /// + /// This is very similar to `fn suggest_static_lifetime_for_gat_from_hrtb` which handles this + /// note for failed type tests instead of outlives errors. fn add_placeholder_from_predicate_note<G: EmissionGuarantee>( &self, - err: &mut Diag<'_, G>, + diag: &mut Diag<'_, G>, path: &[OutlivesConstraint<'tcx>], ) { - let predicate_span = path.iter().find_map(|constraint| { + let tcx = self.infcx.tcx; + let Some((gat_hir_id, generics)) = path.iter().find_map(|constraint| { let outlived = constraint.sub; if let Some(origin) = self.regioncx.definitions.get(outlived) - && let NllRegionVariableOrigin::Placeholder(_) = origin.origin - && let ConstraintCategory::Predicate(span) = constraint.category + && let NllRegionVariableOrigin::Placeholder(placeholder) = origin.origin + && let Some(id) = placeholder.bound.kind.get_id() + && let Some(placeholder_id) = id.as_local() + && let gat_hir_id = tcx.local_def_id_to_hir_id(placeholder_id) + && let Some(generics_impl) = + tcx.parent_hir_node(tcx.parent_hir_id(gat_hir_id)).generics() { - Some(span) + Some((gat_hir_id, generics_impl)) } else { None } - }); + }) else { + return; + }; - if let Some(span) = predicate_span { - err.span_note(span, "due to current limitations in the borrow checker, this implies a `'static` lifetime"); + // Look for the where-bound which introduces the placeholder. + // As we're using the HIR, we need to handle both `for<'a> T: Trait<'a>` + // and `T: for<'a> Trait`<'a>. + for pred in generics.predicates { + let WherePredicateKind::BoundPredicate(WhereBoundPredicate { + bound_generic_params, + bounds, + .. + }) = pred.kind + else { + continue; + }; + if bound_generic_params + .iter() + .rfind(|bgp| tcx.local_def_id_to_hir_id(bgp.def_id) == gat_hir_id) + .is_some() + { + diag.span_note(pred.span, fluent::borrowck_limitations_implies_static); + return; + } + for bound in bounds.iter() { + if let GenericBound::Trait(bound) = bound { + if bound + .bound_generic_params + .iter() + .rfind(|bgp| tcx.local_def_id_to_hir_id(bgp.def_id) == gat_hir_id) + .is_some() + { + diag.span_note(bound.span, fluent::borrowck_limitations_implies_static); + return; + } + } + } } } diff --git a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs index af71db69483..0b3151fd8b8 100644 --- a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs @@ -518,7 +518,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { .with_span_help( self.get_closure_bound_clause_span(*def_id), "`Fn` and `FnMut` closures require captured values to be able to be \ - consumed multiple times, but an `FnOnce` consume them only once", + consumed multiple times, but `FnOnce` closures may consume them only once", ) } _ => { diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index 5d9416b59fc..ea264c8064a 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -3,6 +3,7 @@ use core::ops::ControlFlow; +use either::Either; use hir::{ExprKind, Param}; use rustc_abi::FieldIdx; use rustc_errors::{Applicability, Diag}; @@ -12,15 +13,16 @@ use rustc_middle::bug; use rustc_middle::hir::place::PlaceBase; use rustc_middle::mir::visit::PlaceContext; use rustc_middle::mir::{ - self, BindingForm, Local, LocalDecl, LocalInfo, LocalKind, Location, Mutability, Place, - PlaceRef, ProjectionElem, + self, BindingForm, Body, BorrowKind, Local, LocalDecl, LocalInfo, LocalKind, Location, + Mutability, Operand, Place, PlaceRef, ProjectionElem, RawPtrKind, Rvalue, Statement, + StatementKind, TerminatorKind, }; use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, Upcast}; use rustc_span::{BytePos, DesugaringKind, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits; -use tracing::debug; +use tracing::{debug, trace}; use crate::diagnostics::BorrowedContentSource; use crate::{MirBorrowckCtxt, session_diagnostics}; @@ -31,6 +33,33 @@ pub(crate) enum AccessKind { Mutate, } +/// Finds all statements that assign directly to local (i.e., X = ...) and returns their +/// locations. +fn find_assignments(body: &Body<'_>, local: Local) -> Vec<Location> { + use rustc_middle::mir::visit::Visitor; + + struct FindLocalAssignmentVisitor { + needle: Local, + locations: Vec<Location>, + } + + impl<'tcx> Visitor<'tcx> for FindLocalAssignmentVisitor { + fn visit_local(&mut self, local: Local, place_context: PlaceContext, location: Location) { + if self.needle != local { + return; + } + + if place_context.is_place_assignment() { + self.locations.push(location); + } + } + } + + let mut visitor = FindLocalAssignmentVisitor { needle: local, locations: vec![] }; + visitor.visit_body(body); + visitor.locations +} + impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { pub(crate) fn report_mutability_error( &mut self, @@ -384,7 +413,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - // Also suggest adding mut for upvars + // Also suggest adding mut for upvars. PlaceRef { local, projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)], @@ -438,9 +467,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - // complete hack to approximate old AST-borrowck - // diagnostic: if the span starts with a mutable borrow of - // a local variable, then just suggest the user remove it. + // Complete hack to approximate old AST-borrowck diagnostic: if the span starts + // with a mutable borrow of a local variable, then just suggest the user remove it. PlaceRef { local: _, projection: [] } if self .infcx @@ -677,12 +705,13 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { /// - is the trait from the local crate? If not, we can't suggest changing signatures /// - `Span` of the argument in the trait definition fn is_error_in_trait(&self, local: Local) -> (bool, bool, Option<Span>) { + let tcx = self.infcx.tcx; if self.body.local_kind(local) != LocalKind::Arg { return (false, false, None); } let my_def = self.body.source.def_id(); let Some(td) = - self.infcx.tcx.impl_of_assoc(my_def).and_then(|x| self.infcx.tcx.trait_id_of_impl(x)) + tcx.trait_impl_of_assoc(my_def).and_then(|id| self.infcx.tcx.trait_id_of_impl(id)) else { return (false, false, None); }; @@ -768,7 +797,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { ); } - // point to span of upvar making closure call require mutable borrow + // Point to span of upvar making closure call that requires a mutable borrow fn show_mutating_upvar( &self, tcx: TyCtxt<'_>, @@ -824,7 +853,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } else { bug!("not an upvar") }; - // sometimes we deliberately don't store the name of a place when coming from a macro in + // Sometimes we deliberately don't store the name of a place when coming from a macro in // another crate. We generally want to limit those diagnostics a little, to hide // implementation details (such as those from pin!() or format!()). In that case show a // slightly different error message, or none at all if something else happened. In other @@ -935,8 +964,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let def_id = tcx.hir_enclosing_body_owner(fn_call_id); let mut look_at_return = true; - // If the HIR node is a function or method call gets the def ID - // of the called function or method and the span and args of the call expr + // If the HIR node is a function or method call, get the DefId + // of the callee function or method, the span, and args of the call expr let get_call_details = || { let hir::Node::Expr(hir::Expr { hir_id, kind, .. }) = node else { return None; @@ -1050,7 +1079,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let mut cur_expr = expr; while let ExprKind::MethodCall(path_segment, recv, _, _) = cur_expr.kind { if path_segment.ident.name == sym::iter { - // check `_ty` has `iter_mut` method + // Check that the type has an `iter_mut` method. let res = self .infcx .tcx @@ -1080,38 +1109,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - /// Finds all statements that assign directly to local (i.e., X = ...) and returns their - /// locations. - fn find_assignments(&self, local: Local) -> Vec<Location> { - use rustc_middle::mir::visit::Visitor; - - struct FindLocalAssignmentVisitor { - needle: Local, - locations: Vec<Location>, - } - - impl<'tcx> Visitor<'tcx> for FindLocalAssignmentVisitor { - fn visit_local( - &mut self, - local: Local, - place_context: PlaceContext, - location: Location, - ) { - if self.needle != local { - return; - } - - if place_context.is_place_assignment() { - self.locations.push(location); - } - } - } - - let mut visitor = FindLocalAssignmentVisitor { needle: local, locations: vec![] }; - visitor.visit_body(self.body); - visitor.locations - } - fn suggest_make_local_mut(&self, err: &mut Diag<'_>, local: Local, name: Symbol) { let local_decl = &self.body.local_decls[local]; @@ -1121,7 +1118,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let (is_trait_sig, is_local, local_trait) = self.is_error_in_trait(local); if is_trait_sig && !is_local { - // Do not suggest to change the signature when the trait comes from another crate. + // Do not suggest changing the signature when the trait comes from another crate. err.span_label( local_decl.source_info.span, format!("this is an immutable {pointer_desc}"), @@ -1130,11 +1127,11 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } let decl_span = local_decl.source_info.span; - let amp_mut_sugg = match *local_decl.local_info() { + let (amp_mut_sugg, local_var_ty_info) = match *local_decl.local_info() { LocalInfo::User(mir::BindingForm::ImplicitSelf(_)) => { let (span, suggestion) = suggest_ampmut_self(self.infcx.tcx, decl_span); let additional = local_trait.map(|span| suggest_ampmut_self(self.infcx.tcx, span)); - Some(AmpMutSugg { has_sugg: true, span, suggestion, additional }) + (AmpMutSugg::Type { span, suggestion, additional }, None) } LocalInfo::User(mir::BindingForm::Var(mir::VarBindingForm { @@ -1142,79 +1139,54 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { opt_ty_info, .. })) => { - // check if the RHS is from desugaring + // Check if the RHS is from desugaring. + let first_assignment = find_assignments(&self.body, local).first().copied(); + let first_assignment_stmt = first_assignment + .and_then(|loc| self.body[loc.block].statements.get(loc.statement_index)); + trace!(?first_assignment_stmt); let opt_assignment_rhs_span = - self.find_assignments(local).first().map(|&location| { - if let Some(mir::Statement { - source_info: _, - kind: - mir::StatementKind::Assign(box ( - _, - mir::Rvalue::Use(mir::Operand::Copy(place)), - )), - .. - }) = self.body[location.block].statements.get(location.statement_index) - { - self.body.local_decls[place.local].source_info.span - } else { - self.body.source_info(location).span - } - }); - match opt_assignment_rhs_span.and_then(|s| s.desugaring_kind()) { - // on for loops, RHS points to the iterator part - Some(DesugaringKind::ForLoop) => { - let span = opt_assignment_rhs_span.unwrap(); - self.suggest_similar_mut_method_for_for_loop(err, span); + first_assignment.map(|loc| self.body.source_info(loc).span); + let mut source_span = opt_assignment_rhs_span; + if let Some(mir::Statement { + source_info: _, + kind: + mir::StatementKind::Assign(box (_, mir::Rvalue::Use(mir::Operand::Copy(place)))), + .. + }) = first_assignment_stmt + { + let local_span = self.body.local_decls[place.local].source_info.span; + // `&self` in async functions have a `desugaring_kind`, but the local we assign + // it with does not, so use the local_span for our checks later. + source_span = Some(local_span); + if let Some(DesugaringKind::ForLoop) = local_span.desugaring_kind() { + // On for loops, RHS points to the iterator part. + self.suggest_similar_mut_method_for_for_loop(err, local_span); err.span_label( - span, + local_span, format!("this iterator yields `{pointer_sigil}` {pointer_desc}s",), ); - None - } - // don't create labels for compiler-generated spans - Some(_) => None, - // don't create labels for the span not from user's code - None if opt_assignment_rhs_span - .is_some_and(|span| self.infcx.tcx.sess.source_map().is_imported(span)) => - { - None - } - None => { - if name != kw::SelfLower { - suggest_ampmut( - self.infcx.tcx, - local_decl.ty, - decl_span, - opt_assignment_rhs_span, - opt_ty_info, - ) - } else { - match local_decl.local_info() { - LocalInfo::User(mir::BindingForm::Var(mir::VarBindingForm { - opt_ty_info: None, - .. - })) => { - let (span, sugg) = - suggest_ampmut_self(self.infcx.tcx, decl_span); - Some(AmpMutSugg { - has_sugg: true, - span, - suggestion: sugg, - additional: None, - }) - } - // explicit self (eg `self: &'a Self`) - _ => suggest_ampmut( - self.infcx.tcx, - local_decl.ty, - decl_span, - opt_assignment_rhs_span, - opt_ty_info, - ), - } - } + return; } } + + // Don't create labels for compiler-generated spans or spans not from users' code. + if source_span.is_some_and(|s| { + s.desugaring_kind().is_some() || self.infcx.tcx.sess.source_map().is_imported(s) + }) { + return; + } + + // This could be because we're in an `async fn`. + if name == kw::SelfLower && opt_ty_info.is_none() { + let (span, suggestion) = suggest_ampmut_self(self.infcx.tcx, decl_span); + (AmpMutSugg::Type { span, suggestion, additional: None }, None) + } else if let Some(sugg) = + suggest_ampmut(self.infcx, self.body(), first_assignment_stmt) + { + (sugg, opt_ty_info) + } else { + return; + } } LocalInfo::User(mir::BindingForm::Var(mir::VarBindingForm { @@ -1222,181 +1194,238 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { .. })) => { let pattern_span: Span = local_decl.source_info.span; - suggest_ref_mut(self.infcx.tcx, pattern_span).map(|span| AmpMutSugg { - has_sugg: true, - span, - suggestion: "mut ".to_owned(), - additional: None, - }) + let Some(span) = suggest_ref_mut(self.infcx.tcx, pattern_span) else { + return; + }; + (AmpMutSugg::Type { span, suggestion: "mut ".to_owned(), additional: None }, None) } _ => unreachable!(), }; - match amp_mut_sugg { - Some(AmpMutSugg { - has_sugg: true, - span: err_help_span, - suggestion: suggested_code, - additional, - }) => { - let mut sugg = vec![(err_help_span, suggested_code)]; - if let Some(s) = additional { - sugg.push(s); - } + let mut suggest = |suggs: Vec<_>, applicability, extra| { + if suggs.iter().any(|(span, _)| self.infcx.tcx.sess.source_map().is_imported(*span)) { + return; + } - if sugg.iter().all(|(span, _)| !self.infcx.tcx.sess.source_map().is_imported(*span)) - { - err.multipart_suggestion_verbose( - format!( - "consider changing this to be a mutable {pointer_desc}{}", - if is_trait_sig { - " in the `impl` method and the `trait` definition" - } else { - "" - } - ), - sugg, - Applicability::MachineApplicable, - ); + err.multipart_suggestion_verbose( + format!( + "consider changing this to be a mutable {pointer_desc}{}{extra}", + if is_trait_sig { + " in the `impl` method and the `trait` definition" + } else { + "" + } + ), + suggs, + applicability, + ); + }; + + let (mut sugg, add_type_annotation_if_not_exists) = match amp_mut_sugg { + AmpMutSugg::Type { span, suggestion, additional } => { + let mut sugg = vec![(span, suggestion)]; + sugg.extend(additional); + suggest(sugg, Applicability::MachineApplicable, ""); + return; + } + AmpMutSugg::MapGetMut { span, suggestion } => { + if self.infcx.tcx.sess.source_map().is_imported(span) { + return; } + err.multipart_suggestion_verbose( + "consider using `get_mut`", + vec![(span, suggestion)], + Applicability::MaybeIncorrect, + ); + return; } - Some(AmpMutSugg { - has_sugg: false, span: err_label_span, suggestion: message, .. - }) => { - let def_id = self.body.source.def_id(); - let hir_id = if let Some(local_def_id) = def_id.as_local() - && let Some(body) = self.infcx.tcx.hir_maybe_body_owned_by(local_def_id) - { - BindingFinder { span: err_label_span }.visit_body(&body).break_value() - } else { - None - }; + AmpMutSugg::Expr { span, suggestion } => { + // `Expr` suggestions should change type annotations if they already exist (probably immut), + // but do not add new type annotations. + (vec![(span, suggestion)], false) + } + AmpMutSugg::ChangeBinding => (vec![], true), + }; - if let Some(hir_id) = hir_id - && let hir::Node::LetStmt(local) = self.infcx.tcx.hir_node(hir_id) - { - let tables = self.infcx.tcx.typeck(def_id.as_local().unwrap()); - if let Some(clone_trait) = self.infcx.tcx.lang_items().clone_trait() - && let Some(expr) = local.init - && let ty = tables.node_type_opt(expr.hir_id) - && let Some(ty) = ty - && let ty::Ref(..) = ty.kind() + // Find a binding's type to make mutable. + let (binding_exists, span) = match local_var_ty_info { + // If this is a variable binding with an explicit type, + // then we will suggest changing it to be mutable. + // This is `Applicability::MachineApplicable`. + Some(ty_span) => (true, ty_span), + + // Otherwise, we'll suggest *adding* an annotated type, we'll suggest + // the RHS's type for that. + // This is `Applicability::HasPlaceholders`. + None => (false, decl_span), + }; + + if !binding_exists && !add_type_annotation_if_not_exists { + suggest(sugg, Applicability::MachineApplicable, ""); + return; + } + + // If the binding already exists and is a reference with an explicit + // lifetime, then we can suggest adding ` mut`. This is special-cased from + // the path without an explicit lifetime. + let (sugg_span, sugg_str, suggest_now) = if let Ok(src) = self.infcx.tcx.sess.source_map().span_to_snippet(span) + && src.starts_with("&'") + // Note that `&' a T` is invalid so this is correct. + && let Some(ws_pos) = src.find(char::is_whitespace) + { + let span = span.with_lo(span.lo() + BytePos(ws_pos as u32)).shrink_to_lo(); + (span, " mut".to_owned(), true) + // If there is already a binding, we modify it to be `mut`. + } else if binding_exists { + // Shrink the span to just after the `&` in `&variable`. + let span = span.with_lo(span.lo() + BytePos(1)).shrink_to_lo(); + (span, "mut ".to_owned(), true) + } else { + // Otherwise, suggest that the user annotates the binding; We provide the + // type of the local. + let ty = local_decl.ty.builtin_deref(true).unwrap(); + + (span, format!("{}mut {}", if local_decl.ty.is_ref() { "&" } else { "*" }, ty), false) + }; + + if suggest_now { + // Suggest changing `&x` to `&mut x` and changing `&T` to `&mut T` at the same time. + let has_change = !sugg.is_empty(); + sugg.push((sugg_span, sugg_str)); + suggest( + sugg, + Applicability::MachineApplicable, + // FIXME(fee1-dead) this somehow doesn't fire + if has_change { " and changing the binding's type" } else { "" }, + ); + return; + } else if !sugg.is_empty() { + suggest(sugg, Applicability::MachineApplicable, ""); + return; + } + + let def_id = self.body.source.def_id(); + let hir_id = if let Some(local_def_id) = def_id.as_local() + && let Some(body) = self.infcx.tcx.hir_maybe_body_owned_by(local_def_id) + { + BindingFinder { span: sugg_span }.visit_body(&body).break_value() + } else { + None + }; + let node = hir_id.map(|hir_id| self.infcx.tcx.hir_node(hir_id)); + + let Some(hir::Node::LetStmt(local)) = node else { + err.span_label( + sugg_span, + format!("consider changing this binding's type to be: `{sugg_str}`"), + ); + return; + }; + + let tables = self.infcx.tcx.typeck(def_id.as_local().unwrap()); + if let Some(clone_trait) = self.infcx.tcx.lang_items().clone_trait() + && let Some(expr) = local.init + && let ty = tables.node_type_opt(expr.hir_id) + && let Some(ty) = ty + && let ty::Ref(..) = ty.kind() + { + match self + .infcx + .type_implements_trait_shallow(clone_trait, ty.peel_refs(), self.infcx.param_env) + .as_deref() + { + Some([]) => { + // FIXME: This error message isn't useful, since we're just + // vaguely suggesting to clone a value that already + // implements `Clone`. + // + // A correct suggestion here would take into account the fact + // that inference may be affected by missing types on bindings, + // etc., to improve "tests/ui/borrowck/issue-91206.stderr", for + // example. + } + None => { + if let hir::ExprKind::MethodCall(segment, _rcvr, [], span) = expr.kind + && segment.ident.name == sym::clone { - match self - .infcx - .type_implements_trait_shallow( - clone_trait, - ty.peel_refs(), - self.infcx.param_env, - ) - .as_deref() - { - Some([]) => { - // FIXME: This error message isn't useful, since we're just - // vaguely suggesting to clone a value that already - // implements `Clone`. - // - // A correct suggestion here would take into account the fact - // that inference may be affected by missing types on bindings, - // etc., to improve "tests/ui/borrowck/issue-91206.stderr", for - // example. - } - None => { - if let hir::ExprKind::MethodCall(segment, _rcvr, [], span) = - expr.kind - && segment.ident.name == sym::clone - { - err.span_help( - span, - format!( - "`{}` doesn't implement `Clone`, so this call clones \ + err.span_help( + span, + format!( + "`{}` doesn't implement `Clone`, so this call clones \ the reference `{ty}`", - ty.peel_refs(), - ), - ); - } - // The type doesn't implement Clone. - let trait_ref = ty::Binder::dummy(ty::TraitRef::new( - self.infcx.tcx, - clone_trait, - [ty.peel_refs()], - )); - let obligation = traits::Obligation::new( - self.infcx.tcx, - traits::ObligationCause::dummy(), - self.infcx.param_env, - trait_ref, - ); - self.infcx.err_ctxt().suggest_derive( - &obligation, - err, - trait_ref.upcast(self.infcx.tcx), - ); - } - Some(errors) => { - if let hir::ExprKind::MethodCall(segment, _rcvr, [], span) = - expr.kind - && segment.ident.name == sym::clone - { - err.span_help( - span, - format!( - "`{}` doesn't implement `Clone` because its \ + ty.peel_refs(), + ), + ); + } + // The type doesn't implement Clone. + let trait_ref = ty::Binder::dummy(ty::TraitRef::new( + self.infcx.tcx, + clone_trait, + [ty.peel_refs()], + )); + let obligation = traits::Obligation::new( + self.infcx.tcx, + traits::ObligationCause::dummy(), + self.infcx.param_env, + trait_ref, + ); + self.infcx.err_ctxt().suggest_derive( + &obligation, + err, + trait_ref.upcast(self.infcx.tcx), + ); + } + Some(errors) => { + if let hir::ExprKind::MethodCall(segment, _rcvr, [], span) = expr.kind + && segment.ident.name == sym::clone + { + err.span_help( + span, + format!( + "`{}` doesn't implement `Clone` because its \ implementations trait bounds could not be met, so \ this call clones the reference `{ty}`", - ty.peel_refs(), - ), - ); - err.note(format!( - "the following trait bounds weren't met: {}", - errors - .iter() - .map(|e| e.obligation.predicate.to_string()) - .collect::<Vec<_>>() - .join("\n"), - )); - } - // The type doesn't implement Clone because of unmet obligations. - for error in errors { - if let traits::FulfillmentErrorCode::Select( - traits::SelectionError::Unimplemented, - ) = error.code - && let ty::PredicateKind::Clause(ty::ClauseKind::Trait( - pred, - )) = error.obligation.predicate.kind().skip_binder() - { - self.infcx.err_ctxt().suggest_derive( - &error.obligation, - err, - error.obligation.predicate.kind().rebind(pred), - ); - } - } - } - } + ty.peel_refs(), + ), + ); + err.note(format!( + "the following trait bounds weren't met: {}", + errors + .iter() + .map(|e| e.obligation.predicate.to_string()) + .collect::<Vec<_>>() + .join("\n"), + )); } - let (changing, span, sugg) = match local.ty { - Some(ty) => ("changing", ty.span, message), - None => { - ("specifying", local.pat.span.shrink_to_hi(), format!(": {message}")) + // The type doesn't implement Clone because of unmet obligations. + for error in errors { + if let traits::FulfillmentErrorCode::Select( + traits::SelectionError::Unimplemented, + ) = error.code + && let ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) = + error.obligation.predicate.kind().skip_binder() + { + self.infcx.err_ctxt().suggest_derive( + &error.obligation, + err, + error.obligation.predicate.kind().rebind(pred), + ); } - }; - err.span_suggestion_verbose( - span, - format!("consider {changing} this binding's type"), - sugg, - Applicability::HasPlaceholders, - ); - } else { - err.span_label( - err_label_span, - format!("consider changing this binding's type to be: `{message}`"), - ); + } } } - None => {} } + let (changing, span, sugg) = match local.ty { + Some(ty) => ("changing", ty.span, sugg_str), + None => ("specifying", local.pat.span.shrink_to_hi(), format!(": {sugg_str}")), + }; + err.span_suggestion_verbose( + span, + format!("consider {changing} this binding's type"), + sugg, + Applicability::HasPlaceholders, + ); } } @@ -1463,11 +1492,25 @@ fn suggest_ampmut_self(tcx: TyCtxt<'_>, span: Span) -> (Span, String) { } } -struct AmpMutSugg { - has_sugg: bool, - span: Span, - suggestion: String, - additional: Option<(Span, String)>, +enum AmpMutSugg { + /// Type suggestion. Changes `&self` to `&mut self`, `x: &T` to `x: &mut T`, + /// `ref x` to `ref mut x`, etc. + Type { + span: Span, + suggestion: String, + additional: Option<(Span, String)>, + }, + /// Suggestion for expressions, `&x` to `&mut x`, `&x[i]` to `&mut x[i]`, etc. + Expr { + span: Span, + suggestion: String, + }, + /// Suggests `.get_mut` in the case of `&map[&key]` for Hash/BTreeMap. + MapGetMut { + span: Span, + suggestion: String, + }, + ChangeBinding, } // When we want to suggest a user change a local variable to be a `&mut`, there @@ -1486,110 +1529,111 @@ struct AmpMutSugg { // This implementation attempts to emulate AST-borrowck prioritization // by trying (3.), then (2.) and finally falling back on (1.). fn suggest_ampmut<'tcx>( - tcx: TyCtxt<'tcx>, - decl_ty: Ty<'tcx>, - decl_span: Span, - opt_assignment_rhs_span: Option<Span>, - opt_ty_info: Option<Span>, + infcx: &crate::BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + opt_assignment_rhs_stmt: Option<&Statement<'tcx>>, ) -> Option<AmpMutSugg> { - // if there is a RHS and it starts with a `&` from it, then check if it is + let tcx = infcx.tcx; + // If there is a RHS and it starts with a `&` from it, then check if it is // mutable, and if not, put suggest putting `mut ` to make it mutable. - // we don't have to worry about lifetime annotations here because they are + // We don't have to worry about lifetime annotations here because they are // not valid when taking a reference. For example, the following is not valid Rust: // // let x: &i32 = &'a 5; // ^^ lifetime annotation not allowed // - if let Some(rhs_span) = opt_assignment_rhs_span - && let Ok(rhs_str) = tcx.sess.source_map().span_to_snippet(rhs_span) - && let Some(rhs_str_no_amp) = rhs_str.strip_prefix('&') + if let Some(rhs_stmt) = opt_assignment_rhs_stmt + && let StatementKind::Assign(box (lhs, rvalue)) = &rhs_stmt.kind + && let mut rhs_span = rhs_stmt.source_info.span + && let Ok(mut rhs_str) = tcx.sess.source_map().span_to_snippet(rhs_span) { - // Suggest changing `&raw const` to `&raw mut` if applicable. - if rhs_str_no_amp.trim_start().strip_prefix("raw const").is_some() { - let const_idx = rhs_str.find("const").unwrap() as u32; - let const_span = rhs_span - .with_lo(rhs_span.lo() + BytePos(const_idx)) - .with_hi(rhs_span.lo() + BytePos(const_idx + "const".len() as u32)); - - return Some(AmpMutSugg { - has_sugg: true, - span: const_span, - suggestion: "mut".to_owned(), - additional: None, - }); + let mut rvalue = rvalue; + + // Take some special care when handling `let _x = &*_y`: + // We want to know if this is part of an overloaded index, so `let x = &a[0]`, + // or whether this is a usertype ascription (`let _x: &T = y`). + if let Rvalue::Ref(_, BorrowKind::Shared, place) = rvalue + && place.projection.len() == 1 + && place.projection[0] == ProjectionElem::Deref + && let Some(assign) = find_assignments(&body, place.local).first() + { + // If this is a usertype ascription (`let _x: &T = _y`) then pierce through it as either we want + // to suggest `&mut` on the expression (handled here) or we return `None` and let the caller + // suggest `&mut` on the type if the expression seems fine (e.g. `let _x: &T = &mut _y`). + if let Some(user_ty_projs) = body.local_decls[lhs.local].user_ty.as_ref() + && let [user_ty_proj] = user_ty_projs.contents.as_slice() + && user_ty_proj.projs.is_empty() + && let Either::Left(rhs_stmt_new) = body.stmt_at(*assign) + && let StatementKind::Assign(box (_, rvalue_new)) = &rhs_stmt_new.kind + && let rhs_span_new = rhs_stmt_new.source_info.span + && let Ok(rhs_str_new) = tcx.sess.source_map().span_to_snippet(rhs_span) + { + (rvalue, rhs_span, rhs_str) = (rvalue_new, rhs_span_new, rhs_str_new); + } + + if let Either::Right(call) = body.stmt_at(*assign) + && let TerminatorKind::Call { + func: Operand::Constant(box const_operand), args, .. + } = &call.kind + && let ty::FnDef(method_def_id, method_args) = *const_operand.ty().kind() + && let Some(trait_) = tcx.trait_of_assoc(method_def_id) + && tcx.is_lang_item(trait_, hir::LangItem::Index) + { + let trait_ref = ty::TraitRef::from_assoc( + tcx, + tcx.require_lang_item(hir::LangItem::IndexMut, rhs_span), + method_args, + ); + // The type only implements `Index` but not `IndexMut`, we must not suggest `&mut`. + if !infcx + .type_implements_trait(trait_ref.def_id, trait_ref.args, infcx.param_env) + .must_apply_considering_regions() + { + // Suggest `get_mut` if type is a `BTreeMap` or `HashMap`. + if let ty::Adt(def, _) = trait_ref.self_ty().kind() + && [sym::BTreeMap, sym::HashMap] + .into_iter() + .any(|s| tcx.is_diagnostic_item(s, def.did())) + && let [map, key] = &**args + && let Ok(map) = tcx.sess.source_map().span_to_snippet(map.span) + && let Ok(key) = tcx.sess.source_map().span_to_snippet(key.span) + { + let span = rhs_span; + let suggestion = format!("{map}.get_mut({key}).unwrap()"); + return Some(AmpMutSugg::MapGetMut { span, suggestion }); + } + return None; + } + } } - // Figure out if rhs already is `&mut`. - let is_mut = if let Some(rest) = rhs_str_no_amp.trim_start().strip_prefix("mut") { - match rest.chars().next() { - // e.g. `&mut x` - Some(c) if c.is_whitespace() => true, - // e.g. `&mut(x)` - Some('(') => true, - // e.g. `&mut{x}` - Some('{') => true, - // e.g. `&mutablevar` - _ => false, + let sugg = match rvalue { + Rvalue::Ref(_, BorrowKind::Shared, _) if let Some(ref_idx) = rhs_str.find('&') => { + // Shrink the span to just after the `&` in `&variable`. + Some(( + rhs_span.with_lo(rhs_span.lo() + BytePos(ref_idx as u32 + 1)).shrink_to_lo(), + "mut ".to_owned(), + )) } - } else { - false + Rvalue::RawPtr(RawPtrKind::Const, _) if let Some(const_idx) = rhs_str.find("const") => { + // Suggest changing `&raw const` to `&raw mut` if applicable. + let const_idx = const_idx as u32; + Some(( + rhs_span + .with_lo(rhs_span.lo() + BytePos(const_idx)) + .with_hi(rhs_span.lo() + BytePos(const_idx + "const".len() as u32)), + "mut".to_owned(), + )) + } + _ => None, }; - // if the reference is already mutable then there is nothing we can do - // here. - if !is_mut { - // shrink the span to just after the `&` in `&variable` - let span = rhs_span.with_lo(rhs_span.lo() + BytePos(1)).shrink_to_lo(); - - // FIXME(Ezrashaw): returning is bad because we still might want to - // update the annotated type, see #106857. - return Some(AmpMutSugg { - has_sugg: true, - span, - suggestion: "mut ".to_owned(), - additional: None, - }); + + if let Some((span, suggestion)) = sugg { + return Some(AmpMutSugg::Expr { span, suggestion }); } } - let (binding_exists, span) = match opt_ty_info { - // if this is a variable binding with an explicit type, - // then we will suggest changing it to be mutable. - // this is `Applicability::MachineApplicable`. - Some(ty_span) => (true, ty_span), - - // otherwise, we'll suggest *adding* an annotated type, we'll suggest - // the RHS's type for that. - // this is `Applicability::HasPlaceholders`. - None => (false, decl_span), - }; - - // if the binding already exists and is a reference with an explicit - // lifetime, then we can suggest adding ` mut`. this is special-cased from - // the path without an explicit lifetime. - if let Ok(src) = tcx.sess.source_map().span_to_snippet(span) - && src.starts_with("&'") - // note that `& 'a T` is invalid so this is correct. - && let Some(ws_pos) = src.find(char::is_whitespace) - { - let span = span.with_lo(span.lo() + BytePos(ws_pos as u32)).shrink_to_lo(); - Some(AmpMutSugg { has_sugg: true, span, suggestion: " mut".to_owned(), additional: None }) - // if there is already a binding, we modify it to be `mut` - } else if binding_exists { - // shrink the span to just after the `&` in `&variable` - let span = span.with_lo(span.lo() + BytePos(1)).shrink_to_lo(); - Some(AmpMutSugg { has_sugg: true, span, suggestion: "mut ".to_owned(), additional: None }) - } else { - // otherwise, suggest that the user annotates the binding; we provide the - // type of the local. - let ty = decl_ty.builtin_deref(true).unwrap(); - - Some(AmpMutSugg { - has_sugg: false, - span, - suggestion: format!("{}mut {}", if decl_ty.is_ref() { "&" } else { "*" }, ty), - additional: None, - }) - } + Some(AmpMutSugg::ChangeBinding) } /// If the type is a `Coroutine`, `Closure`, or `CoroutineClosure` diff --git a/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs b/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs index 83fe4a9f98f..99913626418 100644 --- a/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs +++ b/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs @@ -13,6 +13,8 @@ use rustc_middle::mir::{self, ConstraintCategory, Location}; use rustc_middle::ty::{ self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, }; +use rustc_span::Span; +use rustc_trait_selection::error_reporting::infer::region::unexpected_hidden_region_diagnostic; use rustc_trait_selection::errors::impl_trait_overcapture_suggestion; use crate::MirBorrowckCtxt; @@ -26,13 +28,61 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { if errors.is_empty() { return; } + + let infcx = self.infcx; let mut guar = None; + let mut last_unexpected_hidden_region: Option<(Span, Ty<'_>, ty::OpaqueTypeKey<'tcx>)> = + None; for error in errors { guar = Some(match error { - DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err) => err.report(self.infcx), + DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err) => err.report(infcx), DeferredOpaqueTypeError::LifetimeMismatchOpaqueParam(err) => { - self.infcx.dcx().emit_err(err) + infcx.dcx().emit_err(err) + } + DeferredOpaqueTypeError::UnexpectedHiddenRegion { + opaque_type_key, + hidden_type, + member_region, + } => { + let named_ty = + self.regioncx.name_regions_for_member_constraint(infcx.tcx, hidden_type.ty); + let named_key = self + .regioncx + .name_regions_for_member_constraint(infcx.tcx, opaque_type_key); + let named_region = + self.regioncx.name_regions_for_member_constraint(infcx.tcx, member_region); + let diag = unexpected_hidden_region_diagnostic( + infcx, + self.mir_def_id(), + hidden_type.span, + named_ty, + named_region, + named_key, + ); + if last_unexpected_hidden_region + != Some((hidden_type.span, named_ty, named_key)) + { + last_unexpected_hidden_region = + Some((hidden_type.span, named_ty, named_key)); + diag.emit() + } else { + diag.delay_as_bug() + } } + DeferredOpaqueTypeError::NonDefiningUseInDefiningScope { + span, + opaque_type_key, + } => infcx.dcx().span_err( + span, + format!( + "non-defining use of `{}` in the defining scope", + Ty::new_opaque( + infcx.tcx, + opaque_type_key.def_id.to_def_id(), + opaque_type_key.args + ) + ), + ), }); } let guar = guar.unwrap(); @@ -193,10 +243,9 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for FindOpaqueRegion<'_, 'tcx> { let opaque_region_vid = self.regioncx.to_region_vid(opaque_region); // Find a path between the borrow region and our opaque capture. - if let Some((path, _)) = - self.regioncx.find_constraint_paths_between_regions(self.borrow_region, |r| { - r == opaque_region_vid - }) + if let Some(path) = self + .regioncx + .constraint_path_between_regions(self.borrow_region, opaque_region_vid) { for constraint in path { // If we find a call in this path, then check if it defines the opaque. diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 2b74f1a48f7..bec4c934502 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -23,7 +23,6 @@ use rustc_trait_selection::error_reporting::infer::nice_region_error::{ self, HirTraitObjectVisitor, NiceRegionError, TraitObjectVisitor, find_anon_type, find_param_with_region, suggest_adding_lifetime_params, }; -use rustc_trait_selection::error_reporting::infer::region::unexpected_hidden_region_diagnostic; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{Obligation, ObligationCtxt}; use tracing::{debug, instrument, trace}; @@ -62,7 +61,7 @@ impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> { | ConstraintCategory::Boring | ConstraintCategory::BoringNoLocation | ConstraintCategory::Internal - | ConstraintCategory::IllegalUniverse => "", + | ConstraintCategory::OutlivesUnnameablePlaceholder(..) => "", } } } @@ -105,18 +104,6 @@ pub(crate) enum RegionErrorKind<'tcx> { /// A generic bound failure for a type test (`T: 'a`). TypeTestError { type_test: TypeTest<'tcx> }, - /// An unexpected hidden region for an opaque type. - UnexpectedHiddenRegion { - /// The span for the member constraint. - span: Span, - /// The hidden type. - hidden_ty: Ty<'tcx>, - /// The opaque type. - key: ty::OpaqueTypeKey<'tcx>, - /// The unexpected region. - member_region: ty::Region<'tcx>, - }, - /// Higher-ranked subtyping error. BoundUniversalRegionError { /// The placeholder free region. @@ -215,7 +202,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { diag: &mut Diag<'_>, lower_bound: RegionVid, ) { - let mut suggestions = vec![]; let tcx = self.infcx.tcx; // find generic associated types in the given region 'lower_bound' @@ -237,9 +223,11 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { .collect::<Vec<_>>(); debug!(?gat_id_and_generics); - // find higher-ranked trait bounds bounded to the generic associated types + // Look for the where-bound which introduces the placeholder. + // As we're using the HIR, we need to handle both `for<'a> T: Trait<'a>` + // and `T: for<'a> Trait`<'a>. let mut hrtb_bounds = vec![]; - gat_id_and_generics.iter().flatten().for_each(|(gat_hir_id, generics)| { + gat_id_and_generics.iter().flatten().for_each(|&(gat_hir_id, generics)| { for pred in generics.predicates { let BoundPredicate(WhereBoundPredicate { bound_generic_params, bounds, .. }) = pred.kind @@ -248,17 +236,32 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { }; if bound_generic_params .iter() - .rfind(|bgp| tcx.local_def_id_to_hir_id(bgp.def_id) == *gat_hir_id) + .rfind(|bgp| tcx.local_def_id_to_hir_id(bgp.def_id) == gat_hir_id) .is_some() { for bound in *bounds { hrtb_bounds.push(bound); } + } else { + for bound in *bounds { + if let Trait(trait_bound) = bound { + if trait_bound + .bound_generic_params + .iter() + .rfind(|bgp| tcx.local_def_id_to_hir_id(bgp.def_id) == gat_hir_id) + .is_some() + { + hrtb_bounds.push(bound); + return; + } + } + } } } }); debug!(?hrtb_bounds); + let mut suggestions = vec![]; hrtb_bounds.iter().for_each(|bound| { let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }) = bound else { return; @@ -307,11 +310,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { pub(crate) fn report_region_errors(&mut self, nll_errors: RegionErrors<'tcx>) { // Iterate through all the errors, producing a diagnostic for each one. The diagnostics are // buffered in the `MirBorrowckCtxt`. - let mut outlives_suggestion = OutlivesSuggestionBuilder::default(); - let mut last_unexpected_hidden_region: Option<(Span, Ty<'_>, ty::OpaqueTypeKey<'tcx>)> = - None; - for (nll_error, _) in nll_errors.into_iter() { match nll_error { RegionErrorKind::TypeTestError { type_test } => { @@ -362,30 +361,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - RegionErrorKind::UnexpectedHiddenRegion { span, hidden_ty, key, member_region } => { - let named_ty = - self.regioncx.name_regions_for_member_constraint(self.infcx.tcx, hidden_ty); - let named_key = - self.regioncx.name_regions_for_member_constraint(self.infcx.tcx, key); - let named_region = self - .regioncx - .name_regions_for_member_constraint(self.infcx.tcx, member_region); - let diag = unexpected_hidden_region_diagnostic( - self.infcx, - self.mir_def_id(), - span, - named_ty, - named_region, - named_key, - ); - if last_unexpected_hidden_region != Some((span, named_ty, named_key)) { - self.buffer_error(diag); - last_unexpected_hidden_region = Some((span, named_ty, named_key)); - } else { - diag.delay_as_bug(); - } - } - RegionErrorKind::BoundUniversalRegionError { longer_fr, placeholder, @@ -394,11 +369,15 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let error_vid = self.regioncx.region_from_element(longer_fr, &error_element); // Find the code to blame for the fact that `longer_fr` outlives `error_fr`. - let (_, cause) = self.regioncx.find_outlives_blame_span( - longer_fr, - NllRegionVariableOrigin::Placeholder(placeholder), - error_vid, - ); + let cause = self + .regioncx + .best_blame_constraint( + longer_fr, + NllRegionVariableOrigin::Placeholder(placeholder), + error_vid, + ) + .0 + .cause; let universe = placeholder.universe; let universe_info = self.regioncx.universe_info(universe); @@ -454,9 +433,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { ) { debug!("report_region_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr); - let (blame_constraint, path) = self.regioncx.best_blame_constraint(fr, fr_origin, |r| { - self.regioncx.provides_universal_region(r, fr, outlived_fr) - }); + let (blame_constraint, path) = + self.regioncx.best_blame_constraint(fr, fr_origin, outlived_fr); let BlameConstraint { category, cause, variance_info, .. } = blame_constraint; debug!("report_region_error: category={:?} {:?} {:?}", category, cause, variance_info); diff --git a/compiler/rustc_borrowck/src/handle_placeholders.rs b/compiler/rustc_borrowck/src/handle_placeholders.rs index 34599ac55b8..94379cdebf7 100644 --- a/compiler/rustc_borrowck/src/handle_placeholders.rs +++ b/compiler/rustc_borrowck/src/handle_placeholders.rs @@ -1,7 +1,6 @@ //! Logic for lowering higher-kinded outlives constraints //! (with placeholders and universes) and turn them into regular //! outlives constraints. - use rustc_data_structures::frozen::Frozen; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::graph::scc; @@ -10,12 +9,11 @@ use rustc_index::IndexVec; use rustc_infer::infer::RegionVariableOrigin; use rustc_middle::mir::ConstraintCategory; use rustc_middle::ty::{RegionVid, UniverseIndex}; -use tracing::debug; +use tracing::{debug, trace}; use crate::constraints::{ConstraintSccIndex, OutlivesConstraintSet}; use crate::consumers::OutlivesConstraint; use crate::diagnostics::UniverseInfo; -use crate::member_constraints::MemberConstraintSet; use crate::region_infer::values::{LivenessValues, PlaceholderIndices}; use crate::region_infer::{ConstraintSccs, RegionDefinition, Representative, TypeTest}; use crate::ty::VarianceDiagInfo; @@ -30,7 +28,6 @@ pub(crate) struct LoweredConstraints<'tcx> { pub(crate) constraint_sccs: Sccs<RegionVid, ConstraintSccIndex>, pub(crate) definitions: Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>, pub(crate) scc_annotations: IndexVec<ConstraintSccIndex, RegionTracker>, - pub(crate) member_constraints: MemberConstraintSet<'tcx, RegionVid>, pub(crate) outlives_constraints: Frozen<OutlivesConstraintSet<'tcx>>, pub(crate) type_tests: Vec<TypeTest<'tcx>>, pub(crate) liveness_constraints: LivenessValues, @@ -64,18 +61,71 @@ impl scc::Annotations<RegionVid> for SccAnnotations<'_, '_, RegionTracker> { type SccIdx = ConstraintSccIndex; } +#[derive(Copy, Debug, Clone, PartialEq, Eq)] +enum PlaceholderReachability { + /// This SCC reaches no placeholders. + NoPlaceholders, + /// This SCC reaches at least one placeholder. + Placeholders { + /// The largest-universed placeholder we can reach + max_universe: (UniverseIndex, RegionVid), + + /// The placeholder with the smallest ID + min_placeholder: RegionVid, + + /// The placeholder with the largest ID + max_placeholder: RegionVid, + }, +} + +impl PlaceholderReachability { + /// Merge the reachable placeholders of two graph components. + fn merge(self, other: PlaceholderReachability) -> PlaceholderReachability { + use PlaceholderReachability::*; + match (self, other) { + (NoPlaceholders, NoPlaceholders) => NoPlaceholders, + (NoPlaceholders, p @ Placeholders { .. }) + | (p @ Placeholders { .. }, NoPlaceholders) => p, + ( + Placeholders { + min_placeholder: min_pl, + max_placeholder: max_pl, + max_universe: max_u, + }, + Placeholders { min_placeholder, max_placeholder, max_universe }, + ) => Placeholders { + min_placeholder: min_pl.min(min_placeholder), + max_placeholder: max_pl.max(max_placeholder), + max_universe: max_u.max(max_universe), + }, + } + } + + fn max_universe(&self) -> Option<(UniverseIndex, RegionVid)> { + match self { + Self::NoPlaceholders => None, + Self::Placeholders { max_universe, .. } => Some(*max_universe), + } + } + + /// If we have reached placeholders, determine if they can + /// be named from this universe. + fn can_be_named_by(&self, from: UniverseIndex) -> bool { + self.max_universe() + .is_none_or(|(max_placeholder_universe, _)| from.can_name(max_placeholder_universe)) + } +} + /// An annotation for region graph SCCs that tracks /// the values of its elements. This annotates a single SCC. #[derive(Copy, Debug, Clone)] pub(crate) struct RegionTracker { - /// The largest universe of a placeholder reached from this SCC. - /// This includes placeholders within this SCC. - max_placeholder_universe_reached: UniverseIndex, + reachable_placeholders: PlaceholderReachability, /// The largest universe nameable from this SCC. /// It is the smallest nameable universes of all - /// existential regions reachable from it. - max_nameable_universe: UniverseIndex, + /// existential regions reachable from it. Small Rvids are preferred. + max_nameable_universe: (UniverseIndex, RegionVid), /// The representative Region Variable Id for this SCC. pub(crate) representative: Representative, @@ -83,69 +133,82 @@ pub(crate) struct RegionTracker { impl RegionTracker { pub(crate) fn new(rvid: RegionVid, definition: &RegionDefinition<'_>) -> Self { - let placeholder_universe = + let reachable_placeholders = if matches!(definition.origin, NllRegionVariableOrigin::Placeholder(_)) { - definition.universe + PlaceholderReachability::Placeholders { + max_universe: (definition.universe, rvid), + min_placeholder: rvid, + max_placeholder: rvid, + } } else { - UniverseIndex::ROOT + PlaceholderReachability::NoPlaceholders }; Self { - max_placeholder_universe_reached: placeholder_universe, - max_nameable_universe: definition.universe, + reachable_placeholders, + max_nameable_universe: (definition.universe, rvid), representative: Representative::new(rvid, definition), } } /// The largest universe this SCC can name. It's the smallest - /// largest nameable uninverse of any reachable region. + /// largest nameable universe of any reachable region, or + /// `max_nameable(r) = min (max_nameable(r') for r' reachable from r)` pub(crate) fn max_nameable_universe(self) -> UniverseIndex { - self.max_nameable_universe + self.max_nameable_universe.0 } - fn merge_min_max_seen(&mut self, other: &Self) { - self.max_placeholder_universe_reached = std::cmp::max( - self.max_placeholder_universe_reached, - other.max_placeholder_universe_reached, - ); - - self.max_nameable_universe = - std::cmp::min(self.max_nameable_universe, other.max_nameable_universe); - } - - /// Returns `true` if during the annotated SCC reaches a placeholder - /// with a universe larger than the smallest nameable universe of any - /// reachable existential region. - pub(crate) fn has_incompatible_universes(&self) -> bool { - self.max_nameable_universe().cannot_name(self.max_placeholder_universe_reached) + pub(crate) fn max_placeholder_universe_reached(self) -> UniverseIndex { + if let Some((universe, _)) = self.reachable_placeholders.max_universe() { + universe + } else { + UniverseIndex::ROOT + } } /// Determine if the tracked universes of the two SCCs are compatible. pub(crate) fn universe_compatible_with(&self, other: Self) -> bool { + // HACK: We first check whether we can name the highest existential universe + // of `other`. This only exists to avoid errors in case that scc already + // depends on a placeholder it cannot name itself. self.max_nameable_universe().can_name(other.max_nameable_universe()) - || self.max_nameable_universe().can_name(other.max_placeholder_universe_reached) + || other.reachable_placeholders.can_be_named_by(self.max_nameable_universe()) + } + + /// If this SCC reaches a placeholder it can't name, return it. + fn unnameable_placeholder(&self) -> Option<(UniverseIndex, RegionVid)> { + self.reachable_placeholders.max_universe().filter(|&(placeholder_universe, _)| { + !self.max_nameable_universe().can_name(placeholder_universe) + }) } } impl scc::Annotation for RegionTracker { - fn merge_scc(mut self, other: Self) -> Self { - self.representative = self.representative.merge_scc(other.representative); - self.merge_min_max_seen(&other); - self + fn merge_scc(self, other: Self) -> Self { + trace!("{:?} << {:?}", self.representative, other.representative); + + Self { + representative: self.representative.min(other.representative), + max_nameable_universe: self.max_nameable_universe.min(other.max_nameable_universe), + reachable_placeholders: self.reachable_placeholders.merge(other.reachable_placeholders), + } } - fn merge_reached(mut self, other: Self) -> Self { - // No update to in-component values, only add seen values. - self.merge_min_max_seen(&other); - self + fn merge_reached(self, other: Self) -> Self { + Self { + max_nameable_universe: self.max_nameable_universe.min(other.max_nameable_universe), + reachable_placeholders: self.reachable_placeholders.merge(other.reachable_placeholders), + representative: self.representative, + } } } /// Determines if the region variable definitions contain /// placeholders, and compute them for later use. -fn region_definitions<'tcx>( - universal_regions: &UniversalRegions<'tcx>, +// FIXME: This is also used by opaque type handling. Move it to a separate file. +pub(super) fn region_definitions<'tcx>( infcx: &BorrowckInferCtxt<'tcx>, + universal_regions: &UniversalRegions<'tcx>, ) -> (Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>, bool) { let var_infos = infcx.get_region_var_infos(); // Create a RegionDefinition for each inference variable. This happens here because @@ -209,14 +272,13 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>( infcx: &BorrowckInferCtxt<'tcx>, ) -> LoweredConstraints<'tcx> { let universal_regions = &universal_region_relations.universal_regions; - let (definitions, has_placeholders) = region_definitions(universal_regions, infcx); + let (definitions, has_placeholders) = region_definitions(infcx, universal_regions); let MirTypeckRegionConstraints { placeholder_indices, placeholder_index_to_region: _, liveness_constraints, mut outlives_constraints, - member_constraints, universe_causes, type_tests, } = constraints; @@ -242,7 +304,6 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>( return LoweredConstraints { type_tests, - member_constraints, constraint_sccs, scc_annotations: scc_annotations.scc_to_annotation, definitions, @@ -279,7 +340,6 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>( constraint_sccs, definitions, scc_annotations, - member_constraints, outlives_constraints: Frozen::freeze(outlives_constraints), type_tests, liveness_constraints, @@ -310,28 +370,52 @@ fn rewrite_placeholder_outlives<'tcx>( let annotation = annotations[scc]; - // If this SCC participates in a universe violation, - // e.g. if it reaches a region with a universe smaller than - // the largest region reached, add a requirement that it must - // outlive `'static`. - if annotation.has_incompatible_universes() { - // Optimisation opportunity: this will add more constraints than - // needed for correctness, since an SCC upstream of another with - // a universe violation will "infect" its downstream SCCs to also - // outlive static. - let scc_representative_outlives_static = OutlivesConstraint { - sup: annotation.representative.rvid(), - sub: fr_static, - category: ConstraintCategory::IllegalUniverse, - locations: Locations::All(rustc_span::DUMMY_SP), - span: rustc_span::DUMMY_SP, - variance_info: VarianceDiagInfo::None, - from_closure: false, - }; - outlives_constraints.push(scc_representative_outlives_static); - added_constraints = true; - debug!("Added {:?}: 'static!", annotation.representative.rvid()); - } + let Some((max_u, max_u_rvid)) = annotation.unnameable_placeholder() else { + continue; + }; + + debug!( + "Placeholder universe {max_u:?} is too large for its SCC, represented by {:?}", + annotation.representative + ); + + // We only add one `r: 'static` constraint per SCC, where `r` is the SCC representative. + // That constraint is annotated with some placeholder `unnameable` where + // `unnameable` is unnameable from `r` and there is a path in the constraint graph + // between them. + // + // There is one exception; if some other region in this SCC can't name `'r`, then + // we pick the region with the smallest universe in the SCC, so that a path can + // always start in `'r` to find a motivation that isn't cyclic. + let blame_to = if annotation.representative.rvid() == max_u_rvid { + // Assertion: the region that lowered our universe is an existential one and we are a placeholder! + + // The SCC's representative is not nameable from some region + // that ends up in the SCC. + let small_universed_rvid = annotation.max_nameable_universe.1; + debug!( + "{small_universed_rvid:?} lowered our universe to {:?}", + annotation.max_nameable_universe() + ); + small_universed_rvid + } else { + // `max_u_rvid` is not nameable by the SCC's representative. + max_u_rvid + }; + + // FIXME: if we can extract a useful blame span here, future error + // reporting and constraint search can be simplified. + + added_constraints = true; + outlives_constraints.push(OutlivesConstraint { + sup: annotation.representative.rvid(), + sub: fr_static, + category: ConstraintCategory::OutlivesUnnameablePlaceholder(blame_to), + locations: Locations::All(rustc_span::DUMMY_SP), + span: rustc_span::DUMMY_SP, + variance_info: VarianceDiagInfo::None, + from_closure: false, + }); } added_constraints } diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index d76d6a04e6e..ce78ae203a4 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -78,7 +78,6 @@ mod dataflow; mod def_use; mod diagnostics; mod handle_placeholders; -mod member_constraints; mod nll; mod path_utils; mod place_ext; @@ -301,7 +300,7 @@ fn do_mir_borrowck<'tcx>( def: LocalDefId, ) -> PropagatedBorrowCheckResults<'tcx> { let tcx = root_cx.tcx; - let infcx = BorrowckInferCtxt::new(tcx, def); + let infcx = BorrowckInferCtxt::new(tcx, def, root_cx.root_def_id()); let (input_body, promoted) = tcx.mir_promoted(def); let input_body: &Body<'_> = &input_body.borrow(); let input_promoted: &IndexSlice<_, _> = &promoted.borrow(); @@ -335,9 +334,10 @@ fn do_mir_borrowck<'tcx>( // Run the MIR type-checker. let MirTypeckResults { - constraints, + mut constraints, universal_region_relations, - opaque_type_values, + region_bound_pairs, + known_type_outlives_obligations, polonius_context, } = type_check::type_check( root_cx, @@ -352,6 +352,17 @@ fn do_mir_borrowck<'tcx>( Rc::clone(&location_map), ); + let opaque_type_errors = region_infer::opaque_types::handle_opaque_type_uses( + root_cx, + &infcx, + &body, + &universal_region_relations, + ®ion_bound_pairs, + &known_type_outlives_obligations, + &location_map, + &mut constraints, + ); + // Compute non-lexical lifetimes using the constraints computed // by typechecking the MIR body. let nll::NllOutput { @@ -375,8 +386,6 @@ fn do_mir_borrowck<'tcx>( polonius_context, ); - let opaque_type_errors = regioncx.infer_opaque_types(root_cx, &infcx, opaque_type_values); - // Dump MIR results into a file, if that is enabled. This lets us // write unit-tests, as well as helping with debugging. nll::dump_nll_mir(&infcx, body, ®ioncx, &opt_closure_req, &borrow_set); @@ -581,12 +590,13 @@ fn get_flow_results<'a, 'tcx>( pub(crate) struct BorrowckInferCtxt<'tcx> { pub(crate) infcx: InferCtxt<'tcx>, - pub(crate) reg_var_to_origin: RefCell<FxIndexMap<ty::RegionVid, RegionCtxt>>, + pub(crate) root_def_id: LocalDefId, pub(crate) param_env: ParamEnv<'tcx>, + pub(crate) reg_var_to_origin: RefCell<FxIndexMap<ty::RegionVid, RegionCtxt>>, } impl<'tcx> BorrowckInferCtxt<'tcx> { - pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Self { + pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId, root_def_id: LocalDefId) -> Self { let typing_mode = if tcx.use_typing_mode_borrowck() { TypingMode::borrowck(tcx, def_id) } else { @@ -594,7 +604,12 @@ impl<'tcx> BorrowckInferCtxt<'tcx> { }; let infcx = tcx.infer_ctxt().build(typing_mode); let param_env = tcx.param_env(def_id); - BorrowckInferCtxt { infcx, reg_var_to_origin: RefCell::new(Default::default()), param_env } + BorrowckInferCtxt { + infcx, + root_def_id, + reg_var_to_origin: RefCell::new(Default::default()), + param_env, + } } pub(crate) fn next_region_var<F>( diff --git a/compiler/rustc_borrowck/src/member_constraints.rs b/compiler/rustc_borrowck/src/member_constraints.rs deleted file mode 100644 index bdd0f6fe11e..00000000000 --- a/compiler/rustc_borrowck/src/member_constraints.rs +++ /dev/null @@ -1,226 +0,0 @@ -use std::hash::Hash; -use std::ops::Index; - -use rustc_data_structures::fx::FxIndexMap; -use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::ty::{self, Ty}; -use rustc_span::Span; -use tracing::instrument; - -/// Compactly stores a set of `R0 member of [R1...Rn]` constraints, -/// indexed by the region `R0`. -#[derive(Debug)] -pub(crate) struct MemberConstraintSet<'tcx, R> -where - R: Copy + Eq, -{ - /// Stores the first "member" constraint for a given `R0`. This is an - /// index into the `constraints` vector below. - first_constraints: FxIndexMap<R, NllMemberConstraintIndex>, - - /// Stores the data about each `R0 member of [R1..Rn]` constraint. - /// These are organized into a linked list, so each constraint - /// contains the index of the next constraint with the same `R0`. - constraints: IndexVec<NllMemberConstraintIndex, MemberConstraint<'tcx>>, - - /// Stores the `R1..Rn` regions for *all* sets. For any given - /// constraint, we keep two indices so that we can pull out a - /// slice. - choice_regions: Vec<ty::RegionVid>, -} - -/// Represents a `R0 member of [R1..Rn]` constraint -#[derive(Debug)] -pub(crate) struct MemberConstraint<'tcx> { - next_constraint: Option<NllMemberConstraintIndex>, - - /// The span where the hidden type was instantiated. - pub(crate) definition_span: Span, - - /// The hidden type in which `R0` appears. (Used in error reporting.) - pub(crate) hidden_ty: Ty<'tcx>, - - pub(crate) key: ty::OpaqueTypeKey<'tcx>, - - /// The region `R0`. - pub(crate) member_region_vid: ty::RegionVid, - - /// Index of `R1` in `choice_regions` vector from `MemberConstraintSet`. - start_index: usize, - - /// Index of `Rn` in `choice_regions` vector from `MemberConstraintSet`. - end_index: usize, -} - -rustc_index::newtype_index! { - #[debug_format = "MemberConstraintIndex({})"] - pub(crate) struct NllMemberConstraintIndex {} -} - -impl Default for MemberConstraintSet<'_, ty::RegionVid> { - fn default() -> Self { - Self { - first_constraints: Default::default(), - constraints: Default::default(), - choice_regions: Default::default(), - } - } -} - -impl<'tcx> MemberConstraintSet<'tcx, ty::RegionVid> { - pub(crate) fn is_empty(&self) -> bool { - self.constraints.is_empty() - } - - /// Pushes a member constraint into the set. - #[instrument(level = "debug", skip(self))] - pub(crate) fn add_member_constraint( - &mut self, - key: ty::OpaqueTypeKey<'tcx>, - hidden_ty: Ty<'tcx>, - definition_span: Span, - member_region_vid: ty::RegionVid, - choice_regions: &[ty::RegionVid], - ) { - let next_constraint = self.first_constraints.get(&member_region_vid).cloned(); - let start_index = self.choice_regions.len(); - self.choice_regions.extend(choice_regions); - let end_index = self.choice_regions.len(); - let constraint_index = self.constraints.push(MemberConstraint { - next_constraint, - member_region_vid, - definition_span, - hidden_ty, - key, - start_index, - end_index, - }); - self.first_constraints.insert(member_region_vid, constraint_index); - } -} - -impl<'tcx, R1> MemberConstraintSet<'tcx, R1> -where - R1: Copy + Hash + Eq, -{ - /// Remap the "member region" key using `map_fn`, producing a new - /// member constraint set. This is used in the NLL code to map from - /// the original `RegionVid` to an scc index. In some cases, we - /// may have multiple `R1` values mapping to the same `R2` key -- that - /// is ok, the two sets will be merged. - pub(crate) fn into_mapped<R2>( - self, - mut map_fn: impl FnMut(R1) -> R2, - ) -> MemberConstraintSet<'tcx, R2> - where - R2: Copy + Hash + Eq, - { - // We can re-use most of the original data, just tweaking the - // linked list links a bit. - // - // For example if we had two keys `Ra` and `Rb` that both now - // wind up mapped to the same key `S`, we would append the - // linked list for `Ra` onto the end of the linked list for - // `Rb` (or vice versa) -- this basically just requires - // rewriting the final link from one list to point at the other - // other (see `append_list`). - - let MemberConstraintSet { first_constraints, mut constraints, choice_regions } = self; - - let mut first_constraints2 = FxIndexMap::default(); - first_constraints2.reserve(first_constraints.len()); - - for (r1, start1) in first_constraints { - let r2 = map_fn(r1); - if let Some(&start2) = first_constraints2.get(&r2) { - append_list(&mut constraints, start1, start2); - } - first_constraints2.insert(r2, start1); - } - - MemberConstraintSet { first_constraints: first_constraints2, constraints, choice_regions } - } -} - -impl<'tcx, R> MemberConstraintSet<'tcx, R> -where - R: Copy + Hash + Eq, -{ - pub(crate) fn all_indices(&self) -> impl Iterator<Item = NllMemberConstraintIndex> { - self.constraints.indices() - } - - /// Iterate down the constraint indices associated with a given - /// peek-region. You can then use `choice_regions` and other - /// methods to access data. - pub(crate) fn indices( - &self, - member_region_vid: R, - ) -> impl Iterator<Item = NllMemberConstraintIndex> { - let mut next = self.first_constraints.get(&member_region_vid).cloned(); - std::iter::from_fn(move || -> Option<NllMemberConstraintIndex> { - if let Some(current) = next { - next = self.constraints[current].next_constraint; - Some(current) - } else { - None - } - }) - } - - /// Returns the "choice regions" for a given member - /// constraint. This is the `R1..Rn` from a constraint like: - /// - /// ```text - /// R0 member of [R1..Rn] - /// ``` - pub(crate) fn choice_regions(&self, pci: NllMemberConstraintIndex) -> &[ty::RegionVid] { - let MemberConstraint { start_index, end_index, .. } = &self.constraints[pci]; - &self.choice_regions[*start_index..*end_index] - } -} - -impl<'tcx, R> Index<NllMemberConstraintIndex> for MemberConstraintSet<'tcx, R> -where - R: Copy + Eq, -{ - type Output = MemberConstraint<'tcx>; - - fn index(&self, i: NllMemberConstraintIndex) -> &MemberConstraint<'tcx> { - &self.constraints[i] - } -} - -/// Given a linked list starting at `source_list` and another linked -/// list starting at `target_list`, modify `target_list` so that it is -/// followed by `source_list`. -/// -/// Before: -/// -/// ```text -/// target_list: A -> B -> C -> (None) -/// source_list: D -> E -> F -> (None) -/// ``` -/// -/// After: -/// -/// ```text -/// target_list: A -> B -> C -> D -> E -> F -> (None) -/// ``` -fn append_list( - constraints: &mut IndexSlice<NllMemberConstraintIndex, MemberConstraint<'_>>, - target_list: NllMemberConstraintIndex, - source_list: NllMemberConstraintIndex, -) { - let mut p = target_list; - loop { - let r = &mut constraints[p]; - match r.next_constraint { - Some(q) => p = q, - None => { - r.next_constraint = Some(source_list); - return; - } - } - } -} diff --git a/compiler/rustc_borrowck/src/region_infer/graphviz.rs b/compiler/rustc_borrowck/src/region_infer/graphviz.rs index b2f67c43125..526e1850c2e 100644 --- a/compiler/rustc_borrowck/src/region_infer/graphviz.rs +++ b/compiler/rustc_borrowck/src/region_infer/graphviz.rs @@ -12,9 +12,13 @@ use rustc_middle::ty::UniverseIndex; use super::*; fn render_outlives_constraint(constraint: &OutlivesConstraint<'_>) -> String { - match constraint.locations { - Locations::All(_) => "All(...)".to_string(), - Locations::Single(loc) => format!("{loc:?}"), + if let ConstraintCategory::OutlivesUnnameablePlaceholder(unnameable) = constraint.category { + format!("{unnameable:?} unnameable") + } else { + match constraint.locations { + Locations::All(_) => "All(...)".to_string(), + Locations::Single(loc) => format!("{loc:?}"), + } } } diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index b0c31ac9601..9990f4cb3f2 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -1,11 +1,9 @@ -use std::cell::OnceCell; use std::collections::VecDeque; use std::rc::Rc; -use rustc_data_structures::binary_search_util; use rustc_data_structures::frozen::Frozen; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; -use rustc_data_structures::graph::scc::{self, Sccs}; +use rustc_data_structures::graph::scc::Sccs; use rustc_errors::Diag; use rustc_hir::def_id::CRATE_DEF_ID; use rustc_index::IndexVec; @@ -24,15 +22,13 @@ use rustc_span::hygiene::DesugaringKind; use rustc_span::{DUMMY_SP, Span}; use tracing::{Level, debug, enabled, instrument, trace}; -use crate::constraints::graph::{self, NormalConstraintGraph, RegionGraph}; +use crate::constraints::graph::NormalConstraintGraph; use crate::constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet}; use crate::dataflow::BorrowIndex; use crate::diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo}; use crate::handle_placeholders::{LoweredConstraints, RegionTracker}; -use crate::member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}; use crate::polonius::LiveLoans; use crate::polonius::legacy::PoloniusOutput; -use crate::region_infer::reverse_sccs::ReverseSccGraph; use crate::region_infer::values::{LivenessValues, RegionElement, RegionValues, ToElementIndex}; use crate::type_check::Locations; use crate::type_check::free_region_relations::UniversalRegionRelations; @@ -78,18 +74,6 @@ impl Representative { } } -impl scc::Annotation for Representative { - fn merge_scc(self, other: Self) -> Self { - // Just pick the smallest one. Note that we order by tag first! - std::cmp::min(self, other) - } - - // For reachability, we do nothing since the representative doesn't change. - fn merge_reached(self, _other: Self) -> Self { - self - } -} - pub(crate) type ConstraintSccs = Sccs<RegionVid, ConstraintSccIndex>; pub struct RegionInferenceContext<'tcx> { @@ -120,20 +104,6 @@ pub struct RegionInferenceContext<'tcx> { scc_annotations: IndexVec<ConstraintSccIndex, RegionTracker>, - /// Reverse of the SCC constraint graph -- i.e., an edge `A -> B` exists if - /// `B: A`. This is used to compute the universal regions that are required - /// to outlive a given SCC. - rev_scc_graph: OnceCell<ReverseSccGraph>, - - /// The "R0 member of [R1..Rn]" constraints, indexed by SCC. - member_constraints: Rc<MemberConstraintSet<'tcx, ConstraintSccIndex>>, - - /// Records the member constraints that we applied to each scc. - /// This is useful for error reporting. Once constraint - /// propagation is done, this vector is sorted according to - /// `member_region_scc`. - member_constraints_applied: Vec<AppliedMemberConstraint>, - /// Map universe indexes to information on why we created it. universe_causes: FxIndexMap<ty::UniverseIndex, UniverseInfo<'tcx>>, @@ -150,32 +120,6 @@ pub struct RegionInferenceContext<'tcx> { universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>, } -/// Each time that `apply_member_constraint` is successful, it appends -/// one of these structs to the `member_constraints_applied` field. -/// This is used in error reporting to trace out what happened. -/// -/// The way that `apply_member_constraint` works is that it effectively -/// adds a new lower bound to the SCC it is analyzing: so you wind up -/// with `'R: 'O` where `'R` is the pick-region and `'O` is the -/// minimal viable option. -#[derive(Debug)] -pub(crate) struct AppliedMemberConstraint { - /// The SCC that was affected. (The "member region".) - /// - /// The vector if `AppliedMemberConstraint` elements is kept sorted - /// by this field. - pub(crate) member_region_scc: ConstraintSccIndex, - - /// The "best option" that `apply_member_constraint` found -- this was - /// added as an "ad-hoc" lower-bound to `member_region_scc`. - pub(crate) min_choice: ty::RegionVid, - - /// The "member constraint index" -- we can find out details about - /// the constraint from - /// `set.member_constraints[member_constraint_index]`. - pub(crate) member_constraint_index: NllMemberConstraintIndex, -} - #[derive(Debug)] pub(crate) struct RegionDefinition<'tcx> { /// What kind of variable is this -- a free region? existential @@ -268,7 +212,6 @@ enum Trace<'a, 'tcx> { StartRegion, FromGraph(&'a OutlivesConstraint<'tcx>), FromStatic(RegionVid), - FromMember(RegionVid, RegionVid, Span), NotVisited, } @@ -363,7 +306,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { liveness_constraints, universe_causes, placeholder_indices, - member_constraints, } = lowered_constraints; debug!("universal_regions: {:#?}", universal_region_relations.universal_regions); @@ -385,9 +327,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { scc_values.merge_liveness(scc, region, &liveness_constraints); } - let member_constraints = - Rc::new(member_constraints.into_mapped(|r| constraint_sccs.scc(r))); - let mut result = Self { definitions, liveness_constraints, @@ -395,9 +334,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { constraint_graph, constraint_sccs, scc_annotations, - rev_scc_graph: OnceCell::new(), - member_constraints, - member_constraints_applied: Vec::new(), universe_causes, scc_values, type_tests, @@ -550,19 +486,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.scc_values.placeholders_contained_in(scc) } - /// Once region solving has completed, this function will return the member constraints that - /// were applied to the value of a given SCC `scc`. See `AppliedMemberConstraint`. - pub(crate) fn applied_member_constraints( - &self, - scc: ConstraintSccIndex, - ) -> &[AppliedMemberConstraint] { - binary_search_util::binary_search_slice( - &self.member_constraints_applied, - |applied| applied.member_region_scc, - &scc, - ) - } - /// Performs region inference and report errors if we see any /// unsatisfiable constraints. If this is a closure, returns the /// region requirements to propagate to our creator, if any. @@ -607,12 +530,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { debug!(?errors_buffer); - if errors_buffer.is_empty() { - self.check_member_constraints(infcx, &mut errors_buffer); - } - - debug!(?errors_buffer); - let outlives_requirements = outlives_requirements.unwrap_or_default(); if outlives_requirements.is_empty() { @@ -642,146 +559,15 @@ impl<'tcx> RegionInferenceContext<'tcx> { }); // To propagate constraints, we walk the DAG induced by the - // SCC. For each SCC, we visit its successors and compute + // SCC. For each SCC `A`, we visit its successors and compute // their values, then we union all those values to get our // own. - for scc in self.constraint_sccs.all_sccs() { - self.compute_value_for_scc(scc); - } - - // Sort the applied member constraints so we can binary search - // through them later. - self.member_constraints_applied.sort_by_key(|applied| applied.member_region_scc); - } - - /// Computes the value of the SCC `scc_a`, which has not yet been - /// computed, by unioning the values of its successors. - /// Assumes that all successors have been computed already - /// (which is assured by iterating over SCCs in dependency order). - #[instrument(skip(self), level = "debug")] - fn compute_value_for_scc(&mut self, scc_a: ConstraintSccIndex) { - // Walk each SCC `B` such that `A: B`... - for &scc_b in self.constraint_sccs.successors(scc_a) { - debug!(?scc_b); - self.scc_values.add_region(scc_a, scc_b); - } - - // Now take member constraints into account. - let member_constraints = Rc::clone(&self.member_constraints); - for m_c_i in member_constraints.indices(scc_a) { - self.apply_member_constraint(scc_a, m_c_i, member_constraints.choice_regions(m_c_i)); - } - - debug!(value = ?self.scc_values.region_value_str(scc_a)); - } - - /// Invoked for each `R0 member of [R1..Rn]` constraint. - /// - /// `scc` is the SCC containing R0, and `choice_regions` are the - /// `R1..Rn` regions -- they are always known to be universal - /// regions (and if that's not true, we just don't attempt to - /// enforce the constraint). - /// - /// The current value of `scc` at the time the method is invoked - /// is considered a *lower bound*. If possible, we will modify - /// the constraint to set it equal to one of the option regions. - /// If we make any changes, returns true, else false. - /// - /// This function only adds the member constraints to the region graph, - /// it does not check them. They are later checked in - /// `check_member_constraints` after the region graph has been computed. - #[instrument(skip(self, member_constraint_index), level = "debug")] - fn apply_member_constraint( - &mut self, - scc: ConstraintSccIndex, - member_constraint_index: NllMemberConstraintIndex, - choice_regions: &[ty::RegionVid], - ) { - // Create a mutable vector of the options. We'll try to winnow - // them down. - let mut choice_regions: Vec<ty::RegionVid> = choice_regions.to_vec(); - - // Convert to the SCC representative: sometimes we have inference - // variables in the member constraint that wind up equated with - // universal regions. The scc representative is the minimal numbered - // one from the corresponding scc so it will be the universal region - // if one exists. - for c_r in &mut choice_regions { - let scc = self.constraint_sccs.scc(*c_r); - *c_r = self.scc_representative(scc); - } - - // If the member region lives in a higher universe, we currently choose - // the most conservative option by leaving it unchanged. - if !self.max_nameable_universe(scc).is_root() { - return; - } - - // The existing value for `scc` is a lower-bound. This will - // consist of some set `{P} + {LB}` of points `{P}` and - // lower-bound free regions `{LB}`. As each choice region `O` - // is a free region, it will outlive the points. But we can - // only consider the option `O` if `O: LB`. - choice_regions.retain(|&o_r| { - self.scc_values - .universal_regions_outlived_by(scc) - .all(|lb| self.universal_region_relations.outlives(o_r, lb)) - }); - debug!(?choice_regions, "after lb"); - - // Now find all the *upper bounds* -- that is, each UB is a - // free region that must outlive the member region `R0` (`UB: - // R0`). Therefore, we need only keep an option `O` if `UB: O` - // for all UB. - let universal_region_relations = &self.universal_region_relations; - for ub in self.reverse_scc_graph().upper_bounds(scc) { - debug!(?ub); - choice_regions.retain(|&o_r| universal_region_relations.outlives(ub, o_r)); - } - debug!(?choice_regions, "after ub"); - - // At this point we can pick any member of `choice_regions` and would like to choose - // it to be a small as possible. To avoid potential non-determinism we will pick the - // smallest such choice. - // - // Because universal regions are only partially ordered (i.e, not every two regions are - // comparable), we will ignore any region that doesn't compare to all others when picking - // the minimum choice. - // - // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where - // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`. - // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`. - let totally_ordered_subset = choice_regions.iter().copied().filter(|&r1| { - choice_regions.iter().all(|&r2| { - self.universal_region_relations.outlives(r1, r2) - || self.universal_region_relations.outlives(r2, r1) - }) - }); - // Now we're left with `['static, 'c]`. Pick `'c` as the minimum! - let Some(min_choice) = totally_ordered_subset.reduce(|r1, r2| { - let r1_outlives_r2 = self.universal_region_relations.outlives(r1, r2); - let r2_outlives_r1 = self.universal_region_relations.outlives(r2, r1); - match (r1_outlives_r2, r2_outlives_r1) { - (true, true) => r1.min(r2), - (true, false) => r2, - (false, true) => r1, - (false, false) => bug!("incomparable regions in total order"), + for scc_a in self.constraint_sccs.all_sccs() { + // Walk each SCC `B` such that `A: B`... + for &scc_b in self.constraint_sccs.successors(scc_a) { + debug!(?scc_b); + self.scc_values.add_region(scc_a, scc_b); } - }) else { - debug!("no unique minimum choice"); - return; - }; - - // As we require `'scc: 'min_choice`, we have definitely already computed - // its `scc_values` at this point. - let min_choice_scc = self.constraint_sccs.scc(min_choice); - debug!(?min_choice, ?min_choice_scc); - if self.scc_values.add_region(scc, min_choice_scc) { - self.member_constraints_applied.push(AppliedMemberConstraint { - member_region_scc: scc, - min_choice, - member_constraint_index, - }); } } @@ -1487,11 +1273,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { { debug!("try_propagate_universal_region_error: fr_minus={:?}", fr_minus); - let blame_span_category = self.find_outlives_blame_span( - longer_fr, - NllRegionVariableOrigin::FreeRegion, - shorter_fr, - ); + let blame_constraint = self + .best_blame_constraint(longer_fr, NllRegionVariableOrigin::FreeRegion, shorter_fr) + .0; // Grow `shorter_fr` until we find some non-local regions. (We // always will.) We'll call them `shorter_fr+` -- they're ever @@ -1504,8 +1288,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { propagated_outlives_requirements.push(ClosureOutlivesRequirement { subject: ClosureOutlivesSubject::Region(fr_minus), outlived_free_region: fr, - blame_span: blame_span_category.1.span, - category: blame_span_category.0, + blame_span: blame_constraint.cause.span, + category: blame_constraint.category, }); } return RegionRelationCheckResult::Propagated; @@ -1544,118 +1328,67 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - #[instrument(level = "debug", skip(self, infcx, errors_buffer))] - fn check_member_constraints( - &self, - infcx: &InferCtxt<'tcx>, - errors_buffer: &mut RegionErrors<'tcx>, - ) { - let member_constraints = Rc::clone(&self.member_constraints); - for m_c_i in member_constraints.all_indices() { - debug!(?m_c_i); - let m_c = &member_constraints[m_c_i]; - let member_region_vid = m_c.member_region_vid; - debug!( - ?member_region_vid, - value = ?self.region_value_str(member_region_vid), - ); - let choice_regions = member_constraints.choice_regions(m_c_i); - debug!(?choice_regions); - - // Did the member region wind up equal to any of the option regions? - if let Some(o) = - choice_regions.iter().find(|&&o_r| self.eval_equal(o_r, m_c.member_region_vid)) - { - debug!("evaluated as equal to {:?}", o); - continue; - } - - // If not, report an error. - let member_region = ty::Region::new_var(infcx.tcx, member_region_vid); - errors_buffer.push(RegionErrorKind::UnexpectedHiddenRegion { - span: m_c.definition_span, - hidden_ty: m_c.hidden_ty, - key: m_c.key, - member_region, - }); - } - } - - /// We have a constraint `fr1: fr2` that is not satisfied, where - /// `fr2` represents some universal region. Here, `r` is some - /// region where we know that `fr1: r` and this function has the - /// job of determining whether `r` is "to blame" for the fact that - /// `fr1: fr2` is required. - /// - /// This is true under two conditions: - /// - /// - `r == fr2` - /// - `fr2` is `'static` and `r` is some placeholder in a universe - /// that cannot be named by `fr1`; in that case, we will require - /// that `fr1: 'static` because it is the only way to `fr1: r` to - /// be satisfied. (See `add_incompatible_universe`.) - pub(crate) fn provides_universal_region( + pub(crate) fn constraint_path_between_regions( &self, - r: RegionVid, - fr1: RegionVid, - fr2: RegionVid, - ) -> bool { - debug!("provides_universal_region(r={:?}, fr1={:?}, fr2={:?})", r, fr1, fr2); - let result = { - r == fr2 || { - fr2 == self.universal_regions().fr_static && self.cannot_name_placeholder(fr1, r) - } - }; - debug!("provides_universal_region: result = {:?}", result); - result - } - - /// If `r2` represents a placeholder region, then this returns - /// `true` if `r1` cannot name that placeholder in its - /// value; otherwise, returns `false`. - pub(crate) fn cannot_name_placeholder(&self, r1: RegionVid, r2: RegionVid) -> bool { - match self.definitions[r2].origin { - NllRegionVariableOrigin::Placeholder(placeholder) => { - let r1_universe = self.definitions[r1].universe; - debug!( - "cannot_name_value_of: universe1={r1_universe:?} placeholder={:?}", - placeholder - ); - r1_universe.cannot_name(placeholder.universe) - } - - NllRegionVariableOrigin::FreeRegion | NllRegionVariableOrigin::Existential { .. } => { - false - } + from_region: RegionVid, + to_region: RegionVid, + ) -> Option<Vec<OutlivesConstraint<'tcx>>> { + if from_region == to_region { + bug!("Tried to find a path between {from_region:?} and itself!"); } - } - - /// Finds a good `ObligationCause` to blame for the fact that `fr1` outlives `fr2`. - pub(crate) fn find_outlives_blame_span( - &self, - fr1: RegionVid, - fr1_origin: NllRegionVariableOrigin, - fr2: RegionVid, - ) -> (ConstraintCategory<'tcx>, ObligationCause<'tcx>) { - let BlameConstraint { category, cause, .. } = self - .best_blame_constraint(fr1, fr1_origin, |r| self.provides_universal_region(r, fr1, fr2)) - .0; - (category, cause) + self.constraint_path_to(from_region, |to| to == to_region, true).map(|o| o.0) } /// Walks the graph of constraints (where `'a: 'b` is considered - /// an edge `'a -> 'b`) to find all paths from `from_region` to - /// `to_region`. The paths are accumulated into the vector - /// `results`. The paths are stored as a series of - /// `ConstraintIndex` values -- in other words, a list of *edges*. + /// an edge `'a -> 'b`) to find a path from `from_region` to + /// `to_region`. /// /// Returns: a series of constraints as well as the region `R` /// that passed the target test. + /// If `include_static_outlives_all` is `true`, then the synthetic + /// outlives constraints `'static -> a` for every region `a` are + /// considered in the search, otherwise they are ignored. #[instrument(skip(self, target_test), ret)] - pub(crate) fn find_constraint_paths_between_regions( + pub(crate) fn constraint_path_to( + &self, + from_region: RegionVid, + target_test: impl Fn(RegionVid) -> bool, + include_placeholder_static: bool, + ) -> Option<(Vec<OutlivesConstraint<'tcx>>, RegionVid)> { + self.find_constraint_path_between_regions_inner( + true, + from_region, + &target_test, + include_placeholder_static, + ) + .or_else(|| { + self.find_constraint_path_between_regions_inner( + false, + from_region, + &target_test, + include_placeholder_static, + ) + }) + } + + /// The constraints we get from equating the hidden type of each use of an opaque + /// with its final concrete type may end up getting preferred over other, potentially + /// longer constraint paths. + /// + /// Given that we compute the final concrete type by relying on this existing constraint + /// path, this can easily end up hiding the actual reason for why we require these regions + /// to be equal. + /// + /// To handle this, we first look at the path while ignoring these constraints and then + /// retry while considering them. This is not perfect, as the `from_region` may have already + /// been partially related to its argument region, so while we rely on a member constraint + /// to get a complete path, the most relevant step of that path already existed before then. + fn find_constraint_path_between_regions_inner( &self, + ignore_opaque_type_constraints: bool, from_region: RegionVid, target_test: impl Fn(RegionVid) -> bool, + include_placeholder_static: bool, ) -> Option<(Vec<OutlivesConstraint<'tcx>>, RegionVid)> { let mut context = IndexVec::from_elem(Trace::NotVisited, &self.definitions); context[from_region] = Trace::StartRegion; @@ -1670,7 +1403,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { while let Some(r) = deque.pop_front() { debug!( - "find_constraint_paths_between_regions: from_region={:?} r={:?} value={}", + "constraint_path_to: from_region={:?} r={:?} value={}", from_region, r, self.region_value_str(r), @@ -1704,20 +1437,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { result.push(c); } - Trace::FromMember(sup, sub, span) => { - let c = OutlivesConstraint { - sup, - sub, - locations: Locations::All(span), - span, - category: ConstraintCategory::OpaqueType, - variance_info: ty::VarianceDiagInfo::default(), - from_closure: false, - }; - p = c.sup; - result.push(c); - } - Trace::StartRegion => { result.reverse(); return Some((result, r)); @@ -1756,23 +1475,24 @@ impl<'tcx> RegionInferenceContext<'tcx> { let edges = self.constraint_graph.outgoing_edges_from_graph(r, &self.constraints); // This loop can be hot. for constraint in edges { - if matches!(constraint.category, ConstraintCategory::IllegalUniverse) { - debug!("Ignoring illegal universe constraint: {constraint:?}"); - continue; + match constraint.category { + ConstraintCategory::OutlivesUnnameablePlaceholder(_) + if !include_placeholder_static => + { + debug!("Ignoring illegal placeholder constraint: {constraint:?}"); + continue; + } + ConstraintCategory::OpaqueType if ignore_opaque_type_constraints => { + debug!("Ignoring member constraint: {constraint:?}"); + continue; + } + _ => {} } + debug_assert_eq!(constraint.sup, r); handle_trace(constraint.sub, Trace::FromGraph(constraint)); } } - - // Member constraints can also give rise to `'r: 'x` edges that - // were not part of the graph initially, so watch out for those. - // (But they are extremely rare; this loop is very cold.) - for constraint in self.applied_member_constraints(self.constraint_sccs.scc(r)) { - let sub = constraint.min_choice; - let p_c = &self.member_constraints[constraint.member_constraint_index]; - handle_trace(sub, Trace::FromMember(r, sub, p_c.definition_span)); - } } None @@ -1783,37 +1503,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { pub(crate) fn find_sub_region_live_at(&self, fr1: RegionVid, location: Location) -> RegionVid { trace!(scc = ?self.constraint_sccs.scc(fr1)); trace!(universe = ?self.max_nameable_universe(self.constraint_sccs.scc(fr1))); - self.find_constraint_paths_between_regions(fr1, |r| { - // First look for some `r` such that `fr1: r` and `r` is live at `location` + self.constraint_path_to(fr1, |r| { trace!(?r, liveness_constraints=?self.liveness_constraints.pretty_print_live_points(r)); self.liveness_constraints.is_live_at(r, location) - }) - .or_else(|| { - // If we fail to find that, we may find some `r` such that - // `fr1: r` and `r` is a placeholder from some universe - // `fr1` cannot name. This would force `fr1` to be - // `'static`. - self.find_constraint_paths_between_regions(fr1, |r| { - self.cannot_name_placeholder(fr1, r) - }) - }) - .or_else(|| { - // If we fail to find THAT, it may be that `fr1` is a - // placeholder that cannot "fit" into its SCC. In that - // case, there should be some `r` where `fr1: r` and `fr1` is a - // placeholder that `r` cannot name. We can blame that - // edge. - // - // Remember that if `R1: R2`, then the universe of R1 - // must be able to name the universe of R2, because R2 will - // be at least `'empty(Universe(R2))`, and `R1` must be at - // larger than that. - self.find_constraint_paths_between_regions(fr1, |r| { - self.cannot_name_placeholder(r, fr1) - }) - }) - .map(|(_path, r)| r) - .unwrap() + }, true).unwrap().1 } /// Get the region outlived by `longer_fr` and live at `element`. @@ -1857,22 +1550,38 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// creating a constraint path that forces `R` to outlive /// `from_region`, and then finding the best choices within that /// path to blame. - #[instrument(level = "debug", skip(self, target_test))] + #[instrument(level = "debug", skip(self))] pub(crate) fn best_blame_constraint( &self, from_region: RegionVid, from_region_origin: NllRegionVariableOrigin, - target_test: impl Fn(RegionVid) -> bool, + to_region: RegionVid, ) -> (BlameConstraint<'tcx>, Vec<OutlivesConstraint<'tcx>>) { - // Find all paths - let (path, target_region) = self - .find_constraint_paths_between_regions(from_region, target_test) - .or_else(|| { - self.find_constraint_paths_between_regions(from_region, |r| { - self.cannot_name_placeholder(from_region, r) - }) - }) - .unwrap(); + assert!(from_region != to_region, "Trying to blame a region for itself!"); + + let path = self.constraint_path_between_regions(from_region, to_region).unwrap(); + + // If we are passing through a constraint added because we reached an unnameable placeholder `'unnameable`, + // redirect search towards `'unnameable`. + let due_to_placeholder_outlives = path.iter().find_map(|c| { + if let ConstraintCategory::OutlivesUnnameablePlaceholder(unnameable) = c.category { + Some(unnameable) + } else { + None + } + }); + + // Edge case: it's possible that `'from_region` is an unnameable placeholder. + let path = if let Some(unnameable) = due_to_placeholder_outlives + && unnameable != from_region + { + // We ignore the extra edges due to unnameable placeholders to get + // an explanation that was present in the original constraint graph. + self.constraint_path_to(from_region, |r| r == unnameable, false).unwrap().0 + } else { + path + }; + debug!( "path={:#?}", path.iter() @@ -1980,7 +1689,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { ConstraintCategory::Cast { unsize_to: Some(unsize_ty), is_implicit_coercion: true, - } if target_region == self.universal_regions().fr_static + } if to_region == self.universal_regions().fr_static // Mirror the note's condition, to minimize how often this diverts blame. && let ty::Adt(_, args) = unsize_ty.kind() && args.iter().any(|arg| arg.as_type().is_some_and(|ty| ty.is_trait())) @@ -2018,7 +1727,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // specific, and are not used for relations that would make sense to blame. ConstraintCategory::BoringNoLocation => 6, // Do not blame internal constraints. - ConstraintCategory::IllegalUniverse => 7, + ConstraintCategory::OutlivesUnnameablePlaceholder(_) => 7, ConstraintCategory::Internal => 8, }; @@ -2055,6 +1764,14 @@ impl<'tcx> RegionInferenceContext<'tcx> { path[best_choice] }; + assert!( + !matches!( + best_constraint.category, + ConstraintCategory::OutlivesUnnameablePlaceholder(_) + ), + "Illegal placeholder constraint blamed; should have redirected to other region relation" + ); + let blame_constraint = BlameConstraint { category: best_constraint.category, from_closure: best_constraint.from_closure, @@ -2104,11 +1821,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { &self.constraint_sccs } - /// Access to the region graph, built from the outlives constraints. - pub(crate) fn region_graph(&self) -> RegionGraph<'_, 'tcx, graph::Normal> { - self.constraint_graph.region_graph(&self.constraints, self.universal_regions().fr_static) - } - /// Returns the representative `RegionVid` for a given SCC. /// See `RegionTracker` for how a region variable ID is chosen. /// diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs deleted file mode 100644 index 23c4554aa15..00000000000 --- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs +++ /dev/null @@ -1,299 +0,0 @@ -use rustc_data_structures::fx::FxIndexMap; -use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin}; -use rustc_macros::extension; -use rustc_middle::ty::{ - self, DefiningScopeKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, - TypeVisitableExt, fold_regions, -}; -use rustc_span::Span; -use rustc_trait_selection::opaque_types::{ - InvalidOpaqueTypeArgs, check_opaque_type_parameter_valid, -}; -use tracing::{debug, instrument}; - -use super::RegionInferenceContext; -use crate::BorrowCheckRootCtxt; -use crate::session_diagnostics::LifetimeMismatchOpaqueParam; -use crate::universal_regions::RegionClassification; - -pub(crate) enum DeferredOpaqueTypeError<'tcx> { - InvalidOpaqueTypeArgs(InvalidOpaqueTypeArgs<'tcx>), - LifetimeMismatchOpaqueParam(LifetimeMismatchOpaqueParam<'tcx>), -} - -impl<'tcx> RegionInferenceContext<'tcx> { - /// Resolve any opaque types that were encountered while borrow checking - /// this item. This is then used to get the type in the `type_of` query. - /// - /// For example consider `fn f<'a>(x: &'a i32) -> impl Sized + 'a { x }`. - /// This is lowered to give HIR something like - /// - /// type f<'a>::_Return<'_x> = impl Sized + '_x; - /// fn f<'a>(x: &'a i32) -> f<'a>::_Return<'a> { x } - /// - /// When checking the return type record the type from the return and the - /// type used in the return value. In this case they might be `_Return<'1>` - /// and `&'2 i32` respectively. - /// - /// Once we to this method, we have completed region inference and want to - /// call `infer_opaque_definition_from_instantiation` to get the inferred - /// type of `_Return<'_x>`. `infer_opaque_definition_from_instantiation` - /// compares lifetimes directly, so we need to map the inference variables - /// back to concrete lifetimes: `'static`, `ReEarlyParam` or `ReLateParam`. - /// - /// First we map the regions in the generic parameters `_Return<'1>` to - /// their `external_name` giving `_Return<'a>`. This step is a bit involved. - /// See the [rustc-dev-guide chapter] for more info. - /// - /// Then we map all the lifetimes in the concrete type to an equal - /// universal region that occurs in the opaque type's args, in this case - /// this would result in `&'a i32`. We only consider regions in the args - /// in case there is an equal region that does not. For example, this should - /// be allowed: - /// `fn f<'a: 'b, 'b: 'a>(x: *mut &'b i32) -> impl Sized + 'a { x }` - /// - /// This will then allow `infer_opaque_definition_from_instantiation` to - /// determine that `_Return<'_x> = &'_x i32`. - /// - /// There's a slight complication around closures. Given - /// `fn f<'a: 'a>() { || {} }` the closure's type is something like - /// `f::<'a>::{{closure}}`. The region parameter from f is essentially - /// ignored by type checking so ends up being inferred to an empty region. - /// Calling `universal_upper_bound` for such a region gives `fr_fn_body`, - /// which has no `external_name` in which case we use `'{erased}` as the - /// region to pass to `infer_opaque_definition_from_instantiation`. - /// - /// [rustc-dev-guide chapter]: - /// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html - #[instrument(level = "debug", skip(self, root_cx, infcx))] - pub(crate) fn infer_opaque_types( - &self, - root_cx: &mut BorrowCheckRootCtxt<'tcx>, - infcx: &InferCtxt<'tcx>, - opaque_ty_decls: FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>>, - ) -> Vec<DeferredOpaqueTypeError<'tcx>> { - let mut errors = Vec::new(); - let mut decls_modulo_regions: FxIndexMap<OpaqueTypeKey<'tcx>, (OpaqueTypeKey<'tcx>, Span)> = - FxIndexMap::default(); - - for (opaque_type_key, concrete_type) in opaque_ty_decls { - debug!(?opaque_type_key, ?concrete_type); - - let mut arg_regions: Vec<(ty::RegionVid, ty::Region<'_>)> = - vec![(self.universal_regions().fr_static, infcx.tcx.lifetimes.re_static)]; - - let opaque_type_key = - opaque_type_key.fold_captured_lifetime_args(infcx.tcx, |region| { - // Use the SCC representative instead of directly using `region`. - // See [rustc-dev-guide chapter] § "Strict lifetime equality". - let scc = self.constraint_sccs.scc(region.as_var()); - let vid = self.scc_representative(scc); - let named = match self.definitions[vid].origin { - // Iterate over all universal regions in a consistent order and find the - // *first* equal region. This makes sure that equal lifetimes will have - // the same name and simplifies subsequent handling. - // See [rustc-dev-guide chapter] § "Semantic lifetime equality". - NllRegionVariableOrigin::FreeRegion => self - .universal_regions() - .universal_regions_iter() - .filter(|&ur| { - // See [rustc-dev-guide chapter] § "Closure restrictions". - !matches!( - self.universal_regions().region_classification(ur), - Some(RegionClassification::External) - ) - }) - .find(|&ur| self.universal_region_relations.equal(vid, ur)) - .map(|ur| self.definitions[ur].external_name.unwrap()), - NllRegionVariableOrigin::Placeholder(placeholder) => { - Some(ty::Region::new_placeholder(infcx.tcx, placeholder)) - } - NllRegionVariableOrigin::Existential { .. } => None, - } - .unwrap_or_else(|| { - ty::Region::new_error_with_message( - infcx.tcx, - concrete_type.span, - "opaque type with non-universal region args", - ) - }); - - arg_regions.push((vid, named)); - named - }); - debug!(?opaque_type_key, ?arg_regions); - - let concrete_type = fold_regions(infcx.tcx, concrete_type, |region, _| { - arg_regions - .iter() - .find(|&&(arg_vid, _)| self.eval_equal(region.as_var(), arg_vid)) - .map(|&(_, arg_named)| arg_named) - .unwrap_or(infcx.tcx.lifetimes.re_erased) - }); - debug!(?concrete_type); - - let ty = match infcx - .infer_opaque_definition_from_instantiation(opaque_type_key, concrete_type) - { - Ok(ty) => ty, - Err(err) => { - errors.push(DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err)); - continue; - } - }; - - // Sometimes, when the hidden type is an inference variable, it can happen that - // the hidden type becomes the opaque type itself. In this case, this was an opaque - // usage of the opaque type and we can ignore it. This check is mirrored in typeck's - // writeback. - if !infcx.next_trait_solver() { - if let ty::Alias(ty::Opaque, alias_ty) = ty.kind() - && alias_ty.def_id == opaque_type_key.def_id.to_def_id() - && alias_ty.args == opaque_type_key.args - { - continue; - } - } - - root_cx.add_concrete_opaque_type( - opaque_type_key.def_id, - OpaqueHiddenType { span: concrete_type.span, ty }, - ); - - // Check that all opaque types have the same region parameters if they have the same - // non-region parameters. This is necessary because within the new solver we perform - // various query operations modulo regions, and thus could unsoundly select some impls - // that don't hold. - if let Some((prev_decl_key, prev_span)) = decls_modulo_regions.insert( - infcx.tcx.erase_regions(opaque_type_key), - (opaque_type_key, concrete_type.span), - ) && let Some((arg1, arg2)) = std::iter::zip( - prev_decl_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), - opaque_type_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), - ) - .find(|(arg1, arg2)| arg1 != arg2) - { - errors.push(DeferredOpaqueTypeError::LifetimeMismatchOpaqueParam( - LifetimeMismatchOpaqueParam { - arg: arg1, - prev: arg2, - span: prev_span, - prev_span: concrete_type.span, - }, - )); - } - } - - errors - } - - /// Map the regions in the type to named regions. This is similar to what - /// `infer_opaque_types` does, but can infer any universal region, not only - /// ones from the args for the opaque type. It also doesn't double check - /// that the regions produced are in fact equal to the named region they are - /// replaced with. This is fine because this function is only to improve the - /// region names in error messages. - /// - /// This differs from `MirBorrowckCtxt::name_regions` since it is particularly - /// lax with mapping region vids that are *shorter* than a universal region to - /// that universal region. This is useful for member region constraints since - /// we want to suggest a universal region name to capture even if it's technically - /// not equal to the error region. - pub(crate) fn name_regions_for_member_constraint<T>(&self, tcx: TyCtxt<'tcx>, ty: T) -> T - where - T: TypeFoldable<TyCtxt<'tcx>>, - { - fold_regions(tcx, ty, |region, _| match region.kind() { - ty::ReVar(vid) => { - let scc = self.constraint_sccs.scc(vid); - - // Special handling of higher-ranked regions. - if !self.max_nameable_universe(scc).is_root() { - match self.scc_values.placeholders_contained_in(scc).enumerate().last() { - // If the region contains a single placeholder then they're equal. - Some((0, placeholder)) => { - return ty::Region::new_placeholder(tcx, placeholder); - } - - // Fallback: this will produce a cryptic error message. - _ => return region, - } - } - - // Find something that we can name - let upper_bound = self.approx_universal_upper_bound(vid); - if let Some(universal_region) = self.definitions[upper_bound].external_name { - return universal_region; - } - - // Nothing exact found, so we pick a named upper bound, if there's only one. - // If there's >1 universal region, then we probably are dealing w/ an intersection - // region which cannot be mapped back to a universal. - // FIXME: We could probably compute the LUB if there is one. - let scc = self.constraint_sccs.scc(vid); - let upper_bounds: Vec<_> = self - .reverse_scc_graph() - .upper_bounds(scc) - .filter_map(|vid| self.definitions[vid].external_name) - .filter(|r| !r.is_static()) - .collect(); - match &upper_bounds[..] { - [universal_region] => *universal_region, - _ => region, - } - } - _ => region, - }) - } -} - -#[extension(pub trait InferCtxtExt<'tcx>)] -impl<'tcx> InferCtxt<'tcx> { - /// Given the fully resolved, instantiated type for an opaque - /// type, i.e., the value of an inference variable like C1 or C2 - /// (*), computes the "definition type" for an opaque type - /// definition -- that is, the inferred value of `Foo1<'x>` or - /// `Foo2<'x>` that we would conceptually use in its definition: - /// ```ignore (illustrative) - /// type Foo1<'x> = impl Bar<'x> = AAA; // <-- this type AAA - /// type Foo2<'x> = impl Bar<'x> = BBB; // <-- or this type BBB - /// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } - /// ``` - /// Note that these values are defined in terms of a distinct set of - /// generic parameters (`'x` instead of `'a`) from C1 or C2. The main - /// purpose of this function is to do that translation. - /// - /// (*) C1 and C2 were introduced in the comments on - /// `register_member_constraints`. Read that comment for more context. - /// - /// # Parameters - /// - /// - `def_id`, the `impl Trait` type - /// - `args`, the args used to instantiate this opaque type - /// - `instantiated_ty`, the inferred type C1 -- fully resolved, lifted version of - /// `opaque_defn.concrete_ty` - #[instrument(level = "debug", skip(self))] - fn infer_opaque_definition_from_instantiation( - &self, - opaque_type_key: OpaqueTypeKey<'tcx>, - instantiated_ty: OpaqueHiddenType<'tcx>, - ) -> Result<Ty<'tcx>, InvalidOpaqueTypeArgs<'tcx>> { - check_opaque_type_parameter_valid( - self, - opaque_type_key, - instantiated_ty.span, - DefiningScopeKind::MirBorrowck, - )?; - - let definition_ty = instantiated_ty - .remap_generic_params_to_declaration_params( - opaque_type_key, - self.tcx, - DefiningScopeKind::MirBorrowck, - ) - .ty; - - definition_ty.error_reported()?; - Ok(definition_ty) - } -} diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/member_constraints.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/member_constraints.rs new file mode 100644 index 00000000000..667fc440ac0 --- /dev/null +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/member_constraints.rs @@ -0,0 +1,194 @@ +use rustc_data_structures::fx::FxHashMap; +use rustc_hir::def_id::DefId; +use rustc_middle::bug; +use rustc_middle::ty::{ + self, GenericArgsRef, Region, RegionVid, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, + TypeVisitor, +}; +use tracing::{debug, instrument}; + +use super::DefiningUse; +use super::region_ctxt::RegionCtxt; +use crate::constraints::ConstraintSccIndex; + +pub(super) fn apply_member_constraints<'tcx>( + rcx: &mut RegionCtxt<'_, 'tcx>, + defining_uses: &[DefiningUse<'tcx>], +) { + // Start by collecting the member constraints of all defining uses. + // + // Applying member constraints can influence other member constraints, + // so we first collect and then apply them. + let mut member_constraints = Default::default(); + for defining_use in defining_uses { + let mut visitor = CollectMemberConstraintsVisitor { + rcx, + defining_use, + member_constraints: &mut member_constraints, + }; + defining_use.hidden_type.ty.visit_with(&mut visitor); + } + + // Now walk over the region graph, visiting the smallest regions first and then all + // regions which have to outlive that one. + // + // Whenever we encounter a member region, we mutate the value of this SCC. This is + // as if we'd introduce new outlives constraints. However, we discard these region + // values after we've inferred the hidden types of opaques and apply the region + // constraints by simply equating the actual hidden type with the inferred one. + debug!(?member_constraints); + for scc_a in rcx.constraint_sccs.all_sccs() { + debug!(?scc_a); + // Start by adding the region values required by outlives constraints. This + // matches how we compute the final region values in `fn compute_regions`. + // + // We need to do this here to get a lower bound when applying member constraints. + // This propagates the region values added by previous member constraints. + for &scc_b in rcx.constraint_sccs.successors(scc_a) { + debug!(?scc_b); + rcx.scc_values.add_region(scc_a, scc_b); + } + + for defining_use in member_constraints.get(&scc_a).into_iter().flatten() { + apply_member_constraint(rcx, scc_a, &defining_use.arg_regions); + } + } +} + +#[instrument(level = "debug", skip(rcx))] +fn apply_member_constraint<'tcx>( + rcx: &mut RegionCtxt<'_, 'tcx>, + member: ConstraintSccIndex, + arg_regions: &[RegionVid], +) { + // If the member region lives in a higher universe, we currently choose + // the most conservative option by leaving it unchanged. + if !rcx.max_placeholder_universe_reached(member).is_root() { + return; + } + + // The existing value of `'member` is a lower-bound. If its is already larger than + // some universal region, we cannot equate it with that region. Said differently, we + // ignore choice regions which are smaller than this member region. + let mut choice_regions = arg_regions + .iter() + .copied() + .map(|r| rcx.representative(r).rvid()) + .filter(|&choice_region| { + rcx.scc_values.universal_regions_outlived_by(member).all(|lower_bound| { + rcx.universal_region_relations.outlives(choice_region, lower_bound) + }) + }) + .collect::<Vec<_>>(); + debug!(?choice_regions, "after enforcing lower-bound"); + + // Now find all the *upper bounds* -- that is, each UB is a + // free region that must outlive the member region `R0` (`UB: + // R0`). Therefore, we need only keep an option `O` if `UB: O` + // for all UB. + // + // If we have a requirement `'upper_bound: 'member`, equating `'member` + // with some region `'choice` means we now also require `'upper_bound: 'choice`. + // Avoid choice regions for which this does not hold. + for ub in rcx.rev_scc_graph.upper_bounds(member) { + choice_regions + .retain(|&choice_region| rcx.universal_region_relations.outlives(ub, choice_region)); + } + debug!(?choice_regions, "after enforcing upper-bound"); + + // At this point we can pick any member of `choice_regions` and would like to choose + // it to be a small as possible. To avoid potential non-determinism we will pick the + // smallest such choice. + // + // Because universal regions are only partially ordered (i.e, not every two regions are + // comparable), we will ignore any region that doesn't compare to all others when picking + // the minimum choice. + // + // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where + // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`. + // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`. + let totally_ordered_subset = choice_regions.iter().copied().filter(|&r1| { + choice_regions.iter().all(|&r2| { + rcx.universal_region_relations.outlives(r1, r2) + || rcx.universal_region_relations.outlives(r2, r1) + }) + }); + // Now we're left with `['static, 'c]`. Pick `'c` as the minimum! + let Some(min_choice) = totally_ordered_subset.reduce(|r1, r2| { + let r1_outlives_r2 = rcx.universal_region_relations.outlives(r1, r2); + let r2_outlives_r1 = rcx.universal_region_relations.outlives(r2, r1); + match (r1_outlives_r2, r2_outlives_r1) { + (true, true) => r1.min(r2), + (true, false) => r2, + (false, true) => r1, + (false, false) => bug!("incomparable regions in total order"), + } + }) else { + debug!("no unique minimum choice"); + return; + }; + + debug!(?min_choice); + // Lift the member region to be at least as large as this `min_choice` by directly + // mutating the `scc_values` as we compute it. This acts as if we've added a + // `'member: 'min_choice` while not recomputing sccs. This means different sccs + // may now actually be equal. + let min_choice_scc = rcx.constraint_sccs.scc(min_choice); + rcx.scc_values.add_region(member, min_choice_scc); +} + +struct CollectMemberConstraintsVisitor<'a, 'b, 'tcx> { + rcx: &'a RegionCtxt<'a, 'tcx>, + defining_use: &'b DefiningUse<'tcx>, + member_constraints: &'a mut FxHashMap<ConstraintSccIndex, Vec<&'b DefiningUse<'tcx>>>, +} +impl<'tcx> CollectMemberConstraintsVisitor<'_, '_, 'tcx> { + fn cx(&self) -> TyCtxt<'tcx> { + self.rcx.infcx.tcx + } + fn visit_closure_args(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) { + let generics = self.cx().generics_of(def_id); + for arg in args.iter().skip(generics.parent_count) { + arg.visit_with(self); + } + } +} +impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for CollectMemberConstraintsVisitor<'_, '_, 'tcx> { + fn visit_region(&mut self, r: Region<'tcx>) { + match r.kind() { + ty::ReBound(..) => return, + ty::ReVar(vid) => { + let scc = self.rcx.constraint_sccs.scc(vid); + self.member_constraints.entry(scc).or_default().push(self.defining_use); + } + _ => unreachable!(), + } + } + + fn visit_ty(&mut self, ty: Ty<'tcx>) { + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return; + } + + match *ty.kind() { + ty::Closure(def_id, args) + | ty::CoroutineClosure(def_id, args) + | ty::Coroutine(def_id, args) => self.visit_closure_args(def_id, args), + + ty::Alias(kind, ty::AliasTy { def_id, args, .. }) + if let Some(variances) = self.cx().opt_alias_variances(kind, def_id) => + { + // Skip lifetime parameters that are not captured, since they do + // not need member constraints registered for them; we'll erase + // them (and hopefully in the future replace them with placeholders). + for (&v, arg) in std::iter::zip(variances, args.iter()) { + if v != ty::Bivariant { + arg.visit_with(self) + } + } + } + + _ => ty.super_visit_with(self), + } + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs new file mode 100644 index 00000000000..bee82e17835 --- /dev/null +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs @@ -0,0 +1,697 @@ +use std::iter; +use std::rc::Rc; + +use rustc_data_structures::frozen::Frozen; +use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::def_id::DefId; +use rustc_infer::infer::outlives::env::RegionBoundPairs; +use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, OpaqueTypeStorageEntries}; +use rustc_infer::traits::ObligationCause; +use rustc_macros::extension; +use rustc_middle::mir::{Body, ConstraintCategory}; +use rustc_middle::ty::{ + self, DefiningScopeKind, FallibleTypeFolder, GenericArg, GenericArgsRef, OpaqueHiddenType, + OpaqueTypeKey, Region, RegionVid, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable, + TypeVisitableExt, fold_regions, +}; +use rustc_mir_dataflow::points::DenseLocationMap; +use rustc_span::Span; +use rustc_trait_selection::opaque_types::{ + NonDefiningUseReason, opaque_type_has_defining_use_args, +}; +use rustc_trait_selection::solve::NoSolution; +use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp; +use tracing::{debug, instrument}; + +use super::reverse_sccs::ReverseSccGraph; +use crate::consumers::RegionInferenceContext; +use crate::session_diagnostics::LifetimeMismatchOpaqueParam; +use crate::type_check::canonical::fully_perform_op_raw; +use crate::type_check::free_region_relations::UniversalRegionRelations; +use crate::type_check::{Locations, MirTypeckRegionConstraints}; +use crate::universal_regions::{RegionClassification, UniversalRegions}; +use crate::{BorrowCheckRootCtxt, BorrowckInferCtxt}; + +mod member_constraints; +mod region_ctxt; + +use member_constraints::apply_member_constraints; +use region_ctxt::RegionCtxt; + +/// We defer errors from [fn handle_opaque_type_uses] and only report them +/// if there are no `RegionErrors`. If there are region errors, it's likely +/// that errors here are caused by them and don't need to be handled separately. +pub(crate) enum DeferredOpaqueTypeError<'tcx> { + InvalidOpaqueTypeArgs(NonDefiningUseReason<'tcx>), + LifetimeMismatchOpaqueParam(LifetimeMismatchOpaqueParam<'tcx>), + UnexpectedHiddenRegion { + /// The opaque type. + opaque_type_key: OpaqueTypeKey<'tcx>, + /// The hidden type containing the member region. + hidden_type: OpaqueHiddenType<'tcx>, + /// The unexpected region. + member_region: Region<'tcx>, + }, + NonDefiningUseInDefiningScope { + span: Span, + opaque_type_key: OpaqueTypeKey<'tcx>, + }, +} + +/// This looks at all uses of opaque types in their defining scope inside +/// of this function. +/// +/// It first uses all defining uses to compute the actual concrete type of each +/// opaque type definition. +/// +/// We then apply this inferred type to actually check all uses of the opaque. +pub(crate) fn handle_opaque_type_uses<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>, + region_bound_pairs: &RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>], + location_map: &Rc<DenseLocationMap>, + constraints: &mut MirTypeckRegionConstraints<'tcx>, +) -> Vec<DeferredOpaqueTypeError<'tcx>> { + let tcx = infcx.tcx; + let opaque_types = infcx.clone_opaque_types(); + if opaque_types.is_empty() { + return Vec::new(); + } + + // We need to eagerly map all regions to NLL vars here, as we need to make sure we've + // introduced nll vars for all used placeholders. + // + // We need to resolve inference vars as even though we're in MIR typeck, we may still + // encounter inference variables, e.g. when checking user types. + let opaque_types_storage_num_entries = infcx.inner.borrow_mut().opaque_types().num_entries(); + let opaque_types = opaque_types + .into_iter() + .map(|entry| { + fold_regions(tcx, infcx.resolve_vars_if_possible(entry), |r, _| { + let vid = if let ty::RePlaceholder(placeholder) = r.kind() { + constraints.placeholder_region(infcx, placeholder).as_var() + } else { + universal_region_relations.universal_regions.to_region_vid(r) + }; + Region::new_var(tcx, vid) + }) + }) + .collect::<Vec<_>>(); + + debug!(?opaque_types); + + let errors = compute_concrete_opaque_types( + root_cx, + infcx, + constraints, + universal_region_relations, + Rc::clone(location_map), + &opaque_types, + ); + + if !errors.is_empty() { + return errors; + } + + let errors = apply_computed_concrete_opaque_types( + root_cx, + infcx, + body, + &universal_region_relations.universal_regions, + region_bound_pairs, + known_type_outlives_obligations, + constraints, + &opaque_types, + ); + + detect_opaque_types_added_while_handling_opaque_types(infcx, opaque_types_storage_num_entries); + + errors +} + +/// Maps an NLL var to a deterministically chosen equal universal region. +/// +/// See the corresponding [rustc-dev-guide chapter] for more details. This +/// ignores changes to the region values due to member constraints. Applying +/// member constraints does not impact the result of this function. +/// +/// [rustc-dev-guide chapter]: https://rustc-dev-guide.rust-lang.org/borrow_check/opaque-types-region-inference-restrictions.html +fn nll_var_to_universal_region<'tcx>( + rcx: &RegionCtxt<'_, 'tcx>, + r: RegionVid, +) -> Option<Region<'tcx>> { + // Use the SCC representative instead of directly using `region`. + // See [rustc-dev-guide chapter] § "Strict lifetime equality". + let vid = rcx.representative(r).rvid(); + match rcx.definitions[vid].origin { + // Iterate over all universal regions in a consistent order and find the + // *first* equal region. This makes sure that equal lifetimes will have + // the same name and simplifies subsequent handling. + // See [rustc-dev-guide chapter] § "Semantic lifetime equality". + NllRegionVariableOrigin::FreeRegion => rcx + .universal_regions() + .universal_regions_iter() + .filter(|&ur| { + // See [rustc-dev-guide chapter] § "Closure restrictions". + !matches!( + rcx.universal_regions().region_classification(ur), + Some(RegionClassification::External) + ) + }) + .find(|&ur| rcx.universal_region_relations.equal(vid, ur)) + .map(|ur| rcx.definitions[ur].external_name.unwrap()), + NllRegionVariableOrigin::Placeholder(placeholder) => { + Some(ty::Region::new_placeholder(rcx.infcx.tcx, placeholder)) + } + // If `r` were equal to any universal region, its SCC representative + // would have been set to a free region. + NllRegionVariableOrigin::Existential { .. } => None, + } +} + +#[derive(Debug)] +struct DefiningUse<'tcx> { + /// The opaque type using non NLL vars. This uses the actual + /// free regions and placeholders. This is necessary + /// to interact with code outside of `rustc_borrowck`. + opaque_type_key: OpaqueTypeKey<'tcx>, + arg_regions: Vec<RegionVid>, + hidden_type: OpaqueHiddenType<'tcx>, +} + +/// This computes the actual hidden types of the opaque types and maps them to their +/// definition sites. Outside of registering the computed concrete types this function +/// does not mutate the current borrowck state. +/// +/// While it may fail to infer the hidden type and return errors, we always apply +/// the computed concrete hidden type to all opaque type uses to check whether they +/// are correct. This is necessary to support non-defining uses of opaques in their +/// defining scope. +/// +/// It also means that this whole function is not really soundness critical as we +/// recheck all uses of the opaques regardless. +fn compute_concrete_opaque_types<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, + constraints: &MirTypeckRegionConstraints<'tcx>, + universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>, + location_map: Rc<DenseLocationMap>, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], +) -> Vec<DeferredOpaqueTypeError<'tcx>> { + let mut errors = Vec::new(); + // When computing the hidden type we need to track member constraints. + // We don't mutate the region graph used by `fn compute_regions` but instead + // manually track region information via a `RegionCtxt`. We discard this + // information at the end of this function. + let mut rcx = RegionCtxt::new(infcx, universal_region_relations, location_map, constraints); + + // We start by checking each use of an opaque type during type check and + // check whether the generic arguments of the opaque type are fully + // universal, if so, it's a defining use. + let defining_uses = collect_defining_uses(root_cx, &mut rcx, opaque_types, &mut errors); + + // We now compute and apply member constraints for all regions in the hidden + // types of each defining use. This mutates the region values of the `rcx` which + // is used when mapping the defining uses to the definition site. + apply_member_constraints(&mut rcx, &defining_uses); + + // After applying member constraints, we now check whether all member regions ended + // up equal to one of their choice regions and compute the actual concrete type of + // the opaque type definition. This is stored in the `root_cx`. + compute_concrete_types_from_defining_uses(root_cx, &rcx, &defining_uses, &mut errors); + errors +} + +#[instrument(level = "debug", skip_all, ret)] +fn collect_defining_uses<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + rcx: &mut RegionCtxt<'_, 'tcx>, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], + errors: &mut Vec<DeferredOpaqueTypeError<'tcx>>, +) -> Vec<DefiningUse<'tcx>> { + let infcx = rcx.infcx; + let mut defining_uses = vec![]; + for &(opaque_type_key, hidden_type) in opaque_types { + let non_nll_opaque_type_key = opaque_type_key.fold_captured_lifetime_args(infcx.tcx, |r| { + nll_var_to_universal_region(&rcx, r.as_var()).unwrap_or(r) + }); + if let Err(err) = opaque_type_has_defining_use_args( + infcx, + non_nll_opaque_type_key, + hidden_type.span, + DefiningScopeKind::MirBorrowck, + ) { + // A non-defining use. This is a hard error on stable and gets ignored + // with `TypingMode::Borrowck`. + if infcx.tcx.use_typing_mode_borrowck() { + match err { + NonDefiningUseReason::Tainted(guar) => root_cx.add_concrete_opaque_type( + opaque_type_key.def_id, + OpaqueHiddenType::new_error(infcx.tcx, guar), + ), + _ => debug!(?non_nll_opaque_type_key, ?err, "ignoring non-defining use"), + } + } else { + errors.push(DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err)); + } + continue; + } + + // We use the original `opaque_type_key` to compute the `arg_regions`. + let arg_regions = iter::once(rcx.universal_regions().fr_static) + .chain( + opaque_type_key + .iter_captured_args(infcx.tcx) + .filter_map(|(_, arg)| arg.as_region()) + .map(Region::as_var), + ) + .collect(); + defining_uses.push(DefiningUse { + opaque_type_key: non_nll_opaque_type_key, + arg_regions, + hidden_type, + }); + } + + defining_uses +} + +fn compute_concrete_types_from_defining_uses<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + rcx: &RegionCtxt<'_, 'tcx>, + defining_uses: &[DefiningUse<'tcx>], + errors: &mut Vec<DeferredOpaqueTypeError<'tcx>>, +) { + let infcx = rcx.infcx; + let tcx = infcx.tcx; + let mut decls_modulo_regions: FxIndexMap<OpaqueTypeKey<'tcx>, (OpaqueTypeKey<'tcx>, Span)> = + FxIndexMap::default(); + for &DefiningUse { opaque_type_key, ref arg_regions, hidden_type } in defining_uses { + // After applying member constraints, we now map all regions in the hidden type + // to the `arg_regions` of this defining use. In case a region in the hidden type + // ended up not being equal to any such region, we error. + let hidden_type = + match hidden_type.try_fold_with(&mut ToArgRegionsFolder::new(rcx, arg_regions)) { + Ok(hidden_type) => hidden_type, + Err(r) => { + errors.push(DeferredOpaqueTypeError::UnexpectedHiddenRegion { + hidden_type, + opaque_type_key, + member_region: ty::Region::new_var(tcx, r), + }); + let guar = tcx.dcx().span_delayed_bug( + hidden_type.span, + "opaque type with non-universal region args", + ); + ty::OpaqueHiddenType::new_error(tcx, guar) + } + }; + + // Now that we mapped the member regions to their final value, + // map the arguments of the opaque type key back to the parameters + // of the opaque type definition. + let ty = infcx + .infer_opaque_definition_from_instantiation(opaque_type_key, hidden_type) + .unwrap_or_else(|_| { + Ty::new_error_with_message( + rcx.infcx.tcx, + hidden_type.span, + "deferred invalid opaque type args", + ) + }); + + // Sometimes, when the hidden type is an inference variable, it can happen that + // the hidden type becomes the opaque type itself. In this case, this was an opaque + // usage of the opaque type and we can ignore it. This check is mirrored in typeck's + // writeback. + if !rcx.infcx.tcx.use_typing_mode_borrowck() { + if let ty::Alias(ty::Opaque, alias_ty) = ty.kind() + && alias_ty.def_id == opaque_type_key.def_id.to_def_id() + && alias_ty.args == opaque_type_key.args + { + continue; + } + } + + // Check that all opaque types have the same region parameters if they have the same + // non-region parameters. This is necessary because within the new solver we perform + // various query operations modulo regions, and thus could unsoundly select some impls + // that don't hold. + // + // FIXME(-Znext-solver): This isn't necessary after all. We can remove this check again. + if let Some((prev_decl_key, prev_span)) = decls_modulo_regions.insert( + rcx.infcx.tcx.erase_regions(opaque_type_key), + (opaque_type_key, hidden_type.span), + ) && let Some((arg1, arg2)) = std::iter::zip( + prev_decl_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), + opaque_type_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), + ) + .find(|(arg1, arg2)| arg1 != arg2) + { + errors.push(DeferredOpaqueTypeError::LifetimeMismatchOpaqueParam( + LifetimeMismatchOpaqueParam { + arg: arg1, + prev: arg2, + span: prev_span, + prev_span: hidden_type.span, + }, + )); + } + root_cx.add_concrete_opaque_type( + opaque_type_key.def_id, + OpaqueHiddenType { span: hidden_type.span, ty }, + ); + } +} + +/// A folder to map the regions in the hidden type to their corresponding `arg_regions`. +/// +/// This folder has to differentiate between member regions and other regions in the hidden +/// type. Member regions have to be equal to one of the `arg_regions` while other regions simply +/// get treated as an existential region in the opaque if they are not. Existential +/// regions are currently represented using `'erased`. +struct ToArgRegionsFolder<'a, 'tcx> { + rcx: &'a RegionCtxt<'a, 'tcx>, + // When folding closure args or bivariant alias arguments, we simply + // ignore non-member regions. However, we still need to map member + // regions to their arg region even if its in a closure argument. + // + // See tests/ui/type-alias-impl-trait/closure_wf_outlives.rs for an example. + erase_unknown_regions: bool, + arg_regions: &'a [RegionVid], +} + +impl<'a, 'tcx> ToArgRegionsFolder<'a, 'tcx> { + fn new( + rcx: &'a RegionCtxt<'a, 'tcx>, + arg_regions: &'a [RegionVid], + ) -> ToArgRegionsFolder<'a, 'tcx> { + ToArgRegionsFolder { rcx, erase_unknown_regions: false, arg_regions } + } + + fn fold_non_member_arg(&mut self, arg: GenericArg<'tcx>) -> GenericArg<'tcx> { + let prev = self.erase_unknown_regions; + self.erase_unknown_regions = true; + let res = arg.try_fold_with(self).unwrap(); + self.erase_unknown_regions = prev; + res + } + + fn fold_closure_args( + &mut self, + def_id: DefId, + args: GenericArgsRef<'tcx>, + ) -> Result<GenericArgsRef<'tcx>, RegionVid> { + let generics = self.cx().generics_of(def_id); + self.cx().mk_args_from_iter(args.iter().enumerate().map(|(index, arg)| { + if index < generics.parent_count { + Ok(self.fold_non_member_arg(arg)) + } else { + arg.try_fold_with(self) + } + })) + } +} +impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for ToArgRegionsFolder<'_, 'tcx> { + type Error = RegionVid; + fn cx(&self) -> TyCtxt<'tcx> { + self.rcx.infcx.tcx + } + + fn try_fold_region(&mut self, r: Region<'tcx>) -> Result<Region<'tcx>, RegionVid> { + match r.kind() { + // ignore bound regions, keep visiting + ty::ReBound(_, _) => Ok(r), + _ => { + let r = r.as_var(); + if let Some(arg_region) = self + .arg_regions + .iter() + .copied() + .find(|&arg_vid| self.rcx.eval_equal(r, arg_vid)) + .and_then(|r| nll_var_to_universal_region(self.rcx, r)) + { + Ok(arg_region) + } else if self.erase_unknown_regions { + Ok(self.cx().lifetimes.re_erased) + } else { + Err(r) + } + } + } + } + + fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, RegionVid> { + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return Ok(ty); + } + + let tcx = self.cx(); + Ok(match *ty.kind() { + ty::Closure(def_id, args) => { + Ty::new_closure(tcx, def_id, self.fold_closure_args(def_id, args)?) + } + + ty::CoroutineClosure(def_id, args) => { + Ty::new_coroutine_closure(tcx, def_id, self.fold_closure_args(def_id, args)?) + } + + ty::Coroutine(def_id, args) => { + Ty::new_coroutine(tcx, def_id, self.fold_closure_args(def_id, args)?) + } + + ty::Alias(kind, ty::AliasTy { def_id, args, .. }) + if let Some(variances) = tcx.opt_alias_variances(kind, def_id) => + { + let args = tcx.mk_args_from_iter(std::iter::zip(variances, args.iter()).map( + |(&v, s)| { + if v == ty::Bivariant { + Ok(self.fold_non_member_arg(s)) + } else { + s.try_fold_with(self) + } + }, + ))?; + ty::AliasTy::new_from_args(tcx, def_id, args).to_ty(tcx) + } + + _ => ty.try_super_fold_with(self)?, + }) + } +} + +/// This function is what actually applies member constraints to the borrowck +/// state. It is also responsible to check all uses of the opaques in their +/// defining scope. +/// +/// It does this by equating the hidden type of each use with the instantiated final +/// hidden type of the opaque. +fn apply_computed_concrete_opaque_types<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + universal_regions: &UniversalRegions<'tcx>, + region_bound_pairs: &RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>], + constraints: &mut MirTypeckRegionConstraints<'tcx>, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], +) -> Vec<DeferredOpaqueTypeError<'tcx>> { + let tcx = infcx.tcx; + let mut errors = Vec::new(); + for &(key, hidden_type) in opaque_types { + let Some(expected) = root_cx.get_concrete_opaque_type(key.def_id) else { + assert!(tcx.use_typing_mode_borrowck(), "non-defining use in defining scope"); + errors.push(DeferredOpaqueTypeError::NonDefiningUseInDefiningScope { + span: hidden_type.span, + opaque_type_key: key, + }); + let guar = tcx.dcx().span_delayed_bug( + hidden_type.span, + "non-defining use in the defining scope with no defining uses", + ); + root_cx.add_concrete_opaque_type(key.def_id, OpaqueHiddenType::new_error(tcx, guar)); + continue; + }; + + // We erase all non-member region of the opaque and need to treat these as existentials. + let expected = ty::fold_regions(tcx, expected.instantiate(tcx, key.args), |re, _dbi| { + match re.kind() { + ty::ReErased => infcx.next_nll_region_var( + NllRegionVariableOrigin::Existential { name: None }, + || crate::RegionCtxt::Existential(None), + ), + _ => re, + } + }); + + // We now simply equate the expected with the actual hidden type. + let locations = Locations::All(hidden_type.span); + if let Err(guar) = fully_perform_op_raw( + infcx, + body, + universal_regions, + region_bound_pairs, + known_type_outlives_obligations, + constraints, + locations, + ConstraintCategory::OpaqueType, + CustomTypeOp::new( + |ocx| { + let cause = ObligationCause::misc( + hidden_type.span, + body.source.def_id().expect_local(), + ); + // We need to normalize both types in the old solver before equatingt them. + let actual_ty = ocx.normalize(&cause, infcx.param_env, hidden_type.ty); + let expected_ty = ocx.normalize(&cause, infcx.param_env, expected.ty); + ocx.eq(&cause, infcx.param_env, actual_ty, expected_ty).map_err(|_| NoSolution) + }, + "equating opaque types", + ), + ) { + root_cx.add_concrete_opaque_type(key.def_id, OpaqueHiddenType::new_error(tcx, guar)); + } + } + errors +} + +/// In theory `apply_concrete_opaque_types` could introduce new uses of opaque types. +/// We do not check these new uses so this could be unsound. +/// +/// We detect any new uses and simply delay a bug if they occur. If this results in +/// an ICE we can properly handle this, but we haven't encountered any such test yet. +/// +/// See the related comment in `FnCtxt::detect_opaque_types_added_during_writeback`. +fn detect_opaque_types_added_while_handling_opaque_types<'tcx>( + infcx: &InferCtxt<'tcx>, + opaque_types_storage_num_entries: OpaqueTypeStorageEntries, +) { + for (key, hidden_type) in infcx + .inner + .borrow_mut() + .opaque_types() + .opaque_types_added_since(opaque_types_storage_num_entries) + { + let opaque_type_string = infcx.tcx.def_path_str(key.def_id); + let msg = format!("unexpected cyclic definition of `{opaque_type_string}`"); + infcx.dcx().span_delayed_bug(hidden_type.span, msg); + } + + let _ = infcx.take_opaque_types(); +} + +impl<'tcx> RegionInferenceContext<'tcx> { + /// Map the regions in the type to named regions. This is similar to what + /// `infer_opaque_types` does, but can infer any universal region, not only + /// ones from the args for the opaque type. It also doesn't double check + /// that the regions produced are in fact equal to the named region they are + /// replaced with. This is fine because this function is only to improve the + /// region names in error messages. + /// + /// This differs from `MirBorrowckCtxt::name_regions` since it is particularly + /// lax with mapping region vids that are *shorter* than a universal region to + /// that universal region. This is useful for member region constraints since + /// we want to suggest a universal region name to capture even if it's technically + /// not equal to the error region. + pub(crate) fn name_regions_for_member_constraint<T>(&self, tcx: TyCtxt<'tcx>, ty: T) -> T + where + T: TypeFoldable<TyCtxt<'tcx>>, + { + fold_regions(tcx, ty, |region, _| match region.kind() { + ty::ReVar(vid) => { + let scc = self.constraint_sccs.scc(vid); + + // Special handling of higher-ranked regions. + if !self.max_nameable_universe(scc).is_root() { + match self.scc_values.placeholders_contained_in(scc).enumerate().last() { + // If the region contains a single placeholder then they're equal. + Some((0, placeholder)) => { + return ty::Region::new_placeholder(tcx, placeholder); + } + + // Fallback: this will produce a cryptic error message. + _ => return region, + } + } + + // Find something that we can name + let upper_bound = self.approx_universal_upper_bound(vid); + if let Some(universal_region) = self.definitions[upper_bound].external_name { + return universal_region; + } + + // Nothing exact found, so we pick a named upper bound, if there's only one. + // If there's >1 universal region, then we probably are dealing w/ an intersection + // region which cannot be mapped back to a universal. + // FIXME: We could probably compute the LUB if there is one. + let scc = self.constraint_sccs.scc(vid); + let rev_scc_graph = + ReverseSccGraph::compute(&self.constraint_sccs, self.universal_regions()); + let upper_bounds: Vec<_> = rev_scc_graph + .upper_bounds(scc) + .filter_map(|vid| self.definitions[vid].external_name) + .filter(|r| !r.is_static()) + .collect(); + match &upper_bounds[..] { + [universal_region] => *universal_region, + _ => region, + } + } + _ => region, + }) + } +} + +#[extension(pub trait InferCtxtExt<'tcx>)] +impl<'tcx> InferCtxt<'tcx> { + /// Given the fully resolved, instantiated type for an opaque + /// type, i.e., the value of an inference variable like C1 or C2 + /// (*), computes the "definition type" for an opaque type + /// definition -- that is, the inferred value of `Foo1<'x>` or + /// `Foo2<'x>` that we would conceptually use in its definition: + /// ```ignore (illustrative) + /// type Foo1<'x> = impl Bar<'x> = AAA; // <-- this type AAA + /// type Foo2<'x> = impl Bar<'x> = BBB; // <-- or this type BBB + /// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } + /// ``` + /// Note that these values are defined in terms of a distinct set of + /// generic parameters (`'x` instead of `'a`) from C1 or C2. The main + /// purpose of this function is to do that translation. + /// + /// (*) C1 and C2 were introduced in the comments on + /// `register_member_constraints`. Read that comment for more context. + /// + /// # Parameters + /// + /// - `def_id`, the `impl Trait` type + /// - `args`, the args used to instantiate this opaque type + /// - `instantiated_ty`, the inferred type C1 -- fully resolved, lifted version of + /// `opaque_defn.concrete_ty` + #[instrument(level = "debug", skip(self))] + fn infer_opaque_definition_from_instantiation( + &self, + opaque_type_key: OpaqueTypeKey<'tcx>, + instantiated_ty: OpaqueHiddenType<'tcx>, + ) -> Result<Ty<'tcx>, NonDefiningUseReason<'tcx>> { + opaque_type_has_defining_use_args( + self, + opaque_type_key, + instantiated_ty.span, + DefiningScopeKind::MirBorrowck, + )?; + + let definition_ty = instantiated_ty + .remap_generic_params_to_declaration_params( + opaque_type_key, + self.tcx, + DefiningScopeKind::MirBorrowck, + ) + .ty; + + definition_ty.error_reported()?; + Ok(definition_ty) + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/region_ctxt.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/region_ctxt.rs new file mode 100644 index 00000000000..88326e4eebf --- /dev/null +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/region_ctxt.rs @@ -0,0 +1,114 @@ +use std::rc::Rc; + +use rustc_data_structures::frozen::Frozen; +use rustc_index::IndexVec; +use rustc_infer::infer::NllRegionVariableOrigin; +use rustc_middle::ty::{RegionVid, UniverseIndex}; +use rustc_mir_dataflow::points::DenseLocationMap; + +use crate::BorrowckInferCtxt; +use crate::constraints::ConstraintSccIndex; +use crate::handle_placeholders::{SccAnnotations, region_definitions}; +use crate::region_infer::reverse_sccs::ReverseSccGraph; +use crate::region_infer::values::RegionValues; +use crate::region_infer::{ConstraintSccs, RegionDefinition, RegionTracker, Representative}; +use crate::type_check::MirTypeckRegionConstraints; +use crate::type_check::free_region_relations::UniversalRegionRelations; +use crate::universal_regions::UniversalRegions; + +/// A slimmed down version of [crate::region_infer::RegionInferenceContext] used +/// only by opaque type handling. +pub(super) struct RegionCtxt<'a, 'tcx> { + pub(super) infcx: &'a BorrowckInferCtxt<'tcx>, + pub(super) definitions: Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>, + pub(super) universal_region_relations: &'a UniversalRegionRelations<'tcx>, + pub(super) constraint_sccs: ConstraintSccs, + pub(super) scc_annotations: IndexVec<ConstraintSccIndex, RegionTracker>, + pub(super) rev_scc_graph: ReverseSccGraph, + pub(super) scc_values: RegionValues<ConstraintSccIndex>, +} + +impl<'a, 'tcx> RegionCtxt<'a, 'tcx> { + /// Creates a new `RegionCtxt` used to compute defining opaque type uses. + /// + /// This does not yet propagate region values. This is instead done lazily + /// when applying member constraints. + pub(super) fn new( + infcx: &'a BorrowckInferCtxt<'tcx>, + universal_region_relations: &'a Frozen<UniversalRegionRelations<'tcx>>, + location_map: Rc<DenseLocationMap>, + constraints: &MirTypeckRegionConstraints<'tcx>, + ) -> RegionCtxt<'a, 'tcx> { + let universal_regions = &universal_region_relations.universal_regions; + let (definitions, _has_placeholders) = region_definitions(infcx, universal_regions); + let mut scc_annotations = SccAnnotations::init(&definitions); + let constraint_sccs = ConstraintSccs::new_with_annotation( + &constraints + .outlives_constraints + .graph(definitions.len()) + .region_graph(&constraints.outlives_constraints, universal_regions.fr_static), + &mut scc_annotations, + ); + let scc_annotations = scc_annotations.scc_to_annotation; + + // Unlike the `RegionInferenceContext`, we only care about free regions + // and fully ignore liveness and placeholders. + let placeholder_indices = Default::default(); + let mut scc_values = + RegionValues::new(location_map, universal_regions.len(), placeholder_indices); + for variable in definitions.indices() { + let scc = constraint_sccs.scc(variable); + match definitions[variable].origin { + NllRegionVariableOrigin::FreeRegion => { + scc_values.add_element(scc, variable); + } + _ => {} + } + } + + let rev_scc_graph = ReverseSccGraph::compute(&constraint_sccs, universal_regions); + RegionCtxt { + infcx, + definitions, + universal_region_relations, + constraint_sccs, + scc_annotations, + rev_scc_graph, + scc_values, + } + } + + pub(super) fn representative(&self, vid: RegionVid) -> Representative { + let scc = self.constraint_sccs.scc(vid); + self.scc_annotations[scc].representative + } + + pub(crate) fn max_placeholder_universe_reached( + &self, + scc: ConstraintSccIndex, + ) -> UniverseIndex { + self.scc_annotations[scc].max_placeholder_universe_reached() + } + + pub(super) fn universal_regions(&self) -> &UniversalRegions<'tcx> { + &self.universal_region_relations.universal_regions + } + + pub(super) fn eval_equal(&self, r1_vid: RegionVid, r2_vid: RegionVid) -> bool { + let r1 = self.constraint_sccs.scc(r1_vid); + let r2 = self.constraint_sccs.scc(r2_vid); + + if r1 == r2 { + return true; + } + + let universal_outlives = |sub, sup| { + self.scc_values.universal_regions_outlived_by(sub).all(|r1| { + self.scc_values + .universal_regions_outlived_by(sup) + .any(|r2| self.universal_region_relations.outlives(r2, r1)) + }) + }; + universal_outlives(r1, r2) && universal_outlives(r2, r1) + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs index 604265f8940..e8da85eccef 100644 --- a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs +++ b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs @@ -5,7 +5,6 @@ use rustc_data_structures::graph; use rustc_data_structures::graph::vec_graph::VecGraph; use rustc_middle::ty::RegionVid; -use crate::RegionInferenceContext; use crate::constraints::ConstraintSccIndex; use crate::region_infer::ConstraintSccs; use crate::universal_regions::UniversalRegions; @@ -57,12 +56,3 @@ impl ReverseSccGraph { .filter(move |r| duplicates.insert(*r)) } } - -impl RegionInferenceContext<'_> { - /// Return the reverse graph of the region SCCs, initialising it if needed. - pub(super) fn reverse_scc_graph(&self) -> &ReverseSccGraph { - self.rev_scc_graph.get_or_init(|| { - ReverseSccGraph::compute(&self.constraint_sccs, self.universal_regions()) - }) - } -} diff --git a/compiler/rustc_borrowck/src/root_cx.rs b/compiler/rustc_borrowck/src/root_cx.rs index 9b1d12aede5..4e90ae391bb 100644 --- a/compiler/rustc_borrowck/src/root_cx.rs +++ b/compiler/rustc_borrowck/src/root_cx.rs @@ -2,7 +2,7 @@ use rustc_abi::FieldIdx; use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::LocalDefId; use rustc_middle::bug; -use rustc_middle::ty::{OpaqueHiddenType, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{EarlyBinder, OpaqueHiddenType, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::ErrorGuaranteed; use smallvec::SmallVec; @@ -19,7 +19,7 @@ pub(super) struct BorrowCheckRootCtxt<'tcx> { tainted_by_errors: Option<ErrorGuaranteed>, /// This should be `None` during normal compilation. See [`crate::consumers`] for more /// information on how this is used. - pub(crate) consumer: Option<BorrowckConsumer<'tcx>>, + pub consumer: Option<BorrowckConsumer<'tcx>>, } impl<'tcx> BorrowCheckRootCtxt<'tcx> { @@ -38,6 +38,10 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> { } } + pub(super) fn root_def_id(&self) -> LocalDefId { + self.root_def_id + } + /// Collect all defining uses of opaque types inside of this typeck root. This /// expects the hidden type to be mapped to the definition parameters of the opaque /// and errors if we end up with distinct hidden types. @@ -67,6 +71,13 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> { } } + pub(super) fn get_concrete_opaque_type( + &mut self, + def_id: LocalDefId, + ) -> Option<EarlyBinder<'tcx, OpaqueHiddenType<'tcx>>> { + self.concrete_opaque_types.0.get(&def_id).map(|ty| EarlyBinder::bind(*ty)) + } + pub(super) fn set_tainted_by_errors(&mut self, guar: ErrorGuaranteed) { self.tainted_by_errors = Some(guar); } diff --git a/compiler/rustc_borrowck/src/type_check/canonical.rs b/compiler/rustc_borrowck/src/type_check/canonical.rs index b3fa786a517..2627ed899a9 100644 --- a/compiler/rustc_borrowck/src/type_check/canonical.rs +++ b/compiler/rustc_borrowck/src/type_check/canonical.rs @@ -2,8 +2,9 @@ use std::fmt; use rustc_errors::ErrorGuaranteed; use rustc_infer::infer::canonical::Canonical; +use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_middle::bug; -use rustc_middle::mir::ConstraintCategory; +use rustc_middle::mir::{Body, ConstraintCategory}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, Upcast}; use rustc_span::Span; use rustc_span::def_id::DefId; @@ -14,7 +15,68 @@ use rustc_trait_selection::traits::query::type_op::{self, TypeOpOutput}; use tracing::{debug, instrument}; use super::{Locations, NormalizeLocation, TypeChecker}; +use crate::BorrowckInferCtxt; use crate::diagnostics::ToUniverseInfo; +use crate::type_check::{MirTypeckRegionConstraints, constraint_conversion}; +use crate::universal_regions::UniversalRegions; + +#[instrument(skip(infcx, constraints, op), level = "trace")] +pub(crate) fn fully_perform_op_raw<'tcx, R: fmt::Debug, Op>( + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + universal_regions: &UniversalRegions<'tcx>, + region_bound_pairs: &RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>], + constraints: &mut MirTypeckRegionConstraints<'tcx>, + locations: Locations, + category: ConstraintCategory<'tcx>, + op: Op, +) -> Result<R, ErrorGuaranteed> +where + Op: type_op::TypeOp<'tcx, Output = R>, + Op::ErrorInfo: ToUniverseInfo<'tcx>, +{ + let old_universe = infcx.universe(); + + let TypeOpOutput { output, constraints: query_constraints, error_info } = + op.fully_perform(infcx, infcx.root_def_id, locations.span(body))?; + if cfg!(debug_assertions) { + let data = infcx.take_and_reset_region_constraints(); + if !data.is_empty() { + panic!("leftover region constraints: {data:#?}"); + } + } + + debug!(?output, ?query_constraints); + + if let Some(data) = query_constraints { + constraint_conversion::ConstraintConversion::new( + infcx, + universal_regions, + region_bound_pairs, + known_type_outlives_obligations, + locations, + locations.span(body), + category, + constraints, + ) + .convert_all(data); + } + + // If the query has created new universes and errors are going to be emitted, register the + // cause of these new universes for improved diagnostics. + let universe = infcx.universe(); + if old_universe != universe + && let Some(error_info) = error_info + { + let universe_info = error_info.to_universe_info(old_universe); + for u in (old_universe + 1)..=universe { + constraints.universe_causes.insert(u, universe_info.clone()); + } + } + + Ok(output) +} impl<'a, 'tcx> TypeChecker<'a, 'tcx> { /// Given some operation `op` that manipulates types, proves @@ -38,36 +100,17 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Op: type_op::TypeOp<'tcx, Output = R>, Op::ErrorInfo: ToUniverseInfo<'tcx>, { - let old_universe = self.infcx.universe(); - - let TypeOpOutput { output, constraints, error_info } = - op.fully_perform(self.infcx, locations.span(self.body))?; - if cfg!(debug_assertions) { - let data = self.infcx.take_and_reset_region_constraints(); - if !data.is_empty() { - panic!("leftover region constraints: {data:#?}"); - } - } - - debug!(?output, ?constraints); - - if let Some(data) = constraints { - self.push_region_constraints(locations, category, data); - } - - // If the query has created new universes and errors are going to be emitted, register the - // cause of these new universes for improved diagnostics. - let universe = self.infcx.universe(); - if old_universe != universe - && let Some(error_info) = error_info - { - let universe_info = error_info.to_universe_info(old_universe); - for u in (old_universe + 1)..=universe { - self.constraints.universe_causes.insert(u, universe_info.clone()); - } - } - - Ok(output) + fully_perform_op_raw( + self.infcx, + self.body, + self.universal_regions, + self.region_bound_pairs, + self.known_type_outlives_obligations, + self.constraints, + locations, + category, + op, + ) } pub(super) fn instantiate_canonical<T>( diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index 9bb96b94506..703223e2e54 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -1,10 +1,10 @@ use rustc_data_structures::fx::FxHashSet; use rustc_hir::def_id::LocalDefId; +use rustc_infer::infer::SubregionOrigin; use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate}; use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound}; -use rustc_infer::infer::{InferCtxt, SubregionOrigin}; use rustc_infer::traits::query::type_op::DeeplyNormalize; use rustc_middle::bug; use rustc_middle::ty::{ @@ -18,10 +18,12 @@ use crate::constraints::OutlivesConstraint; use crate::region_infer::TypeTest; use crate::type_check::{Locations, MirTypeckRegionConstraints}; use crate::universal_regions::UniversalRegions; -use crate::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory}; +use crate::{ + BorrowckInferCtxt, ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory, +}; pub(crate) struct ConstraintConversion<'a, 'tcx> { - infcx: &'a InferCtxt<'tcx>, + infcx: &'a BorrowckInferCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, /// Each RBP `GK: 'a` is assumed to be true. These encode /// relationships like `T: 'a` that are added via implicit bounds @@ -34,7 +36,6 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> { /// logic expecting to see (e.g.) `ReStatic`, and if we supplied /// our special inference variable there, we would mess that up. region_bound_pairs: &'a RegionBoundPairs<'tcx>, - param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, span: Span, @@ -45,10 +46,9 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> { impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { pub(crate) fn new( - infcx: &'a InferCtxt<'tcx>, + infcx: &'a BorrowckInferCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, region_bound_pairs: &'a RegionBoundPairs<'tcx>, - param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, span: Span, @@ -59,7 +59,6 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { infcx, universal_regions, region_bound_pairs, - param_env, known_type_outlives_obligations, locations, span, @@ -286,8 +285,11 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ConstraintCategory<'tcx>, )>, ) -> Ty<'tcx> { - match self.param_env.and(DeeplyNormalize { value: ty }).fully_perform(self.infcx, self.span) - { + match self.infcx.param_env.and(DeeplyNormalize { value: ty }).fully_perform( + self.infcx, + self.infcx.root_def_id, + self.span, + ) { Ok(TypeOpOutput { output: ty, constraints, .. }) => { // FIXME(higher_ranked_auto): What should we do with the assumptions here? if let Some(QueryRegionConstraints { outlives, assumptions: _ }) = constraints { diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index f642d34ea67..7bf2df91470 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -2,9 +2,9 @@ use rustc_data_structures::frozen::Frozen; use rustc_data_structures::transitive_relation::{TransitiveRelation, TransitiveRelationBuilder}; use rustc_hir::def::DefKind; use rustc_infer::infer::canonical::QueryRegionConstraints; +use rustc_infer::infer::outlives; use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::region_constraints::GenericKind; -use rustc_infer::infer::{InferCtxt, outlives}; use rustc_infer::traits::query::type_op::DeeplyNormalize; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::query::OutlivesBound; @@ -14,6 +14,7 @@ use rustc_trait_selection::traits::query::type_op::{self, TypeOp}; use tracing::{debug, instrument}; use type_op::TypeOpOutput; +use crate::BorrowckInferCtxt; use crate::type_check::{Locations, MirTypeckRegionConstraints, constraint_conversion}; use crate::universal_regions::UniversalRegions; @@ -47,14 +48,12 @@ pub(crate) struct CreateResult<'tcx> { } pub(crate) fn create<'tcx>( - infcx: &InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, universal_regions: UniversalRegions<'tcx>, constraints: &mut MirTypeckRegionConstraints<'tcx>, ) -> CreateResult<'tcx> { UniversalRegionRelationsBuilder { infcx, - param_env, constraints, universal_regions, region_bound_pairs: Default::default(), @@ -177,8 +176,7 @@ impl UniversalRegionRelations<'_> { } struct UniversalRegionRelationsBuilder<'a, 'tcx> { - infcx: &'a InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, + infcx: &'a BorrowckInferCtxt<'tcx>, universal_regions: UniversalRegions<'tcx>, constraints: &'a mut MirTypeckRegionConstraints<'tcx>, @@ -205,7 +203,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // Insert the `'a: 'b` we know from the predicates. // This does not consider the type-outlives. - let param_env = self.param_env; + let param_env = self.infcx.param_env; self.add_outlives_bounds(outlives::explicit_outlives_bounds(param_env)); // - outlives is reflexive, so `'r: 'r` for every region `'r` @@ -263,7 +261,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { let TypeOpOutput { output: norm_ty, constraints: constraints_normalize, .. } = param_env .and(DeeplyNormalize { value: ty }) - .fully_perform(self.infcx, span) + .fully_perform(self.infcx, self.infcx.root_def_id, span) .unwrap_or_else(|guar| TypeOpOutput { output: Ty::new_error(self.infcx.tcx, guar), constraints: None, @@ -298,8 +296,9 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // Add implied bounds from impl header. if matches!(tcx.def_kind(defining_ty_def_id), DefKind::AssocFn | DefKind::AssocConst) { for &(ty, _) in tcx.assumed_wf_types(tcx.local_parent(defining_ty_def_id)) { - let result: Result<_, ErrorGuaranteed> = - param_env.and(DeeplyNormalize { value: ty }).fully_perform(self.infcx, span); + let result: Result<_, ErrorGuaranteed> = param_env + .and(DeeplyNormalize { value: ty }) + .fully_perform(self.infcx, self.infcx.root_def_id, span); let Ok(TypeOpOutput { output: norm_ty, constraints: c, .. }) = result else { continue; }; @@ -318,7 +317,6 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { self.infcx, &self.universal_regions, &self.region_bound_pairs, - param_env, &known_type_outlives_obligations, Locations::All(span), span, @@ -353,10 +351,11 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { output: normalized_outlives, constraints: constraints_normalize, error_info: _, - }) = self - .param_env - .and(DeeplyNormalize { value: outlives }) - .fully_perform(self.infcx, span) + }) = self.infcx.param_env.and(DeeplyNormalize { value: outlives }).fully_perform( + self.infcx, + self.infcx.root_def_id, + span, + ) else { self.infcx.dcx().delayed_bug(format!("could not normalize {outlives:?}")); return; @@ -381,9 +380,10 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { span: Span, ) -> Option<&'tcx QueryRegionConstraints<'tcx>> { let TypeOpOutput { output: bounds, constraints, .. } = self + .infcx .param_env .and(type_op::ImpliedOutlivesBounds { ty }) - .fully_perform(self.infcx, span) + .fully_perform(self.infcx, self.infcx.root_def_id, span) .map_err(|_: ErrorGuaranteed| debug!("failed to compute implied bounds {:?}", ty)) .ok()?; debug!(?bounds, ?constraints); diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index 5d30fa71e92..b704d8f0a76 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -640,7 +640,7 @@ impl<'tcx> LivenessContext<'_, '_, 'tcx> { let op = typeck.infcx.param_env.and(DropckOutlives { dropped_ty }); - match op.fully_perform(typeck.infcx, DUMMY_SP) { + match op.fully_perform(typeck.infcx, typeck.root_cx.root_def_id(), DUMMY_SP) { Ok(TypeOpOutput { output, constraints, .. }) => { DropData { dropck_result: output, region_constraint_data: constraints } } diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index c3aa205d5aa..0d363935f14 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -26,8 +26,7 @@ use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::cast::CastTy; use rustc_middle::ty::{ self, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, CoroutineArgsExt, - GenericArgsRef, OpaqueHiddenType, OpaqueTypeKey, RegionVid, Ty, TyCtxt, TypeVisitableExt, - UserArgs, UserTypeAnnotationIndex, fold_regions, + GenericArgsRef, Ty, TyCtxt, TypeVisitableExt, UserArgs, UserTypeAnnotationIndex, fold_regions, }; use rustc_middle::{bug, span_bug}; use rustc_mir_dataflow::move_paths::MoveData; @@ -35,6 +34,7 @@ use rustc_mir_dataflow::points::DenseLocationMap; use rustc_span::def_id::CRATE_DEF_ID; use rustc_span::source_map::Spanned; use rustc_span::{Span, sym}; +use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints; use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput}; use tracing::{debug, instrument, trace}; @@ -42,7 +42,6 @@ use tracing::{debug, instrument, trace}; use crate::borrow_set::BorrowSet; use crate::constraints::{OutlivesConstraint, OutlivesConstraintSet}; use crate::diagnostics::UniverseInfo; -use crate::member_constraints::MemberConstraintSet; use crate::polonius::legacy::{PoloniusFacts, PoloniusLocationTable}; use crate::polonius::{PoloniusContext, PoloniusLivenessContext}; use crate::region_infer::TypeTest; @@ -67,12 +66,11 @@ macro_rules! span_mirbug { }) } -mod canonical; +pub(crate) mod canonical; mod constraint_conversion; pub(crate) mod free_region_relations; mod input_output; pub(crate) mod liveness; -mod opaque_types; mod relate_tys; /// Type checks the given `mir` in the context of the inference @@ -114,7 +112,6 @@ pub(crate) fn type_check<'tcx>( placeholder_index_to_region: IndexVec::default(), liveness_constraints: LivenessValues::with_specific_points(Rc::clone(&location_map)), outlives_constraints: OutlivesConstraintSet::default(), - member_constraints: MemberConstraintSet::default(), type_tests: Vec::default(), universe_causes: FxIndexMap::default(), }; @@ -124,7 +121,7 @@ pub(crate) fn type_check<'tcx>( region_bound_pairs, normalized_inputs_and_output, known_type_outlives_obligations, - } = free_region_relations::create(infcx, infcx.param_env, universal_regions, &mut constraints); + } = free_region_relations::create(infcx, universal_regions, &mut constraints); let pre_obligations = infcx.take_registered_region_obligations(); assert!( @@ -170,9 +167,6 @@ pub(crate) fn type_check<'tcx>( liveness::generate(&mut typeck, &location_map, move_data); - let opaque_type_values = - opaque_types::take_opaques_and_register_member_constraints(&mut typeck); - // We're done with typeck, we can finalize the polonius liveness context for region inference. let polonius_context = typeck.polonius_liveness.take().map(|liveness_context| { PoloniusContext::create_from_liveness( @@ -187,7 +181,6 @@ pub(crate) fn type_check<'tcx>( if let Some(guar) = universal_region_relations.universal_regions.encountered_re_error() { debug!("encountered an error region; removing constraints!"); constraints.outlives_constraints = Default::default(); - constraints.member_constraints = Default::default(); constraints.type_tests = Default::default(); root_cx.set_tainted_by_errors(guar); infcx.set_tainted_by_errors(guar); @@ -196,7 +189,8 @@ pub(crate) fn type_check<'tcx>( MirTypeckResults { constraints, universal_region_relations, - opaque_type_values, + region_bound_pairs, + known_type_outlives_obligations, polonius_context, } } @@ -245,7 +239,8 @@ struct TypeChecker<'a, 'tcx> { pub(crate) struct MirTypeckResults<'tcx> { pub(crate) constraints: MirTypeckRegionConstraints<'tcx>, pub(crate) universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>, - pub(crate) opaque_type_values: FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>>, + pub(crate) region_bound_pairs: Frozen<RegionBoundPairs<'tcx>>, + pub(crate) known_type_outlives_obligations: Frozen<Vec<ty::PolyTypeOutlivesPredicate<'tcx>>>, pub(crate) polonius_context: Option<PoloniusContext>, } @@ -277,8 +272,6 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> { pub(crate) outlives_constraints: OutlivesConstraintSet<'tcx>, - pub(crate) member_constraints: MemberConstraintSet<'tcx, RegionVid>, - pub(crate) universe_causes: FxIndexMap<ty::UniverseIndex, UniverseInfo<'tcx>>, pub(crate) type_tests: Vec<TypeTest<'tcx>>, @@ -287,7 +280,7 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> { impl<'tcx> MirTypeckRegionConstraints<'tcx> { /// Creates a `Region` for a given `PlaceholderRegion`, or returns the /// region that corresponds to a previously created one. - fn placeholder_region( + pub(crate) fn placeholder_region( &mut self, infcx: &InferCtxt<'tcx>, placeholder: ty::PlaceholderRegion, @@ -380,14 +373,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.body } - fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> RegionVid { - if let ty::RePlaceholder(placeholder) = r.kind() { - self.constraints.placeholder_region(self.infcx, placeholder).as_var() - } else { - self.universal_regions.to_region_vid(r) - } - } - fn unsized_feature_enabled(&self) -> bool { self.tcx().features().unsized_fn_params() } @@ -424,7 +409,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.infcx, self.universal_regions, self.region_bound_pairs, - self.infcx.param_env, self.known_type_outlives_obligations, locations, locations.span(self.body), @@ -1471,68 +1455,79 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } CastKind::PtrToPtr => { let ty_from = op.ty(self.body, tcx); - let cast_ty_from = CastTy::from_ty(ty_from); - let cast_ty_to = CastTy::from_ty(*ty); - match (cast_ty_from, cast_ty_to) { - (Some(CastTy::Ptr(src)), Some(CastTy::Ptr(dst))) => { - let src_tail = self.struct_tail(src.ty, location); - let dst_tail = self.struct_tail(dst.ty, location); - - // This checks (lifetime part of) vtable validity for pointer casts, - // which is irrelevant when there are aren't principal traits on - // both sides (aka only auto traits). - // - // Note that other checks (such as denying `dyn Send` -> `dyn - // Debug`) are in `rustc_hir_typeck`. - if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) = *src_tail.kind() - && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) = *dst_tail.kind() - && src_tty.principal().is_some() - && dst_tty.principal().is_some() - { - // Remove auto traits. - // Auto trait checks are handled in `rustc_hir_typeck` as FCW. - let src_obj = Ty::new_dynamic( - tcx, - tcx.mk_poly_existential_predicates( - &src_tty.without_auto_traits().collect::<Vec<_>>(), - ), - // FIXME: Once we disallow casting `*const dyn Trait + 'short` - // to `*const dyn Trait + 'long`, then this can just be `src_lt`. - dst_lt, - ty::Dyn, - ); - let dst_obj = Ty::new_dynamic( - tcx, - tcx.mk_poly_existential_predicates( - &dst_tty.without_auto_traits().collect::<Vec<_>>(), - ), - dst_lt, - ty::Dyn, - ); - - debug!(?src_tty, ?dst_tty, ?src_obj, ?dst_obj); - - self.sub_types( - src_obj, - dst_obj, - location.to_locations(), - ConstraintCategory::Cast { - is_implicit_coercion: false, - unsize_to: None, - }, - ) - .unwrap(); - } - } - _ => { - span_mirbug!( - self, - rvalue, - "Invalid PtrToPtr cast {:?} -> {:?}", - ty_from, - ty - ) - } + let Some(CastTy::Ptr(src)) = CastTy::from_ty(ty_from) else { + unreachable!(); + }; + let Some(CastTy::Ptr(dst)) = CastTy::from_ty(*ty) else { + unreachable!(); + }; + + if self.infcx.type_is_sized_modulo_regions(self.infcx.param_env, dst.ty) { + // Wide to thin ptr cast. This may even occur in an env with + // impossible predicates, such as `where dyn Trait: Sized`. + // In this case, we don't want to fall into the case below, + // since the types may not actually be equatable, but it's + // fine to perform this operation in an impossible env. + let trait_ref = ty::TraitRef::new( + tcx, + tcx.require_lang_item(LangItem::Sized, self.last_span), + [dst.ty], + ); + self.prove_trait_ref( + trait_ref, + location.to_locations(), + ConstraintCategory::Cast { + is_implicit_coercion: true, + unsize_to: None, + }, + ); + } else if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) = + *self.struct_tail(src.ty, location).kind() + && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) = + *self.struct_tail(dst.ty, location).kind() + && src_tty.principal().is_some() + && dst_tty.principal().is_some() + { + // This checks (lifetime part of) vtable validity for pointer casts, + // which is irrelevant when there are aren't principal traits on + // both sides (aka only auto traits). + // + // Note that other checks (such as denying `dyn Send` -> `dyn + // Debug`) are in `rustc_hir_typeck`. + + // Remove auto traits. + // Auto trait checks are handled in `rustc_hir_typeck` as FCW. + let src_obj = Ty::new_dynamic( + tcx, + tcx.mk_poly_existential_predicates( + &src_tty.without_auto_traits().collect::<Vec<_>>(), + ), + // FIXME: Once we disallow casting `*const dyn Trait + 'short` + // to `*const dyn Trait + 'long`, then this can just be `src_lt`. + dst_lt, + ty::Dyn, + ); + let dst_obj = Ty::new_dynamic( + tcx, + tcx.mk_poly_existential_predicates( + &dst_tty.without_auto_traits().collect::<Vec<_>>(), + ), + dst_lt, + ty::Dyn, + ); + + debug!(?src_tty, ?dst_tty, ?src_obj, ?dst_obj); + + self.sub_types( + src_obj, + dst_obj, + location.to_locations(), + ConstraintCategory::Cast { + is_implicit_coercion: false, + unsize_to: None, + }, + ) + .unwrap(); } } CastKind::Transmute => { @@ -1773,10 +1768,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { locations, ); - assert!(!matches!( - tcx.impl_of_assoc(def_id).map(|imp| tcx.def_kind(imp)), - Some(DefKind::Impl { of_trait: true }) - )); + assert_eq!(tcx.trait_impl_of_assoc(def_id), None); self.prove_predicates( args.types().map(|ty| ty::ClauseKind::WellFormed(ty.into())), locations, @@ -1898,7 +1890,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { if !output_ty .is_privately_uninhabited(self.tcx(), self.infcx.typing_env(self.infcx.param_env)) { - span_mirbug!(self, term, "call to converging function {:?} w/o dest", sig); + span_mirbug!(self, term, "call to non-diverging function {:?} w/o dest", sig); } } else { let dest_ty = destination.ty(self.body, tcx).ty; @@ -2477,12 +2469,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { args: GenericArgsRef<'tcx>, locations: Locations, ) -> ty::InstantiatedPredicates<'tcx> { + let root_def_id = self.root_cx.root_def_id(); if let Some(closure_requirements) = &self.root_cx.closure_requirements(def_id) { constraint_conversion::ConstraintConversion::new( self.infcx, self.universal_regions, self.region_bound_pairs, - self.infcx.param_env, self.known_type_outlives_obligations, locations, self.body.span, // irrelevant; will be overridden. @@ -2492,9 +2484,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { .apply_closure_requirements(closure_requirements, def_id, args); } - // Now equate closure args to regions inherited from `typeck_root_def_id`. Fixes #98589. - let typeck_root_def_id = tcx.typeck_root_def_id(self.body.source.def_id()); - let typeck_root_args = ty::GenericArgs::identity_for_item(tcx, typeck_root_def_id); + // Now equate closure args to regions inherited from `root_def_id`. Fixes #98589. + let typeck_root_args = ty::GenericArgs::identity_for_item(tcx, root_def_id); let parent_args = match tcx.def_kind(def_id) { // We don't want to dispatch on 3 different kind of closures here, so take @@ -2569,17 +2560,14 @@ impl<'tcx> TypeOp<'tcx> for InstantiateOpaqueType<'tcx> { fn fully_perform( mut self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> { - let (mut output, region_constraints) = scrape_region_constraints( - infcx, - |ocx| { + let (mut output, region_constraints) = + scrape_region_constraints(infcx, root_def_id, "InstantiateOpaqueType", span, |ocx| { ocx.register_obligations(self.obligations.clone()); Ok(()) - }, - "InstantiateOpaqueType", - span, - )?; + })?; self.region_constraints = Some(region_constraints); output.error_info = Some(self); Ok(output) diff --git a/compiler/rustc_borrowck/src/type_check/opaque_types.rs b/compiler/rustc_borrowck/src/type_check/opaque_types.rs deleted file mode 100644 index 5a422483eef..00000000000 --- a/compiler/rustc_borrowck/src/type_check/opaque_types.rs +++ /dev/null @@ -1,333 +0,0 @@ -use std::iter; - -use rustc_data_structures::fx::FxIndexMap; -use rustc_middle::span_bug; -use rustc_middle::ty::{ - self, GenericArgKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeSuperVisitable, - TypeVisitable, TypeVisitableExt, TypeVisitor, fold_regions, -}; -use tracing::{debug, trace}; - -use super::{MemberConstraintSet, TypeChecker}; - -/// Once we're done with typechecking the body, we take all the opaque types -/// defined by this function and add their 'member constraints'. -pub(super) fn take_opaques_and_register_member_constraints<'tcx>( - typeck: &mut TypeChecker<'_, 'tcx>, -) -> FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>> { - let infcx = typeck.infcx; - // Annoying: to invoke `typeck.to_region_vid`, we need access to - // `typeck.constraints`, but we also want to be mutating - // `typeck.member_constraints`. For now, just swap out the value - // we want and replace at the end. - let mut member_constraints = std::mem::take(&mut typeck.constraints.member_constraints); - let opaque_types = infcx - .take_opaque_types() - .into_iter() - .map(|(opaque_type_key, hidden_type)| { - let hidden_type = infcx.resolve_vars_if_possible(hidden_type); - register_member_constraints( - typeck, - &mut member_constraints, - opaque_type_key, - hidden_type, - ); - trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind()); - if hidden_type.has_non_region_infer() { - span_bug!(hidden_type.span, "could not resolve {:?}", hidden_type.ty); - } - - // Convert all regions to nll vars. - let (opaque_type_key, hidden_type) = - fold_regions(infcx.tcx, (opaque_type_key, hidden_type), |r, _| { - ty::Region::new_var(infcx.tcx, typeck.to_region_vid(r)) - }); - - (opaque_type_key, hidden_type) - }) - .collect(); - assert!(typeck.constraints.member_constraints.is_empty()); - typeck.constraints.member_constraints = member_constraints; - opaque_types -} - -/// Given the map `opaque_types` containing the opaque -/// `impl Trait` types whose underlying, hidden types are being -/// inferred, this method adds constraints to the regions -/// appearing in those underlying hidden types to ensure that they -/// at least do not refer to random scopes within the current -/// function. These constraints are not (quite) sufficient to -/// guarantee that the regions are actually legal values; that -/// final condition is imposed after region inference is done. -/// -/// # The Problem -/// -/// Let's work through an example to explain how it works. Assume -/// the current function is as follows: -/// -/// ```text -/// fn foo<'a, 'b>(..) -> (impl Bar<'a>, impl Bar<'b>) -/// ``` -/// -/// Here, we have two `impl Trait` types whose values are being -/// inferred (the `impl Bar<'a>` and the `impl -/// Bar<'b>`). Conceptually, this is sugar for a setup where we -/// define underlying opaque types (`Foo1`, `Foo2`) and then, in -/// the return type of `foo`, we *reference* those definitions: -/// -/// ```text -/// type Foo1<'x> = impl Bar<'x>; -/// type Foo2<'x> = impl Bar<'x>; -/// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } -/// // ^^^^ ^^ -/// // | | -/// // | args -/// // def_id -/// ``` -/// -/// As indicating in the comments above, each of those references -/// is (in the compiler) basically generic parameters (`args`) -/// applied to the type of a suitable `def_id` (which identifies -/// `Foo1` or `Foo2`). -/// -/// Now, at this point in compilation, what we have done is to -/// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with -/// fresh inference variables C1 and C2. We wish to use the values -/// of these variables to infer the underlying types of `Foo1` and -/// `Foo2`. That is, this gives rise to higher-order (pattern) unification -/// constraints like: -/// -/// ```text -/// for<'a> (Foo1<'a> = C1) -/// for<'b> (Foo1<'b> = C2) -/// ``` -/// -/// For these equation to be satisfiable, the types `C1` and `C2` -/// can only refer to a limited set of regions. For example, `C1` -/// can only refer to `'static` and `'a`, and `C2` can only refer -/// to `'static` and `'b`. The job of this function is to impose that -/// constraint. -/// -/// Up to this point, C1 and C2 are basically just random type -/// inference variables, and hence they may contain arbitrary -/// regions. In fact, it is fairly likely that they do! Consider -/// this possible definition of `foo`: -/// -/// ```text -/// fn foo<'a, 'b>(x: &'a i32, y: &'b i32) -> (impl Bar<'a>, impl Bar<'b>) { -/// (&*x, &*y) -/// } -/// ``` -/// -/// Here, the values for the concrete types of the two impl -/// traits will include inference variables: -/// -/// ```text -/// &'0 i32 -/// &'1 i32 -/// ``` -/// -/// Ordinarily, the subtyping rules would ensure that these are -/// sufficiently large. But since `impl Bar<'a>` isn't a specific -/// type per se, we don't get such constraints by default. This -/// is where this function comes into play. It adds extra -/// constraints to ensure that all the regions which appear in the -/// inferred type are regions that could validly appear. -/// -/// This is actually a bit of a tricky constraint in general. We -/// want to say that each variable (e.g., `'0`) can only take on -/// values that were supplied as arguments to the opaque type -/// (e.g., `'a` for `Foo1<'a>`) or `'static`, which is always in -/// scope. We don't have a constraint quite of this kind in the current -/// region checker. -/// -/// # The Solution -/// -/// We generally prefer to make `<=` constraints, since they -/// integrate best into the region solver. To do that, we find the -/// "minimum" of all the arguments that appear in the args: that -/// is, some region which is less than all the others. In the case -/// of `Foo1<'a>`, that would be `'a` (it's the only choice, after -/// all). Then we apply that as a least bound to the variables -/// (e.g., `'a <= '0`). -/// -/// In some cases, there is no minimum. Consider this example: -/// -/// ```text -/// fn baz<'a, 'b>() -> impl Trait<'a, 'b> { ... } -/// ``` -/// -/// Here we would report a more complex "in constraint", like `'r -/// in ['a, 'b, 'static]` (where `'r` is some region appearing in -/// the hidden type). -/// -/// # Constrain regions, not the hidden concrete type -/// -/// Note that generating constraints on each region `Rc` is *not* -/// the same as generating an outlives constraint on `Tc` itself. -/// For example, if we had a function like this: -/// -/// ``` -/// # #![feature(type_alias_impl_trait)] -/// # fn main() {} -/// # trait Foo<'a> {} -/// # impl<'a, T> Foo<'a> for (&'a u32, T) {} -/// fn foo<'a, T>(x: &'a u32, y: T) -> impl Foo<'a> { -/// (x, y) -/// } -/// -/// // Equivalent to: -/// # mod dummy { use super::*; -/// type FooReturn<'a, T> = impl Foo<'a>; -/// #[define_opaque(FooReturn)] -/// fn foo<'a, T>(x: &'a u32, y: T) -> FooReturn<'a, T> { -/// (x, y) -/// } -/// # } -/// ``` -/// -/// then the hidden type `Tc` would be `(&'0 u32, T)` (where `'0` -/// is an inference variable). If we generated a constraint that -/// `Tc: 'a`, then this would incorrectly require that `T: 'a` -- -/// but this is not necessary, because the opaque type we -/// create will be allowed to reference `T`. So we only generate a -/// constraint that `'0: 'a`. -fn register_member_constraints<'tcx>( - typeck: &mut TypeChecker<'_, 'tcx>, - member_constraints: &mut MemberConstraintSet<'tcx, ty::RegionVid>, - opaque_type_key: OpaqueTypeKey<'tcx>, - OpaqueHiddenType { span, ty: hidden_ty }: OpaqueHiddenType<'tcx>, -) { - let tcx = typeck.tcx(); - let hidden_ty = typeck.infcx.resolve_vars_if_possible(hidden_ty); - debug!(?hidden_ty); - - let variances = tcx.variances_of(opaque_type_key.def_id); - debug!(?variances); - - // For a case like `impl Foo<'a, 'b>`, we would generate a constraint - // `'r in ['a, 'b, 'static]` for each region `'r` that appears in the - // hidden type (i.e., it must be equal to `'a`, `'b`, or `'static`). - // - // `conflict1` and `conflict2` are the two region bounds that we - // detected which were unrelated. They are used for diagnostics. - - // Create the set of choice regions: each region in the hidden - // type can be equal to any of the region parameters of the - // opaque type definition. - let fr_static = typeck.universal_regions.fr_static; - let choice_regions: Vec<_> = opaque_type_key - .args - .iter() - .enumerate() - .filter(|(i, _)| variances[*i] == ty::Invariant) - .filter_map(|(_, arg)| match arg.kind() { - GenericArgKind::Lifetime(r) => Some(typeck.to_region_vid(r)), - GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, - }) - .chain(iter::once(fr_static)) - .collect(); - - // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's - // not currently sound until we have existential regions. - hidden_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { - tcx, - op: |r| { - member_constraints.add_member_constraint( - opaque_type_key, - hidden_ty, - span, - typeck.to_region_vid(r), - &choice_regions, - ) - }, - }); -} - -/// Visitor that requires that (almost) all regions in the type visited outlive -/// `least_region`. We cannot use `push_outlives_components` because regions in -/// closure signatures are not included in their outlives components. We need to -/// ensure all regions outlive the given bound so that we don't end up with, -/// say, `ReVar` appearing in a return type and causing ICEs when other -/// functions end up with region constraints involving regions from other -/// functions. -/// -/// We also cannot use `for_each_free_region` because for closures it includes -/// the regions parameters from the enclosing item. -/// -/// We ignore any type parameters because impl trait values are assumed to -/// capture all the in-scope type parameters. -struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { - tcx: TyCtxt<'tcx>, - op: OP, -} - -impl<'tcx, OP> TypeVisitor<TyCtxt<'tcx>> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP> -where - OP: FnMut(ty::Region<'tcx>), -{ - fn visit_region(&mut self, r: ty::Region<'tcx>) { - match r.kind() { - // ignore bound regions, keep visiting - ty::ReBound(_, _) => {} - _ => (self.op)(r), - } - } - - fn visit_ty(&mut self, ty: Ty<'tcx>) { - // We're only interested in types involving regions - if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { - return; - } - - match *ty.kind() { - ty::Closure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_closure().upvar_tys() { - upvar.visit_with(self); - } - args.as_closure().sig_as_fn_ptr_ty().visit_with(self); - } - - ty::CoroutineClosure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_coroutine_closure().upvar_tys() { - upvar.visit_with(self); - } - - args.as_coroutine_closure().signature_parts_ty().visit_with(self); - } - - ty::Coroutine(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - // Also skip the witness type, because that has no free regions. - - for upvar in args.as_coroutine().upvar_tys() { - upvar.visit_with(self); - } - args.as_coroutine().return_ty().visit_with(self); - args.as_coroutine().yield_ty().visit_with(self); - args.as_coroutine().resume_ty().visit_with(self); - } - - ty::Alias(kind, ty::AliasTy { def_id, args, .. }) - if let Some(variances) = self.tcx.opt_alias_variances(kind, def_id) => - { - // Skip lifetime parameters that are not captured, since they do - // not need member constraints registered for them; we'll erase - // them (and hopefully in the future replace them with placeholders). - for (v, s) in std::iter::zip(variances, args.iter()) { - if *v != ty::Bivariant { - s.visit_with(self); - } - } - } - - _ => { - ty.super_visit_with(self); - } - } - } -} diff --git a/compiler/rustc_borrowck/src/type_check/relate_tys.rs b/compiler/rustc_borrowck/src/type_check/relate_tys.rs index 84ca9bad2c1..7ac2dff12f7 100644 --- a/compiler/rustc_borrowck/src/type_check/relate_tys.rs +++ b/compiler/rustc_borrowck/src/type_check/relate_tys.rs @@ -124,8 +124,13 @@ impl<'a, 'b, 'tcx> NllTypeRelating<'a, 'b, 'tcx> { // by using `ty_vid rel B` and then finally and end by equating `ty_vid` to // the opaque. let mut enable_subtyping = |ty, opaque_is_expected| { - let ty_vid = infcx.next_ty_var_id_in_universe(self.span(), ty::UniverseIndex::ROOT); - + // We create the fresh inference variable in the highest universe. + // In theory we could limit it to the highest universe in the args of + // the opaque but that isn't really worth the effort. + // + // We'll make sure that the opaque type can actually name everything + // in its hidden type later on. + let ty_vid = infcx.next_ty_vid(self.span()); let variance = if opaque_is_expected { self.ambient_variance } else { diff --git a/compiler/rustc_builtin_macros/Cargo.toml b/compiler/rustc_builtin_macros/Cargo.toml index e56b9e641a1..9ca44d67150 100644 --- a/compiler/rustc_builtin_macros/Cargo.toml +++ b/compiler/rustc_builtin_macros/Cargo.toml @@ -30,6 +30,6 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_builtin_macros/messages.ftl b/compiler/rustc_builtin_macros/messages.ftl index ae186d744c4..358c0d3db46 100644 --- a/compiler/rustc_builtin_macros/messages.ftl +++ b/compiler/rustc_builtin_macros/messages.ftl @@ -222,6 +222,15 @@ builtin_macros_format_unused_args = multiple unused formatting arguments builtin_macros_format_use_positional = consider using a positional formatting argument instead +builtin_macros_derive_from_wrong_target = `#[derive(From)]` used on {$kind} + +builtin_macros_derive_from_wrong_field_count = `#[derive(From)]` used on a struct with {$multiple_fields -> + [true] multiple fields + *[false] no fields +} + +builtin_macros_derive_from_usage_note = `#[derive(From)]` can only be used on structs with exactly one field + builtin_macros_multiple_default_attrs = multiple `#[default]` attributes .note = only one `#[default]` attribute is needed .label = `#[default]` used here @@ -259,8 +268,6 @@ builtin_macros_only_one_argument = {$name} takes 1 argument builtin_macros_proc_macro = `proc-macro` crate types currently cannot export any items other than functions tagged with `#[proc_macro]`, `#[proc_macro_derive]`, or `#[proc_macro_attribute]` -builtin_macros_proc_macro_attribute_only_be_used_on_bare_functions = the `#[{$path}]` attribute may only be used on bare functions - builtin_macros_proc_macro_attribute_only_usable_with_crate_type = the `#[{$path}]` attribute is only usable with crates of the `proc-macro` crate type builtin_macros_requires_cfg_pattern = diff --git a/compiler/rustc_builtin_macros/src/assert.rs b/compiler/rustc_builtin_macros/src/assert.rs index 855da5caa31..013258a1b4e 100644 --- a/compiler/rustc_builtin_macros/src/assert.rs +++ b/compiler/rustc_builtin_macros/src/assert.rs @@ -1,8 +1,8 @@ mod context; -use rustc_ast::token::Delimiter; +use rustc_ast::token::{self, Delimiter}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; -use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment, UnOp, token}; +use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment}; use rustc_ast_pretty::pprust; use rustc_errors::PResult; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult}; @@ -29,7 +29,7 @@ pub(crate) fn expand_assert<'cx>( // `core::panic` and `std::panic` are different macros, so we use call-site // context to pick up whichever is currently in scope. - let call_site_span = cx.with_call_site_ctxt(span); + let call_site_span = cx.with_call_site_ctxt(cond_expr.span); let panic_path = || { if use_panic_2021(span) { @@ -63,7 +63,7 @@ pub(crate) fn expand_assert<'cx>( }), })), ); - expr_if_not(cx, call_site_span, cond_expr, then, None) + assert_cond_check(cx, call_site_span, cond_expr, then) } // If `generic_assert` is enabled, generates rich captured outputs // @@ -88,26 +88,33 @@ pub(crate) fn expand_assert<'cx>( )), )], ); - expr_if_not(cx, call_site_span, cond_expr, then, None) + assert_cond_check(cx, call_site_span, cond_expr, then) }; ExpandResult::Ready(MacEager::expr(expr)) } +/// `assert!($cond_expr, $custom_message)` struct Assert { cond_expr: Box<Expr>, custom_message: Option<TokenStream>, } -// if !{ ... } { ... } else { ... } -fn expr_if_not( - cx: &ExtCtxt<'_>, - span: Span, - cond: Box<Expr>, - then: Box<Expr>, - els: Option<Box<Expr>>, -) -> Box<Expr> { - cx.expr_if(span, cx.expr(span, ExprKind::Unary(UnOp::Not, cond)), then, els) +/// `match <cond> { true => {} _ => <then> }` +fn assert_cond_check(cx: &ExtCtxt<'_>, span: Span, cond: Box<Expr>, then: Box<Expr>) -> Box<Expr> { + // Instead of expanding to `if !<cond> { <then> }`, we expand to + // `match <cond> { true => {} _ => <then> }`. + // This allows us to always complain about mismatched types instead of "cannot apply unary + // operator `!` to type `X`" when passing an invalid `<cond>`, while also allowing `<cond>` to + // be `&true`. + let els = cx.expr_block(cx.block(span, thin_vec![])); + let mut arms = thin_vec![]; + arms.push(cx.arm(span, cx.pat_lit(span, cx.expr_bool(span, true)), els)); + arms.push(cx.arm(span, cx.pat_wild(span), then)); + + // We wrap the `match` in a statement to limit the length of any borrows introduced in the + // condition. + cx.expr_block(cx.block(span, [cx.stmt_expr(cx.expr_match(span, cond, arms))].into())) } fn parse_assert<'a>(cx: &ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PResult<'a, Assert> { diff --git a/compiler/rustc_builtin_macros/src/autodiff.rs b/compiler/rustc_builtin_macros/src/autodiff.rs index a662840eda5..48d0795af5e 100644 --- a/compiler/rustc_builtin_macros/src/autodiff.rs +++ b/compiler/rustc_builtin_macros/src/autodiff.rs @@ -15,11 +15,12 @@ mod llvm_enzyme { use rustc_ast::tokenstream::*; use rustc_ast::visit::AssocCtxt::*; use rustc_ast::{ - self as ast, AssocItemKind, BindingMode, ExprKind, FnRetTy, FnSig, Generics, ItemKind, - MetaItemInner, PatKind, QSelf, TyKind, Visibility, + self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocItemKind, BindingMode, + FnRetTy, FnSig, GenericArg, GenericArgs, GenericParamKind, Generics, ItemKind, + MetaItemInner, PatKind, Path, PathSegment, TyKind, Visibility, }; use rustc_expand::base::{Annotatable, ExtCtxt}; - use rustc_span::{Ident, Span, Symbol, kw, sym}; + use rustc_span::{Ident, Span, Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use tracing::{debug, trace}; @@ -179,11 +180,8 @@ mod llvm_enzyme { } /// We expand the autodiff macro to generate a new placeholder function which passes - /// type-checking and can be called by users. The function body of the placeholder function will - /// later be replaced on LLVM-IR level, so the design of the body is less important and for now - /// should just prevent early inlining and optimizations which alter the function signature. - /// The exact signature of the generated function depends on the configuration provided by the - /// user, but here is an example: + /// type-checking and can be called by users. The exact signature of the generated function + /// depends on the configuration provided by the user, but here is an example: /// /// ``` /// #[autodiff(cos_box, Reverse, Duplicated, Active)] @@ -194,19 +192,12 @@ mod llvm_enzyme { /// which becomes expanded to: /// ``` /// #[rustc_autodiff] - /// #[inline(never)] /// fn sin(x: &Box<f32>) -> f32 { /// f32::sin(**x) /// } /// #[rustc_autodiff(Reverse, Duplicated, Active)] - /// #[inline(never)] /// fn cos_box(x: &Box<f32>, dx: &mut Box<f32>, dret: f32) -> f32 { - /// unsafe { - /// asm!("NOP"); - /// }; - /// ::core::hint::black_box(sin(x)); - /// ::core::hint::black_box((dx, dret)); - /// ::core::hint::black_box(sin(x)) + /// std::intrinsics::autodiff(sin::<>, cos_box::<>, (x, dx, dret)) /// } /// ``` /// FIXME(ZuseZ4): Once autodiff is enabled by default, make this a doc comment which is checked @@ -227,16 +218,24 @@ mod llvm_enzyme { // first get information about the annotable item: visibility, signature, name and generic // parameters. // these will be used to generate the differentiated version of the function - let Some((vis, sig, primal, generics)) = (match &item { - Annotatable::Item(iitem) => extract_item_info(iitem), + let Some((vis, sig, primal, generics, impl_of_trait)) = (match &item { + Annotatable::Item(iitem) => { + extract_item_info(iitem).map(|(v, s, p, g)| (v, s, p, g, false)) + } Annotatable::Stmt(stmt) => match &stmt.kind { - ast::StmtKind::Item(iitem) => extract_item_info(iitem), + ast::StmtKind::Item(iitem) => { + extract_item_info(iitem).map(|(v, s, p, g)| (v, s, p, g, false)) + } _ => None, }, - Annotatable::AssocItem(assoc_item, Impl { .. }) => match &assoc_item.kind { - ast::AssocItemKind::Fn(box ast::Fn { sig, ident, generics, .. }) => { - Some((assoc_item.vis.clone(), sig.clone(), ident.clone(), generics.clone())) - } + Annotatable::AssocItem(assoc_item, Impl { of_trait }) => match &assoc_item.kind { + ast::AssocItemKind::Fn(box ast::Fn { sig, ident, generics, .. }) => Some(( + assoc_item.vis.clone(), + sig.clone(), + ident.clone(), + generics.clone(), + *of_trait, + )), _ => None, }, _ => None, @@ -254,7 +253,6 @@ mod llvm_enzyme { }; let has_ret = has_ret(&sig.decl.output); - let sig_span = ecx.with_call_site_ctxt(sig.span); // create TokenStream from vec elemtents: // meta_item doesn't have a .tokens field @@ -323,19 +321,23 @@ mod llvm_enzyme { } let span = ecx.with_def_site_ctxt(expand_span); - let n_active: u32 = x - .input_activity - .iter() - .filter(|a| **a == DiffActivity::Active || **a == DiffActivity::ActiveOnly) - .count() as u32; - let (d_sig, new_args, idents, errored) = gen_enzyme_decl(ecx, &sig, &x, span); - let d_body = gen_enzyme_body( - ecx, &x, n_active, &sig, &d_sig, primal, &new_args, span, sig_span, idents, errored, - &generics, + let d_sig = gen_enzyme_decl(ecx, &sig, &x, span); + + let d_body = ecx.block( + span, + thin_vec![call_autodiff( + ecx, + primal, + first_ident(&meta_item_vec[0]), + span, + &d_sig, + &generics, + impl_of_trait, + )], ); // The first element of it is the name of the function to be generated - let asdf = Box::new(ast::Fn { + let d_fn = Box::new(ast::Fn { defaultness: ast::Defaultness::Final, sig: d_sig, ident: first_ident(&meta_item_vec[0]), @@ -368,7 +370,7 @@ mod llvm_enzyme { let new_id = ecx.sess.psess.attr_id_generator.mk_attr_id(); let inline_never = outer_normal_attr(&inline_never_attr, new_id, span); - // We're avoid duplicating the attributes `#[rustc_autodiff]` and `#[inline(never)]`. + // We're avoid duplicating the attribute `#[rustc_autodiff]`. fn same_attribute(attr: &ast::AttrKind, item: &ast::AttrKind) -> bool { match (attr, item) { (ast::AttrKind::Normal(a), ast::AttrKind::Normal(b)) => { @@ -381,14 +383,16 @@ mod llvm_enzyme { } } + let mut has_inline_never = false; + // Don't add it multiple times: let orig_annotatable: Annotatable = match item { Annotatable::Item(ref mut iitem) => { if !iitem.attrs.iter().any(|a| same_attribute(&a.kind, &attr.kind)) { iitem.attrs.push(attr); } - if !iitem.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) { - iitem.attrs.push(inline_never.clone()); + if iitem.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) { + has_inline_never = true; } Annotatable::Item(iitem.clone()) } @@ -396,8 +400,8 @@ mod llvm_enzyme { if !assoc_item.attrs.iter().any(|a| same_attribute(&a.kind, &attr.kind)) { assoc_item.attrs.push(attr); } - if !assoc_item.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) { - assoc_item.attrs.push(inline_never.clone()); + if assoc_item.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) { + has_inline_never = true; } Annotatable::AssocItem(assoc_item.clone(), i) } @@ -407,9 +411,8 @@ mod llvm_enzyme { if !iitem.attrs.iter().any(|a| same_attribute(&a.kind, &attr.kind)) { iitem.attrs.push(attr); } - if !iitem.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) - { - iitem.attrs.push(inline_never.clone()); + if iitem.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) { + has_inline_never = true; } } _ => unreachable!("stmt kind checked previously"), @@ -428,12 +431,21 @@ mod llvm_enzyme { tokens: ts, }); + let new_id = ecx.sess.psess.attr_id_generator.mk_attr_id(); let d_attr = outer_normal_attr(&rustc_ad_attr, new_id, span); + + // If the source function has the `#[inline(never)]` attribute, we'll also add it to the diff function + let mut d_attrs = thin_vec![d_attr]; + + if has_inline_never { + d_attrs.push(inline_never); + } + let d_annotatable = match &item { Annotatable::AssocItem(_, _) => { - let assoc_item: AssocItemKind = ast::AssocItemKind::Fn(asdf); + let assoc_item: AssocItemKind = ast::AssocItemKind::Fn(d_fn); let d_fn = Box::new(ast::AssocItem { - attrs: thin_vec![d_attr, inline_never], + attrs: d_attrs, id: ast::DUMMY_NODE_ID, span, vis, @@ -443,13 +455,13 @@ mod llvm_enzyme { Annotatable::AssocItem(d_fn, Impl { of_trait: false }) } Annotatable::Item(_) => { - let mut d_fn = ecx.item(span, thin_vec![d_attr, inline_never], ItemKind::Fn(asdf)); + let mut d_fn = ecx.item(span, d_attrs, ItemKind::Fn(d_fn)); d_fn.vis = vis; Annotatable::Item(d_fn) } Annotatable::Stmt(_) => { - let mut d_fn = ecx.item(span, thin_vec![d_attr, inline_never], ItemKind::Fn(asdf)); + let mut d_fn = ecx.item(span, d_attrs, ItemKind::Fn(d_fn)); d_fn.vis = vis; Annotatable::Stmt(Box::new(ast::Stmt { @@ -484,282 +496,95 @@ mod llvm_enzyme { ty } - // Will generate a body of the type: + // Generate `autodiff` intrinsic call // ``` - // { - // unsafe { - // asm!("NOP"); - // } - // ::core::hint::black_box(primal(args)); - // ::core::hint::black_box((args, ret)); - // <This part remains to be done by following function> - // } + // std::intrinsics::autodiff(source, diff, (args)) // ``` - fn init_body_helper( + fn call_autodiff( ecx: &ExtCtxt<'_>, - span: Span, primal: Ident, - new_names: &[String], - sig_span: Span, - new_decl_span: Span, - idents: &[Ident], - errored: bool, + diff: Ident, + span: Span, + d_sig: &FnSig, generics: &Generics, - ) -> (Box<ast::Block>, Box<ast::Expr>, Box<ast::Expr>, Box<ast::Expr>) { - let blackbox_path = ecx.std_path(&[sym::hint, sym::black_box]); - let noop = ast::InlineAsm { - asm_macro: ast::AsmMacro::Asm, - template: vec![ast::InlineAsmTemplatePiece::String("NOP".into())], - template_strs: Box::new([]), - operands: vec![], - clobber_abis: vec![], - options: ast::InlineAsmOptions::PURE | ast::InlineAsmOptions::NOMEM, - line_spans: vec![], - }; - let noop_expr = ecx.expr_asm(span, Box::new(noop)); - let unsf = ast::BlockCheckMode::Unsafe(ast::UnsafeSource::CompilerGenerated); - let unsf_block = ast::Block { - stmts: thin_vec![ecx.stmt_semi(noop_expr)], - id: ast::DUMMY_NODE_ID, - tokens: None, - rules: unsf, + is_impl: bool, + ) -> rustc_ast::Stmt { + let primal_path_expr = gen_turbofish_expr(ecx, primal, generics, span, is_impl); + let diff_path_expr = gen_turbofish_expr(ecx, diff, generics, span, is_impl); + + let tuple_expr = ecx.expr_tuple( span, - }; - let unsf_expr = ecx.expr_block(Box::new(unsf_block)); - let blackbox_call_expr = ecx.expr_path(ecx.path(span, blackbox_path)); - let primal_call = gen_primal_call(ecx, span, primal, idents, generics); - let black_box_primal_call = ecx.expr_call( - new_decl_span, - blackbox_call_expr.clone(), - thin_vec![primal_call.clone()], + d_sig + .decl + .inputs + .iter() + .map(|arg| match arg.pat.kind { + PatKind::Ident(_, ident, _) => ecx.expr_path(ecx.path_ident(span, ident)), + _ => todo!(), + }) + .collect::<ThinVec<_>>() + .into(), ); - let tup_args = new_names - .iter() - .map(|arg| ecx.expr_path(ecx.path_ident(span, Ident::from_str(arg)))) - .collect(); - let black_box_remaining_args = ecx.expr_call( - sig_span, - blackbox_call_expr.clone(), - thin_vec![ecx.expr_tuple(sig_span, tup_args)], + let enzyme_path_idents = ecx.std_path(&[sym::intrinsics, sym::autodiff]); + let enzyme_path = ecx.path(span, enzyme_path_idents); + let call_expr = ecx.expr_call( + span, + ecx.expr_path(enzyme_path), + vec![primal_path_expr, diff_path_expr, tuple_expr].into(), ); - let mut body = ecx.block(span, ThinVec::new()); - body.stmts.push(ecx.stmt_semi(unsf_expr)); - - // This uses primal args which won't be available if we errored before - if !errored { - body.stmts.push(ecx.stmt_semi(black_box_primal_call.clone())); - } - body.stmts.push(ecx.stmt_semi(black_box_remaining_args)); - - (body, primal_call, black_box_primal_call, blackbox_call_expr) + ecx.stmt_expr(call_expr) } - /// We only want this function to type-check, since we will replace the body - /// later on llvm level. Using `loop {}` does not cover all return types anymore, - /// so instead we manually build something that should pass the type checker. - /// We also add a inline_asm line, as one more barrier for rustc to prevent inlining - /// or const propagation. inline_asm will also triggers an Enzyme crash if due to another - /// bug would ever try to accidentally differentiate this placeholder function body. - /// Finally, we also add back_box usages of all input arguments, to prevent rustc - /// from optimizing any arguments away. - fn gen_enzyme_body( + // Generate turbofish expression from fn name and generics + // Given `foo` and `<A, B, C>` params, gen `foo::<A, B, C>` + // We use this expression when passing primal and diff function to the autodiff intrinsic + fn gen_turbofish_expr( ecx: &ExtCtxt<'_>, - x: &AutoDiffAttrs, - n_active: u32, - sig: &ast::FnSig, - d_sig: &ast::FnSig, - primal: Ident, - new_names: &[String], - span: Span, - sig_span: Span, - idents: Vec<Ident>, - errored: bool, + ident: Ident, generics: &Generics, - ) -> Box<ast::Block> { - let new_decl_span = d_sig.span; - - // Just adding some default inline-asm and black_box usages to prevent early inlining - // and optimizations which alter the function signature. - // - // The bb_primal_call is the black_box call of the primal function. We keep it around, - // since it has the convenient property of returning the type of the primal function, - // Remember, we only care to match types here. - // No matter which return we pick, we always wrap it into a std::hint::black_box call, - // to prevent rustc from propagating it into the caller. - let (mut body, primal_call, bb_primal_call, bb_call_expr) = init_body_helper( - ecx, - span, - primal, - new_names, - sig_span, - new_decl_span, - &idents, - errored, - generics, - ); - - if !has_ret(&d_sig.decl.output) { - // there is no return type that we have to match, () works fine. - return body; - } - - // Everything from here onwards just tries to fulfil the return type. Fun! - - // having an active-only return means we'll drop the original return type. - // So that can be treated identical to not having one in the first place. - let primal_ret = has_ret(&sig.decl.output) && !x.has_active_only_ret(); - - if primal_ret && n_active == 0 && x.mode.is_rev() { - // We only have the primal ret. - body.stmts.push(ecx.stmt_expr(bb_primal_call)); - return body; - } - - if !primal_ret && n_active == 1 { - // Again no tuple return, so return default float val. - let ty = match d_sig.decl.output { - FnRetTy::Ty(ref ty) => ty.clone(), - FnRetTy::Default(span) => { - panic!("Did not expect Default ret ty: {:?}", span); + span: Span, + is_impl: bool, + ) -> Box<ast::Expr> { + let generic_args = generics + .params + .iter() + .filter_map(|p| match &p.kind { + GenericParamKind::Type { .. } => { + let path = ast::Path::from_ident(p.ident); + let ty = ecx.ty_path(path); + Some(AngleBracketedArg::Arg(GenericArg::Type(ty))) } - }; - let arg = ty.kind.is_simple_path().unwrap(); - let tmp = ecx.def_site_path(&[arg, kw::Default]); - let default_call_expr = ecx.expr_path(ecx.path(span, tmp)); - let default_call_expr = ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); - body.stmts.push(ecx.stmt_expr(default_call_expr)); - return body; - } - - let mut exprs: Box<ast::Expr> = primal_call; - let d_ret_ty = match d_sig.decl.output { - FnRetTy::Ty(ref ty) => ty.clone(), - FnRetTy::Default(span) => { - panic!("Did not expect Default ret ty: {:?}", span); - } - }; - if x.mode.is_fwd() { - // Fwd mode is easy. If the return activity is Const, we support arbitrary types. - // Otherwise, we only support a scalar, a pair of scalars, or an array of scalars. - // We checked that (on a best-effort base) in the preceding gen_enzyme_decl function. - // In all three cases, we can return `std::hint::black_box(<T>::default())`. - if x.ret_activity == DiffActivity::Const { - // Here we call the primal function, since our dummy function has the same return - // type due to the Const return activity. - exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![exprs]); - } else { - let q = QSelf { ty: d_ret_ty, path_span: span, position: 0 }; - let y = ExprKind::Path( - Some(Box::new(q)), - ecx.path_ident(span, Ident::with_dummy_span(kw::Default)), - ); - let default_call_expr = ecx.expr(span, y); - let default_call_expr = - ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); - exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![default_call_expr]); - } - } else if x.mode.is_rev() { - if x.width == 1 { - // We either have `-> ArbitraryType` or `-> (ArbitraryType, repeated_float_scalars)`. - match d_ret_ty.kind { - TyKind::Tup(ref args) => { - // We have a tuple return type. We need to create a tuple of the same size - // and fill it with default values. - let mut exprs2 = thin_vec![exprs]; - for arg in args.iter().skip(1) { - let arg = arg.kind.is_simple_path().unwrap(); - let tmp = ecx.def_site_path(&[arg, kw::Default]); - let default_call_expr = ecx.expr_path(ecx.path(span, tmp)); - let default_call_expr = - ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); - exprs2.push(default_call_expr); - } - exprs = ecx.expr_tuple(new_decl_span, exprs2); - } - _ => { - // Interestingly, even the `-> ArbitraryType` case - // ends up getting matched and handled correctly above, - // so we don't have to handle any other case for now. - panic!("Unsupported return type: {:?}", d_ret_ty); - } + GenericParamKind::Const { .. } => { + let expr = ecx.expr_path(ast::Path::from_ident(p.ident)); + let anon_const = AnonConst { id: ast::DUMMY_NODE_ID, value: expr }; + Some(AngleBracketedArg::Arg(GenericArg::Const(anon_const))) } - } - exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![exprs]); - } else { - unreachable!("Unsupported mode: {:?}", x.mode); - } + GenericParamKind::Lifetime { .. } => None, + }) + .collect::<ThinVec<_>>(); - body.stmts.push(ecx.stmt_expr(exprs)); - - body - } + let args: AngleBracketedArgs = AngleBracketedArgs { span, args: generic_args }; - fn gen_primal_call( - ecx: &ExtCtxt<'_>, - span: Span, - primal: Ident, - idents: &[Ident], - generics: &Generics, - ) -> Box<ast::Expr> { - let has_self = idents.len() > 0 && idents[0].name == kw::SelfLower; + let segment = PathSegment { + ident, + id: ast::DUMMY_NODE_ID, + args: Some(Box::new(GenericArgs::AngleBracketed(args))), + }; - if has_self { - let args: ThinVec<_> = - idents[1..].iter().map(|arg| ecx.expr_path(ecx.path_ident(span, *arg))).collect(); - let self_expr = ecx.expr_self(span); - ecx.expr_method_call(span, self_expr, primal, args) + let segments = if is_impl { + thin_vec![ + PathSegment { ident: Ident::from_str("Self"), id: ast::DUMMY_NODE_ID, args: None }, + segment, + ] } else { - let args: ThinVec<_> = - idents.iter().map(|arg| ecx.expr_path(ecx.path_ident(span, *arg))).collect(); - let mut primal_path = ecx.path_ident(span, primal); - - let is_generic = !generics.params.is_empty(); - - match (is_generic, primal_path.segments.last_mut()) { - (true, Some(function_path)) => { - let primal_generic_types = generics - .params - .iter() - .filter(|param| matches!(param.kind, ast::GenericParamKind::Type { .. })); - - let generated_generic_types = primal_generic_types - .map(|type_param| { - let generic_param = TyKind::Path( - None, - ast::Path { - span, - segments: thin_vec![ast::PathSegment { - ident: type_param.ident, - args: None, - id: ast::DUMMY_NODE_ID, - }], - tokens: None, - }, - ); - - ast::AngleBracketedArg::Arg(ast::GenericArg::Type(Box::new(ast::Ty { - id: type_param.id, - span, - kind: generic_param, - tokens: None, - }))) - }) - .collect(); - - function_path.args = - Some(Box::new(ast::GenericArgs::AngleBracketed(ast::AngleBracketedArgs { - span, - args: generated_generic_types, - }))); - } - _ => {} - } + thin_vec![segment] + }; - let primal_call_expr = ecx.expr_path(primal_path); - ecx.expr_call(span, primal_call_expr, args) - } + let path = Path { span, segments, tokens: None }; + + ecx.expr_path(path) } // Generate the new function declaration. Const arguments are kept as is. Duplicated arguments must @@ -778,7 +603,7 @@ mod llvm_enzyme { sig: &ast::FnSig, x: &AutoDiffAttrs, span: Span, - ) -> (ast::FnSig, Vec<String>, Vec<Ident>, bool) { + ) -> ast::FnSig { let dcx = ecx.sess.dcx(); let has_ret = has_ret(&sig.decl.output); let sig_args = sig.decl.inputs.len() + if has_ret { 1 } else { 0 }; @@ -790,7 +615,7 @@ mod llvm_enzyme { found: num_activities, }); // This is not the right signature, but we can continue parsing. - return (sig.clone(), vec![], vec![], true); + return sig.clone(); } assert!(sig.decl.inputs.len() == x.input_activity.len()); assert!(has_ret == x.has_ret_activity()); @@ -833,7 +658,7 @@ mod llvm_enzyme { if errors { // This is not the right signature, but we can continue parsing. - return (sig.clone(), new_inputs, idents, true); + return sig.clone(); } let unsafe_activities = x @@ -1047,7 +872,7 @@ mod llvm_enzyme { } let d_sig = FnSig { header: d_header, decl: d_decl, span }; trace!("Generated signature: {:?}", d_sig); - (d_sig, new_inputs, idents, false) + d_sig } } diff --git a/compiler/rustc_builtin_macros/src/cfg_accessible.rs b/compiler/rustc_builtin_macros/src/cfg_accessible.rs index f7d8f4aa783..48d80004cdd 100644 --- a/compiler/rustc_builtin_macros/src/cfg_accessible.rs +++ b/compiler/rustc_builtin_macros/src/cfg_accessible.rs @@ -1,9 +1,9 @@ //! Implementation of the `#[cfg_accessible(path)]` attribute macro. use rustc_ast as ast; +use rustc_attr_parsing::validate_attr; use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier}; use rustc_feature::AttributeTemplate; -use rustc_parse::validate_attr; use rustc_span::{Span, sym}; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs index a33eca43de5..09d827b0635 100644 --- a/compiler/rustc_builtin_macros/src/derive.rs +++ b/compiler/rustc_builtin_macros/src/derive.rs @@ -1,10 +1,10 @@ use rustc_ast as ast; use rustc_ast::{GenericParamKind, ItemKind, MetaItemInner, MetaItemKind, StmtKind}; +use rustc_attr_parsing::validate_attr; use rustc_expand::base::{ Annotatable, DeriveResolution, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier, }; use rustc_feature::AttributeTemplate; -use rustc_parse::validate_attr; use rustc_session::Session; use rustc_span::{ErrorGuaranteed, Ident, Span, sym}; diff --git a/compiler/rustc_builtin_macros/src/deriving/from.rs b/compiler/rustc_builtin_macros/src/deriving/from.rs new file mode 100644 index 00000000000..ab25de7c917 --- /dev/null +++ b/compiler/rustc_builtin_macros/src/deriving/from.rs @@ -0,0 +1,133 @@ +use rustc_ast as ast; +use rustc_ast::{ItemKind, VariantData}; +use rustc_errors::MultiSpan; +use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt}; +use rustc_span::{Ident, Span, kw, sym}; +use thin_vec::thin_vec; + +use crate::deriving::generic::ty::{Bounds, Path, PathKind, Ty}; +use crate::deriving::generic::{ + BlockOrExpr, FieldlessVariantsStrategy, MethodDef, SubstructureFields, TraitDef, + combine_substructure, +}; +use crate::deriving::pathvec_std; +use crate::errors; + +/// Generate an implementation of the `From` trait, provided that `item` +/// is a struct or a tuple struct with exactly one field. +pub(crate) fn expand_deriving_from( + cx: &ExtCtxt<'_>, + span: Span, + mitem: &ast::MetaItem, + annotatable: &Annotatable, + push: &mut dyn FnMut(Annotatable), + is_const: bool, +) { + let Annotatable::Item(item) = &annotatable else { + cx.dcx().bug("derive(From) used on something else than an item"); + }; + + let err_span = || { + let item_span = item.kind.ident().map(|ident| ident.span).unwrap_or(item.span); + MultiSpan::from_spans(vec![span, item_span]) + }; + + // `#[derive(From)]` is currently usable only on structs with exactly one field. + let field = match &item.kind { + ItemKind::Struct(_, _, data) => { + if let [field] = data.fields() { + Ok(field.clone()) + } else { + let guar = cx.dcx().emit_err(errors::DeriveFromWrongFieldCount { + span: err_span(), + multiple_fields: data.fields().len() > 1, + }); + Err(guar) + } + } + ItemKind::Enum(_, _, _) | ItemKind::Union(_, _, _) => { + let guar = cx.dcx().emit_err(errors::DeriveFromWrongTarget { + span: err_span(), + kind: &format!("{} {}", item.kind.article(), item.kind.descr()), + }); + Err(guar) + } + _ => cx.dcx().bug("Invalid derive(From) ADT input"), + }; + + let from_type = Ty::AstTy(match field { + Ok(ref field) => field.ty.clone(), + Err(guar) => cx.ty(span, ast::TyKind::Err(guar)), + }); + + let path = + Path::new_(pathvec_std!(convert::From), vec![Box::new(from_type.clone())], PathKind::Std); + + // Generate code like this: + // + // struct S(u32); + // #[automatically_derived] + // impl ::core::convert::From<u32> for S { + // #[inline] + // fn from(value: u32) -> S { + // Self(value) + // } + // } + let from_trait_def = TraitDef { + span, + path, + skip_path_as_bound: true, + needs_copy_as_bound_if_packed: false, + additional_bounds: Vec::new(), + supports_unions: false, + methods: vec![MethodDef { + name: sym::from, + generics: Bounds { bounds: vec![] }, + explicit_self: false, + nonself_args: vec![(from_type, sym::value)], + ret_ty: Ty::Self_, + attributes: thin_vec![cx.attr_word(sym::inline, span)], + fieldless_variants_strategy: FieldlessVariantsStrategy::Default, + combine_substructure: combine_substructure(Box::new(|cx, span, substructure| { + let field = match field { + Ok(ref field) => field, + Err(guar) => { + return BlockOrExpr::new_expr(DummyResult::raw_expr(span, Some(guar))); + } + }; + + let self_kw = Ident::new(kw::SelfUpper, span); + let expr: Box<ast::Expr> = match substructure.fields { + SubstructureFields::StaticStruct(variant, _) => match variant { + // Self { field: value } + VariantData::Struct { .. } => cx.expr_struct_ident( + span, + self_kw, + thin_vec![cx.field_imm( + span, + field.ident.unwrap(), + cx.expr_ident(span, Ident::new(sym::value, span)) + )], + ), + // Self(value) + VariantData::Tuple(_, _) => cx.expr_call_ident( + span, + self_kw, + thin_vec![cx.expr_ident(span, Ident::new(sym::value, span))], + ), + variant => { + cx.dcx().bug(format!("Invalid derive(From) ADT variant: {variant:?}")); + } + }, + _ => cx.dcx().bug("Invalid derive(From) ADT input"), + }; + BlockOrExpr::new_expr(expr) + })), + }], + associated_types: Vec::new(), + is_const, + is_staged_api_crate: cx.ecfg.features.staged_api(), + }; + + from_trait_def.expand(cx, mitem, annotatable, push); +} diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs b/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs index 00e70b21cf4..1458553d492 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs @@ -2,7 +2,7 @@ //! when specifying impls to be derived. pub(crate) use Ty::*; -use rustc_ast::{self as ast, Expr, GenericArg, GenericParamKind, Generics, SelfKind}; +use rustc_ast::{self as ast, Expr, GenericArg, GenericParamKind, Generics, SelfKind, TyKind}; use rustc_expand::base::ExtCtxt; use rustc_span::source_map::respan; use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw}; @@ -65,7 +65,7 @@ impl Path { } } -/// A type. Supports pointers, Self, and literals. +/// A type. Supports pointers, Self, literals, unit or an arbitrary AST path. #[derive(Clone)] pub(crate) enum Ty { Self_, @@ -76,6 +76,8 @@ pub(crate) enum Ty { Path(Path), /// For () return types. Unit, + /// An arbitrary type. + AstTy(Box<ast::Ty>), } pub(crate) fn self_ref() -> Ty { @@ -101,6 +103,7 @@ impl Ty { let ty = ast::TyKind::Tup(ThinVec::new()); cx.ty(span, ty) } + AstTy(ty) => ty.clone(), } } @@ -132,6 +135,10 @@ impl Ty { cx.path_all(span, false, vec![self_ty], params) } Path(p) => p.to_path(cx, span, self_ty, generics), + AstTy(ty) => match &ty.kind { + TyKind::Path(_, path) => path.clone(), + _ => cx.dcx().span_bug(span, "non-path in a path in generic `derive`"), + }, Ref(..) => cx.dcx().span_bug(span, "ref in a path in generic `derive`"), Unit => cx.dcx().span_bug(span, "unit in a path in generic `derive`"), } diff --git a/compiler/rustc_builtin_macros/src/deriving/mod.rs b/compiler/rustc_builtin_macros/src/deriving/mod.rs index 1edc2965def..cee6952fa34 100644 --- a/compiler/rustc_builtin_macros/src/deriving/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/mod.rs @@ -23,6 +23,7 @@ pub(crate) mod clone; pub(crate) mod coerce_pointee; pub(crate) mod debug; pub(crate) mod default; +pub(crate) mod from; pub(crate) mod hash; #[path = "cmp/eq.rs"] diff --git a/compiler/rustc_builtin_macros/src/errors.rs b/compiler/rustc_builtin_macros/src/errors.rs index 6bcf4d3e0a2..54e8f750337 100644 --- a/compiler/rustc_builtin_macros/src/errors.rs +++ b/compiler/rustc_builtin_macros/src/errors.rs @@ -447,6 +447,24 @@ pub(crate) struct DefaultHasArg { } #[derive(Diagnostic)] +#[diag(builtin_macros_derive_from_wrong_target)] +#[note(builtin_macros_derive_from_usage_note)] +pub(crate) struct DeriveFromWrongTarget<'a> { + #[primary_span] + pub(crate) span: MultiSpan, + pub(crate) kind: &'a str, +} + +#[derive(Diagnostic)] +#[diag(builtin_macros_derive_from_wrong_field_count)] +#[note(builtin_macros_derive_from_usage_note)] +pub(crate) struct DeriveFromWrongFieldCount { + #[primary_span] + pub(crate) span: MultiSpan, + pub(crate) multiple_fields: bool, +} + +#[derive(Diagnostic)] #[diag(builtin_macros_derive_macro_call)] pub(crate) struct DeriveMacroCall { #[primary_span] @@ -906,14 +924,6 @@ pub(crate) struct TakesNoArguments<'a> { } #[derive(Diagnostic)] -#[diag(builtin_macros_proc_macro_attribute_only_be_used_on_bare_functions)] -pub(crate) struct AttributeOnlyBeUsedOnBareFunctions<'a> { - #[primary_span] - pub span: Span, - pub path: &'a str, -} - -#[derive(Diagnostic)] #[diag(builtin_macros_proc_macro_attribute_only_usable_with_crate_type)] pub(crate) struct AttributeOnlyUsableWithCrateType<'a> { #[primary_span] diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index ec613b7b710..6415e55e0b0 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -10,11 +10,12 @@ use rustc_ast::{ }; use rustc_data_structures::fx::FxHashSet; use rustc_errors::{ - Applicability, Diag, MultiSpan, PResult, SingleLabelManySpans, listify, pluralize, + Applicability, BufferedEarlyLint, Diag, MultiSpan, PResult, SingleLabelManySpans, listify, + pluralize, }; use rustc_expand::base::*; use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY; -use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, LintId}; +use rustc_lint_defs::{BuiltinLintDiag, LintId}; use rustc_parse::exp; use rustc_parse_format as parse; use rustc_span::{BytePos, ErrorGuaranteed, Ident, InnerSpan, Span, Symbol}; @@ -595,7 +596,8 @@ fn make_format_args( named_arg_sp: arg_name.span, named_arg_name: arg_name.name.to_string(), is_formatting_arg: matches!(used_as, Width | Precision), - }, + } + .into(), }); } } diff --git a/compiler/rustc_builtin_macros/src/format_foreign.rs b/compiler/rustc_builtin_macros/src/format_foreign.rs index 3e5a26c0556..cf563a53973 100644 --- a/compiler/rustc_builtin_macros/src/format_foreign.rs +++ b/compiler/rustc_builtin_macros/src/format_foreign.rs @@ -416,7 +416,7 @@ pub(crate) mod printf { // Yes, this *is* the parameter. Some(('$', end2)) => { state = Flags; - parameter = Some(at.slice_between(end).unwrap().parse().unwrap()); + parameter = at.slice_between(end).unwrap().parse().ok(); move_to!(end2); } // Wait, no, actually, it's the width. diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs index 7bc448a9acb..1bcea95fbb7 100644 --- a/compiler/rustc_builtin_macros/src/lib.rs +++ b/compiler/rustc_builtin_macros/src/lib.rs @@ -8,7 +8,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] -#![feature(autodiff)] #![feature(box_patterns)] #![feature(decl_macro)] #![feature(if_let_guard)] @@ -139,6 +138,7 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) { PartialEq: partial_eq::expand_deriving_partial_eq, PartialOrd: partial_ord::expand_deriving_partial_ord, CoercePointee: coerce_pointee::expand_deriving_coerce_pointee, + From: from::expand_deriving_from, } let client = rustc_proc_macro::bridge::client::Client::expand1(rustc_proc_macro::quote); diff --git a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs index f440adf6cf0..6ac3e17503d 100644 --- a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs +++ b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs @@ -231,12 +231,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { let fn_ident = if let ast::ItemKind::Fn(fn_) = &item.kind { fn_.ident } else { - self.dcx - .create_err(errors::AttributeOnlyBeUsedOnBareFunctions { - span: attr.span, - path: &pprust::path_to_string(&attr.get_normal_item().path), - }) - .emit(); + // Error handled by general target checking logic return; }; diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index e803f3be82b..a9d91f77560 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -141,7 +141,7 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { if let ast::ItemKind::Mod( _, _, - ModKind::Loaded(.., ast::ModSpans { inner_span: span, .. }, _), + ModKind::Loaded(.., ast::ModSpans { inner_span: span, .. }), ) = item.kind { let prev_tests = mem::take(&mut self.tests); diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs index f00c170e485..3a4585d5be9 100644 --- a/compiler/rustc_builtin_macros/src/util.rs +++ b/compiler/rustc_builtin_macros/src/util.rs @@ -1,12 +1,13 @@ use rustc_ast::tokenstream::TokenStream; use rustc_ast::{self as ast, AttrStyle, Attribute, MetaItem, attr, token}; +use rustc_attr_parsing::validate_attr; use rustc_errors::{Applicability, Diag, ErrorGuaranteed}; use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt}; use rustc_expand::expand::AstFragment; use rustc_feature::AttributeTemplate; use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES; -use rustc_parse::{exp, parser, validate_attr}; +use rustc_parse::{exp, parser}; use rustc_session::errors::report_lit_error; use rustc_span::{BytePos, Span, Symbol}; diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs index bec546badc9..a56466750e7 100644 --- a/compiler/rustc_codegen_cranelift/src/constant.rs +++ b/compiler/rustc_codegen_cranelift/src/constant.rs @@ -281,8 +281,8 @@ fn data_id_for_static( .abi .bytes(); - let linkage = if import_linkage == rustc_middle::mir::mono::Linkage::ExternalWeak - || import_linkage == rustc_middle::mir::mono::Linkage::WeakAny + let linkage = if import_linkage == rustc_hir::attrs::Linkage::ExternalWeak + || import_linkage == rustc_hir::attrs::Linkage::WeakAny { Linkage::Preemptible } else { @@ -332,8 +332,8 @@ fn data_id_for_static( let linkage = if definition { crate::linkage::get_static_linkage(tcx, def_id) - } else if attrs.linkage == Some(rustc_middle::mir::mono::Linkage::ExternalWeak) - || attrs.linkage == Some(rustc_middle::mir::mono::Linkage::WeakAny) + } else if attrs.linkage == Some(rustc_hir::attrs::Linkage::ExternalWeak) + || attrs.linkage == Some(rustc_hir::attrs::Linkage::WeakAny) { Linkage::Preemptible } else { diff --git a/compiler/rustc_codegen_cranelift/src/driver/aot.rs b/compiler/rustc_codegen_cranelift/src/driver/aot.rs index 8ec3599b63d..7e77781dc2f 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/aot.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/aot.rs @@ -18,12 +18,11 @@ use rustc_codegen_ssa::{ use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::{IntoDynSyncSend, par_map}; +use rustc_hir::attrs::Linkage as RLinkage; use rustc_metadata::fs::copy_to_stdout; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::mono::{ - CodegenUnit, Linkage as RLinkage, MonoItem, MonoItemData, Visibility, -}; +use rustc_middle::mir::mono::{CodegenUnit, MonoItem, MonoItemData, Visibility}; use rustc_session::Session; use rustc_session::config::{DebugInfo, OutFileName, OutputFilenames, OutputType}; diff --git a/compiler/rustc_codegen_cranelift/src/linkage.rs b/compiler/rustc_codegen_cranelift/src/linkage.rs index ca853aac158..d76ab9d0109 100644 --- a/compiler/rustc_codegen_cranelift/src/linkage.rs +++ b/compiler/rustc_codegen_cranelift/src/linkage.rs @@ -1,4 +1,5 @@ -use rustc_middle::mir::mono::{Linkage as RLinkage, MonoItem, Visibility}; +use rustc_hir::attrs::Linkage as RLinkage; +use rustc_middle::mir::mono::{MonoItem, Visibility}; use crate::prelude::*; diff --git a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml index 759d0d59e26..e49c62d6c93 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml @@ -82,20 +82,16 @@ jobs: - name: Build sample project with target defined as JSON spec run: | ./y.sh prepare --only-libcore --cross - ./y.sh build --sysroot --features compiler-builtins-no-f16-f128 --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json + ./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ./y.sh cargo build --manifest-path=./tests/hello-world/Cargo.toml --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json ./y.sh clean all - name: Build run: | ./y.sh prepare --only-libcore --cross - ./y.sh build --sysroot --features compiler-builtins-no-f16-f128 --target-triple m68k-unknown-linux-gnu + ./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu ./y.sh test --mini-tests --target-triple m68k-unknown-linux-gnu - # FIXME: since https://github.com/rust-lang/rust/pull/140809, we cannot run programs for architectures not - # supported by the object crate, since this adds a dependency on symbols.o for the panic runtime. - # And as such, a wrong order of the object files in the linker command now fails with an undefined reference - # to some symbols like __rustc::rust_panic. - #CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests --target-triple m68k-unknown-linux-gnu + CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests --target-triple m68k-unknown-linux-gnu ./y.sh clean all - name: Prepare dependencies @@ -104,23 +100,21 @@ jobs: git config --global user.name "User" ./y.sh prepare --cross - # FIXME: We cannot run programs for architectures not supported by the object crate. See comment above. - #- name: Run tests - #run: | - #./y.sh test --target-triple m68k-unknown-linux-gnu --release --clean --build-sysroot --sysroot-features compiler-builtins-no-f16-f128 ${{ matrix.commands }} - - # FIXME: We cannot run programs for architectures not supported by the object crate. See comment above. - #- name: Run Hello World! - #run: | - #./y.sh build --target-triple m68k-unknown-linux-gnu - - #vm_dir=$(pwd)/vm - #cd tests/hello-world - #CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ../../y.sh cargo build --target m68k-unknown-linux-gnu - #sudo cp target/m68k-unknown-linux-gnu/debug/hello_world $vm_dir/home/ - #sudo chroot $vm_dir qemu-m68k-static /home/hello_world > hello_world_stdout - #expected_output="40" - #test $(cat hello_world_stdout) == $expected_output || (echo "Output differs. Actual output: $(cat hello_world_stdout)"; exit 1) + - name: Run tests + run: | + ./y.sh test --target-triple m68k-unknown-linux-gnu --release --clean --build-sysroot ${{ matrix.commands }} + + - name: Run Hello World! + run: | + ./y.sh build --target-triple m68k-unknown-linux-gnu + + vm_dir=$(pwd)/vm + cd tests/hello-world + CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ../../y.sh cargo build --target m68k-unknown-linux-gnu + sudo cp target/m68k-unknown-linux-gnu/debug/hello_world $vm_dir/home/ + sudo chroot $vm_dir qemu-m68k-static /home/hello_world > hello_world_stdout + expected_output="40" + test $(cat hello_world_stdout) == $expected_output || (echo "Output differs. Actual output: $(cat hello_world_stdout)"; exit 1) # Summary job for the merge queue. # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! diff --git a/compiler/rustc_codegen_gcc/Cargo.lock b/compiler/rustc_codegen_gcc/Cargo.lock index 7f35c1a80bd..a5b972baf98 100644 --- a/compiler/rustc_codegen_gcc/Cargo.lock +++ b/compiler/rustc_codegen_gcc/Cargo.lock @@ -56,18 +56,18 @@ dependencies = [ [[package]] name = "gccjit" -version = "2.7.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99a89184220d967dd300139f2d2ae7d52c1a69d632b24aacc57c54625254ce" +checksum = "4a0e310ef75f396cd11b2443b353d55376656ca92c13cba36f92b7aff346ac1a" dependencies = [ "gccjit_sys", ] [[package]] name = "gccjit_sys" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24edb7bfe2b7b27c6d09ed23eebfcab0b359c8fe978433f902943e6f127a0f1b" +checksum = "95ed7572b30cd32430294dde6fb70822d58e67c6846a548647e8739776a0125b" dependencies = [ "libc", ] diff --git a/compiler/rustc_codegen_gcc/Cargo.toml b/compiler/rustc_codegen_gcc/Cargo.toml index 193348d1ef6..6031933bd2d 100644 --- a/compiler/rustc_codegen_gcc/Cargo.toml +++ b/compiler/rustc_codegen_gcc/Cargo.toml @@ -24,7 +24,7 @@ default = ["master"] [dependencies] object = { version = "0.37.0", default-features = false, features = ["std", "read"] } tempfile = "3.20" -gccjit = "2.7" +gccjit = "2.8" #gccjit = { git = "https://github.com/rust-lang/gccjit.rs" } # Local copy. diff --git a/compiler/rustc_codegen_gcc/build_system/src/fmt.rs b/compiler/rustc_codegen_gcc/build_system/src/fmt.rs index 7e6594f50f9..91535f217e3 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/fmt.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/fmt.rs @@ -1,7 +1,7 @@ use std::ffi::OsStr; use std::path::Path; -use crate::utils::run_command_with_output; +use crate::utils::{run_command_with_output, walk_dir}; fn show_usage() { println!( @@ -32,5 +32,31 @@ pub fn run() -> Result<(), String> { if check { &[&"cargo", &"fmt", &"--check"] } else { &[&"cargo", &"fmt"] }; run_command_with_output(cmd, Some(Path::new(".")))?; - run_command_with_output(cmd, Some(Path::new("build_system"))) + run_command_with_output(cmd, Some(Path::new("build_system")))?; + + run_rustfmt_recursively("tests/run", check) +} + +fn run_rustfmt_recursively<P>(dir: P, check: bool) -> Result<(), String> +where + P: AsRef<Path>, +{ + walk_dir( + dir, + &mut |dir| run_rustfmt_recursively(dir, check), + &mut |file_path| { + if file_path.extension().filter(|ext| ext == &OsStr::new("rs")).is_some() { + let rustfmt_cmd: &[&dyn AsRef<OsStr>] = if check { + &[&"rustfmt", &"--check", &file_path] + } else { + &[&"rustfmt", &file_path] + }; + + run_command_with_output(rustfmt_cmd, Some(Path::new("."))) + } else { + Ok(()) + } + }, + true, + ) } diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs index 2c8271c36a9..3dd3fce2eec 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/test.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs @@ -531,7 +531,7 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> { r#"change-id = 115898 [rust] -codegen-backends = [] +codegen-backends = ["gcc"] deny-warnings = false verbose-tests = true diff --git a/compiler/rustc_codegen_gcc/libgccjit.version b/compiler/rustc_codegen_gcc/libgccjit.version index f62154968d3..dc9a0012864 100644 --- a/compiler/rustc_codegen_gcc/libgccjit.version +++ b/compiler/rustc_codegen_gcc/libgccjit.version @@ -1 +1 @@ -04ce66d8c918de9273bd7101638ad8724edf5e21 +4e995bd73c4490edfe5080ec6014d63aa9abed5f diff --git a/compiler/rustc_codegen_gcc/rust-toolchain b/compiler/rustc_codegen_gcc/rust-toolchain index 058e734be5c..04d33dfb116 100644 --- a/compiler/rustc_codegen_gcc/rust-toolchain +++ b/compiler/rustc_codegen_gcc/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2025-08-03" +channel = "nightly-2025-08-25" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff --git a/compiler/rustc_codegen_gcc/src/back/lto.rs b/compiler/rustc_codegen_gcc/src/back/lto.rs index d558dfbc1c4..fcee6b6df62 100644 --- a/compiler/rustc_codegen_gcc/src/back/lto.rs +++ b/compiler/rustc_codegen_gcc/src/back/lto.rs @@ -29,7 +29,7 @@ use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput}; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; use rustc_data_structures::memmap::Mmap; -use rustc_errors::{DiagCtxtHandle, FatalError}; +use rustc_errors::DiagCtxtHandle; use rustc_middle::bug; use rustc_middle::dep_graph::WorkProduct; use rustc_session::config::Lto; @@ -51,12 +51,11 @@ fn prepare_lto( cgcx: &CodegenContext<GccCodegenBackend>, each_linked_rlib_for_lto: &[PathBuf], dcx: DiagCtxtHandle<'_>, -) -> Result<LtoData, FatalError> { +) -> LtoData { let tmp_path = match tempdir() { Ok(tmp_path) => tmp_path, Err(error) => { - eprintln!("Cannot create temporary directory: {}", error); - return Err(FatalError); + dcx.fatal(format!("Cannot create temporary directory: {}", error)); } }; @@ -91,15 +90,14 @@ fn prepare_lto( upstream_modules.push((module, CString::new(name).unwrap())); } Err(e) => { - dcx.emit_err(e); - return Err(FatalError); + dcx.emit_fatal(e); } } } } } - Ok(LtoData { upstream_modules, tmp_path }) + LtoData { upstream_modules, tmp_path } } fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> { @@ -114,10 +112,10 @@ pub(crate) fn run_fat( cgcx: &CodegenContext<GccCodegenBackend>, each_linked_rlib_for_lto: &[PathBuf], modules: Vec<FatLtoInput<GccCodegenBackend>>, -) -> Result<ModuleCodegen<GccContext>, FatalError> { +) -> ModuleCodegen<GccContext> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); - let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx)?; + let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx); /*let symbols_below_threshold = lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();*/ fat_lto( @@ -137,7 +135,7 @@ fn fat_lto( mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, tmp_path: TempDir, //symbols_below_threshold: &[String], -) -> Result<ModuleCodegen<GccContext>, FatalError> { +) -> ModuleCodegen<GccContext> { let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module"); info!("going for a fat lto"); @@ -261,7 +259,7 @@ fn fat_lto( // of now. module.module_llvm.temp_dir = Some(tmp_path); - Ok(module) + module } pub struct ModuleBuffer(PathBuf); @@ -286,10 +284,10 @@ pub(crate) fn run_thin( each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, ThinBuffer)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, -) -> Result<(Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>), FatalError> { +) -> (Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>) { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); - let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx)?; + let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx); if cgcx.opts.cg.linker_plugin_lto.enabled() { unreachable!( "We should never reach this case if the LTO step \ @@ -355,7 +353,7 @@ fn thin_lto( tmp_path: TempDir, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, //_symbols_below_threshold: &[String], -) -> Result<(Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>), FatalError> { +) -> (Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>) { let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); info!("going for that thin, thin LTO"); @@ -518,13 +516,13 @@ fn thin_lto( // TODO: save the directory so that it gets deleted later. std::mem::forget(tmp_path); - Ok((opt_jobs, copy_jobs)) + (opt_jobs, copy_jobs) } pub fn optimize_thin_module( thin_module: ThinModule<GccCodegenBackend>, _cgcx: &CodegenContext<GccCodegenBackend>, -) -> Result<ModuleCodegen<GccContext>, FatalError> { +) -> ModuleCodegen<GccContext> { //let dcx = cgcx.create_dcx(); //let module_name = &thin_module.shared.module_names[thin_module.idx]; @@ -634,7 +632,8 @@ pub fn optimize_thin_module( save_temp_bitcode(cgcx, &module, "thin-lto-after-pm"); } }*/ - Ok(module) + #[allow(clippy::let_and_return)] + module } pub struct ThinBuffer { diff --git a/compiler/rustc_codegen_gcc/src/back/write.rs b/compiler/rustc_codegen_gcc/src/back/write.rs index c1231142c65..84bc7016271 100644 --- a/compiler/rustc_codegen_gcc/src/back/write.rs +++ b/compiler/rustc_codegen_gcc/src/back/write.rs @@ -6,7 +6,6 @@ use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, Mo use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; use rustc_fs_util::link_or_copy; use rustc_session::config::OutputType; -use rustc_span::fatal_error::FatalError; use rustc_target::spec::SplitDebuginfo; use crate::base::add_pic_option; @@ -17,7 +16,7 @@ pub(crate) fn codegen( cgcx: &CodegenContext<GccCodegenBackend>, module: ModuleCodegen<GccContext>, config: &ModuleConfig, -) -> Result<CompiledModule, FatalError> { +) -> CompiledModule { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); @@ -246,7 +245,7 @@ pub(crate) fn codegen( } } - Ok(module.into_compiled_module( + module.into_compiled_module( config.emit_obj != EmitObj::None, cgcx.target_can_use_split_dwarf && cgcx.split_debuginfo == SplitDebuginfo::Unpacked, config.emit_bc, @@ -254,7 +253,7 @@ pub(crate) fn codegen( config.emit_ir, &cgcx.output_filenames, cgcx.invocation_temp.as_deref(), - )) + ) } pub(crate) fn save_temp_bitcode( diff --git a/compiler/rustc_codegen_gcc/src/base.rs b/compiler/rustc_codegen_gcc/src/base.rs index c105916bbb2..e9d72e457a0 100644 --- a/compiler/rustc_codegen_gcc/src/base.rs +++ b/compiler/rustc_codegen_gcc/src/base.rs @@ -8,8 +8,8 @@ use rustc_codegen_ssa::ModuleCodegen; use rustc_codegen_ssa::base::maybe_create_entry_wrapper; use rustc_codegen_ssa::mono_item::MonoItemExt; use rustc_codegen_ssa::traits::DebugInfoCodegenMethods; +use rustc_hir::attrs::Linkage; use rustc_middle::dep_graph; -use rustc_middle::mir::mono::Linkage; #[cfg(feature = "master")] use rustc_middle::mir::mono::Visibility; use rustc_middle::ty::TyCtxt; diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs index 873f1f1951c..619277eba8b 100644 --- a/compiler/rustc_codegen_gcc/src/consts.rs +++ b/compiler/rustc_codegen_gcc/src/consts.rs @@ -5,13 +5,13 @@ use rustc_abi::{self as abi, Align, HasDataLayout, Primitive, Size, WrappingRang use rustc_codegen_ssa::traits::{ BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods, }; +use rustc_hir::attrs::Linkage; use rustc_hir::def::DefKind; use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs}; use rustc_middle::mir::interpret::{ self, ConstAllocation, ErrorHandled, Scalar as InterpScalar, read_target_uint, }; -use rustc_middle::mir::mono::Linkage; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::{self, Instance}; use rustc_middle::{bug, span_bug}; diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs index fdc15d580ef..41363d6313d 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs @@ -1497,7 +1497,6 @@ fn simd_funnel_shift<'a, 'gcc, 'tcx>( let index = bx.context.new_rvalue_from_int(bx.int_type, i as i32); let a_val = bx.context.new_vector_access(None, a, index).to_rvalue(); let a_val = bx.context.new_bitcast(None, a_val, unsigned_type); - // TODO: we probably need to use gcc_int_cast instead. let a_val = bx.gcc_int_cast(a_val, new_int_type); let b_val = bx.context.new_vector_access(None, b, index).to_rvalue(); let b_val = bx.context.new_bitcast(None, b_val, unsigned_type); diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs index b11f11d38e3..2d7df79ba95 100644 --- a/compiler/rustc_codegen_gcc/src/lib.rs +++ b/compiler/rustc_codegen_gcc/src/lib.rs @@ -93,7 +93,6 @@ use gccjit::{CType, Context, OptimizationLevel}; #[cfg(feature = "master")] use gccjit::{TargetInfo, Version}; use rustc_ast::expand::allocator::AllocatorKind; -use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule}; use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn, @@ -111,7 +110,6 @@ use rustc_middle::util::Providers; use rustc_session::Session; use rustc_session::config::{OptLevel, OutputFilenames}; use rustc_span::Symbol; -use rustc_span::fatal_error::FatalError; use rustc_target::spec::RelocModel; use tempfile::TempDir; @@ -363,12 +361,7 @@ impl WriteBackendMethods for GccCodegenBackend { _exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], modules: Vec<FatLtoInput<Self>>, - diff_functions: Vec<AutoDiffItem>, - ) -> Result<ModuleCodegen<Self::Module>, FatalError> { - if !diff_functions.is_empty() { - unimplemented!(); - } - + ) -> ModuleCodegen<Self::Module> { back::lto::run_fat(cgcx, each_linked_rlib_for_lto, modules) } @@ -379,7 +372,7 @@ impl WriteBackendMethods for GccCodegenBackend { each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>, - ) -> Result<(Vec<ThinModule<Self>>, Vec<WorkProduct>), FatalError> { + ) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>) { back::lto::run_thin(cgcx, each_linked_rlib_for_lto, modules, cached_modules) } @@ -396,15 +389,14 @@ impl WriteBackendMethods for GccCodegenBackend { _dcx: DiagCtxtHandle<'_>, module: &mut ModuleCodegen<Self::Module>, config: &ModuleConfig, - ) -> Result<(), FatalError> { + ) { module.module_llvm.context.set_optimization_level(to_gcc_opt_level(config.opt_level)); - Ok(()) } fn optimize_thin( cgcx: &CodegenContext<Self>, thin: ThinModule<Self>, - ) -> Result<ModuleCodegen<Self::Module>, FatalError> { + ) -> ModuleCodegen<Self::Module> { back::lto::optimize_thin_module(thin, cgcx) } @@ -412,7 +404,7 @@ impl WriteBackendMethods for GccCodegenBackend { cgcx: &CodegenContext<Self>, module: ModuleCodegen<Self::Module>, config: &ModuleConfig, - ) -> Result<CompiledModule, FatalError> { + ) -> CompiledModule { back::write::codegen(cgcx, module, config) } diff --git a/compiler/rustc_codegen_gcc/src/mono_item.rs b/compiler/rustc_codegen_gcc/src/mono_item.rs index ff188c437da..35d44d21bcb 100644 --- a/compiler/rustc_codegen_gcc/src/mono_item.rs +++ b/compiler/rustc_codegen_gcc/src/mono_item.rs @@ -1,11 +1,12 @@ #[cfg(feature = "master")] use gccjit::{FnAttribute, VarAttribute}; use rustc_codegen_ssa::traits::PreDefineCodegenMethods; +use rustc_hir::attrs::Linkage; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_middle::bug; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::mono::{Linkage, Visibility}; +use rustc_middle::mir::mono::Visibility; use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv, LayoutOf}; use rustc_middle::ty::{self, Instance, TypeVisitableExt}; diff --git a/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt index 29032b321fa..c5e22970c66 100644 --- a/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt +++ b/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt @@ -6,7 +6,6 @@ tests/run-make/doctests-keep-binaries/ tests/run-make/doctests-runtool/ tests/run-make/emit-shared-files/ tests/run-make/exit-code/ -tests/run-make/issue-64153/ tests/run-make/llvm-ident/ tests/run-make/native-link-modifier-bundle/ tests/run-make/remap-path-prefix-dwarf/ @@ -34,8 +33,6 @@ tests/run-make/c-link-to-rust-staticlib/ tests/run-make/foreign-double-unwind/ tests/run-make/foreign-exceptions/ tests/run-make/glibc-staticlib-args/ -tests/run-make/issue-36710/ -tests/run-make/issue-68794-textrel-on-minimal-lib/ tests/run-make/lto-smoke-c/ tests/run-make/return-non-c-like-enum/ diff --git a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt index 41fb4729c07..e2615bce190 100644 --- a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt +++ b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt @@ -20,7 +20,7 @@ tests/ui/drop/dynamic-drop.rs tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs tests/ui/simd/issue-17170.rs tests/ui/simd/issue-39720.rs -tests/ui/issues/issue-14875.rs +tests/ui/drop/panic-during-drop-14875.rs tests/ui/issues/issue-29948.rs tests/ui/process/println-with-broken-pipe.rs tests/ui/lto/thin-lto-inlines2.rs @@ -86,3 +86,5 @@ tests/ui/panics/unwind-force-no-unwind-tables.rs tests/ui/attributes/fn-align-dyn.rs tests/ui/linkage-attr/raw-dylib/elf/glibc-x86_64.rs tests/ui/explicit-tail-calls/recursion-etc.rs +tests/ui/explicit-tail-calls/indexer.rs +tests/ui/explicit-tail-calls/drop-order.rs diff --git a/compiler/rustc_codegen_gcc/tests/run/int.rs b/compiler/rustc_codegen_gcc/tests/run/int.rs index e20ecc23679..78675acb544 100644 --- a/compiler/rustc_codegen_gcc/tests/run/int.rs +++ b/compiler/rustc_codegen_gcc/tests/run/int.rs @@ -7,12 +7,10 @@ fn main() { use std::hint::black_box; macro_rules! check { - ($ty:ty, $expr:expr) => { - { - const EXPECTED: $ty = $expr; - assert_eq!($expr, EXPECTED); - } - }; + ($ty:ty, $expr:expr) => {{ + const EXPECTED: $ty = $expr; + assert_eq!($expr, EXPECTED); + }}; } check!(u32, (2220326408_u32 + black_box(1)) >> (32 - 6)); diff --git a/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs b/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs index 78872159f62..78e1cac57e0 100644 --- a/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs +++ b/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs @@ -12,7 +12,7 @@ fn main() { let arg_count = std::env::args().count(); let int = isize::MAX; - let _int = int + arg_count as isize; // overflow + let _int = int + arg_count as isize; // overflow // If overflow checking is disabled, we should reach here. #[cfg(not(debug_assertions))] diff --git a/compiler/rustc_codegen_gcc/tests/run/structs.rs b/compiler/rustc_codegen_gcc/tests/run/structs.rs index da73cbed9ae..e08e67837be 100644 --- a/compiler/rustc_codegen_gcc/tests/run/structs.rs +++ b/compiler/rustc_codegen_gcc/tests/run/structs.rs @@ -27,12 +27,8 @@ fn one() -> isize { #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { - let test = Test { - field: one(), - }; - let two = Two { - two: 2, - }; + let test = Test { field: one() }; + let two = Two { two: 2 }; unsafe { libc::printf(b"%ld\n\0" as *const u8 as *const i8, test.field); libc::printf(b"%ld\n\0" as *const u8 as *const i8, two.two); diff --git a/compiler/rustc_codegen_gcc/tests/run/volatile.rs b/compiler/rustc_codegen_gcc/tests/run/volatile.rs index 94a7bdc5c06..dc11fbfa600 100644 --- a/compiler/rustc_codegen_gcc/tests/run/volatile.rs +++ b/compiler/rustc_codegen_gcc/tests/run/volatile.rs @@ -12,15 +12,11 @@ struct Struct { func: unsafe fn(*const ()), } -fn func(_ptr: *const ()) { -} +fn func(_ptr: *const ()) {} fn main() { let mut x = MaybeUninit::<&Struct>::uninit(); - x.write(&Struct { - pointer: std::ptr::null(), - func, - }); + x.write(&Struct { pointer: std::ptr::null(), func }); let x = unsafe { x.assume_init() }; let value = unsafe { (x as *const Struct).read_volatile() }; println!("{:?}", value); diff --git a/compiler/rustc_codegen_gcc/tests/run/volatile2.rs b/compiler/rustc_codegen_gcc/tests/run/volatile2.rs index bdcb8259878..ada112687d3 100644 --- a/compiler/rustc_codegen_gcc/tests/run/volatile2.rs +++ b/compiler/rustc_codegen_gcc/tests/run/volatile2.rs @@ -7,7 +7,14 @@ mod libc { #[link(name = "c")] extern "C" { pub fn sigaction(signum: i32, act: *const sigaction, oldact: *mut sigaction) -> i32; - pub fn mmap(addr: *mut (), len: usize, prot: i32, flags: i32, fd: i32, offset: i64) -> *mut (); + pub fn mmap( + addr: *mut (), + len: usize, + prot: i32, + flags: i32, + fd: i32, + offset: i64, + ) -> *mut (); pub fn mprotect(addr: *mut (), len: usize, prot: i32) -> i32; } @@ -54,7 +61,8 @@ fn main() { libc::MAP_PRIVATE | libc::MAP_ANONYMOUS, -1, 0, - ).cast(); + ) + .cast(); if STORAGE == libc::MAP_FAILED { panic!("error: mmap failed"); } diff --git a/compiler/rustc_codegen_llvm/Cargo.toml b/compiler/rustc_codegen_llvm/Cargo.toml index 2d11628250c..26d20acbe28 100644 --- a/compiler/rustc_codegen_llvm/Cargo.toml +++ b/compiler/rustc_codegen_llvm/Cargo.toml @@ -8,11 +8,11 @@ test = false [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true # To avoid duplicate dependencies, this should match the version of gimli used # by `rustc_codegen_ssa` via its `thorin-dwp` dependency. gimli = "0.31" -itertools = "0.12" +itertools.workspace = true libc = "0.2" measureme = "12.0.1" object = { version = "0.37.0", default-features = false, features = ["std", "read"] } @@ -40,7 +40,7 @@ rustc_target = { path = "../rustc_target" } serde = { version = "1", features = ["derive"] } serde_json = "1" smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 009e7e2487b..ac7583f5666 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -24,6 +24,7 @@ use crate::attributes::{self, llfn_attrs_from_instance}; use crate::builder::Builder; use crate::context::CodegenCx; use crate::llvm::{self, Attribute, AttributePlace}; +use crate::llvm_util; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; @@ -41,12 +42,13 @@ trait ArgAttributesExt { const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] = [(ArgAttribute::InReg, llvm::AttributeKind::InReg)]; -const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 5] = [ +const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 6] = [ (ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias), - (ArgAttribute::NoCapture, llvm::AttributeKind::NoCapture), + (ArgAttribute::CapturesAddress, llvm::AttributeKind::CapturesAddress), (ArgAttribute::NonNull, llvm::AttributeKind::NonNull), (ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly), (ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef), + (ArgAttribute::CapturesReadOnly, llvm::AttributeKind::CapturesReadOnly), ]; fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> { @@ -82,6 +84,12 @@ fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&' } for (attr, llattr) in OPTIMIZATION_ATTRIBUTES { if regular.contains(attr) { + // captures(...) is only available since LLVM 21. + if (attr == ArgAttribute::CapturesReadOnly || attr == ArgAttribute::CapturesAddress) + && llvm_util::get_version() < (21, 0, 0) + { + continue; + } attrs.push(llattr.create_attr(cx.llcx)); } } @@ -500,7 +508,16 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } } PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => { - apply(attrs); + let i = apply(attrs); + if cx.sess().opts.optimize != config::OptLevel::No + && llvm_util::get_version() >= (21, 0, 0) + { + attributes::apply_to_llfn( + llfn, + llvm::AttributePlace::Argument(i), + &[llvm::AttributeKind::DeadOnReturn.create_attr(cx.llcx)], + ); + } } PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => { assert!(!on_stack); diff --git a/compiler/rustc_codegen_llvm/src/allocator.rs b/compiler/rustc_codegen_llvm/src/allocator.rs index 2b5090ed6db..df3e49279d9 100644 --- a/compiler/rustc_codegen_llvm/src/allocator.rs +++ b/compiler/rustc_codegen_llvm/src/allocator.rs @@ -8,11 +8,12 @@ use rustc_middle::bug; use rustc_middle::ty::TyCtxt; use rustc_session::config::{DebugInfo, OomStrategy}; use rustc_symbol_mangling::mangle_internal_symbol; +use smallvec::SmallVec; use crate::builder::SBuilder; use crate::declare::declare_simple_fn; -use crate::llvm::{self, False, True, Type, Value}; -use crate::{SimpleCx, attributes, debuginfo}; +use crate::llvm::{self, FALSE, TRUE, Type, Value}; +use crate::{SimpleCx, attributes, debuginfo, llvm_util}; pub(crate) unsafe fn codegen( tcx: TyCtxt<'_>, @@ -79,7 +80,7 @@ pub(crate) unsafe fn codegen( &cx, &mangle_internal_symbol(tcx, OomStrategy::SYMBOL), &i8, - &llvm::LLVMConstInt(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as u64, False), + &llvm::LLVMConstInt(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as u64, FALSE), ); // __rust_no_alloc_shim_is_unstable_v2 @@ -147,6 +148,20 @@ fn create_wrapper_function( llvm::Visibility::from_generic(tcx.sess.default_visibility()), ty, ); + + let mut attrs = SmallVec::<[_; 2]>::new(); + + let target_cpu = llvm_util::target_cpu(tcx.sess); + let target_cpu_attr = llvm::CreateAttrStringValue(cx.llcx, "target-cpu", target_cpu); + + let tune_cpu_attr = llvm_util::tune_cpu(tcx.sess) + .map(|tune_cpu| llvm::CreateAttrStringValue(cx.llcx, "tune-cpu", tune_cpu)); + + attrs.push(target_cpu_attr); + attrs.extend(tune_cpu_attr); + + attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &attrs); + let no_return = if no_return { // -> ! DIFlagNoReturn let no_return = llvm::AttributeKind::NoReturn.create_attr(cx.llcx); @@ -186,7 +201,7 @@ fn create_wrapper_function( .map(|(i, _)| llvm::get_param(llfn, i as c_uint)) .collect::<Vec<_>>(); let ret = bx.call(ty, callee, &args, None); - llvm::LLVMSetTailCall(ret, True); + llvm::LLVMSetTailCall(ret, TRUE); if output.is_some() { bx.ret(ret); } else { diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index a643a91141e..38c1d3b53e8 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -16,6 +16,7 @@ use tracing::debug; use crate::builder::Builder; use crate::common::Funclet; use crate::context::CodegenCx; +use crate::llvm::ToLlvmBool; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; @@ -470,10 +471,6 @@ pub(crate) fn inline_asm_call<'ll>( dest: Option<&'ll llvm::BasicBlock>, catch_funclet: Option<(&'ll llvm::BasicBlock, Option<&Funclet<'ll>>)>, ) -> Option<&'ll Value> { - let volatile = if volatile { llvm::True } else { llvm::False }; - let alignstack = if alignstack { llvm::True } else { llvm::False }; - let can_throw = if unwind { llvm::True } else { llvm::False }; - let argtys = inputs .iter() .map(|v| { @@ -500,10 +497,10 @@ pub(crate) fn inline_asm_call<'ll>( asm.len(), cons.as_ptr(), cons.len(), - volatile, - alignstack, + volatile.to_llvm_bool(), + alignstack.to_llvm_bool(), dia, - can_throw, + unwind.to_llvm_bool(), ) }; diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index c548f467583..5affb26483a 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -28,22 +28,6 @@ pub(crate) fn apply_to_callsite(callsite: &Value, idx: AttributePlace, attrs: &[ } } -pub(crate) fn has_attr(llfn: &Value, idx: AttributePlace, attr: AttributeKind) -> bool { - llvm::HasAttributeAtIndex(llfn, idx, attr) -} - -pub(crate) fn has_string_attr(llfn: &Value, name: &str) -> bool { - llvm::HasStringAttribute(llfn, name) -} - -pub(crate) fn remove_from_llfn(llfn: &Value, place: AttributePlace, kind: AttributeKind) { - llvm::RemoveRustEnumAttributeAtIndex(llfn, place, kind); -} - -pub(crate) fn remove_string_attr_from_llfn(llfn: &Value, name: &str) { - llvm::RemoveStringAttrFromFn(llfn, name); -} - /// Get LLVM attribute for the provided inline heuristic. #[inline] fn inline_attr<'ll>(cx: &CodegenCx<'ll, '_>, inline: InlineAttr) -> Option<&'ll Attribute> { @@ -436,6 +420,16 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::ALLOCATOR_ZEROED) { to_add.push(create_alloc_family_attr(cx.llcx)); + if let Some(zv) = + cx.tcx.get_attr(instance.def_id(), rustc_span::sym::rustc_allocator_zeroed_variant) + && let Some(name) = zv.value_str() + { + to_add.push(llvm::CreateAttrStringValue( + cx.llcx, + "alloc-variant-zeroed", + &mangle_internal_symbol(cx.tcx, name.as_str()), + )); + } // apply to argument place instead of function let alloc_align = AttributeKind::AllocAlign.create_attr(cx.llcx); attributes::apply_to_llfn(llfn, AttributePlace::Argument(1), &[alloc_align]); @@ -513,7 +507,7 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( to_add.push(llvm::CreateAttrStringValue(cx.llcx, "wasm-import-module", module)); let name = - codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id())); + codegen_fn_attrs.symbol_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id())); let name = name.as_str(); to_add.push(llvm::CreateAttrStringValue(cx.llcx, "wasm-import-name", name)); } diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs index 0a161442933..7a340ae83f3 100644 --- a/compiler/rustc_codegen_llvm/src/back/archive.rs +++ b/compiler/rustc_codegen_llvm/src/back/archive.rs @@ -1,104 +1,21 @@ //! A helper class for dealing with static archives -use std::ffi::{CStr, CString, c_char, c_void}; -use std::path::{Path, PathBuf}; -use std::{io, mem, ptr, str}; +use std::ffi::{CStr, c_char, c_void}; +use std::io; use rustc_codegen_ssa::back::archive::{ - ArArchiveBuilder, ArchiveBuildFailure, ArchiveBuilder, ArchiveBuilderBuilder, - DEFAULT_OBJECT_READER, ObjectReader, UnknownArchiveKind, try_extract_macho_fat_archive, + ArArchiveBuilder, ArchiveBuilder, ArchiveBuilderBuilder, DEFAULT_OBJECT_READER, ObjectReader, }; use rustc_session::Session; -use crate::llvm::archive_ro::{ArchiveRO, Child}; -use crate::llvm::{self, ArchiveKind, last_error}; - -/// Helper for adding many files to an archive. -#[must_use = "must call build() to finish building the archive"] -pub(crate) struct LlvmArchiveBuilder<'a> { - sess: &'a Session, - additions: Vec<Addition>, -} - -enum Addition { - File { path: PathBuf, name_in_archive: String }, - Archive { path: PathBuf, archive: ArchiveRO, skip: Box<dyn FnMut(&str) -> bool> }, -} - -impl Addition { - fn path(&self) -> &Path { - match self { - Addition::File { path, .. } | Addition::Archive { path, .. } => path, - } - } -} - -fn is_relevant_child(c: &Child<'_>) -> bool { - match c.name() { - Some(name) => !name.contains("SYMDEF"), - None => false, - } -} - -impl<'a> ArchiveBuilder for LlvmArchiveBuilder<'a> { - fn add_archive( - &mut self, - archive: &Path, - skip: Box<dyn FnMut(&str) -> bool + 'static>, - ) -> io::Result<()> { - let mut archive = archive.to_path_buf(); - if self.sess.target.llvm_target.contains("-apple-macosx") { - if let Some(new_archive) = try_extract_macho_fat_archive(self.sess, &archive)? { - archive = new_archive - } - } - let archive_ro = match ArchiveRO::open(&archive) { - Ok(ar) => ar, - Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)), - }; - if self.additions.iter().any(|ar| ar.path() == archive) { - return Ok(()); - } - self.additions.push(Addition::Archive { - path: archive, - archive: archive_ro, - skip: Box::new(skip), - }); - Ok(()) - } - - /// Adds an arbitrary file to this archive - fn add_file(&mut self, file: &Path) { - let name = file.file_name().unwrap().to_str().unwrap(); - self.additions - .push(Addition::File { path: file.to_path_buf(), name_in_archive: name.to_owned() }); - } - - /// Combine the provided files, rlibs, and native libraries into a single - /// `Archive`. - fn build(mut self: Box<Self>, output: &Path) -> bool { - match self.build_with_llvm(output) { - Ok(any_members) => any_members, - Err(error) => { - self.sess.dcx().emit_fatal(ArchiveBuildFailure { path: output.to_owned(), error }) - } - } - } -} +use crate::llvm; pub(crate) struct LlvmArchiveBuilderBuilder; impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder { fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a> { - // Keeping LlvmArchiveBuilder around in case of a regression caused by using - // ArArchiveBuilder. - // FIXME(#128955) remove a couple of months after #128936 gets merged in case - // no regression is found. - if false { - Box::new(LlvmArchiveBuilder { sess, additions: Vec::new() }) - } else { - Box::new(ArArchiveBuilder::new(sess, &LLVM_OBJECT_READER)) - } + // Use the `object` crate to build archives, with a little bit of help from LLVM. + Box::new(ArArchiveBuilder::new(sess, &LLVM_OBJECT_READER)) } } @@ -178,91 +95,3 @@ fn llvm_is_64_bit_object_file(buf: &[u8]) -> bool { fn llvm_is_ec_object_file(buf: &[u8]) -> bool { unsafe { llvm::LLVMRustIsECObject(buf.as_ptr(), buf.len()) } } - -impl<'a> LlvmArchiveBuilder<'a> { - fn build_with_llvm(&mut self, output: &Path) -> io::Result<bool> { - let kind = &*self.sess.target.archive_format; - let kind = kind - .parse::<ArchiveKind>() - .map_err(|_| kind) - .unwrap_or_else(|kind| self.sess.dcx().emit_fatal(UnknownArchiveKind { kind })); - - let mut additions = mem::take(&mut self.additions); - // Values in the `members` list below will contain pointers to the strings allocated here. - // So they need to get dropped after all elements of `members` get freed. - let mut strings = Vec::new(); - let mut members = Vec::new(); - - let dst = CString::new(output.to_str().unwrap())?; - - unsafe { - for addition in &mut additions { - match addition { - Addition::File { path, name_in_archive } => { - let path = CString::new(path.to_str().unwrap())?; - let name = CString::new(name_in_archive.as_bytes())?; - members.push(llvm::LLVMRustArchiveMemberNew( - path.as_ptr(), - name.as_ptr(), - None, - )); - strings.push(path); - strings.push(name); - } - Addition::Archive { archive, skip, .. } => { - for child in archive.iter() { - let child = child.map_err(string_to_io_error)?; - if !is_relevant_child(&child) { - continue; - } - let child_name = child.name().unwrap(); - if skip(child_name) { - continue; - } - - // It appears that LLVM's archive writer is a little - // buggy if the name we pass down isn't just the - // filename component, so chop that off here and - // pass it in. - // - // See LLVM bug 25877 for more info. - let child_name = - Path::new(child_name).file_name().unwrap().to_str().unwrap(); - let name = CString::new(child_name)?; - let m = llvm::LLVMRustArchiveMemberNew( - ptr::null(), - name.as_ptr(), - Some(child.raw), - ); - members.push(m); - strings.push(name); - } - } - } - } - - let r = llvm::LLVMRustWriteArchive( - dst.as_ptr(), - members.len() as libc::size_t, - members.as_ptr() as *const &_, - true, - kind, - self.sess.target.arch == "arm64ec", - ); - let ret = if r.into_result().is_err() { - let msg = last_error().unwrap_or_else(|| "failed to write archive".into()); - Err(io::Error::new(io::ErrorKind::Other, msg)) - } else { - Ok(!members.is_empty()) - }; - for member in members { - llvm::LLVMRustArchiveMemberFree(member); - } - ret - } - } -} - -fn string_to_io_error(s: String) -> io::Error { - io::Error::new(io::ErrorKind::Other, format!("bad archive: {s}")) -} diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs index c269f11e931..d85f432702c 100644 --- a/compiler/rustc_codegen_llvm/src/back/lto.rs +++ b/compiler/rustc_codegen_llvm/src/back/lto.rs @@ -14,7 +14,7 @@ use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::memmap::Mmap; -use rustc_errors::{DiagCtxtHandle, FatalError}; +use rustc_errors::DiagCtxtHandle; use rustc_middle::bug; use rustc_middle::dep_graph::WorkProduct; use rustc_session::config::{self, Lto}; @@ -24,9 +24,8 @@ use crate::back::write::{ self, CodegenDiagnosticsStage, DiagnosticHandlers, bitcode_section_name, save_temp_bitcode, }; use crate::errors::{LlvmError, LtoBitcodeFromRlib}; -use crate::llvm::AttributePlace::Function; use crate::llvm::{self, build_string}; -use crate::{LlvmCodegenBackend, ModuleLlvm, SimpleCx, attributes}; +use crate::{LlvmCodegenBackend, ModuleLlvm, SimpleCx}; /// We keep track of the computed LTO cache keys from the previous /// session to determine which CGUs we can reuse. @@ -37,7 +36,7 @@ fn prepare_lto( exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], dcx: DiagCtxtHandle<'_>, -) -> Result<(Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>), FatalError> { +) -> (Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>) { let mut symbols_below_threshold = exported_symbols_for_lto .iter() .map(|symbol| CString::new(symbol.to_owned()).unwrap()) @@ -80,16 +79,13 @@ fn prepare_lto( let module = SerializedModule::FromRlib(data.to_vec()); upstream_modules.push((module, CString::new(name).unwrap())); } - Err(e) => { - dcx.emit_err(e); - return Err(FatalError); - } + Err(e) => dcx.emit_fatal(e), } } } } - Ok((symbols_below_threshold, upstream_modules)) + (symbols_below_threshold, upstream_modules) } fn get_bitcode_slice_from_object_data<'a>( @@ -124,11 +120,11 @@ pub(crate) fn run_fat( exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], modules: Vec<FatLtoInput<LlvmCodegenBackend>>, -) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> { +) -> ModuleCodegen<ModuleLlvm> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); let (symbols_below_threshold, upstream_modules) = - prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx)?; + prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx); let symbols_below_threshold = symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>(); fat_lto(cgcx, dcx, modules, upstream_modules, &symbols_below_threshold) @@ -143,11 +139,11 @@ pub(crate) fn run_thin( each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, ThinBuffer)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, -) -> Result<(Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> { +) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); let (symbols_below_threshold, upstream_modules) = - prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx)?; + prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx); let symbols_below_threshold = symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>(); if cgcx.opts.cg.linker_plugin_lto.enabled() { @@ -174,7 +170,7 @@ fn fat_lto( modules: Vec<FatLtoInput<LlvmCodegenBackend>>, mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, symbols_below_threshold: &[*const libc::c_char], -) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> { +) -> ModuleCodegen<ModuleLlvm> { let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module"); info!("going for a fat lto"); @@ -225,7 +221,7 @@ fn fat_lto( assert!(!serialized_modules.is_empty(), "must have at least one serialized module"); let (buffer, name) = serialized_modules.remove(0); info!("no in-memory regular modules to choose from, parsing {:?}", name); - let llvm_module = ModuleLlvm::parse(cgcx, &name, buffer.data(), dcx)?; + let llvm_module = ModuleLlvm::parse(cgcx, &name, buffer.data(), dcx); ModuleCodegen::new_regular(name.into_string().unwrap(), llvm_module) } }; @@ -266,7 +262,9 @@ fn fat_lto( }); info!("linking {:?}", name); let data = bc_decoded.data(); - linker.add(data).map_err(|()| write::llvm_err(dcx, LlvmError::LoadBitcode { name }))?; + linker + .add(data) + .unwrap_or_else(|()| write::llvm_err(dcx, LlvmError::LoadBitcode { name })); } drop(linker); save_temp_bitcode(cgcx, &module, "lto.input"); @@ -283,7 +281,7 @@ fn fat_lto( save_temp_bitcode(cgcx, &module, "lto.after-restriction"); } - Ok(module) + module } pub(crate) struct Linker<'a>(&'a mut llvm::Linker<'a>); @@ -353,7 +351,7 @@ fn thin_lto( serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, symbols_below_threshold: &[*const libc::c_char], -) -> Result<(Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> { +) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) { let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); unsafe { info!("going for that thin, thin LTO"); @@ -423,7 +421,7 @@ fn thin_lto( symbols_below_threshold.as_ptr(), symbols_below_threshold.len(), ) - .ok_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext))?; + .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext)); let data = ThinData(data); @@ -493,10 +491,10 @@ fn thin_lto( if let Some(path) = key_map_path && let Err(err) = curr_key_map.save_to_file(&path) { - return Err(write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err })); + write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err }); } - Ok((opt_jobs, copy_jobs)) + (opt_jobs, copy_jobs) } } @@ -551,7 +549,7 @@ pub(crate) fn run_pass_manager( dcx: DiagCtxtHandle<'_>, module: &mut ModuleCodegen<ModuleLlvm>, thin: bool, -) -> Result<(), FatalError> { +) { let _timer = cgcx.prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name); let config = cgcx.config(module.kind); @@ -583,7 +581,7 @@ pub(crate) fn run_pass_manager( } unsafe { - write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?; + write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage); } if enable_gpu && !thin { @@ -593,36 +591,11 @@ pub(crate) fn run_pass_manager( } if cfg!(llvm_enzyme) && enable_ad && !thin { - let cx = - SimpleCx::new(module.module_llvm.llmod(), &module.module_llvm.llcx, cgcx.pointer_size); - - for function in cx.get_functions() { - let enzyme_marker = "enzyme_marker"; - if attributes::has_string_attr(function, enzyme_marker) { - // Sanity check: Ensure 'noinline' is present before replacing it. - assert!( - attributes::has_attr(function, Function, llvm::AttributeKind::NoInline), - "Expected __enzyme function to have 'noinline' before adding 'alwaysinline'" - ); - - attributes::remove_from_llfn(function, Function, llvm::AttributeKind::NoInline); - attributes::remove_string_attr_from_llfn(function, enzyme_marker); - - assert!( - !attributes::has_string_attr(function, enzyme_marker), - "Expected function to not have 'enzyme_marker'" - ); - - let always_inline = llvm::AttributeKind::AlwaysInline.create_attr(cx.llcx); - attributes::apply_to_llfn(function, Function, &[always_inline]); - } - } - let opt_stage = llvm::OptStage::FatLTO; let stage = write::AutodiffStage::PostAD; if !config.autodiff.contains(&config::AutoDiff::NoPostopt) { unsafe { - write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?; + write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage); } } @@ -634,7 +607,6 @@ pub(crate) fn run_pass_manager( } debug!("lto done"); - Ok(()) } pub struct ModuleBuffer(&'static mut llvm::ModuleBuffer); @@ -727,7 +699,7 @@ impl Drop for ThinBuffer { pub(crate) fn optimize_thin_module( thin_module: ThinModule<LlvmCodegenBackend>, cgcx: &CodegenContext<LlvmCodegenBackend>, -) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> { +) -> ModuleCodegen<ModuleLlvm> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); @@ -738,7 +710,7 @@ pub(crate) fn optimize_thin_module( // into that context. One day, however, we may do this for upstream // crates but for locally codegened modules we may be able to reuse // that LLVM Context and Module. - let module_llvm = ModuleLlvm::parse(cgcx, module_name, thin_module.data(), dcx)?; + let module_llvm = ModuleLlvm::parse(cgcx, module_name, thin_module.data(), dcx); let mut module = ModuleCodegen::new_regular(thin_module.name(), module_llvm); // Given that the newly created module lacks a thinlto buffer for embedding, we need to re-add it here. if cgcx.config(ModuleKind::Regular).embed_bitcode() { @@ -772,7 +744,7 @@ pub(crate) fn optimize_thin_module( .generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name()); if unsafe { !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) } { - return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule)); + write::llvm_err(dcx, LlvmError::PrepareThinLtoModule); } save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve"); } @@ -783,7 +755,7 @@ pub(crate) fn optimize_thin_module( .generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name()); if unsafe { !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) } { - return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule)); + write::llvm_err(dcx, LlvmError::PrepareThinLtoModule); } save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize"); } @@ -794,7 +766,7 @@ pub(crate) fn optimize_thin_module( if unsafe { !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod, target.raw()) } { - return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule)); + write::llvm_err(dcx, LlvmError::PrepareThinLtoModule); } save_temp_bitcode(cgcx, &module, "thin-lto-after-import"); } @@ -806,11 +778,11 @@ pub(crate) fn optimize_thin_module( // little differently. { info!("running thin lto passes over {}", module.name); - run_pass_manager(cgcx, dcx, &mut module, true)?; + run_pass_manager(cgcx, dcx, &mut module, true); save_temp_bitcode(cgcx, &module, "thin-lto-after-pm"); } } - Ok(module) + module } /// Maps LLVM module identifiers to their corresponding LLVM LTO cache keys @@ -876,9 +848,9 @@ pub(crate) fn parse_module<'a>( name: &CStr, data: &[u8], dcx: DiagCtxtHandle<'_>, -) -> Result<&'a llvm::Module, FatalError> { +) -> &'a llvm::Module { unsafe { llvm::LLVMRustParseBitcodeForLTO(cx, data.as_ptr(), data.len(), name.as_ptr()) - .ok_or_else(|| write::llvm_err(dcx, LlvmError::ParseBitcode)) + .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::ParseBitcode)) } } diff --git a/compiler/rustc_codegen_llvm/src/back/mod.rs b/compiler/rustc_codegen_llvm/src/back/mod.rs new file mode 100644 index 00000000000..6cb89f80ab8 --- /dev/null +++ b/compiler/rustc_codegen_llvm/src/back/mod.rs @@ -0,0 +1,5 @@ +pub(crate) mod archive; +pub(crate) mod lto; +pub(crate) mod owned_target_machine; +mod profiling; +pub(crate) mod write; diff --git a/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs b/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs index 8e82013e94a..6d8178320fe 100644 --- a/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs +++ b/compiler/rustc_codegen_llvm/src/back/owned_target_machine.rs @@ -1,4 +1,5 @@ -use std::ffi::{CStr, c_char}; +use std::assert_matches::assert_matches; +use std::ffi::CStr; use std::marker::PhantomData; use std::ptr::NonNull; @@ -41,11 +42,9 @@ impl OwnedTargetMachine { args_cstr_buff: &[u8], use_wasm_eh: bool, ) -> Result<Self, LlvmError<'static>> { - assert!(args_cstr_buff.len() > 0); - assert!( - *args_cstr_buff.last().unwrap() == 0, - "The last character must be a null terminator." - ); + // The argument list is passed as the concatenation of one or more C strings. + // This implies that there must be a last byte, and it must be 0. + assert_matches!(args_cstr_buff, [.., b'\0'], "the last byte must be a NUL terminator"); // SAFETY: llvm::LLVMRustCreateTargetMachine copies pointed to data let tm_ptr = unsafe { @@ -71,7 +70,7 @@ impl OwnedTargetMachine { output_obj_file.as_ptr(), debug_info_compression.as_ptr(), use_emulated_tls, - args_cstr_buff.as_ptr() as *const c_char, + args_cstr_buff.as_ptr(), args_cstr_buff.len(), use_wasm_eh, ) @@ -99,7 +98,7 @@ impl Drop for OwnedTargetMachine { // llvm::LLVMRustCreateTargetMachine OwnedTargetMachine is not copyable so there is no // double free or use after free. unsafe { - llvm::LLVMRustDisposeTargetMachine(self.tm_unique.as_mut()); + llvm::LLVMRustDisposeTargetMachine(self.tm_unique.as_ptr()); } } } diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs index 85a06f457eb..7ea2ae6673b 100644 --- a/compiler/rustc_codegen_llvm/src/back/write.rs +++ b/compiler/rustc_codegen_llvm/src/back/write.rs @@ -20,7 +20,7 @@ use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind}; use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::small_c_str::SmallCStr; -use rustc_errors::{DiagCtxtHandle, FatalError, Level}; +use rustc_errors::{DiagCtxtHandle, Level}; use rustc_fs_util::{link_or_copy, path_to_c_string}; use rustc_middle::ty::TyCtxt; use rustc_session::Session; @@ -46,10 +46,10 @@ use crate::llvm::{self, DiagnosticInfo}; use crate::type_::Type; use crate::{LlvmCodegenBackend, ModuleLlvm, base, common, llvm_util}; -pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> FatalError { +pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> ! { match llvm::last_error() { - Some(llvm_err) => dcx.emit_almost_fatal(WithLlvmError(err, llvm_err)), - None => dcx.emit_almost_fatal(err), + Some(llvm_err) => dcx.emit_fatal(WithLlvmError(err, llvm_err)), + None => dcx.emit_fatal(err), } } @@ -63,7 +63,7 @@ fn write_output_file<'ll>( file_type: llvm::FileType, self_profiler_ref: &SelfProfilerRef, verify_llvm_ir: bool, -) -> Result<(), FatalError> { +) { debug!("write_output_file output={:?} dwo_output={:?}", output, dwo_output); let output_c = path_to_c_string(output); let dwo_output_c; @@ -100,7 +100,7 @@ fn write_output_file<'ll>( } } - result.into_result().map_err(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output })) + result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output })) } pub(crate) fn create_informational_target_machine( @@ -112,7 +112,7 @@ pub(crate) fn create_informational_target_machine( // system/tcx is set up. let features = llvm_util::global_llvm_features(sess, false, only_base_features); target_machine_factory(sess, config::OptLevel::No, &features)(config) - .unwrap_or_else(|err| llvm_err(sess.dcx(), err).raise()) + .unwrap_or_else(|err| llvm_err(sess.dcx(), err)) } pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMachine { @@ -139,7 +139,7 @@ pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTar tcx.backend_optimization_level(()), tcx.global_backend_features(()), )(config) - .unwrap_or_else(|err| llvm_err(tcx.dcx(), err).raise()) + .unwrap_or_else(|err| llvm_err(tcx.dcx(), err)) } fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) { @@ -565,7 +565,7 @@ pub(crate) unsafe fn llvm_optimize( opt_level: config::OptLevel, opt_stage: llvm::OptStage, autodiff_stage: AutodiffStage, -) -> Result<(), FatalError> { +) { // Enzyme: // The whole point of compiler based AD is to differentiate optimized IR instead of unoptimized // source code. However, benchmarks show that optimizations increasing the code size @@ -704,7 +704,7 @@ pub(crate) unsafe fn llvm_optimize( llvm_plugins.len(), ) }; - result.into_result().map_err(|()| llvm_err(dcx, LlvmError::RunLlvmPasses)) + result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::RunLlvmPasses)) } // Unsafe due to LLVM calls. @@ -713,7 +713,7 @@ pub(crate) fn optimize( dcx: DiagCtxtHandle<'_>, module: &mut ModuleCodegen<ModuleLlvm>, config: &ModuleConfig, -) -> Result<(), FatalError> { +) { let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name); let llcx = &*module.module_llvm.llcx; @@ -765,7 +765,7 @@ pub(crate) fn optimize( opt_stage, autodiff_stage, ) - }?; + }; if let Some(thin_lto_buffer) = thin_lto_buffer { let thin_lto_buffer = unsafe { ThinBuffer::from_raw_ptr(thin_lto_buffer) }; module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec()); @@ -793,14 +793,13 @@ pub(crate) fn optimize( } } } - Ok(()) } pub(crate) fn codegen( cgcx: &CodegenContext<LlvmCodegenBackend>, module: ModuleCodegen<ModuleLlvm>, config: &ModuleConfig, -) -> Result<CompiledModule, FatalError> { +) -> CompiledModule { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); @@ -862,7 +861,7 @@ pub(crate) fn codegen( .generic_activity_with_arg("LLVM_module_codegen_embed_bitcode", &*module.name); let thin_bc = module.thin_lto_buffer.as_deref().expect("cannot find embedded bitcode"); - embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, &thin_bc); + embed_bitcode(cgcx, llcx, llmod, &thin_bc); } } @@ -909,7 +908,9 @@ pub(crate) fn codegen( record_artifact_size(&cgcx.prof, "llvm_ir", &out); } - result.into_result().map_err(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out }))?; + result + .into_result() + .unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out })); } if config.emit_asm { @@ -940,7 +941,7 @@ pub(crate) fn codegen( llvm::FileType::AssemblyFile, &cgcx.prof, config.verify_llvm_ir, - )?; + ); } match config.emit_obj { @@ -976,7 +977,7 @@ pub(crate) fn codegen( llvm::FileType::ObjectFile, &cgcx.prof, config.verify_llvm_ir, - )?; + ); } EmitObj::Bitcode => { @@ -1009,7 +1010,7 @@ pub(crate) fn codegen( && cgcx.target_can_use_split_dwarf && cgcx.split_debuginfo != SplitDebuginfo::Off && cgcx.split_dwarf_kind == SplitDwarfKind::Split; - Ok(module.into_compiled_module( + module.into_compiled_module( config.emit_obj != EmitObj::None, dwarf_object_emitted, config.emit_bc, @@ -1017,7 +1018,7 @@ pub(crate) fn codegen( config.emit_ir, &cgcx.output_filenames, cgcx.invocation_temp.as_deref(), - )) + ) } fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data: &[u8]) -> Vec<u8> { @@ -1058,7 +1059,6 @@ fn embed_bitcode( cgcx: &CodegenContext<LlvmCodegenBackend>, llcx: &llvm::Context, llmod: &llvm::Module, - cmdline: &str, bitcode: &[u8], ) { // We're adding custom sections to the output object file, but we definitely @@ -1074,7 +1074,9 @@ fn embed_bitcode( // * Mach-O - this is for macOS. Inspecting the source code for the native // linker here shows that the `.llvmbc` and `.llvmcmd` sections are // automatically skipped by the linker. In that case there's nothing extra - // that we need to do here. + // that we need to do here. We do need to make sure that the + // `__LLVM,__cmdline` section exists even though it is empty as otherwise + // ld64 rejects the object file. // // * Wasm - the native LLD linker is hard-coded to skip `.llvmbc` and // `.llvmcmd` sections, so there's nothing extra we need to do. @@ -1109,9 +1111,9 @@ fn embed_bitcode( llvm::set_section(llglobal, bitcode_section_name(cgcx)); llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage); - llvm::LLVMSetGlobalConstant(llglobal, llvm::True); + llvm::LLVMSetGlobalConstant(llglobal, llvm::TRUE); - let llconst = common::bytes_in_context(llcx, cmdline.as_bytes()); + let llconst = common::bytes_in_context(llcx, &[]); let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline"); llvm::set_initializer(llglobal, llconst); let section = if cgcx.target_is_like_darwin { @@ -1128,7 +1130,7 @@ fn embed_bitcode( let section_flags = if cgcx.is_pe_coff { "n" } else { "e" }; let asm = create_section_with_flags_asm(".llvmbc", section_flags, bitcode); llvm::append_module_inline_asm(llmod, &asm); - let asm = create_section_with_flags_asm(".llvmcmd", section_flags, cmdline.as_bytes()); + let asm = create_section_with_flags_asm(".llvmcmd", section_flags, &[]); llvm::append_module_inline_asm(llmod, &asm); } } diff --git a/compiler/rustc_codegen_llvm/src/base.rs b/compiler/rustc_codegen_llvm/src/base.rs index 5dda836988c..9cc5d8dbc21 100644 --- a/compiler/rustc_codegen_llvm/src/base.rs +++ b/compiler/rustc_codegen_llvm/src/base.rs @@ -18,9 +18,10 @@ use rustc_codegen_ssa::base::maybe_create_entry_wrapper; use rustc_codegen_ssa::mono_item::MonoItemExt; use rustc_codegen_ssa::traits::*; use rustc_data_structures::small_c_str::SmallCStr; +use rustc_hir::attrs::Linkage; use rustc_middle::dep_graph; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; -use rustc_middle::mir::mono::{Linkage, Visibility}; +use rustc_middle::mir::mono::Visibility; use rustc_middle::ty::TyCtxt; use rustc_session::config::DebugInfo; use rustc_span::Symbol; diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 917d07e3c61..37379586d58 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -35,7 +35,7 @@ use crate::attributes; use crate::common::Funclet; use crate::context::{CodegenCx, FullCx, GenericCx, SCx}; use crate::llvm::{ - self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, GEPNoWrapFlags, Metadata, True, + self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, GEPNoWrapFlags, Metadata, TRUE, ToLlvmBool, }; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; @@ -493,8 +493,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { let add = llvm::LLVMBuildAdd(self.llbuilder, a, b, UNNAMED); if llvm::LLVMIsAInstruction(add).is_some() { - llvm::LLVMSetNUW(add, True); - llvm::LLVMSetNSW(add, True); + llvm::LLVMSetNUW(add, TRUE); + llvm::LLVMSetNSW(add, TRUE); } add } @@ -503,8 +503,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { let sub = llvm::LLVMBuildSub(self.llbuilder, a, b, UNNAMED); if llvm::LLVMIsAInstruction(sub).is_some() { - llvm::LLVMSetNUW(sub, True); - llvm::LLVMSetNSW(sub, True); + llvm::LLVMSetNUW(sub, TRUE); + llvm::LLVMSetNSW(sub, TRUE); } sub } @@ -513,8 +513,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { let mul = llvm::LLVMBuildMul(self.llbuilder, a, b, UNNAMED); if llvm::LLVMIsAInstruction(mul).is_some() { - llvm::LLVMSetNUW(mul, True); - llvm::LLVMSetNSW(mul, True); + llvm::LLVMSetNUW(mul, TRUE); + llvm::LLVMSetNSW(mul, TRUE); } mul } @@ -528,7 +528,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { // an instruction, so we need to check before setting the flag. // (See also `LLVMBuildNUWNeg` which also needs a check.) if llvm::LLVMIsAInstruction(or).is_some() { - llvm::LLVMSetIsDisjoint(or, True); + llvm::LLVMSetIsDisjoint(or, TRUE); } or } @@ -629,7 +629,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value { unsafe { let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED); - llvm::LLVMSetVolatile(load, llvm::True); + llvm::LLVMSetVolatile(load, llvm::TRUE); load } } @@ -717,7 +717,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let mut const_llval = None; let llty = place.layout.llvm_type(self); if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) { - if llvm::LLVMIsGlobalConstant(global) == llvm::True { + if llvm::LLVMIsGlobalConstant(global).is_true() { if let Some(init) = llvm::LLVMGetInitializer(global) { if self.val_ty(init) == llty { const_llval = Some(init); @@ -838,7 +838,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint }; llvm::LLVMSetAlignment(store, align); if flags.contains(MemFlags::VOLATILE) { - llvm::LLVMSetVolatile(store, llvm::True); + llvm::LLVMSetVolatile(store, llvm::TRUE); } if flags.contains(MemFlags::NONTEMPORAL) { // Make sure that the current target architectures supports "sane" non-temporal @@ -956,7 +956,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let trunc = self.trunc(val, dest_ty); unsafe { if llvm::LLVMIsAInstruction(trunc).is_some() { - llvm::LLVMSetNUW(trunc, True); + llvm::LLVMSetNUW(trunc, TRUE); } } trunc @@ -968,7 +968,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let trunc = self.trunc(val, dest_ty); unsafe { if llvm::LLVMIsAInstruction(trunc).is_some() { - llvm::LLVMSetNSW(trunc, True); + llvm::LLVMSetNSW(trunc, TRUE); } } trunc @@ -1067,13 +1067,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value { unsafe { - llvm::LLVMBuildIntCast2( - self.llbuilder, - val, - dest_ty, - if is_signed { True } else { False }, - UNNAMED, - ) + llvm::LLVMBuildIntCast2(self.llbuilder, val, dest_ty, is_signed.to_llvm_bool(), UNNAMED) } } @@ -1229,7 +1223,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false); let landing_pad = self.landing_pad(ty, pers_fn, 0); unsafe { - llvm::LLVMSetCleanup(landing_pad, llvm::True); + llvm::LLVMSetCleanup(landing_pad, llvm::TRUE); } (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1)) } @@ -1317,7 +1311,6 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { failure_order: rustc_middle::ty::AtomicOrdering, weak: bool, ) -> (&'ll Value, &'ll Value) { - let weak = if weak { llvm::True } else { llvm::False }; unsafe { let value = llvm::LLVMBuildAtomicCmpXchg( self.llbuilder, @@ -1326,9 +1319,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { src, AtomicOrdering::from_generic(order), AtomicOrdering::from_generic(failure_order), - llvm::False, // SingleThreaded + llvm::FALSE, // SingleThreaded ); - llvm::LLVMSetWeak(value, weak); + llvm::LLVMSetWeak(value, weak.to_llvm_bool()); let val = self.extract_value(value, 0); let success = self.extract_value(value, 1); (val, success) @@ -1353,7 +1346,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { dst, src, AtomicOrdering::from_generic(order), - llvm::False, // SingleThreaded + llvm::FALSE, // SingleThreaded ) }; if ret_ptr && self.val_ty(res) != self.type_ptr() { @@ -1368,14 +1361,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { scope: SynchronizationScope, ) { let single_threaded = match scope { - SynchronizationScope::SingleThread => llvm::True, - SynchronizationScope::CrossThread => llvm::False, + SynchronizationScope::SingleThread => true, + SynchronizationScope::CrossThread => false, }; unsafe { llvm::LLVMBuildFence( self.llbuilder, AtomicOrdering::from_generic(order), - single_threaded, + single_threaded.to_llvm_bool(), UNNAMED, ); } @@ -1453,7 +1446,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { instance: Option<Instance<'tcx>>, ) { let call = self.call(llty, fn_attrs, Some(fn_abi), llfn, args, funclet, instance); - llvm::LLVMRustSetTailCallKind(call, llvm::TailCallKind::MustTail); + llvm::LLVMSetTailCallKind(call, llvm::TailCallKind::MustTail); match &fn_abi.ret.mode { PassMode::Ignore | PassMode::Indirect { .. } => self.ret_void(), @@ -1696,7 +1689,11 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { return; } - self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[self.cx.const_u64(size), ptr]); + if crate::llvm_util::get_version() >= (22, 0, 0) { + self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[ptr]); + } else { + self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[self.cx.const_u64(size), ptr]); + } } } impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> { diff --git a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs index 829b3c513c2..6ddf53cdc87 100644 --- a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs +++ b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs @@ -1,40 +1,92 @@ use std::ptr; -use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, AutoDiffItem, DiffActivity, DiffMode}; -use rustc_codegen_ssa::ModuleCodegen; +use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, DiffActivity, DiffMode}; use rustc_codegen_ssa::common::TypeKind; -use rustc_codegen_ssa::traits::BaseTypeCodegenMethods; -use rustc_errors::FatalError; -use rustc_middle::bug; -use tracing::{debug, trace}; +use rustc_codegen_ssa::traits::{BaseTypeCodegenMethods, BuilderMethods}; +use rustc_middle::ty::{PseudoCanonicalInput, Ty, TyCtxt, TypingEnv}; +use rustc_middle::{bug, ty}; +use tracing::debug; -use crate::back::write::llvm_err; -use crate::builder::{SBuilder, UNNAMED}; +use crate::builder::{Builder, PlaceRef, UNNAMED}; use crate::context::SimpleCx; use crate::declare::declare_simple_fn; -use crate::errors::{AutoDiffWithoutEnable, LlvmError}; -use crate::llvm::AttributePlace::Function; -use crate::llvm::{Metadata, True}; +use crate::llvm; +use crate::llvm::{Metadata, TRUE, Type}; use crate::value::Value; -use crate::{CodegenContext, LlvmCodegenBackend, ModuleLlvm, attributes, llvm}; -fn get_params(fnc: &Value) -> Vec<&Value> { - let param_num = llvm::LLVMCountParams(fnc) as usize; - let mut fnc_args: Vec<&Value> = vec![]; - fnc_args.reserve(param_num); - unsafe { - llvm::LLVMGetParams(fnc, fnc_args.as_mut_ptr()); - fnc_args.set_len(param_num); +pub(crate) fn adjust_activity_to_abi<'tcx>( + tcx: TyCtxt<'tcx>, + fn_ty: Ty<'tcx>, + da: &mut Vec<DiffActivity>, +) { + if !matches!(fn_ty.kind(), ty::FnDef(..)) { + bug!("expected fn def for autodiff, got {:?}", fn_ty); } - fnc_args -} -fn has_sret(fnc: &Value) -> bool { - let num_args = llvm::LLVMCountParams(fnc) as usize; - if num_args == 0 { - false - } else { - unsafe { llvm::LLVMRustHasAttributeAtIndex(fnc, 0, llvm::AttributeKind::StructRet) } + // We don't actually pass the types back into the type system. + // All we do is decide how to handle the arguments. + let sig = fn_ty.fn_sig(tcx).skip_binder(); + + let mut new_activities = vec![]; + let mut new_positions = vec![]; + for (i, ty) in sig.inputs().iter().enumerate() { + if let Some(inner_ty) = ty.builtin_deref(true) { + if inner_ty.is_slice() { + // Now we need to figure out the size of each slice element in memory to allow + // safety checks and usability improvements in the backend. + let sty = match inner_ty.builtin_index() { + Some(sty) => sty, + None => { + panic!("slice element type unknown"); + } + }; + let pci = PseudoCanonicalInput { + typing_env: TypingEnv::fully_monomorphized(), + value: sty, + }; + + let layout = tcx.layout_of(pci); + let elem_size = match layout { + Ok(layout) => layout.size, + Err(_) => { + bug!("autodiff failed to compute slice element size"); + } + }; + let elem_size: u32 = elem_size.bytes() as u32; + + // We know that the length will be passed as extra arg. + if !da.is_empty() { + // We are looking at a slice. The length of that slice will become an + // extra integer on llvm level. Integers are always const. + // However, if the slice get's duplicated, we want to know to later check the + // size. So we mark the new size argument as FakeActivitySize. + // There is one FakeActivitySize per slice, so for convenience we store the + // slice element size in bytes in it. We will use the size in the backend. + let activity = match da[i] { + DiffActivity::DualOnly + | DiffActivity::Dual + | DiffActivity::Dualv + | DiffActivity::DuplicatedOnly + | DiffActivity::Duplicated => { + DiffActivity::FakeActivitySize(Some(elem_size)) + } + DiffActivity::Const => DiffActivity::Const, + _ => bug!("unexpected activity for ptr/ref"), + }; + new_activities.push(activity); + new_positions.push(i + 1); + } + + continue; + } + } + } + // now add the extra activities coming from slices + // Reverse order to not invalidate the indices + for _ in 0..new_activities.len() { + let pos = new_positions.pop().unwrap(); + let activity = new_activities.pop().unwrap(); + da.insert(pos, activity); } } @@ -48,14 +100,13 @@ fn has_sret(fnc: &Value) -> bool { // need to match those. // FIXME(ZuseZ4): This logic is a bit more complicated than it should be, can we simplify it // using iterators and peek()? -fn match_args_from_caller_to_enzyme<'ll>( +fn match_args_from_caller_to_enzyme<'ll, 'tcx>( cx: &SimpleCx<'ll>, - builder: &SBuilder<'ll, 'll>, + builder: &mut Builder<'_, 'll, 'tcx>, width: u32, args: &mut Vec<&'ll llvm::Value>, inputs: &[DiffActivity], outer_args: &[&'ll llvm::Value], - has_sret: bool, ) { debug!("matching autodiff arguments"); // We now handle the issue that Rust level arguments not always match the llvm-ir level @@ -67,14 +118,6 @@ fn match_args_from_caller_to_enzyme<'ll>( let mut outer_pos: usize = 0; let mut activity_pos = 0; - if has_sret { - // Then the first outer arg is the sret pointer. Enzyme doesn't know about sret, so the - // inner function will still return something. We increase our outer_pos by one, - // and once we're done with all other args we will take the return of the inner call and - // update the sret pointer with it - outer_pos = 1; - } - let enzyme_const = cx.create_metadata(b"enzyme_const"); let enzyme_out = cx.create_metadata(b"enzyme_out"); let enzyme_dup = cx.create_metadata(b"enzyme_dup"); @@ -193,92 +236,6 @@ fn match_args_from_caller_to_enzyme<'ll>( } } -// On LLVM-IR, we can luckily declare __enzyme_ functions without specifying the input -// arguments. We do however need to declare them with their correct return type. -// We already figured the correct return type out in our frontend, when generating the outer_fn, -// so we can now just go ahead and use that. This is not always trivial, e.g. because sret. -// Beyond sret, this article describes our challenges nicely: -// <https://yorickpeterse.com/articles/the-mess-that-is-handling-structure-arguments-and-returns-in-llvm/> -// I.e. (i32, f32) will get merged into i64, but we don't handle that yet. -fn compute_enzyme_fn_ty<'ll>( - cx: &SimpleCx<'ll>, - attrs: &AutoDiffAttrs, - fn_to_diff: &'ll Value, - outer_fn: &'ll Value, -) -> &'ll llvm::Type { - let fn_ty = cx.get_type_of_global(outer_fn); - let mut ret_ty = cx.get_return_type(fn_ty); - - let has_sret = has_sret(outer_fn); - - if has_sret { - // Now we don't just forward the return type, so we have to figure it out based on the - // primal return type, in combination with the autodiff settings. - let fn_ty = cx.get_type_of_global(fn_to_diff); - let inner_ret_ty = cx.get_return_type(fn_ty); - - let void_ty = unsafe { llvm::LLVMVoidTypeInContext(cx.llcx) }; - if inner_ret_ty == void_ty { - // This indicates that even the inner function has an sret. - // Right now I only look for an sret in the outer function. - // This *probably* needs some extra handling, but I never ran - // into such a case. So I'll wait for user reports to have a test case. - bug!("sret in inner function"); - } - - if attrs.width == 1 { - // Enzyme returns a struct of style: - // `{ original_ret(if requested), float, float, ... }` - let mut struct_elements = vec![]; - if attrs.has_primal_ret() { - struct_elements.push(inner_ret_ty); - } - // Next, we push the list of active floats, since they will be lowered to `enzyme_out`, - // and therefore part of the return struct. - let param_tys = cx.func_params_types(fn_ty); - for (act, param_ty) in attrs.input_activity.iter().zip(param_tys) { - if matches!(act, DiffActivity::Active) { - // Now find the float type at position i based on the fn_ty, - // to know what (f16/f32/f64/...) to add to the struct. - struct_elements.push(param_ty); - } - } - ret_ty = cx.type_struct(&struct_elements, false); - } else { - // First we check if we also have to deal with the primal return. - match attrs.mode { - DiffMode::Forward => match attrs.ret_activity { - DiffActivity::Dual => { - let arr_ty = - unsafe { llvm::LLVMArrayType2(inner_ret_ty, attrs.width as u64 + 1) }; - ret_ty = arr_ty; - } - DiffActivity::DualOnly => { - let arr_ty = - unsafe { llvm::LLVMArrayType2(inner_ret_ty, attrs.width as u64) }; - ret_ty = arr_ty; - } - DiffActivity::Const => { - todo!("Not sure, do we need to do something here?"); - } - _ => { - bug!("unreachable"); - } - }, - DiffMode::Reverse => { - todo!("Handle sret for reverse mode"); - } - _ => { - bug!("unreachable"); - } - } - } - } - - // LLVM can figure out the input types on it's own, so we take a shortcut here. - unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, True) } -} - /// When differentiating `fn_to_diff`, take a `outer_fn` and generate another /// function with expected naming and calling conventions[^1] which will be /// discovered by the enzyme LLVM pass and its body populated with the differentiated @@ -288,11 +245,15 @@ fn compute_enzyme_fn_ty<'ll>( /// [^1]: <https://enzyme.mit.edu/getting_started/CallingConvention/> // FIXME(ZuseZ4): `outer_fn` should include upstream safety checks to // cover some assumptions of enzyme/autodiff, which could lead to UB otherwise. -fn generate_enzyme_call<'ll>( +pub(crate) fn generate_enzyme_call<'ll, 'tcx>( + builder: &mut Builder<'_, 'll, 'tcx>, cx: &SimpleCx<'ll>, fn_to_diff: &'ll Value, - outer_fn: &'ll Value, + outer_name: &str, + ret_ty: &'ll Type, + fn_args: &[&'ll Value], attrs: AutoDiffAttrs, + dest: PlaceRef<'tcx, &'ll Value>, ) { // We have to pick the name depending on whether we want forward or reverse mode autodiff. let mut ad_name: String = match attrs.mode { @@ -302,11 +263,9 @@ fn generate_enzyme_call<'ll>( } .to_string(); - // add outer_fn name to ad_name to make it unique, in case users apply autodiff to multiple + // add outer_name to ad_name to make it unique, in case users apply autodiff to multiple // functions. Unwrap will only panic, if LLVM gave us an invalid string. - let name = llvm::get_value_name(outer_fn); - let outer_fn_name = std::str::from_utf8(&name).unwrap(); - ad_name.push_str(outer_fn_name); + ad_name.push_str(outer_name); // Let us assume the user wrote the following function square: // @@ -316,14 +275,8 @@ fn generate_enzyme_call<'ll>( // %0 = fmul double %x, %x // ret double %0 // } - // ``` - // - // The user now applies autodiff to the function square, in which case fn_to_diff will be `square`. - // Our macro generates the following placeholder code (slightly simplified): // - // ```llvm // define double @dsquare(double %x) { - // ; placeholder code // return 0.0; // } // ``` @@ -340,175 +293,44 @@ fn generate_enzyme_call<'ll>( // ret double %0 // } // ``` - unsafe { - let enzyme_ty = compute_enzyme_fn_ty(cx, &attrs, fn_to_diff, outer_fn); - - // FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and - // think a bit more about what should go here. - let cc = llvm::LLVMGetFunctionCallConv(outer_fn); - let ad_fn = declare_simple_fn( - cx, - &ad_name, - llvm::CallConv::try_from(cc).expect("invalid callconv"), - llvm::UnnamedAddr::No, - llvm::Visibility::Default, - enzyme_ty, - ); - - // Otherwise LLVM might inline our temporary code before the enzyme pass has a chance to - // do it's work. - let attr = llvm::AttributeKind::NoInline.create_attr(cx.llcx); - attributes::apply_to_llfn(ad_fn, Function, &[attr]); - - // We add a made-up attribute just such that we can recognize it after AD to update - // (no)-inline attributes. We'll then also remove this attribute. - let enzyme_marker_attr = llvm::CreateAttrString(cx.llcx, "enzyme_marker"); - attributes::apply_to_llfn(outer_fn, Function, &[enzyme_marker_attr]); - - // first, remove all calls from fnc - let entry = llvm::LLVMGetFirstBasicBlock(outer_fn); - let br = llvm::LLVMRustGetTerminator(entry); - llvm::LLVMRustEraseInstFromParent(br); - - let last_inst = llvm::LLVMRustGetLastInstruction(entry).unwrap(); - let mut builder = SBuilder::build(cx, entry); - - let num_args = llvm::LLVMCountParams(&fn_to_diff); - let mut args = Vec::with_capacity(num_args as usize + 1); - args.push(fn_to_diff); - - let enzyme_primal_ret = cx.create_metadata(b"enzyme_primal_return"); - if matches!(attrs.ret_activity, DiffActivity::Dual | DiffActivity::Active) { - args.push(cx.get_metadata_value(enzyme_primal_ret)); - } - if attrs.width > 1 { - let enzyme_width = cx.create_metadata(b"enzyme_width"); - args.push(cx.get_metadata_value(enzyme_width)); - args.push(cx.get_const_int(cx.type_i64(), attrs.width as u64)); - } - - let has_sret = has_sret(outer_fn); - let outer_args: Vec<&llvm::Value> = get_params(outer_fn); - match_args_from_caller_to_enzyme( - &cx, - &builder, - attrs.width, - &mut args, - &attrs.input_activity, - &outer_args, - has_sret, - ); - - let call = builder.call(enzyme_ty, ad_fn, &args, None); - - // This part is a bit iffy. LLVM requires that a call to an inlineable function has some - // metadata attached to it, but we just created this code oota. Given that the - // differentiated function already has partly confusing metadata, and given that this - // affects nothing but the auttodiff IR, we take a shortcut and just steal metadata from the - // dummy code which we inserted at a higher level. - // FIXME(ZuseZ4): Work with Enzyme core devs to clarify what debug metadata issues we have, - // and how to best improve it for enzyme core and rust-enzyme. - let md_ty = cx.get_md_kind_id("dbg"); - if llvm::LLVMRustHasMetadata(last_inst, md_ty) { - let md = llvm::LLVMRustDIGetInstMetadata(last_inst) - .expect("failed to get instruction metadata"); - let md_todiff = cx.get_metadata_value(md); - llvm::LLVMSetMetadata(call, md_ty, md_todiff); - } else { - // We don't panic, since depending on whether we are in debug or release mode, we might - // have no debug info to copy, which would then be ok. - trace!("no dbg info"); - } - - // Now that we copied the metadata, get rid of dummy code. - llvm::LLVMRustEraseInstUntilInclusive(entry, last_inst); - - if cx.val_ty(call) == cx.type_void() || has_sret { - if has_sret { - // This is what we already have in our outer_fn (shortened): - // define void @_foo(ptr <..> sret([32 x i8]) initializes((0, 32)) %0, <...>) { - // %7 = call [4 x double] (...) @__enzyme_fwddiff_foo(ptr @square, metadata !"enzyme_width", i64 4, <...>) - // <Here we are, we want to add the following two lines> - // store [4 x double] %7, ptr %0, align 8 - // ret void - // } - - // now store the result of the enzyme call into the sret pointer. - let sret_ptr = outer_args[0]; - let call_ty = cx.val_ty(call); - if attrs.width == 1 { - assert_eq!(cx.type_kind(call_ty), TypeKind::Struct); - } else { - assert_eq!(cx.type_kind(call_ty), TypeKind::Array); - } - llvm::LLVMBuildStore(&builder.llbuilder, call, sret_ptr); - } - builder.ret_void(); - } else { - builder.ret(call); - } - - // Let's crash in case that we messed something up above and generated invalid IR. - llvm::LLVMRustVerifyFunction( - outer_fn, - llvm::LLVMRustVerifierFailureAction::LLVMAbortProcessAction, - ); - } -} - -pub(crate) fn differentiate<'ll>( - module: &'ll ModuleCodegen<ModuleLlvm>, - cgcx: &CodegenContext<LlvmCodegenBackend>, - diff_items: Vec<AutoDiffItem>, -) -> Result<(), FatalError> { - for item in &diff_items { - trace!("{}", item); + let enzyme_ty = unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, TRUE) }; + + // FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and + // think a bit more about what should go here. + let cc = unsafe { llvm::LLVMGetFunctionCallConv(fn_to_diff) }; + let ad_fn = declare_simple_fn( + cx, + &ad_name, + llvm::CallConv::try_from(cc).expect("invalid callconv"), + llvm::UnnamedAddr::No, + llvm::Visibility::Default, + enzyme_ty, + ); + + let num_args = llvm::LLVMCountParams(&fn_to_diff); + let mut args = Vec::with_capacity(num_args as usize + 1); + args.push(fn_to_diff); + + let enzyme_primal_ret = cx.create_metadata(b"enzyme_primal_return"); + if matches!(attrs.ret_activity, DiffActivity::Dual | DiffActivity::Active) { + args.push(cx.get_metadata_value(enzyme_primal_ret)); } - - let diag_handler = cgcx.create_dcx(); - - let cx = SimpleCx::new(module.module_llvm.llmod(), module.module_llvm.llcx, cgcx.pointer_size); - - // First of all, did the user try to use autodiff without using the -Zautodiff=Enable flag? - if !diff_items.is_empty() - && !cgcx.opts.unstable_opts.autodiff.contains(&rustc_session::config::AutoDiff::Enable) - { - return Err(diag_handler.handle().emit_almost_fatal(AutoDiffWithoutEnable)); - } - - // Here we replace the placeholder code with the actual autodiff code, which calls Enzyme. - for item in diff_items.iter() { - let name = item.source.clone(); - let fn_def: Option<&llvm::Value> = cx.get_function(&name); - let Some(fn_def) = fn_def else { - return Err(llvm_err( - diag_handler.handle(), - LlvmError::PrepareAutoDiff { - src: item.source.clone(), - target: item.target.clone(), - error: "could not find source function".to_owned(), - }, - )); - }; - debug!(?item.target); - let fn_target: Option<&llvm::Value> = cx.get_function(&item.target); - let Some(fn_target) = fn_target else { - return Err(llvm_err( - diag_handler.handle(), - LlvmError::PrepareAutoDiff { - src: item.source.clone(), - target: item.target.clone(), - error: "could not find target function".to_owned(), - }, - )); - }; - - generate_enzyme_call(&cx, fn_def, fn_target, item.attrs.clone()); + if attrs.width > 1 { + let enzyme_width = cx.create_metadata(b"enzyme_width"); + args.push(cx.get_metadata_value(enzyme_width)); + args.push(cx.get_const_int(cx.type_i64(), attrs.width as u64)); } - // FIXME(ZuseZ4): support SanitizeHWAddress and prevent illegal/unsupported opts + match_args_from_caller_to_enzyme( + &cx, + builder, + attrs.width, + &mut args, + &attrs.input_activity, + fn_args, + ); - trace!("done with differentiate()"); + let call = builder.call(enzyme_ty, None, None, ad_fn, &args, None, None); - Ok(()) + builder.store_to_place(call, dest.val); } diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index f29fefb66f0..11b79a7fe68 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -20,7 +20,7 @@ use tracing::debug; use crate::consts::const_alloc_to_llvm; pub(crate) use crate::context::CodegenCx; use crate::context::{GenericCx, SCx}; -use crate::llvm::{self, BasicBlock, Bool, ConstantInt, False, Metadata, True}; +use crate::llvm::{self, BasicBlock, ConstantInt, FALSE, Metadata, TRUE, ToLlvmBool}; use crate::type_::Type; use crate::value::Value; @@ -158,7 +158,7 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { self.type_kind(t) == TypeKind::Integer, "only allows integer types in const_int" ); - unsafe { llvm::LLVMConstInt(t, i as u64, True) } + unsafe { llvm::LLVMConstInt(t, i as u64, TRUE) } } fn const_u8(&self, i: u8) -> &'ll Value { @@ -192,7 +192,7 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { self.type_kind(t) == TypeKind::Integer, "only allows integer types in const_uint" ); - unsafe { llvm::LLVMConstInt(t, i, False) } + unsafe { llvm::LLVMConstInt(t, i, FALSE) } } fn const_uint_big(&self, t: &'ll Type, u: u128) -> &'ll Value { @@ -377,7 +377,7 @@ pub(crate) fn val_ty(v: &Value) -> &Type { pub(crate) fn bytes_in_context<'ll>(llcx: &'ll llvm::Context, bytes: &[u8]) -> &'ll Value { unsafe { let ptr = bytes.as_ptr() as *const c_char; - llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), True) + llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), TRUE) } } @@ -392,7 +392,7 @@ fn struct_in_context<'ll>( packed: bool, ) -> &'ll Value { let len = c_uint::try_from(elts.len()).expect("LLVMConstStructInContext elements len overflow"); - unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed as Bool) } + unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed.to_llvm_bool()) } } #[inline] diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs index 6b06daf3477..9ec7b0f80ae 100644 --- a/compiler/rustc_codegen_llvm/src/consts.rs +++ b/compiler/rustc_codegen_llvm/src/consts.rs @@ -4,6 +4,7 @@ use rustc_abi::{Align, HasDataLayout, Primitive, Scalar, Size, WrappingRange}; use rustc_codegen_ssa::common; use rustc_codegen_ssa::traits::*; use rustc_hir::LangItem; +use rustc_hir::attrs::Linkage; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs}; @@ -11,7 +12,7 @@ use rustc_middle::mir::interpret::{ Allocation, ConstAllocation, ErrorHandled, InitChunk, Pointer, Scalar as InterpScalar, read_target_uint, }; -use rustc_middle::mir::mono::{Linkage, MonoItem}; +use rustc_middle::mir::mono::MonoItem; use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf}; use rustc_middle::ty::{self, Instance}; use rustc_middle::{bug, span_bug}; diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index ee77774c688..4fd6110ac4a 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -8,7 +8,6 @@ use std::str; use rustc_abi::{HasDataLayout, Size, TargetDataLayout, VariantIdx}; use rustc_codegen_ssa::back::versioned_llvm_target; use rustc_codegen_ssa::base::{wants_msvc_seh, wants_wasm_eh}; -use rustc_codegen_ssa::common::TypeKind; use rustc_codegen_ssa::errors as ssa_errors; use rustc_codegen_ssa::traits::*; use rustc_data_structures::base_n::{ALPHANUMERIC_ONLY, ToBaseN}; @@ -213,6 +212,12 @@ pub(crate) unsafe fn create_module<'ll>( target_data_layout = target_data_layout.replace("p8:128:128:128:48", "p8:128:128") } } + if llvm_version < (22, 0, 0) { + if sess.target.arch == "avr" { + // LLVM 22.0 updated the default layout on avr: https://github.com/llvm/llvm-project/pull/153010 + target_data_layout = target_data_layout.replace("n8:16", "n8") + } + } // Ensure the data-layout values hardcoded remain the defaults. { @@ -372,6 +377,15 @@ pub(crate) unsafe fn create_module<'ll>( } } + if let Some(regparm_count) = sess.opts.unstable_opts.regparm { + llvm::add_module_flag_u32( + llmod, + llvm::ModuleFlagMergeBehavior::Error, + "NumRegisterParameters", + regparm_count, + ); + } + if let Some(BranchProtection { bti, pac_ret }) = sess.opts.unstable_opts.branch_protection { if sess.target.arch == "aarch64" { llvm::add_module_flag_u32( @@ -457,6 +471,15 @@ pub(crate) unsafe fn create_module<'ll>( } } + if sess.opts.unstable_opts.indirect_branch_cs_prefix { + llvm::add_module_flag_u32( + llmod, + llvm::ModuleFlagMergeBehavior::Override, + "indirect_branch_cs_prefix", + 1, + ); + } + match (sess.opts.unstable_opts.small_data_threshold, sess.target.small_data_threshold_support()) { // Set up the small-data optimization limit for architectures that use @@ -654,10 +677,6 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> { } } impl<'ll> SimpleCx<'ll> { - pub(crate) fn get_return_type(&self, ty: &'ll Type) -> &'ll Type { - assert_eq!(self.type_kind(ty), TypeKind::Function); - unsafe { llvm::LLVMGetReturnType(ty) } - } pub(crate) fn get_type_of_global(&self, val: &'ll Value) -> &'ll Type { unsafe { llvm::LLVMGlobalGetValueType(val) } } @@ -682,7 +701,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { } pub(crate) fn get_const_int(&self, ty: &'ll Type, val: u64) -> &'ll Value { - unsafe { llvm::LLVMConstInt(ty, val, llvm::False) } + unsafe { llvm::LLVMConstInt(ty, val, llvm::FALSE) } } pub(crate) fn get_const_i64(&self, n: u64) -> &'ll Value { @@ -721,16 +740,6 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { llvm::LLVMMDStringInContext2(self.llcx(), name.as_ptr() as *const c_char, name.len()) } } - - pub(crate) fn get_functions(&self) -> Vec<&'ll Value> { - let mut functions = vec![]; - let mut func = unsafe { llvm::LLVMGetFirstFunction(self.llmod()) }; - while let Some(f) = func { - functions.push(f); - func = unsafe { llvm::LLVMGetNextFunction(f) } - } - functions - } } impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs index 6eb7042da61..7a6dc008c7b 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs @@ -72,7 +72,7 @@ pub(crate) fn get_or_insert_gdb_debug_scripts_section_global<'ll>( .unwrap_or_else(|| bug!("symbol `{}` is already defined", section_var_name)); llvm::set_section(section_var, c".debug_gdb_scripts"); llvm::set_initializer(section_var, cx.const_bytes(section_contents)); - llvm::LLVMSetGlobalConstant(section_var, llvm::True); + llvm::LLVMSetGlobalConstant(section_var, llvm::TRUE); llvm::set_unnamed_address(section_var, llvm::UnnamedAddr::Global); llvm::set_linkage(section_var, llvm::Linkage::LinkOnceODRLinkage); // This should make sure that the whole section is not larger than diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs index d1502d2b1e6..18a783a348a 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs @@ -276,7 +276,7 @@ pub(super) fn build_type_with_children<'ll, 'tcx>( && let ty::Adt(adt_def, args) = ty.kind() { let def_id = adt_def.did(); - // If any sub type reference the original type definition and the sub type has a type + // If any child type references the original type definition and the child type has a type // parameter that strictly contains the original parameter, the original type is a recursive // type that can expanding indefinitely. Example, // ``` @@ -285,21 +285,43 @@ pub(super) fn build_type_with_children<'ll, 'tcx>( // Item(T), // } // ``` - let is_expanding_recursive = adt_def.is_enum() - && debug_context(cx).adt_stack.borrow().iter().any(|(parent_def_id, parent_args)| { - if def_id == *parent_def_id { - args.iter().zip(parent_args.iter()).any(|(arg, parent_arg)| { - if let (Some(arg), Some(parent_arg)) = (arg.as_type(), parent_arg.as_type()) - { - arg != parent_arg && arg.contains(parent_arg) - } else { - false - } - }) - } else { - false - } - }); + let is_expanding_recursive = { + let stack = debug_context(cx).adt_stack.borrow(); + stack + .iter() + .enumerate() + .rev() + .skip(1) + .filter(|(_, (ancestor_def_id, _))| def_id == *ancestor_def_id) + .any(|(ancestor_index, (_, ancestor_args))| { + args.iter() + .zip(ancestor_args.iter()) + .filter_map(|(arg, ancestor_arg)| arg.as_type().zip(ancestor_arg.as_type())) + .any(|(arg, ancestor_arg)| + // Strictly contains. + (arg != ancestor_arg && arg.contains(ancestor_arg)) + // Check all types between current and ancestor use the + // ancestor_arg. + // Otherwise, duplicate wrappers in normal recursive type may be + // regarded as expanding. + // ``` + // struct Recursive { + // a: Box<Box<Recursive>>, + // } + // ``` + // It can produce an ADT stack like this, + // - Box<Recursive> + // - Recursive + // - Box<Box<Recursive>> + && stack[ancestor_index + 1..stack.len()].iter().all( + |(_, intermediate_args)| + intermediate_args + .iter() + .filter_map(|arg| arg.as_type()) + .any(|mid_arg| mid_arg.contains(ancestor_arg)) + )) + }) + }; if is_expanding_recursive { // FIXME: indicate that this is an expanding recursive type in stub metadata? return DINodeCreationResult::new(stub_info.metadata, false); diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs index 6cbf2dbf7d3..2c3a84499ac 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs @@ -533,31 +533,26 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { // First, let's see if this is a method within an inherent impl. Because // if yes, we want to make the result subroutine DIE a child of the // subroutine's self-type. - if let Some(impl_def_id) = cx.tcx.impl_of_assoc(instance.def_id()) { - // If the method does *not* belong to a trait, proceed - if cx.tcx.trait_id_of_impl(impl_def_id).is_none() { - let impl_self_ty = cx.tcx.instantiate_and_normalize_erasing_regions( - instance.args, - cx.typing_env(), - cx.tcx.type_of(impl_def_id), - ); - - // Only "class" methods are generally understood by LLVM, - // so avoid methods on other types (e.g., `<*mut T>::null`). - if let ty::Adt(def, ..) = impl_self_ty.kind() - && !def.is_box() - { - // Again, only create type information if full debuginfo is enabled - if cx.sess().opts.debuginfo == DebugInfo::Full && !impl_self_ty.has_param() - { - return (type_di_node(cx, impl_self_ty), true); - } else { - return (namespace::item_namespace(cx, def.did()), false); - } + // For trait method impls we still use the "parallel namespace" + // strategy + if let Some(imp_def_id) = cx.tcx.inherent_impl_of_assoc(instance.def_id()) { + let impl_self_ty = cx.tcx.instantiate_and_normalize_erasing_regions( + instance.args, + cx.typing_env(), + cx.tcx.type_of(imp_def_id), + ); + + // Only "class" methods are generally understood by LLVM, + // so avoid methods on other types (e.g., `<*mut T>::null`). + if let ty::Adt(def, ..) = impl_self_ty.kind() + && !def.is_box() + { + // Again, only create type information if full debuginfo is enabled + if cx.sess().opts.debuginfo == DebugInfo::Full && !impl_self_ty.has_param() { + return (type_di_node(cx, impl_self_ty), true); + } else { + return (namespace::item_namespace(cx, def.did()), false); } - } else { - // For trait method impls we still use the "parallel namespace" - // strategy } } diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs b/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs index b4d639368b0..1dcf4ff3062 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs @@ -38,7 +38,7 @@ pub(crate) fn item_namespace<'ll>(cx: &CodegenCx<'ll, '_>, def_id: DefId) -> &'l parent_scope, namespace_name_string.as_ptr(), namespace_name_string.len(), - llvm::False, // ExportSymbols (only relevant for C++ anonymous namespaces) + llvm::FALSE, // ExportSymbols (only relevant for C++ anonymous namespaces) ) }; diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 7b27e496986..49d3dedbeab 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -3,24 +3,29 @@ use std::cmp::Ordering; use rustc_abi::{Align, BackendRepr, ExternAbi, Float, HasDataLayout, Primitive, Size}; use rustc_codegen_ssa::base::{compare_simd_types, wants_msvc_seh, wants_wasm_eh}; +use rustc_codegen_ssa::codegen_attrs::autodiff_attrs; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; use rustc_codegen_ssa::errors::{ExpectedPointerMutability, InvalidMonomorphization}; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue}; use rustc_codegen_ssa::traits::*; -use rustc_hir as hir; +use rustc_hir::def_id::LOCAL_CRATE; +use rustc_hir::{self as hir}; use rustc_middle::mir::BinOp; use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, LayoutOf}; -use rustc_middle::ty::{self, GenericArgsRef, Ty}; +use rustc_middle::ty::{self, GenericArgsRef, Instance, Ty, TyCtxt, TypingEnv}; use rustc_middle::{bug, span_bug}; use rustc_span::{Span, Symbol, sym}; -use rustc_symbol_mangling::mangle_internal_symbol; +use rustc_symbol_mangling::{mangle_internal_symbol, symbol_name_for_instance_in_crate}; +use rustc_target::callconv::PassMode; use rustc_target::spec::PanicStrategy; use tracing::debug; use crate::abi::FnAbiLlvmExt; use crate::builder::Builder; +use crate::builder::autodiff::{adjust_activity_to_abi, generate_enzyme_call}; use crate::context::CodegenCx; +use crate::errors::AutoDiffWithoutEnable; use crate::llvm::{self, Metadata}; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; @@ -189,6 +194,10 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { &[ptr, args[1].immediate()], ) } + sym::autodiff => { + codegen_autodiff(self, tcx, instance, args, result); + return Ok(()); + } sym::is_val_statically_known => { if let OperandValue::Immediate(imm) = args[0].val { self.call_intrinsic( @@ -321,10 +330,16 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { _ => bug!(), }; let ptr = args[0].immediate(); + let locality = fn_args.const_at(1).to_value().valtree.unwrap_leaf().to_i32(); self.call_intrinsic( "llvm.prefetch", &[self.val_ty(ptr)], - &[ptr, self.const_i32(rw), args[1].immediate(), self.const_i32(cache_type)], + &[ + ptr, + self.const_i32(rw), + self.const_i32(locality), + self.const_i32(cache_type), + ], ) } sym::carrying_mul_add => { @@ -1113,6 +1128,143 @@ fn get_rust_try_fn<'a, 'll, 'tcx>( rust_try } +fn codegen_autodiff<'ll, 'tcx>( + bx: &mut Builder<'_, 'll, 'tcx>, + tcx: TyCtxt<'tcx>, + instance: ty::Instance<'tcx>, + args: &[OperandRef<'tcx, &'ll Value>], + result: PlaceRef<'tcx, &'ll Value>, +) { + if !tcx.sess.opts.unstable_opts.autodiff.contains(&rustc_session::config::AutoDiff::Enable) { + let _ = tcx.dcx().emit_almost_fatal(AutoDiffWithoutEnable); + } + + let fn_args = instance.args; + let callee_ty = instance.ty(tcx, bx.typing_env()); + + let sig = callee_ty.fn_sig(tcx).skip_binder(); + + let ret_ty = sig.output(); + let llret_ty = bx.layout_of(ret_ty).llvm_type(bx); + + // Get source, diff, and attrs + let (source_id, source_args) = match fn_args.into_type_list(tcx)[0].kind() { + ty::FnDef(def_id, source_params) => (def_id, source_params), + _ => bug!("invalid autodiff intrinsic args"), + }; + + let fn_source = match Instance::try_resolve(tcx, bx.cx.typing_env(), *source_id, source_args) { + Ok(Some(instance)) => instance, + Ok(None) => bug!( + "could not resolve ({:?}, {:?}) to a specific autodiff instance", + source_id, + source_args + ), + Err(_) => { + // An error has already been emitted + return; + } + }; + + let source_symbol = symbol_name_for_instance_in_crate(tcx, fn_source.clone(), LOCAL_CRATE); + let Some(fn_to_diff) = bx.cx.get_function(&source_symbol) else { + bug!("could not find source function") + }; + + let (diff_id, diff_args) = match fn_args.into_type_list(tcx)[1].kind() { + ty::FnDef(def_id, diff_args) => (def_id, diff_args), + _ => bug!("invalid args"), + }; + + let fn_diff = match Instance::try_resolve(tcx, bx.cx.typing_env(), *diff_id, diff_args) { + Ok(Some(instance)) => instance, + Ok(None) => bug!( + "could not resolve ({:?}, {:?}) to a specific autodiff instance", + diff_id, + diff_args + ), + Err(_) => { + // An error has already been emitted + return; + } + }; + + let val_arr = get_args_from_tuple(bx, args[2], fn_diff); + let diff_symbol = symbol_name_for_instance_in_crate(tcx, fn_diff.clone(), LOCAL_CRATE); + + let Some(mut diff_attrs) = autodiff_attrs(tcx, fn_diff.def_id()) else { + bug!("could not find autodiff attrs") + }; + + adjust_activity_to_abi( + tcx, + fn_source.ty(tcx, TypingEnv::fully_monomorphized()), + &mut diff_attrs.input_activity, + ); + + // Build body + generate_enzyme_call( + bx, + bx.cx, + fn_to_diff, + &diff_symbol, + llret_ty, + &val_arr, + diff_attrs.clone(), + result, + ); +} + +fn get_args_from_tuple<'ll, 'tcx>( + bx: &mut Builder<'_, 'll, 'tcx>, + tuple_op: OperandRef<'tcx, &'ll Value>, + fn_instance: Instance<'tcx>, +) -> Vec<&'ll Value> { + let cx = bx.cx; + let fn_abi = cx.fn_abi_of_instance(fn_instance, ty::List::empty()); + + match tuple_op.val { + OperandValue::Immediate(val) => vec![val], + OperandValue::Pair(v1, v2) => vec![v1, v2], + OperandValue::Ref(ptr) => { + let tuple_place = PlaceRef { val: ptr, layout: tuple_op.layout }; + + let mut result = Vec::with_capacity(fn_abi.args.len()); + let mut tuple_index = 0; + + for arg in &fn_abi.args { + match arg.mode { + PassMode::Ignore => {} + PassMode::Direct(_) | PassMode::Cast { .. } => { + let field = tuple_place.project_field(bx, tuple_index); + let llvm_ty = field.layout.llvm_type(bx.cx); + let val = bx.load(llvm_ty, field.val.llval, field.val.align); + result.push(val); + tuple_index += 1; + } + PassMode::Pair(_, _) => { + let field = tuple_place.project_field(bx, tuple_index); + let llvm_ty = field.layout.llvm_type(bx.cx); + let pair_val = bx.load(llvm_ty, field.val.llval, field.val.align); + result.push(bx.extract_value(pair_val, 0)); + result.push(bx.extract_value(pair_val, 1)); + tuple_index += 1; + } + PassMode::Indirect { .. } => { + let field = tuple_place.project_field(bx, tuple_index); + result.push(field.val.llval); + tuple_index += 1; + } + } + } + + result + } + + OperandValue::ZeroSized => vec![], + } +} + fn generic_simd_intrinsic<'ll, 'tcx>( bx: &mut Builder<'_, 'll, 'tcx>, name: Symbol, diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index ca84b6de8b1..628cb34fd9e 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -30,7 +30,6 @@ use context::SimpleCx; use errors::ParseTargetMachineConfig; use llvm_util::target_config; use rustc_ast::expand::allocator::AllocatorKind; -use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule}; use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryConfig, TargetMachineFactoryFn, @@ -38,7 +37,7 @@ use rustc_codegen_ssa::back::write::{ use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig}; use rustc_data_structures::fx::FxIndexMap; -use rustc_errors::{DiagCtxtHandle, FatalError}; +use rustc_errors::DiagCtxtHandle; use rustc_metadata::EncodedMetadata; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::ty::TyCtxt; @@ -47,18 +46,11 @@ use rustc_session::Session; use rustc_session::config::{OptLevel, OutputFilenames, PrintKind, PrintRequest}; use rustc_span::Symbol; -mod back { - pub(crate) mod archive; - pub(crate) mod lto; - pub(crate) mod owned_target_machine; - mod profiling; - pub(crate) mod write; -} - mod abi; mod allocator; mod asm; mod attributes; +mod back; mod base; mod builder; mod callee; @@ -173,20 +165,15 @@ impl WriteBackendMethods for LlvmCodegenBackend { exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], modules: Vec<FatLtoInput<Self>>, - diff_fncs: Vec<AutoDiffItem>, - ) -> Result<ModuleCodegen<Self::Module>, FatalError> { + ) -> ModuleCodegen<Self::Module> { let mut module = - back::lto::run_fat(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, modules)?; - - if !diff_fncs.is_empty() { - builder::autodiff::differentiate(&module, cgcx, diff_fncs)?; - } + back::lto::run_fat(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, modules); let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); - back::lto::run_pass_manager(cgcx, dcx, &mut module, false)?; + back::lto::run_pass_manager(cgcx, dcx, &mut module, false); - Ok(module) + module } fn run_thin_lto( cgcx: &CodegenContext<Self>, @@ -194,7 +181,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>, - ) -> Result<(Vec<ThinModule<Self>>, Vec<WorkProduct>), FatalError> { + ) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>) { back::lto::run_thin( cgcx, exported_symbols_for_lto, @@ -208,20 +195,20 @@ impl WriteBackendMethods for LlvmCodegenBackend { dcx: DiagCtxtHandle<'_>, module: &mut ModuleCodegen<Self::Module>, config: &ModuleConfig, - ) -> Result<(), FatalError> { + ) { back::write::optimize(cgcx, dcx, module, config) } fn optimize_thin( cgcx: &CodegenContext<Self>, thin: ThinModule<Self>, - ) -> Result<ModuleCodegen<Self::Module>, FatalError> { + ) -> ModuleCodegen<Self::Module> { back::lto::optimize_thin_module(thin, cgcx) } fn codegen( cgcx: &CodegenContext<Self>, module: ModuleCodegen<Self::Module>, config: &ModuleConfig, - ) -> Result<CompiledModule, FatalError> { + ) -> CompiledModule { back::write::codegen(cgcx, module, config) } fn prepare_thin( @@ -420,12 +407,12 @@ impl ModuleLlvm { cgcx: &CodegenContext<LlvmCodegenBackend>, name: &str, dcx: DiagCtxtHandle<'_>, - ) -> Result<OwnedTargetMachine, FatalError> { + ) -> OwnedTargetMachine { let tm_factory_config = TargetMachineFactoryConfig::new(cgcx, name); match (cgcx.tm_factory)(tm_factory_config) { - Ok(m) => Ok(m), + Ok(m) => m, Err(e) => { - return Err(dcx.emit_almost_fatal(ParseTargetMachineConfig(e))); + dcx.emit_fatal(ParseTargetMachineConfig(e)); } } } @@ -435,13 +422,13 @@ impl ModuleLlvm { name: &CStr, buffer: &[u8], dcx: DiagCtxtHandle<'_>, - ) -> Result<Self, FatalError> { + ) -> Self { unsafe { let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names); - let llmod_raw = back::lto::parse_module(llcx, name, buffer, dcx)?; - let tm = ModuleLlvm::tm_from_cgcx(cgcx, name.to_str().unwrap(), dcx)?; + let llmod_raw = back::lto::parse_module(llcx, name, buffer, dcx); + let tm = ModuleLlvm::tm_from_cgcx(cgcx, name.to_str().unwrap(), dcx); - Ok(ModuleLlvm { llmod_raw, llcx, tm: ManuallyDrop::new(tm) }) + ModuleLlvm { llmod_raw, llcx, tm: ManuallyDrop::new(tm) } } } diff --git a/compiler/rustc_codegen_llvm/src/llvm/archive_ro.rs b/compiler/rustc_codegen_llvm/src/llvm/archive_ro.rs deleted file mode 100644 index 51bcc4d123d..00000000000 --- a/compiler/rustc_codegen_llvm/src/llvm/archive_ro.rs +++ /dev/null @@ -1,94 +0,0 @@ -//! A wrapper around LLVM's archive (.a) code - -use std::path::Path; -use std::{slice, str}; - -use rustc_fs_util::path_to_c_string; - -pub(crate) struct ArchiveRO { - pub raw: &'static mut super::Archive, -} - -unsafe impl Send for ArchiveRO {} - -pub(crate) struct Iter<'a> { - raw: &'a mut super::ArchiveIterator<'a>, -} - -pub(crate) struct Child<'a> { - pub raw: &'a mut super::ArchiveChild<'a>, -} - -impl ArchiveRO { - /// Opens a static archive for read-only purposes. This is more optimized - /// than the `open` method because it uses LLVM's internal `Archive` class - /// rather than shelling out to `ar` for everything. - /// - /// If this archive is used with a mutable method, then an error will be - /// raised. - pub(crate) fn open(dst: &Path) -> Result<ArchiveRO, String> { - unsafe { - let s = path_to_c_string(dst); - let ar = super::LLVMRustOpenArchive(s.as_ptr()).ok_or_else(|| { - super::last_error().unwrap_or_else(|| "failed to open archive".to_owned()) - })?; - Ok(ArchiveRO { raw: ar }) - } - } - - pub(crate) fn iter(&self) -> Iter<'_> { - unsafe { Iter { raw: super::LLVMRustArchiveIteratorNew(self.raw) } } - } -} - -impl Drop for ArchiveRO { - fn drop(&mut self) { - unsafe { - super::LLVMRustDestroyArchive(&mut *(self.raw as *mut _)); - } - } -} - -impl<'a> Iterator for Iter<'a> { - type Item = Result<Child<'a>, String>; - - fn next(&mut self) -> Option<Result<Child<'a>, String>> { - unsafe { - match super::LLVMRustArchiveIteratorNext(self.raw) { - Some(raw) => Some(Ok(Child { raw })), - None => super::last_error().map(Err), - } - } - } -} - -impl<'a> Drop for Iter<'a> { - fn drop(&mut self) { - unsafe { - super::LLVMRustArchiveIteratorFree(&mut *(self.raw as *mut _)); - } - } -} - -impl<'a> Child<'a> { - pub(crate) fn name(&self) -> Option<&'a str> { - unsafe { - let mut name_len = 0; - let name_ptr = super::LLVMRustArchiveChildName(self.raw, &mut name_len); - if name_ptr.is_null() { - None - } else { - let name = slice::from_raw_parts(name_ptr as *const u8, name_len as usize); - str::from_utf8(name).ok().map(|s| s.trim()) - } - } - } -} - -impl<'a> Drop for Child<'a> { - fn drop(&mut self) { - unsafe { - super::LLVMRustArchiveChildFree(&mut *(self.raw as *mut _)); - } - } -} diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 75d3d27f74e..ba590851dbd 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -11,9 +11,8 @@ //! the need for an extra cast from `*const u8` on the Rust side. #![allow(non_camel_case_types)] -#![allow(non_upper_case_globals)] -use std::fmt::Debug; +use std::fmt::{self, Debug}; use std::marker::PhantomData; use std::num::NonZero; use std::ptr; @@ -33,10 +32,59 @@ use crate::llvm; /// In the LLVM-C API, boolean values are passed as `typedef int LLVMBool`, /// which has a different ABI from Rust or C++ `bool`. -pub(crate) type Bool = c_int; +/// +/// This wrapper does not implement `PartialEq`. +/// To test the underlying boolean value, use [`Self::is_true`]. +#[derive(Clone, Copy)] +#[repr(transparent)] +pub(crate) struct Bool { + value: c_int, +} + +pub(crate) const TRUE: Bool = Bool::TRUE; +pub(crate) const FALSE: Bool = Bool::FALSE; + +impl Bool { + pub(crate) const TRUE: Self = Self { value: 1 }; + pub(crate) const FALSE: Self = Self { value: 0 }; + + pub(crate) const fn from_bool(rust_bool: bool) -> Self { + if rust_bool { Self::TRUE } else { Self::FALSE } + } + + /// Converts this LLVM-C boolean to a Rust `bool` + pub(crate) fn is_true(self) -> bool { + // Since we're interacting with a C API, follow the C convention of + // treating any nonzero value as true. + self.value != Self::FALSE.value + } +} -pub(crate) const True: Bool = 1 as Bool; -pub(crate) const False: Bool = 0 as Bool; +impl Debug for Bool { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.value { + 0 => f.write_str("FALSE"), + 1 => f.write_str("TRUE"), + // As with `Self::is_true`, treat any nonzero value as true. + v => write!(f, "TRUE ({v})"), + } + } +} + +/// Convenience trait to convert `bool` to `llvm::Bool` with an explicit method call. +/// +/// Being able to write `b.to_llvm_bool()` is less noisy than `llvm::Bool::from(b)`, +/// while being more explicit and less mistake-prone than something like `b.into()`. +pub(crate) trait ToLlvmBool: Copy { + fn to_llvm_bool(self) -> llvm::Bool; +} + +impl ToLlvmBool for bool { + #[inline(always)] + fn to_llvm_bool(self) -> llvm::Bool { + llvm::Bool::from_bool(self) + } +} /// Wrapper for a raw enum value returned from LLVM's C APIs. /// @@ -97,6 +145,7 @@ pub(crate) enum ModuleFlagMergeBehavior { // Consts for the LLVM CallConv type, pre-cast to usize. +/// Must match the layout of `LLVMTailCallKind`. #[derive(Copy, Clone, PartialEq, Debug)] #[repr(C)] #[allow(dead_code)] @@ -214,7 +263,7 @@ pub(crate) enum AttributeKind { MinSize = 4, Naked = 5, NoAlias = 6, - NoCapture = 7, + CapturesAddress = 7, NoInline = 8, NonNull = 9, NoRedZone = 10, @@ -249,6 +298,8 @@ pub(crate) enum AttributeKind { FnRetThunkExtern = 41, Writable = 42, DeadOnUnwind = 43, + DeadOnReturn = 44, + CapturesReadOnly = 45, } /// LLVMIntPredicate @@ -331,10 +382,15 @@ impl RealPredicate { } } -/// LLVMTypeKind -#[derive(Copy, Clone, PartialEq, Debug)] +/// Must match the layout of `LLVMTypeKind`. +/// +/// Use [`RawEnum<TypeKind>`] for values of `LLVMTypeKind` returned from LLVM, +/// to avoid risk of UB if LLVM adds new enum values. +/// +/// All of LLVM's variants should be declared here, even if no Rust-side code refers +/// to them, because unknown variants will cause [`RawEnum::to_rust`] to panic. +#[derive(Copy, Clone, PartialEq, Debug, TryFromU32)] #[repr(C)] -#[expect(dead_code, reason = "Some variants are unused, but are kept to match LLVM-C")] pub(crate) enum TypeKind { Void = 0, Half = 1, @@ -609,17 +665,6 @@ pub(crate) enum DiagnosticLevel { Remark, } -/// LLVMRustArchiveKind -#[derive(Copy, Clone)] -#[repr(C)] -pub(crate) enum ArchiveKind { - K_GNU, - K_BSD, - K_DARWIN, - K_COFF, - K_AIXBIG, -} - unsafe extern "C" { // LLVMRustThinLTOData pub(crate) type ThinLTOData; @@ -768,19 +813,12 @@ pub(crate) struct Builder<'a>(InvariantOpaque<'a>); pub(crate) struct PassManager<'a>(InvariantOpaque<'a>); unsafe extern "C" { pub type TargetMachine; - pub(crate) type Archive; } -#[repr(C)] -pub(crate) struct ArchiveIterator<'a>(InvariantOpaque<'a>); -#[repr(C)] -pub(crate) struct ArchiveChild<'a>(InvariantOpaque<'a>); unsafe extern "C" { pub(crate) type Twine; pub(crate) type DiagnosticInfo; pub(crate) type SMDiagnostic; } -#[repr(C)] -pub(crate) struct RustArchiveMember<'a>(InvariantOpaque<'a>); /// Opaque pointee of `LLVMOperandBundleRef`. #[repr(C)] pub(crate) struct OperandBundle<'a>(InvariantOpaque<'a>); @@ -1045,6 +1083,8 @@ unsafe extern "C" { CanThrow: llvm::Bool, ) -> &'ll Value; + pub(crate) safe fn LLVMGetTypeKind(Ty: &Type) -> RawEnum<TypeKind>; + // Operations on integer types pub(crate) fn LLVMInt1TypeInContext(C: &Context) -> &Type; pub(crate) fn LLVMInt8TypeInContext(C: &Context) -> &Type; @@ -1196,7 +1236,7 @@ unsafe extern "C" { pub(crate) safe fn LLVMIsGlobalConstant(GlobalVar: &Value) -> Bool; pub(crate) safe fn LLVMSetGlobalConstant(GlobalVar: &Value, IsConstant: Bool); pub(crate) safe fn LLVMSetTailCall(CallInst: &Value, IsTailCall: Bool); - pub(crate) safe fn LLVMRustSetTailCallKind(CallInst: &Value, Kind: TailCallKind); + pub(crate) safe fn LLVMSetTailCallKind(CallInst: &Value, kind: TailCallKind); // Operations on attributes pub(crate) fn LLVMCreateStringAttribute( @@ -1840,9 +1880,6 @@ unsafe extern "C" { // Create and destroy contexts. pub(crate) fn LLVMRustContextCreate(shouldDiscardNames: bool) -> &'static mut Context; - /// See llvm::LLVMTypeKind::getTypeID. - pub(crate) fn LLVMRustGetTypeKind(Ty: &Type) -> TypeKind; - // Operations on all values pub(crate) fn LLVMRustGlobalAddMetadata<'a>( Val: &'a Value, @@ -1892,11 +1929,17 @@ unsafe extern "C" { C: &Context, effects: MemoryEffects, ) -> &Attribute; + /// ## Safety + /// - Each of `LowerWords` and `UpperWords` must point to an array that is + /// long enough to fully define an integer of size `NumBits`, i.e. each + /// pointer must point to `NumBits.div_ceil(64)` elements or more. + /// - The implementation will make its own copy of the pointed-to `u64` + /// values, so the pointers only need to outlive this function call. pub(crate) fn LLVMRustCreateRangeAttribute( C: &Context, - num_bits: c_uint, - lower_words: *const u64, - upper_words: *const u64, + NumBits: c_uint, + LowerWords: *const u64, + UpperWords: *const u64, ) -> &Attribute; // Operations on functions @@ -2437,7 +2480,7 @@ unsafe extern "C" { OutputObjFile: *const c_char, DebugInfoCompression: *const c_char, UseEmulatedTls: bool, - ArgsCstrBuff: *const c_char, + ArgsCstrBuff: *const c_uchar, // See "PTR_LEN_STR". ArgsCstrBuffLen: usize, UseWasmEH: bool, ) -> *mut TargetMachine; @@ -2504,19 +2547,6 @@ unsafe extern "C" { pub(crate) fn LLVMRustSetNormalizedTarget(M: &Module, triple: *const c_char); pub(crate) fn LLVMRustRunRestrictionPass(M: &Module, syms: *const *const c_char, len: size_t); - pub(crate) fn LLVMRustOpenArchive(path: *const c_char) -> Option<&'static mut Archive>; - pub(crate) fn LLVMRustArchiveIteratorNew(AR: &Archive) -> &mut ArchiveIterator<'_>; - pub(crate) fn LLVMRustArchiveIteratorNext<'a>( - AIR: &ArchiveIterator<'a>, - ) -> Option<&'a mut ArchiveChild<'a>>; - pub(crate) fn LLVMRustArchiveChildName( - ACR: &ArchiveChild<'_>, - size: &mut size_t, - ) -> *const c_char; - pub(crate) fn LLVMRustArchiveChildFree<'a>(ACR: &'a mut ArchiveChild<'a>); - pub(crate) fn LLVMRustArchiveIteratorFree<'a>(AIR: &'a mut ArchiveIterator<'a>); - pub(crate) fn LLVMRustDestroyArchive(AR: &'static mut Archive); - pub(crate) fn LLVMRustWriteTwineToString(T: &Twine, s: &RustString); pub(crate) fn LLVMRustUnpackOptimizationDiagnostic<'a>( @@ -2554,21 +2584,6 @@ unsafe extern "C" { num_ranges: &mut usize, ) -> bool; - pub(crate) fn LLVMRustWriteArchive( - Dst: *const c_char, - NumMembers: size_t, - Members: *const &RustArchiveMember<'_>, - WriteSymbtab: bool, - Kind: ArchiveKind, - isEC: bool, - ) -> LLVMRustResult; - pub(crate) fn LLVMRustArchiveMemberNew<'a>( - Filename: *const c_char, - Name: *const c_char, - Child: Option<&ArchiveChild<'a>>, - ) -> &'a mut RustArchiveMember<'a>; - pub(crate) fn LLVMRustArchiveMemberFree<'a>(Member: &'a mut RustArchiveMember<'a>); - pub(crate) fn LLVMRustSetDataLayoutFromTargetMachine<'a>(M: &'a Module, TM: &'a TargetMachine); pub(crate) fn LLVMRustPositionBuilderPastAllocas<'a>(B: &Builder<'a>, Fn: &'a Value); diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs index 154ba4fd690..d6974e22c85 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs @@ -3,7 +3,6 @@ use std::ffi::{CStr, CString}; use std::num::NonZero; use std::ptr; -use std::str::FromStr; use std::string::FromUtf8Error; use libc::c_uint; @@ -16,7 +15,6 @@ pub(crate) use self::MetadataType::*; pub(crate) use self::ffi::*; use crate::common::AsCCharPtr; -pub(crate) mod archive_ro; pub(crate) mod diagnostic; pub(crate) mod enzyme_ffi; mod ffi; @@ -42,32 +40,6 @@ pub(crate) fn AddFunctionAttributes<'ll>( } } -pub(crate) fn HasAttributeAtIndex<'ll>( - llfn: &'ll Value, - idx: AttributePlace, - kind: AttributeKind, -) -> bool { - unsafe { LLVMRustHasAttributeAtIndex(llfn, idx.as_uint(), kind) } -} - -pub(crate) fn HasStringAttribute<'ll>(llfn: &'ll Value, name: &str) -> bool { - unsafe { LLVMRustHasFnAttribute(llfn, name.as_c_char_ptr(), name.len()) } -} - -pub(crate) fn RemoveStringAttrFromFn<'ll>(llfn: &'ll Value, name: &str) { - unsafe { LLVMRustRemoveFnAttribute(llfn, name.as_c_char_ptr(), name.len()) } -} - -pub(crate) fn RemoveRustEnumAttributeAtIndex( - llfn: &Value, - place: AttributePlace, - kind: AttributeKind, -) { - unsafe { - LLVMRustRemoveEnumAttributeAtIndex(llfn, place.as_uint(), kind); - } -} - pub(crate) fn AddCallSiteAttributes<'ll>( callsite: &'ll Value, idx: AttributePlace, @@ -140,16 +112,26 @@ pub(crate) fn CreateAllocKindAttr(llcx: &Context, kind_arg: AllocKindFlags) -> & pub(crate) fn CreateRangeAttr(llcx: &Context, size: Size, range: WrappingRange) -> &Attribute { let lower = range.start; + // LLVM treats the upper bound as exclusive, but allows wrapping. let upper = range.end.wrapping_add(1); - let lower_words = [lower as u64, (lower >> 64) as u64]; - let upper_words = [upper as u64, (upper >> 64) as u64]; + + // Pass each `u128` endpoint value as a `[u64; 2]` array, least-significant part first. + let as_u64_array = |x: u128| [x as u64, (x >> 64) as u64]; + let lower_words: [u64; 2] = as_u64_array(lower); + let upper_words: [u64; 2] = as_u64_array(upper); + + // To ensure that LLVM doesn't try to read beyond the `[u64; 2]` arrays, + // we must explicitly check that `size_bits` does not exceed 128. + let size_bits = size.bits(); + assert!(size_bits <= 128); + // More robust assertions that are redundant with `size_bits <= 128` and + // should be optimized away. + assert!(size_bits.div_ceil(64) <= u64::try_from(lower_words.len()).unwrap()); + assert!(size_bits.div_ceil(64) <= u64::try_from(upper_words.len()).unwrap()); + let size_bits = c_uint::try_from(size_bits).unwrap(); + unsafe { - LLVMRustCreateRangeAttribute( - llcx, - size.bits().try_into().unwrap(), - lower_words.as_ptr(), - upper_words.as_ptr(), - ) + LLVMRustCreateRangeAttribute(llcx, size_bits, lower_words.as_ptr(), upper_words.as_ptr()) } } @@ -178,21 +160,6 @@ pub(crate) enum CodeGenOptSize { CodeGenOptSizeAggressive = 2, } -impl FromStr for ArchiveKind { - type Err = (); - - fn from_str(s: &str) -> Result<Self, Self::Err> { - match s { - "gnu" => Ok(ArchiveKind::K_GNU), - "bsd" => Ok(ArchiveKind::K_BSD), - "darwin" => Ok(ArchiveKind::K_DARWIN), - "coff" => Ok(ArchiveKind::K_COFF), - "aix_big" => Ok(ArchiveKind::K_AIXBIG), - _ => Err(()), - } - } -} - pub(crate) fn SetInstructionCallConv(instr: &Value, cc: CallConv) { unsafe { LLVMSetInstructionCallConv(instr, cc as c_uint); @@ -258,7 +225,7 @@ pub(crate) fn set_initializer(llglobal: &Value, constant_val: &Value) { } pub(crate) fn set_global_constant(llglobal: &Value, is_constant: bool) { - LLVMSetGlobalConstant(llglobal, if is_constant { ffi::True } else { ffi::False }); + LLVMSetGlobalConstant(llglobal, is_constant.to_llvm_bool()); } pub(crate) fn get_linkage(llglobal: &Value) -> Linkage { @@ -272,7 +239,7 @@ pub(crate) fn set_linkage(llglobal: &Value, linkage: Linkage) { } pub(crate) fn is_declaration(llglobal: &Value) -> bool { - unsafe { LLVMIsDeclaration(llglobal) == ffi::True } + unsafe { LLVMIsDeclaration(llglobal) }.is_true() } pub(crate) fn get_visibility(llglobal: &Value) -> Visibility { diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 28d2100f478..d927ffd78c2 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -26,7 +26,7 @@ static INIT: Once = Once::new(); pub(crate) fn init(sess: &Session) { unsafe { // Before we touch LLVM, make sure that multithreading is enabled. - if llvm::LLVMIsMultithreaded() != 1 { + if !llvm::LLVMIsMultithreaded().is_true() { bug!("LLVM compiled without support for threads"); } INIT.call_once(|| { @@ -277,8 +277,9 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option<LLVMFea { None } + ("loongarch32" | "loongarch64", "32s") if get_version().0 < 21 => None, // Filter out features that are not supported by the current LLVM version - ("riscv32" | "riscv64", "zacas") if get_version().0 < 20 => None, + ("riscv32" | "riscv64", "zacas" | "rva23u64" | "supm") if get_version().0 < 20 => None, ( "s390x", "message-security-assist-extension12" diff --git a/compiler/rustc_codegen_llvm/src/mono_item.rs b/compiler/rustc_codegen_llvm/src/mono_item.rs index f9edaded60d..52eefe2d4d2 100644 --- a/compiler/rustc_codegen_llvm/src/mono_item.rs +++ b/compiler/rustc_codegen_llvm/src/mono_item.rs @@ -1,8 +1,9 @@ use rustc_codegen_ssa::traits::*; +use rustc_hir::attrs::Linkage; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_middle::bug; -use rustc_middle::mir::mono::{Linkage, Visibility}; +use rustc_middle::mir::mono::Visibility; use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv, LayoutOf}; use rustc_middle::ty::{self, Instance, TypeVisitableExt}; use rustc_session::config::CrateType; @@ -132,7 +133,7 @@ impl CodegenCx<'_, '_> { // Thread-local variables generally don't support copy relocations. let is_thread_local_var = llvm::LLVMIsAGlobalVariable(llval) - .is_some_and(|v| llvm::LLVMIsThreadLocal(v) == llvm::True); + .is_some_and(|v| llvm::LLVMIsThreadLocal(v).is_true()); if is_thread_local_var { return false; } diff --git a/compiler/rustc_codegen_llvm/src/type_.rs b/compiler/rustc_codegen_llvm/src/type_.rs index 89365503138..9ecaf5f24fe 100644 --- a/compiler/rustc_codegen_llvm/src/type_.rs +++ b/compiler/rustc_codegen_llvm/src/type_.rs @@ -15,7 +15,7 @@ use rustc_target::callconv::{CastTarget, FnAbi}; use crate::abi::{FnAbiLlvmExt, LlvmType}; use crate::context::{CodegenCx, GenericCx, SCx}; pub(crate) use crate::llvm::Type; -use crate::llvm::{Bool, False, Metadata, True}; +use crate::llvm::{FALSE, Metadata, TRUE, ToLlvmBool}; use crate::type_of::LayoutLlvmExt; use crate::value::Value; use crate::{common, llvm}; @@ -53,7 +53,9 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { } pub(crate) fn set_struct_body(&self, ty: &'ll Type, els: &[&'ll Type], packed: bool) { - unsafe { llvm::LLVMStructSetBody(ty, els.as_ptr(), els.len() as c_uint, packed as Bool) } + unsafe { + llvm::LLVMStructSetBody(ty, els.as_ptr(), els.len() as c_uint, packed.to_llvm_bool()) + } } pub(crate) fn type_void(&self) -> &'ll Type { unsafe { llvm::LLVMVoidTypeInContext(self.llcx()) } @@ -139,7 +141,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { } pub(crate) fn type_variadic_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type { - unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, True) } + unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, TRUE) } } pub(crate) fn type_i1(&self) -> &'ll Type { @@ -152,7 +154,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { self.llcx(), els.as_ptr(), els.len() as c_uint, - packed as Bool, + packed.to_llvm_bool(), ) } } @@ -200,11 +202,11 @@ impl<'ll, CX: Borrow<SCx<'ll>>> BaseTypeCodegenMethods for GenericCx<'ll, CX> { } fn type_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type { - unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, False) } + unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, FALSE) } } fn type_kind(&self, ty: &'ll Type) -> TypeKind { - unsafe { llvm::LLVMRustGetTypeKind(ty).to_generic() } + llvm::LLVMGetTypeKind(ty).to_rust().to_generic() } fn type_ptr(&self) -> &'ll Type { diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index 30956fd1855..6b989a96488 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -6,12 +6,12 @@ edition = "2024" [dependencies] # tidy-alphabetical-start ar_archive_writer = "0.4.2" -bitflags = "2.4.1" +bitflags.workspace = true bstr = "1.11.3" # `cc` updates often break things, so we pin it here. Cargo enforces "max 1 semver-compat version # per crate", so if you change this, you need to also change it in `rustc_llvm`. cc = "=1.2.16" -itertools = "0.12" +itertools.workspace = true pathdiff = "0.2.0" regex = "1.4" rustc_abi = { path = "../rustc_abi" } @@ -26,6 +26,7 @@ rustc_hashes = { path = "../rustc_hashes" } rustc_hir = { path = "../rustc_hir" } rustc_incremental = { path = "../rustc_incremental" } rustc_index = { path = "../rustc_index" } +rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } rustc_metadata = { path = "../rustc_metadata" } rustc_middle = { path = "../rustc_middle" } @@ -39,9 +40,9 @@ rustc_trait_selection = { path = "../rustc_trait_selection" } serde_json = "1.0.59" smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tempfile = "3.2" -thin-vec = "0.2.12" +thin-vec.workspace = true thorin-dwp = "0.9" -tracing = "0.1" +tracing.workspace = true wasm-encoder = "0.219" # tidy-alphabetical-end diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index b6cfea88363..44b9941691a 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -171,9 +171,6 @@ codegen_ssa_invalid_monomorphization_unsupported_symbol = invalid monomorphizati codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size = invalid monomorphization of `{$name}` intrinsic: unsupported {$symbol} from `{$in_ty}` with element `{$in_elem}` of size `{$size}` to `{$ret_ty}` -codegen_ssa_invalid_no_sanitize = invalid argument for `no_sanitize` - .note = expected one of: `address`, `cfi`, `hwaddress`, `kcfi`, `memory`, `memtag`, `shadow-call-stack`, or `thread` - codegen_ssa_invalid_windows_subsystem = invalid windows subsystem `{$subsystem}`, only `windows` and `console` are allowed codegen_ssa_ld64_unimplemented_modifier = `as-needed` modifier not implemented yet for ld64 diff --git a/compiler/rustc_codegen_ssa/src/back/apple.rs b/compiler/rustc_codegen_ssa/src/back/apple.rs index 2274450e20e..b1d646d9265 100644 --- a/compiler/rustc_codegen_ssa/src/back/apple.rs +++ b/compiler/rustc_codegen_ssa/src/back/apple.rs @@ -164,7 +164,7 @@ pub(super) fn get_sdk_root(sess: &Session) -> Option<PathBuf> { // // Note that when cross-compiling from e.g. Linux, the `xcrun` binary may sometimes be provided // as a shim by a cross-compilation helper tool. It usually isn't, but we still try nonetheless. - match xcrun_show_sdk_path(sdk_name, sess.verbose_internals()) { + match xcrun_show_sdk_path(sdk_name, false) { Ok((path, stderr)) => { // Emit extra stderr, such as if `-verbose` was passed, or if `xcrun` emitted a warning. if !stderr.is_empty() { diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index 4ebe59dc2a7..19c919c0e4e 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -14,11 +14,13 @@ use itertools::Itertools; use regex::Regex; use rustc_arena::TypedArena; use rustc_ast::CRATE_NODE_ID; +use rustc_attr_parsing::{ShouldEmit, eval_config_entry}; use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::memmap::Mmap; use rustc_data_structures::temp_dir::MaybeTempDir; use rustc_errors::{DiagCtxtHandle, LintDiagnostic}; use rustc_fs_util::{TempDirBuilder, fix_windows_verbatim_for_gcc, try_canonicalize}; +use rustc_hir::attrs::NativeLibKind; use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc_macros::LintDiagnostic; use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file}; @@ -38,7 +40,6 @@ use rustc_session::config::{ use rustc_session::lint::builtin::LINKER_MESSAGES; use rustc_session::output::{check_file_is_writeable, invalid_output_for_target, out_filename}; use rustc_session::search_paths::PathKind; -use rustc_session::utils::NativeLibKind; /// For all the linkers we support, and information they might /// need out of the shared crate context before we get rid of it. use rustc_session::{Session, filesearch}; @@ -2435,6 +2436,13 @@ fn linker_with_args( // Passed after compiler-generated options to support manual overriding when necessary. add_user_defined_link_args(cmd, sess); + // ------------ Builtin configurable linker scripts ------------ + // The user's link args should be able to overwrite symbols in the compiler's + // linker script that were weakly defined (i.e. defined with `PROVIDE()`). For this + // to work correctly, the user needs to be able to specify linker arguments like + // `--defsym` and `--script` *before* any builtin linker scripts are evaluated. + add_link_script(cmd, sess, tmpdir, crate_type); + // ------------ Object code and libraries, order-dependent ------------ // Post-link CRT objects. @@ -2469,8 +2477,6 @@ fn add_order_independent_options( let apple_sdk_root = add_apple_sdk(cmd, sess, flavor); - add_link_script(cmd, sess, tmpdir, crate_type); - if sess.target.os == "fuchsia" && crate_type == CrateType::Executable && !matches!(flavor, LinkerFlavor::Gnu(Cc::Yes, _)) @@ -3014,7 +3020,9 @@ fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) { fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool { match lib.cfg { - Some(ref cfg) => rustc_attr_parsing::cfg_matches(cfg, sess, CRATE_NODE_ID, None), + Some(ref cfg) => { + eval_config_entry(sess, cfg, CRATE_NODE_ID, None, ShouldEmit::ErrorsAndLints).as_bool() + } None => true, } } diff --git a/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs b/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs index b9e0c957363..9f42991d4c0 100644 --- a/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs +++ b/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs @@ -7,9 +7,9 @@ use rustc_data_structures::base_n::{CASE_INSENSITIVE, ToBaseN}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_data_structures::stable_hasher::StableHasher; use rustc_hashes::Hash128; +use rustc_hir::attrs::NativeLibKind; use rustc_session::Session; use rustc_session::cstore::DllImport; -use rustc_session::utils::NativeLibKind; use rustc_span::Symbol; use crate::back::archive::ImportLibraryItem; @@ -307,11 +307,14 @@ fn create_elf_raw_dylib_stub(sess: &Session, soname: &str, symbols: &[DllImport] stub.reserve_section_headers(); stub.reserve_dynsym(); stub.reserve_dynstr(); + let verdef_count = 1 + vers.len(); + let mut dynamic_entries = 2; // DT_SONAME, DT_NULL if !vers.is_empty() { stub.reserve_gnu_versym(); - stub.reserve_gnu_verdef(1 + vers.len(), 1 + vers.len()); + stub.reserve_gnu_verdef(verdef_count, verdef_count); + dynamic_entries += 1; // DT_VERDEFNUM } - stub.reserve_dynamic(2); // DT_SONAME, DT_NULL + stub.reserve_dynamic(dynamic_entries); // First write the ELF header with the arch information. let e_machine = match (arch, sub_arch) { @@ -443,9 +446,13 @@ fn create_elf_raw_dylib_stub(sess: &Session, soname: &str, symbols: &[DllImport] // .dynamic // the DT_SONAME will be used by the linker to populate DT_NEEDED // which the loader uses to find the library. - // DT_NULL terminates the .dynamic table. stub.write_align_dynamic(); stub.write_dynamic_string(elf::DT_SONAME, soname); + // LSB section "2.7. Symbol Versioning" requires `DT_VERDEFNUM` to be reliable. + if verdef_count > 1 { + stub.write_dynamic(elf::DT_VERDEFNUM, verdef_count as u64); + } + // DT_NULL terminates the .dynamic table. stub.write_dynamic(elf::DT_NULL, 0); stub_buf diff --git a/compiler/rustc_codegen_ssa/src/back/metadata.rs b/compiler/rustc_codegen_ssa/src/back/metadata.rs index bf38c02e908..10aaadd5688 100644 --- a/compiler/rustc_codegen_ssa/src/back/metadata.rs +++ b/compiler/rustc_codegen_ssa/src/back/metadata.rs @@ -329,12 +329,18 @@ pub(super) fn elf_e_flags(architecture: Architecture, sess: &Session) -> u32 { // Source: https://github.com/riscv-non-isa/riscv-elf-psabi-doc/blob/079772828bd10933d34121117a222b4cc0ee2200/riscv-elf.adoc let mut e_flags: u32 = 0x0; - // Check if compressed is enabled - // `unstable_target_features` is used here because "c" is gated behind riscv_target_feature. - if sess.unstable_target_features.contains(&sym::c) { + // Check if compression is enabled + // `unstable_target_features` is used here because "zca" is gated behind riscv_target_feature. + if sess.unstable_target_features.contains(&sym::zca) { e_flags |= elf::EF_RISCV_RVC; } + // Check if RVTSO is enabled + // `unstable_target_features` is used here because "ztso" is gated behind riscv_target_feature. + if sess.unstable_target_features.contains(&sym::ztso) { + e_flags |= elf::EF_RISCV_TSO; + } + // Set the appropriate flag based on ABI // This needs to match LLVM `RISCVELFStreamer.cpp` match &*sess.target.llvm_abiname { diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs index 7e124f65324..13419bcb22c 100644 --- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs +++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs @@ -306,7 +306,8 @@ fn exported_generic_symbols_provider_local<'tcx>( let mut symbols: Vec<_> = vec![]; if tcx.local_crate_exports_generics() { - use rustc_middle::mir::mono::{Linkage, MonoItem, Visibility}; + use rustc_hir::attrs::Linkage; + use rustc_middle::mir::mono::{MonoItem, Visibility}; use rustc_middle::ty::InstanceKind; // Normally, we require that shared monomorphizations are not hidden, diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index f460c900c08..92582dcc399 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -1,5 +1,6 @@ use std::assert_matches::assert_matches; use std::marker::PhantomData; +use std::panic::AssertUnwindSafe; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::sync::mpsc::{Receiver, Sender, channel}; @@ -7,7 +8,6 @@ use std::{fs, io, mem, str, thread}; use rustc_abi::Size; use rustc_ast::attr; -use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::jobserver::{self, Acquired}; use rustc_data_structures::memmap::Mmap; @@ -15,7 +15,7 @@ use rustc_data_structures::profiling::{SelfProfilerRef, VerboseTimingGuard}; use rustc_errors::emitter::Emitter; use rustc_errors::translation::Translator; use rustc_errors::{ - Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, Level, MultiSpan, Style, + Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalErrorMarker, Level, MultiSpan, Style, Suggestions, }; use rustc_fs_util::link_or_copy; @@ -38,7 +38,7 @@ use tracing::debug; use super::link::{self, ensure_removed}; use super::lto::{self, SerializedModule}; use crate::back::lto::check_lto_allowed; -use crate::errors::{AutodiffWithoutLto, ErrorCreatingRemarkDir}; +use crate::errors::ErrorCreatingRemarkDir; use crate::traits::*; use crate::{ CachedModuleCodegen, CodegenResults, CompiledModule, CrateInfo, ModuleCodegen, ModuleKind, @@ -76,12 +76,9 @@ pub struct ModuleConfig { /// Names of additional optimization passes to run. pub passes: Vec<String>, /// Some(level) to optimize at a certain level, or None to run - /// absolutely no optimizations (used for the metadata module). + /// absolutely no optimizations (used for the allocator module). pub opt_level: Option<config::OptLevel>, - /// Some(level) to optimize binary size, or None to not affect program size. - pub opt_size: Option<config::OptLevel>, - pub pgo_gen: SwitchWithOptPath, pub pgo_use: Option<PathBuf>, pub pgo_sample_use: Option<PathBuf>, @@ -102,7 +99,6 @@ pub struct ModuleConfig { pub emit_obj: EmitObj, pub emit_thin_lto: bool, pub emit_thin_lto_summary: bool, - pub bc_cmdline: String, // Miscellaneous flags. These are mostly copied from command-line // options. @@ -110,7 +106,6 @@ pub struct ModuleConfig { pub lint_llvm_ir: bool, pub no_prepopulate_passes: bool, pub no_builtins: bool, - pub time_module: bool, pub vectorize_loop: bool, pub vectorize_slp: bool, pub merge_functions: bool, @@ -160,7 +155,6 @@ impl ModuleConfig { passes: if_regular!(sess.opts.cg.passes.clone(), vec![]), opt_level: opt_level_and_size, - opt_size: opt_level_and_size, pgo_gen: if_regular!( sess.opts.cg.profile_generate.clone(), @@ -210,17 +204,12 @@ impl ModuleConfig { sess.opts.output_types.contains_key(&OutputType::ThinLinkBitcode), false ), - bc_cmdline: sess.target.bitcode_llvm_cmdline.to_string(), verify_llvm_ir: sess.verify_llvm_ir(), lint_llvm_ir: sess.opts.unstable_opts.lint_llvm_ir, no_prepopulate_passes: sess.opts.cg.no_prepopulate_passes, no_builtins: no_builtins || sess.target.no_builtins, - // Exclude metadata and allocator modules from time_passes output, - // since they throw off the "LLVM passes" measurement. - time_module: if_regular!(true, false), - // Copy what clang does by turning on loop vectorization at O2 and // slp vectorization at O3. vectorize_loop: !sess.opts.cg.no_vectorize_loops @@ -396,8 +385,7 @@ fn generate_thin_lto_work<B: ExtraBackendMethods>( each_linked_rlib_for_lto, needs_thin_lto, import_only_modules, - ) - .unwrap_or_else(|e| e.raise()); + ); lto_modules .into_iter() .map(|module| { @@ -443,7 +431,6 @@ pub(crate) fn start_async_codegen<B: ExtraBackendMethods>( backend: B, tcx: TyCtxt<'_>, target_cpu: String, - autodiff_items: &[AutoDiffItem], ) -> OngoingCodegen<B> { let (coordinator_send, coordinator_receive) = channel(); @@ -462,7 +449,6 @@ pub(crate) fn start_async_codegen<B: ExtraBackendMethods>( backend.clone(), tcx, &crate_info, - autodiff_items, shared_emitter, codegen_worker_send, coordinator_receive, @@ -717,7 +703,6 @@ pub(crate) enum WorkItem<B: WriteBackendMethods> { each_linked_rlib_for_lto: Vec<PathBuf>, needs_fat_lto: Vec<FatLtoInput<B>>, import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>, - autodiff: Vec<AutoDiffItem>, }, /// Performs thin-LTO on the given module. ThinLto(lto::ThinModule<B>), @@ -848,11 +833,11 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>( cgcx: &CodegenContext<B>, mut module: ModuleCodegen<B::Module>, module_config: &ModuleConfig, -) -> Result<WorkItemResult<B>, FatalError> { +) -> WorkItemResult<B> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); - B::optimize(cgcx, dcx, &mut module, module_config)?; + B::optimize(cgcx, dcx, &mut module, module_config); // After we've done the initial round of optimizations we need to // decide whether to synchronously codegen this module or ship it @@ -872,8 +857,8 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>( match lto_type { ComputedLtoType::No => { - let module = B::codegen(cgcx, module, module_config)?; - Ok(WorkItemResult::Finished(module)) + let module = B::codegen(cgcx, module, module_config); + WorkItemResult::Finished(module) } ComputedLtoType::Thin => { let (name, thin_buffer) = B::prepare_thin(module, false); @@ -882,7 +867,7 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>( panic!("Error writing pre-lto-bitcode file `{}`: {}", path.display(), e); }); } - Ok(WorkItemResult::NeedsThinLto(name, thin_buffer)) + WorkItemResult::NeedsThinLto(name, thin_buffer) } ComputedLtoType::Fat => match bitcode { Some(path) => { @@ -890,12 +875,12 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>( fs::write(&path, buffer.data()).unwrap_or_else(|e| { panic!("Error writing pre-lto-bitcode file `{}`: {}", path.display(), e); }); - Ok(WorkItemResult::NeedsFatLto(FatLtoInput::Serialized { + WorkItemResult::NeedsFatLto(FatLtoInput::Serialized { name, buffer: SerializedModule::Local(buffer), - })) + }) } - None => Ok(WorkItemResult::NeedsFatLto(FatLtoInput::InMemory(module))), + None => WorkItemResult::NeedsFatLto(FatLtoInput::InMemory(module)), }, } } @@ -990,9 +975,8 @@ fn execute_fat_lto_work_item<B: ExtraBackendMethods>( each_linked_rlib_for_lto: &[PathBuf], mut needs_fat_lto: Vec<FatLtoInput<B>>, import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>, - autodiff: Vec<AutoDiffItem>, module_config: &ModuleConfig, -) -> Result<WorkItemResult<B>, FatalError> { +) -> WorkItemResult<B> { for (module, wp) in import_only_modules { needs_fat_lto.push(FatLtoInput::Serialized { name: wp.cgu_name, buffer: module }) } @@ -1002,20 +986,19 @@ fn execute_fat_lto_work_item<B: ExtraBackendMethods>( exported_symbols_for_lto, each_linked_rlib_for_lto, needs_fat_lto, - autodiff, - )?; - let module = B::codegen(cgcx, module, module_config)?; - Ok(WorkItemResult::Finished(module)) + ); + let module = B::codegen(cgcx, module, module_config); + WorkItemResult::Finished(module) } fn execute_thin_lto_work_item<B: ExtraBackendMethods>( cgcx: &CodegenContext<B>, module: lto::ThinModule<B>, module_config: &ModuleConfig, -) -> Result<WorkItemResult<B>, FatalError> { - let module = B::optimize_thin(cgcx, module)?; - let module = B::codegen(cgcx, module, module_config)?; - Ok(WorkItemResult::Finished(module)) +) -> WorkItemResult<B> { + let module = B::optimize_thin(cgcx, module); + let module = B::codegen(cgcx, module, module_config); + WorkItemResult::Finished(module) } /// Messages sent to the coordinator. @@ -1094,7 +1077,6 @@ fn start_executing_work<B: ExtraBackendMethods>( backend: B, tcx: TyCtxt<'_>, crate_info: &CrateInfo, - autodiff_items: &[AutoDiffItem], shared_emitter: SharedEmitter, codegen_worker_send: Sender<CguMessage>, coordinator_receive: Receiver<Message<B>>, @@ -1104,7 +1086,6 @@ fn start_executing_work<B: ExtraBackendMethods>( ) -> thread::JoinHandle<Result<CompiledModules, ()>> { let coordinator_send = tx_to_llvm_workers; let sess = tcx.sess; - let autodiff_items = autodiff_items.to_vec(); let mut each_linked_rlib_for_lto = Vec::new(); let mut each_linked_rlib_file_for_lto = Vec::new(); @@ -1437,7 +1418,6 @@ fn start_executing_work<B: ExtraBackendMethods>( each_linked_rlib_for_lto: each_linked_rlib_file_for_lto, needs_fat_lto, import_only_modules, - autodiff: autodiff_items.clone(), }, 0, )); @@ -1445,11 +1425,6 @@ fn start_executing_work<B: ExtraBackendMethods>( helper.request_token(); } } else { - if !autodiff_items.is_empty() { - let dcx = cgcx.create_dcx(); - dcx.handle().emit_fatal(AutodiffWithoutLto {}); - } - for (work, cost) in generate_thin_lto_work( &cgcx, &exported_symbols_for_lto, @@ -1729,44 +1704,17 @@ fn spawn_work<'a, B: ExtraBackendMethods>( llvm_start_time: &mut Option<VerboseTimingGuard<'a>>, work: WorkItem<B>, ) { - if cgcx.config(work.module_kind()).time_module && llvm_start_time.is_none() { + if llvm_start_time.is_none() { *llvm_start_time = Some(cgcx.prof.verbose_generic_activity("LLVM_passes")); } let cgcx = cgcx.clone(); B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || { - // Set up a destructor which will fire off a message that we're done as - // we exit. - struct Bomb<B: ExtraBackendMethods> { - coordinator_send: Sender<Message<B>>, - result: Option<Result<WorkItemResult<B>, FatalError>>, - } - impl<B: ExtraBackendMethods> Drop for Bomb<B> { - fn drop(&mut self) { - let msg = match self.result.take() { - Some(Ok(result)) => Message::WorkItem::<B> { result: Ok(result) }, - Some(Err(FatalError)) => { - Message::WorkItem::<B> { result: Err(Some(WorkerFatalError)) } - } - None => Message::WorkItem::<B> { result: Err(None) }, - }; - drop(self.coordinator_send.send(msg)); - } - } - - let mut bomb = Bomb::<B> { coordinator_send, result: None }; - - // Execute the work itself, and if it finishes successfully then flag - // ourselves as a success as well. - // - // Note that we ignore any `FatalError` coming out of `execute_work_item`, - // as a diagnostic was already sent off to the main thread - just - // surface that there was an error in this worker. - bomb.result = { + let result = std::panic::catch_unwind(AssertUnwindSafe(|| { let module_config = cgcx.config(work.module_kind()); - Some(match work { + match work { WorkItem::Optimize(m) => { let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_optimize", &*m.name); @@ -1777,14 +1725,13 @@ fn spawn_work<'a, B: ExtraBackendMethods>( "codegen_copy_artifacts_from_incr_cache", &*m.name, ); - Ok(execute_copy_from_cache_work_item(&cgcx, m, module_config)) + execute_copy_from_cache_work_item(&cgcx, m, module_config) } WorkItem::FatLto { exported_symbols_for_lto, each_linked_rlib_for_lto, needs_fat_lto, import_only_modules, - autodiff, } => { let _timer = cgcx .prof @@ -1795,7 +1742,6 @@ fn spawn_work<'a, B: ExtraBackendMethods>( &each_linked_rlib_for_lto, needs_fat_lto, import_only_modules, - autodiff, module_config, ) } @@ -1804,8 +1750,22 @@ fn spawn_work<'a, B: ExtraBackendMethods>( cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", m.name()); execute_thin_lto_work_item(&cgcx, m, module_config) } - }) + } + })); + + let msg = match result { + Ok(result) => Message::WorkItem::<B> { result: Ok(result) }, + + // We ignore any `FatalError` coming out of `execute_work_item`, as a + // diagnostic was already sent off to the main thread - just surface + // that there was an error in this worker. + Err(err) if err.is::<FatalErrorMarker>() => { + Message::WorkItem::<B> { result: Err(Some(WorkerFatalError)) } + } + + Err(_) => Message::WorkItem::<B> { result: Err(None) }, }; + drop(coordinator_send.send(msg)); }) .expect("failed to spawn work thread"); } @@ -1986,10 +1946,13 @@ impl<B: ExtraBackendMethods> Drop for Coordinator<B> { pub struct OngoingCodegen<B: ExtraBackendMethods> { pub backend: B, pub crate_info: CrateInfo, - pub codegen_worker_receive: Receiver<CguMessage>, - pub shared_emitter_main: SharedEmitterMain, pub output_filenames: Arc<OutputFilenames>, + // Field order below is intended to terminate the coordinator thread before two fields below + // drop and prematurely close channels used by coordinator thread. See `Coordinator`'s + // `Drop` implementation for more info. pub coordinator: Coordinator<B>, + pub codegen_worker_receive: Receiver<CguMessage>, + pub shared_emitter_main: SharedEmitterMain, } impl<B: ExtraBackendMethods> OngoingCodegen<B> { diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index b4556ced0b3..8abaf201aba 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -647,7 +647,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>( ) -> OngoingCodegen<B> { // Skip crate items and just output metadata in -Z no-codegen mode. if tcx.sess.opts.unstable_opts.no_codegen || !tcx.sess.opts.output_types.should_codegen() { - let ongoing_codegen = start_async_codegen(backend, tcx, target_cpu, &[]); + let ongoing_codegen = start_async_codegen(backend, tcx, target_cpu); ongoing_codegen.codegen_finished(tcx); @@ -665,8 +665,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>( // Run the monomorphization collector and partition the collected items into // codegen units. - let MonoItemPartitions { codegen_units, autodiff_items, .. } = - tcx.collect_and_partition_mono_items(()); + let MonoItemPartitions { codegen_units, .. } = tcx.collect_and_partition_mono_items(()); // Force all codegen_unit queries so they are already either red or green // when compile_codegen_unit accesses them. We are not able to re-execute @@ -679,34 +678,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>( } } - let ongoing_codegen = start_async_codegen(backend.clone(), tcx, target_cpu, autodiff_items); - - // Codegen an allocator shim, if necessary. - if let Some(kind) = allocator_kind_for_codegen(tcx) { - let llmod_id = - cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string(); - let module_llvm = tcx.sess.time("write_allocator_module", || { - backend.codegen_allocator( - tcx, - &llmod_id, - kind, - // If allocator_kind is Some then alloc_error_handler_kind must - // also be Some. - tcx.alloc_error_handler_kind(()).unwrap(), - ) - }); - - ongoing_codegen.wait_for_signal_to_codegen_item(); - ongoing_codegen.check_for_errors(tcx.sess); - - // These modules are generally cheap and won't throw off scheduling. - let cost = 0; - submit_codegened_module_to_llvm( - &ongoing_codegen.coordinator, - ModuleCodegen::new_allocator(llmod_id, module_llvm), - cost, - ); - } + let ongoing_codegen = start_async_codegen(backend.clone(), tcx, target_cpu); // For better throughput during parallel processing by LLVM, we used to sort // CGUs largest to smallest. This would lead to better thread utilization @@ -823,6 +795,35 @@ pub fn codegen_crate<B: ExtraBackendMethods>( } } + // Codegen an allocator shim, if necessary. + // Do this last to ensure the LLVM_passes timer doesn't start while no CGUs have been codegened + // yet for the backend to optimize. + if let Some(kind) = allocator_kind_for_codegen(tcx) { + let llmod_id = + cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string(); + let module_llvm = tcx.sess.time("write_allocator_module", || { + backend.codegen_allocator( + tcx, + &llmod_id, + kind, + // If allocator_kind is Some then alloc_error_handler_kind must + // also be Some. + tcx.alloc_error_handler_kind(()).unwrap(), + ) + }); + + ongoing_codegen.wait_for_signal_to_codegen_item(); + ongoing_codegen.check_for_errors(tcx.sess); + + // These modules are generally cheap and won't throw off scheduling. + let cost = 0; + submit_codegened_module_to_llvm( + &ongoing_codegen.coordinator, + ModuleCodegen::new_allocator(llmod_id, module_llvm), + cost, + ); + } + ongoing_codegen.codegen_finished(tcx); // Since the main thread is sometimes blocked during codegen, we keep track @@ -857,7 +858,7 @@ pub fn is_call_from_compiler_builtins_to_upstream_monomorphization<'tcx>( instance: Instance<'tcx>, ) -> bool { fn is_llvm_intrinsic(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - if let Some(name) = tcx.codegen_fn_attrs(def_id).link_name { + if let Some(name) = tcx.codegen_fn_attrs(def_id).symbol_name { name.as_str().starts_with("llvm.") } else { false diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index 7f54a47327a..961bb788149 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -6,12 +6,10 @@ use rustc_ast::{LitKind, MetaItem, MetaItemInner, attr}; use rustc_hir::attrs::{AttributeKind, InlineAttr, InstructionSetAttr, UsedBy}; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; -use rustc_hir::weak_lang_items::WEAK_LANG_ITEMS; use rustc_hir::{self as hir, Attribute, LangItem, find_attr, lang_items}; use rustc_middle::middle::codegen_fn_attrs::{ CodegenFnAttrFlags, CodegenFnAttrs, PatchableFunctionEntry, }; -use rustc_middle::mir::mono::Linkage; use rustc_middle::query::Providers; use rustc_middle::span_bug; use rustc_middle::ty::{self as ty, TyCtxt}; @@ -26,31 +24,6 @@ use crate::target_features::{ check_target_feature_trait_unsafe, check_tied_features, from_target_feature_attr, }; -fn linkage_by_name(tcx: TyCtxt<'_>, def_id: LocalDefId, name: &str) -> Linkage { - use rustc_middle::mir::mono::Linkage::*; - - // Use the names from src/llvm/docs/LangRef.rst here. Most types are only - // applicable to variable declarations and may not really make sense for - // Rust code in the first place but allow them anyway and trust that the - // user knows what they're doing. Who knows, unanticipated use cases may pop - // up in the future. - // - // ghost, dllimport, dllexport and linkonce_odr_autohide are not supported - // and don't have to be, LLVM treats them as no-ops. - match name { - "available_externally" => AvailableExternally, - "common" => Common, - "extern_weak" => ExternalWeak, - "external" => External, - "internal" => Internal, - "linkonce" => LinkOnceAny, - "linkonce_odr" => LinkOnceODR, - "weak" => WeakAny, - "weak_odr" => WeakODR, - _ => tcx.dcx().span_fatal(tcx.def_span(def_id), "invalid linkage specified"), - } -} - /// In some cases, attributes are only valid on functions, but it's the `check_attr` /// pass that checks that they aren't used anywhere else, rather than this module. /// In these cases, we bail from performing further checks that are only meaningful for @@ -103,39 +76,6 @@ fn parse_instruction_set_attr(tcx: TyCtxt<'_>, attr: &Attribute) -> Option<Instr } } -// FIXME(jdonszelmann): remove when linkage becomes a parsed attr -fn parse_linkage_attr(tcx: TyCtxt<'_>, did: LocalDefId, attr: &Attribute) -> Option<Linkage> { - let val = attr.value_str()?; - let linkage = linkage_by_name(tcx, did, val.as_str()); - Some(linkage) -} - -// FIXME(jdonszelmann): remove when no_sanitize becomes a parsed attr -fn parse_no_sanitize_attr(tcx: TyCtxt<'_>, attr: &Attribute) -> Option<SanitizerSet> { - let list = attr.meta_item_list()?; - let mut sanitizer_set = SanitizerSet::empty(); - - for item in list.iter() { - match item.name() { - Some(sym::address) => { - sanitizer_set |= SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS - } - Some(sym::cfi) => sanitizer_set |= SanitizerSet::CFI, - Some(sym::kcfi) => sanitizer_set |= SanitizerSet::KCFI, - Some(sym::memory) => sanitizer_set |= SanitizerSet::MEMORY, - Some(sym::memtag) => sanitizer_set |= SanitizerSet::MEMTAG, - Some(sym::shadow_call_stack) => sanitizer_set |= SanitizerSet::SHADOWCALLSTACK, - Some(sym::thread) => sanitizer_set |= SanitizerSet::THREAD, - Some(sym::hwaddress) => sanitizer_set |= SanitizerSet::HWADDRESS, - _ => { - tcx.dcx().emit_err(errors::InvalidNoSanitize { span: item.span() }); - } - } - } - - Some(sanitizer_set) -} - // FIXME(jdonszelmann): remove when patchable_function_entry becomes a parsed attr fn parse_patchable_function_entry( tcx: TyCtxt<'_>, @@ -194,7 +134,7 @@ fn parse_patchable_function_entry( #[derive(Default)] struct InterestingAttributeDiagnosticSpans { link_ordinal: Option<Span>, - no_sanitize: Option<Span>, + sanitize: Option<Span>, inline: Option<Span>, no_mangle: Option<Span>, } @@ -210,20 +150,12 @@ fn process_builtin_attrs( let mut interesting_spans = InterestingAttributeDiagnosticSpans::default(); let rust_target_features = tcx.rust_target_features(LOCAL_CRATE); - // If our rustc version supports autodiff/enzyme, then we call our handler - // to check for any `#[rustc_autodiff(...)]` attributes. - // FIXME(jdonszelmann): merge with loop below - if cfg!(llvm_enzyme) { - let ad = autodiff_attrs(tcx, did.into()); - codegen_fn_attrs.autodiff_item = ad; - } - for attr in attrs.iter() { if let hir::Attribute::Parsed(p) = attr { match p { AttributeKind::Cold(_) => codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD, AttributeKind::ExportName { name, .. } => { - codegen_fn_attrs.export_name = Some(*name) + codegen_fn_attrs.symbol_name = Some(*name) } AttributeKind::Inline(inline, span) => { codegen_fn_attrs.inline = *inline; @@ -231,7 +163,13 @@ fn process_builtin_attrs( } AttributeKind::Naked(_) => codegen_fn_attrs.flags |= CodegenFnAttrFlags::NAKED, AttributeKind::Align { align, .. } => codegen_fn_attrs.alignment = Some(*align), - AttributeKind::LinkName { name, .. } => codegen_fn_attrs.link_name = Some(*name), + AttributeKind::LinkName { name, .. } => { + // FIXME Remove check for foreign functions once #[link_name] on non-foreign + // functions is a hard error + if tcx.is_foreign_item(did) { + codegen_fn_attrs.symbol_name = Some(*name); + } + } AttributeKind::LinkOrdinal { ordinal, span } => { codegen_fn_attrs.link_ordinal = Some(*ordinal); interesting_spans.link_ordinal = Some(*span); @@ -255,7 +193,7 @@ fn process_builtin_attrs( } } AttributeKind::Optimize(optimize, _) => codegen_fn_attrs.optimize = *optimize, - AttributeKind::TargetFeature(features, attr_span) => { + AttributeKind::TargetFeature { features, attr_span, was_forced } => { let Some(sig) = tcx.hir_node_by_def_id(did).fn_sig() else { tcx.dcx().span_delayed_bug(*attr_span, "target_feature applied to non-fn"); continue; @@ -263,7 +201,7 @@ fn process_builtin_attrs( let safe_target_features = matches!(sig.header.safety, hir::HeaderSafety::SafeTargetFeatures); codegen_fn_attrs.safe_target_features = safe_target_features; - if safe_target_features { + if safe_target_features && !was_forced { if tcx.sess.target.is_like_wasm || tcx.sess.opts.actually_rustdoc { // The `#[target_feature]` attribute is allowed on // WebAssembly targets on all functions. Prior to stabilizing @@ -294,6 +232,7 @@ fn process_builtin_attrs( tcx, did, features, + *was_forced, rust_target_features, &mut codegen_fn_attrs.target_features, ); @@ -332,6 +271,31 @@ fn process_builtin_attrs( AttributeKind::StdInternalSymbol(_) => { codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL } + AttributeKind::Linkage(linkage, _) => { + let linkage = Some(*linkage); + + if tcx.is_foreign_item(did) { + codegen_fn_attrs.import_linkage = linkage; + + if tcx.is_mutable_static(did.into()) { + let mut diag = tcx.dcx().struct_span_err( + attr.span(), + "extern mutable statics are not allowed with `#[linkage]`", + ); + diag.note( + "marking the extern static mutable would allow changing which \ + symbol the static references rather than make the target of the \ + symbol mutable", + ); + diag.emit(); + } + } else { + codegen_fn_attrs.linkage = linkage; + } + } + AttributeKind::Sanitize { span, .. } => { + interesting_spans.sanitize = Some(*span); + } _ => {} } } @@ -349,33 +313,6 @@ fn process_builtin_attrs( codegen_fn_attrs.flags |= CodegenFnAttrFlags::ALLOCATOR_ZEROED } sym::thread_local => codegen_fn_attrs.flags |= CodegenFnAttrFlags::THREAD_LOCAL, - sym::linkage => { - let linkage = parse_linkage_attr(tcx, did, attr); - - if tcx.is_foreign_item(did) { - codegen_fn_attrs.import_linkage = linkage; - - if tcx.is_mutable_static(did.into()) { - let mut diag = tcx.dcx().struct_span_err( - attr.span(), - "extern mutable statics are not allowed with `#[linkage]`", - ); - diag.note( - "marking the extern static mutable would allow changing which \ - symbol the static references rather than make the target of the \ - symbol mutable", - ); - diag.emit(); - } - } else { - codegen_fn_attrs.linkage = linkage; - } - } - sym::no_sanitize => { - interesting_spans.no_sanitize = Some(attr.span()); - codegen_fn_attrs.no_sanitize |= - parse_no_sanitize_attr(tcx, attr).unwrap_or_default(); - } sym::instruction_set => { codegen_fn_attrs.instruction_set = parse_instruction_set_attr(tcx, attr) } @@ -399,6 +336,8 @@ fn apply_overrides(tcx: TyCtxt<'_>, did: LocalDefId, codegen_fn_attrs: &mut Code codegen_fn_attrs.alignment = Ord::max(codegen_fn_attrs.alignment, tcx.sess.opts.unstable_opts.min_function_alignment); + // Compute the disabled sanitizers. + codegen_fn_attrs.no_sanitize |= tcx.disabled_sanitizers_for(did); // On trait methods, inherit the `#[align]` of the trait's method prototype. codegen_fn_attrs.alignment = Ord::max(codegen_fn_attrs.alignment, tcx.inherited_align(did)); @@ -443,6 +382,29 @@ fn apply_overrides(tcx: TyCtxt<'_>, did: LocalDefId, codegen_fn_attrs: &mut Code if tcx.should_inherit_track_caller(did) { codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER; } + + // Foreign items by default use no mangling for their symbol name. + if tcx.is_foreign_item(did) { + codegen_fn_attrs.flags |= CodegenFnAttrFlags::FOREIGN_ITEM; + + // There's a few exceptions to this rule though: + if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) { + // * `#[rustc_std_internal_symbol]` mangles the symbol name in a special way + // both for exports and imports through foreign items. This is handled further, + // during symbol mangling logic. + } else if codegen_fn_attrs.symbol_name.is_some() { + // * This can be overridden with the `#[link_name]` attribute + } else { + // NOTE: there's one more exception that we cannot apply here. On wasm, + // some items cannot be `no_mangle`. + // However, we don't have enough information here to determine that. + // As such, no_mangle foreign items on wasm that have the same defid as some + // import will *still* be mangled despite this. + // + // if none of the exceptions apply; apply no_mangle + codegen_fn_attrs.flags |= CodegenFnAttrFlags::NO_MANGLE; + } + } } fn check_result( @@ -475,17 +437,17 @@ fn check_result( if !codegen_fn_attrs.no_sanitize.is_empty() && codegen_fn_attrs.inline.always() && let (Some(no_sanitize_span), Some(inline_span)) = - (interesting_spans.no_sanitize, interesting_spans.inline) + (interesting_spans.sanitize, interesting_spans.inline) { let hir_id = tcx.local_def_id_to_hir_id(did); tcx.node_span_lint(lint::builtin::INLINE_NO_SANITIZE, hir_id, no_sanitize_span, |lint| { - lint.primary_message("`no_sanitize` will have no effect after inlining"); + lint.primary_message("setting `sanitize` off will have no effect after inlining"); lint.span_note(inline_span, "inlining requested here"); }) } // error when specifying link_name together with link_ordinal - if let Some(_) = codegen_fn_attrs.link_name + if let Some(_) = codegen_fn_attrs.symbol_name && let Some(_) = codegen_fn_attrs.link_ordinal { let msg = "cannot use `#[link_name]` with `#[link_ordinal]`"; @@ -505,7 +467,7 @@ fn check_result( .collect(), ) { let span = - find_attr!(tcx.get_all_attrs(did), AttributeKind::TargetFeature(_, span) => *span) + find_attr!(tcx.get_all_attrs(did), AttributeKind::TargetFeature{attr_span: span, ..} => *span) .unwrap_or_else(|| tcx.def_span(did)); tcx.dcx() @@ -532,14 +494,11 @@ fn handle_lang_items( // strippable by the linker. // // Additionally weak lang items have predetermined symbol names. - if let Some(lang_item) = lang_item { - if WEAK_LANG_ITEMS.contains(&lang_item) { - codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL; - } - if let Some(link_name) = lang_item.link_name() { - codegen_fn_attrs.export_name = Some(link_name); - codegen_fn_attrs.link_name = Some(link_name); - } + if let Some(lang_item) = lang_item + && let Some(link_name) = lang_item.link_name() + { + codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL; + codegen_fn_attrs.symbol_name = Some(link_name); } // error when using no_mangle on a lang item item @@ -605,6 +564,32 @@ fn opt_trait_item(tcx: TyCtxt<'_>, def_id: DefId) -> Option<DefId> { } } +fn disabled_sanitizers_for(tcx: TyCtxt<'_>, did: LocalDefId) -> SanitizerSet { + // Backtrack to the crate root. + let mut disabled = match tcx.opt_local_parent(did) { + // Check the parent (recursively). + Some(parent) => tcx.disabled_sanitizers_for(parent), + // We reached the crate root without seeing an attribute, so + // there is no sanitizers to exclude. + None => SanitizerSet::empty(), + }; + + // Check for a sanitize annotation directly on this def. + if let Some((on_set, off_set)) = find_attr!(tcx.get_all_attrs(did), AttributeKind::Sanitize {on_set, off_set, ..} => (on_set, off_set)) + { + // the on set is the set of sanitizers explicitly enabled. + // we mask those out since we want the set of disabled sanitizers here + disabled &= !*on_set; + // the off set is the set of sanitizers explicitly disabled. + // we or those in here. + disabled |= *off_set; + // the on set and off set are distjoint since there's a third option: unset. + // a node may not set the sanitizer setting in which case it inherits from parents. + // the code above in this function does this backtracking + } + disabled +} + /// Checks if the provided DefId is a method in a trait impl for a trait which has track_caller /// applied to the method prototype. fn should_inherit_track_caller(tcx: TyCtxt<'_>, def_id: DefId) -> bool { @@ -624,7 +609,7 @@ fn inherited_align<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<Align> { /// placeholder functions. We wrote the rustc_autodiff attributes ourself, so this should never /// panic, unless we introduced a bug when parsing the autodiff macro. //FIXME(jdonszelmann): put in the main loop. No need to have two..... :/ Let's do that when we make autodiff parsed. -fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> { +pub fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> { let attrs = tcx.get_attrs(id, sym::rustc_autodiff); let attrs = attrs.filter(|attr| attr.has_name(sym::rustc_autodiff)).collect::<Vec<_>>(); @@ -729,6 +714,11 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> { } pub(crate) fn provide(providers: &mut Providers) { - *providers = - Providers { codegen_fn_attrs, should_inherit_track_caller, inherited_align, ..*providers }; + *providers = Providers { + codegen_fn_attrs, + should_inherit_track_caller, + inherited_align, + disabled_sanitizers_for, + ..*providers + }; } diff --git a/compiler/rustc_codegen_ssa/src/common.rs b/compiler/rustc_codegen_ssa/src/common.rs index a6fd6c763ed..08e2f355953 100644 --- a/compiler/rustc_codegen_ssa/src/common.rs +++ b/compiler/rustc_codegen_ssa/src/common.rs @@ -1,10 +1,11 @@ #![allow(non_camel_case_types)] use rustc_hir::LangItem; +use rustc_hir::attrs::PeImportNameType; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{self, Instance, TyCtxt}; use rustc_middle::{bug, mir, span_bug}; -use rustc_session::cstore::{DllCallingConvention, DllImport, PeImportNameType}; +use rustc_session::cstore::{DllCallingConvention, DllImport}; use rustc_span::Span; use rustc_target::spec::Target; diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index 7ac830bcda9..fb5a8205140 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -1121,14 +1121,6 @@ impl IntoDiagArg for ExpectedPointerMutability { } #[derive(Diagnostic)] -#[diag(codegen_ssa_invalid_no_sanitize)] -#[note] -pub(crate) struct InvalidNoSanitize { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(codegen_ssa_target_feature_safe_trait)] pub(crate) struct TargetFeatureSafeTrait { #[primary_span] diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 23ed387a3ff..fe0500a5d4c 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -25,10 +25,10 @@ use std::io; use std::path::{Path, PathBuf}; use std::sync::Arc; -use rustc_ast as ast; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_data_structures::unord::UnordMap; use rustc_hir::CRATE_HIR_ID; +use rustc_hir::attrs::{CfgEntry, NativeLibKind}; use rustc_hir::def_id::CrateNum; use rustc_macros::{Decodable, Encodable, HashStable}; use rustc_metadata::EncodedMetadata; @@ -45,7 +45,6 @@ use rustc_session::Session; use rustc_session::config::{CrateType, OutputFilenames, OutputType, RUST_CGU_EXT}; use rustc_session::cstore::{self, CrateSource}; use rustc_session::lint::builtin::LINKER_MESSAGES; -use rustc_session::utils::NativeLibKind; use rustc_span::Symbol; pub mod assert_module_sources; @@ -187,7 +186,7 @@ pub struct NativeLib { pub kind: NativeLibKind, pub name: Symbol, pub filename: Option<Symbol>, - pub cfg: Option<ast::MetaItemInner>, + pub cfg: Option<CfgEntry>, pub verbatim: bool, pub dll_imports: Vec<cstore::DllImport>, } diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index e96590441fa..c3dc3e42b83 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -5,6 +5,7 @@ use rustc_ast as ast; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_data_structures::packed::Pu128; use rustc_hir::lang_items::LangItem; +use rustc_lint_defs::builtin::TAIL_CALL_TRACK_CALLER; use rustc_middle::mir::{self, AssertKind, InlineAsmMacro, SwitchTargets, UnwindTerminateReason}; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, ValidityRequirement}; use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; @@ -35,7 +36,7 @@ enum MergingSucc { True, } -/// Indicates to the call terminator codegen whether a cal +/// Indicates to the call terminator codegen whether a call /// is a normal call or an explicit tail call. #[derive(Debug, PartialEq)] enum CallKind { @@ -906,7 +907,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { fn_span, ); - let instance = match instance.def { + match instance.def { // We don't need AsyncDropGlueCtorShim here because it is not `noop func`, // it is `func returning noop future` ty::InstanceKind::DropGlue(_, None) => { @@ -995,14 +996,35 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { intrinsic.name, ); } - instance + (Some(instance), None) } } } - _ => instance, - }; - (Some(instance), None) + _ if kind == CallKind::Tail + && instance.def.requires_caller_location(bx.tcx()) => + { + if let Some(hir_id) = + terminator.source_info.scope.lint_root(&self.mir.source_scopes) + { + let msg = "tail calling a function marked with `#[track_caller]` has no special effect"; + bx.tcx().node_lint(TAIL_CALL_TRACK_CALLER, hir_id, |d| { + _ = d.primary_message(msg).span(fn_span) + }); + } + + let instance = ty::Instance::resolve_for_fn_ptr( + bx.tcx(), + bx.typing_env(), + def_id, + generic_args, + ) + .unwrap(); + + (None, Some(bx.get_fn_addr(instance))) + } + _ => (Some(instance), None), + } } ty::FnPtr(..) => (None, Some(callee.immediate())), _ => bug!("{} is not callable", callee.layout.ty), diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index 2a9b5c9019b..31784cabf4a 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -1,6 +1,6 @@ use rustc_abi::{BackendRepr, Float, Integer, Primitive, RegKind}; -use rustc_hir::attrs::InstructionSetAttr; -use rustc_middle::mir::mono::{Linkage, MonoItemData, Visibility}; +use rustc_hir::attrs::{InstructionSetAttr, Linkage}; +use rustc_middle::mir::mono::{MonoItemData, Visibility}; use rustc_middle::mir::{InlineAsmOperand, START_BLOCK}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; use rustc_middle::ty::{Instance, Ty, TyCtxt, TypeVisitableExt}; diff --git a/compiler/rustc_codegen_ssa/src/mono_item.rs b/compiler/rustc_codegen_ssa/src/mono_item.rs index b9040c330fb..8f03dc1e6b5 100644 --- a/compiler/rustc_codegen_ssa/src/mono_item.rs +++ b/compiler/rustc_codegen_ssa/src/mono_item.rs @@ -1,5 +1,6 @@ +use rustc_hir::attrs::Linkage; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::mono::{Linkage, MonoItem, MonoItemData, Visibility}; +use rustc_middle::mir::mono::{MonoItem, MonoItemData, Visibility}; use rustc_middle::ty::layout::HasTyCtxt; use tracing::debug; diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs index 7e4341a8236..54584999d61 100644 --- a/compiler/rustc_codegen_ssa/src/target_features.rs +++ b/compiler/rustc_codegen_ssa/src/target_features.rs @@ -3,7 +3,7 @@ use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_hir::attrs::InstructionSetAttr; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; -use rustc_middle::middle::codegen_fn_attrs::TargetFeature; +use rustc_middle::middle::codegen_fn_attrs::{TargetFeature, TargetFeatureKind}; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; use rustc_session::Session; @@ -22,6 +22,7 @@ pub(crate) fn from_target_feature_attr( tcx: TyCtxt<'_>, did: LocalDefId, features: &[(Symbol, Span)], + was_forced: bool, rust_target_features: &UnordMap<String, target_features::Stability>, target_features: &mut Vec<TargetFeature>, ) { @@ -88,7 +89,14 @@ pub(crate) fn from_target_feature_attr( } } } - target_features.push(TargetFeature { name, implied: name != feature }) + let kind = if name != feature { + TargetFeatureKind::Implied + } else if was_forced { + TargetFeatureKind::Forced + } else { + TargetFeatureKind::Enabled + }; + target_features.push(TargetFeature { name, kind }) } } } @@ -180,6 +188,7 @@ fn parse_rust_feature_flag<'a>( while let Some(new_feature) = new_features.pop() { if features.insert(new_feature) { if let Some(implied_features) = inverse_implied_features.get(&new_feature) { + #[allow(rustc::potential_query_instability)] new_features.extend(implied_features) } } diff --git a/compiler/rustc_codegen_ssa/src/traits/declare.rs b/compiler/rustc_codegen_ssa/src/traits/declare.rs index 9f735546558..8d5f0a5b939 100644 --- a/compiler/rustc_codegen_ssa/src/traits/declare.rs +++ b/compiler/rustc_codegen_ssa/src/traits/declare.rs @@ -1,5 +1,6 @@ +use rustc_hir::attrs::Linkage; use rustc_hir::def_id::DefId; -use rustc_middle::mir::mono::{Linkage, Visibility}; +use rustc_middle::mir::mono::Visibility; use rustc_middle::ty::Instance; pub trait PreDefineCodegenMethods<'tcx> { diff --git a/compiler/rustc_codegen_ssa/src/traits/write.rs b/compiler/rustc_codegen_ssa/src/traits/write.rs index f391c198e1a..cc7c4e46d7b 100644 --- a/compiler/rustc_codegen_ssa/src/traits/write.rs +++ b/compiler/rustc_codegen_ssa/src/traits/write.rs @@ -1,7 +1,6 @@ use std::path::PathBuf; -use rustc_ast::expand::autodiff_attrs::AutoDiffItem; -use rustc_errors::{DiagCtxtHandle, FatalError}; +use rustc_errors::DiagCtxtHandle; use rustc_middle::dep_graph::WorkProduct; use crate::back::lto::{SerializedModule, ThinModule}; @@ -23,8 +22,7 @@ pub trait WriteBackendMethods: Clone + 'static { exported_symbols_for_lto: &[String], each_linked_rlib_for_lto: &[PathBuf], modules: Vec<FatLtoInput<Self>>, - diff_fncs: Vec<AutoDiffItem>, - ) -> Result<ModuleCodegen<Self::Module>, FatalError>; + ) -> ModuleCodegen<Self::Module>; /// Performs thin LTO by performing necessary global analysis and returning two /// lists, one of the modules that need optimization and another for modules that /// can simply be copied over from the incr. comp. cache. @@ -34,7 +32,7 @@ pub trait WriteBackendMethods: Clone + 'static { each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>, - ) -> Result<(Vec<ThinModule<Self>>, Vec<WorkProduct>), FatalError>; + ) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>); fn print_pass_timings(&self); fn print_statistics(&self); fn optimize( @@ -42,16 +40,16 @@ pub trait WriteBackendMethods: Clone + 'static { dcx: DiagCtxtHandle<'_>, module: &mut ModuleCodegen<Self::Module>, config: &ModuleConfig, - ) -> Result<(), FatalError>; + ); fn optimize_thin( cgcx: &CodegenContext<Self>, thin: ThinModule<Self>, - ) -> Result<ModuleCodegen<Self::Module>, FatalError>; + ) -> ModuleCodegen<Self::Module>; fn codegen( cgcx: &CodegenContext<Self>, module: ModuleCodegen<Self::Module>, config: &ModuleConfig, - ) -> Result<CompiledModule, FatalError>; + ) -> CompiledModule; fn prepare_thin( module: ModuleCodegen<Self::Module>, want_summary: bool, diff --git a/compiler/rustc_const_eval/Cargo.toml b/compiler/rustc_const_eval/Cargo.toml index 51dcee8d882..88dfc83f7fc 100644 --- a/compiler/rustc_const_eval/Cargo.toml +++ b/compiler/rustc_const_eval/Cargo.toml @@ -22,5 +22,5 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index 2985eafb63a..60518dafbf2 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -57,7 +57,7 @@ const_eval_const_context = {$kind -> } const_eval_const_heap_ptr_in_final = encountered `const_allocate` pointer in final value that was not made global - .note = use `const_make_global` to make allocated pointers immutable before returning + .note = use `const_make_global` to turn allocated pointers into immutable globals before returning const_eval_const_make_global_ptr_already_made_global = attempting to call `const_make_global` twice on the same allocation {$alloc} @@ -231,6 +231,9 @@ const_eval_mutable_borrow_escaping = const_eval_mutable_ptr_in_final = encountered mutable pointer in final value of {const_eval_intern_kind} +const_eval_partial_pointer_in_final = encountered partial pointer in final value of {const_eval_intern_kind} + .note = while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value + const_eval_nested_static_in_thread_local = #[thread_local] does not support implicit nested statics, please create explicit static items and refer to them instead const_eval_non_const_await = @@ -299,10 +302,8 @@ const_eval_panic = evaluation panicked: {$msg} const_eval_panic_non_str = argument to `panic!()` in a const context must have type `&str` -const_eval_partial_pointer_copy = - unable to copy parts of a pointer from memory at {$ptr} -const_eval_partial_pointer_overwrite = - unable to overwrite parts of a pointer in memory at {$ptr} +const_eval_partial_pointer_read = + unable to read parts of a pointer from memory at {$ptr} const_eval_pointer_arithmetic_overflow = overflowing pointer arithmetic: the total offset in bytes does not fit in an `isize` diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 8e2c138282a..ca707b50d50 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -827,7 +827,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { // At this point, we are calling a function, `callee`, whose `DefId` is known... - // `begin_panic` and `#[rustc_const_panic_str]` functions accept generic + // `begin_panic` and `panic_display` functions accept generic // types other than str. Check to enforce that only str can be used in // const-eval. @@ -841,8 +841,8 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { return; } - // const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str` - if tcx.has_attr(callee, sym::rustc_const_panic_str) { + // const-eval of `panic_display` assumes the argument is `&&str` + if tcx.is_lang_item(callee, LangItem::PanicDisplay) { match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() { ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) => {} diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index 5835660e1c3..4da2663319c 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -117,6 +117,13 @@ fn eval_body_using_ecx<'tcx, R: InterpretationResult<'tcx>>( ecx.tcx.dcx().emit_err(errors::ConstHeapPtrInFinal { span: ecx.tcx.span }), ))); } + Err(InternError::PartialPointer) => { + throw_inval!(AlreadyReported(ReportedErrorInfo::non_const_eval_error( + ecx.tcx + .dcx() + .emit_err(errors::PartialPtrInFinal { span: ecx.tcx.span, kind: intern_kind }), + ))); + } } interp_ok(R::make_result(ret, ecx)) diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index a18ae79f318..fccb6b171b1 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -237,7 +237,7 @@ impl<'tcx> CompileTimeInterpCx<'tcx> { ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> { let def_id = instance.def_id(); - if self.tcx.has_attr(def_id, sym::rustc_const_panic_str) + if self.tcx.is_lang_item(def_id, LangItem::PanicDisplay) || self.tcx.is_lang_item(def_id, LangItem::BeginPanic) { let args = self.copy_fn_args(args); @@ -280,22 +280,110 @@ impl<'tcx> CompileTimeInterpCx<'tcx> { interp_ok(match (a, b) { // Comparisons between integers are always known. (Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8, - // Comparisons of null with an arbitrary scalar can be known if `scalar_may_be_null` - // indicates that the scalar can definitely *not* be null. - (Scalar::Int(int), ptr) | (ptr, Scalar::Int(int)) - if int.is_null() && !self.scalar_may_be_null(ptr)? => - { - 0 + // Comparing a pointer `ptr` with an integer `int` is equivalent to comparing + // `ptr-int` with null, so we can reduce this case to a `scalar_may_be_null` test. + (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => { + let int = int.to_target_usize(*self.tcx); + // The `wrapping_neg` here may produce a value that is not + // a valid target usize any more... but `wrapping_offset` handles that correctly. + let offset_ptr = ptr.wrapping_offset(Size::from_bytes(int.wrapping_neg()), self); + if !self.scalar_may_be_null(Scalar::from_pointer(offset_ptr, self))? { + // `ptr.wrapping_sub(int)` is definitely not equal to `0`, so `ptr != int` + 0 + } else { + // `ptr.wrapping_sub(int)` could be equal to `0`, but might not be, + // so we cannot know for sure if `ptr == int` or not + 2 + } + } + (Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => { + let (a_prov, a_offset) = a.prov_and_relative_offset(); + let (b_prov, b_offset) = b.prov_and_relative_offset(); + let a_allocid = a_prov.alloc_id(); + let b_allocid = b_prov.alloc_id(); + let a_info = self.get_alloc_info(a_allocid); + let b_info = self.get_alloc_info(b_allocid); + + // Check if the pointers cannot be equal due to alignment + if a_info.align > Align::ONE && b_info.align > Align::ONE { + let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes()); + let a_residue = a_offset.bytes() % min_align; + let b_residue = b_offset.bytes() % min_align; + if a_residue != b_residue { + // If the two pointers have a different residue modulo their + // common alignment, they cannot be equal. + return interp_ok(0); + } + // The pointers have the same residue modulo their common alignment, + // so they could be equal. Try the other checks. + } + + if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = ( + self.tcx.try_get_global_alloc(a_allocid), + self.tcx.try_get_global_alloc(b_allocid), + ) { + if a_allocid == b_allocid { + debug_assert_eq!( + a_did, b_did, + "different static item DefIds had same AllocId? {a_allocid:?} == {b_allocid:?}, {a_did:?} != {b_did:?}" + ); + // Comparing two pointers into the same static. As per + // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro + // a static cannot be duplicated, so if two pointers are into the same + // static, they are equal if and only if their offsets are equal. + (a_offset == b_offset) as u8 + } else { + debug_assert_ne!( + a_did, b_did, + "same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}" + ); + // Comparing two pointers into the different statics. + // We can never determine for sure that two pointers into different statics + // are *equal*, but we can know that they are *inequal* if they are both + // strictly in-bounds (i.e. in-bounds and not one-past-the-end) of + // their respective static, as different non-zero-sized statics cannot + // overlap or be deduplicated as per + // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro + // (non-deduplication), and + // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness + // (non-overlapping). + if a_offset < a_info.size && b_offset < b_info.size { + 0 + } else { + // Otherwise, conservatively say we don't know. + // There are some cases we could still return `0` for, e.g. + // if the pointers being equal would require their statics to overlap + // one or more bytes, but for simplicity we currently only check + // strictly in-bounds pointers. + 2 + } + } + } else { + // All other cases we conservatively say we don't know. + // + // For comparing statics to non-statics, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness + // immutable statics can overlap with other kinds of allocations sometimes. + // + // FIXME: We could be more decisive for (non-zero-sized) mutable statics, + // which cannot overlap with other kinds of allocations. + // + // Functions and vtables can be duplicated and deduplicated, so we + // cannot be sure of runtime equality of pointers to the same one, or the + // runtime inequality of pointers to different ones (see e.g. #73722), + // so comparing those should return 2, whether they are the same allocation + // or not. + // + // `GlobalAlloc::TypeId` exists mostly to prevent consteval from comparing + // `TypeId`s, so comparing those should always return 2, whether they are the + // same allocation or not. + // + // FIXME: We could revisit comparing pointers into the same + // `GlobalAlloc::Memory` once https://github.com/rust-lang/rust/issues/128775 + // is fixed (but they can be deduplicated, so comparing pointers into different + // ones should return 2). + 2 + } } - // Other ways of comparing integers and pointers can never be known for sure. - (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2, - // FIXME: return a `1` for when both sides are the same pointer, *except* that - // some things (like functions and vtables) do not have stable addresses - // so we need to be careful around them (see e.g. #73722). - // FIXME: return `0` for at least some comparisons where we can reliably - // determine the result of runtime inequality tests at compile-time. - // Examples include comparison of addresses in different static items. - (Scalar::Ptr(..), Scalar::Ptr(..)) => 2, }) } } diff --git a/compiler/rustc_const_eval/src/errors.rs b/compiler/rustc_const_eval/src/errors.rs index a4148cb145f..2d412ee5ec2 100644 --- a/compiler/rustc_const_eval/src/errors.rs +++ b/compiler/rustc_const_eval/src/errors.rs @@ -52,6 +52,15 @@ pub(crate) struct ConstHeapPtrInFinal { } #[derive(Diagnostic)] +#[diag(const_eval_partial_pointer_in_final)] +#[note] +pub(crate) struct PartialPtrInFinal { + #[primary_span] + pub span: Span, + pub kind: InternKind, +} + +#[derive(Diagnostic)] #[diag(const_eval_unstable_in_stable_exposed)] pub(crate) struct UnstableInStableExposed { pub gate: String, @@ -836,8 +845,7 @@ impl ReportErrorExt for UnsupportedOpInfo { UnsupportedOpInfo::Unsupported(s) => s.clone().into(), UnsupportedOpInfo::ExternTypeField => const_eval_extern_type_field, UnsupportedOpInfo::UnsizedLocal => const_eval_unsized_local, - UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite, - UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy, + UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_read, UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int, UnsupportedOpInfo::ThreadLocalStatic(_) => const_eval_thread_local_static, UnsupportedOpInfo::ExternStatic(_) => const_eval_extern_static, @@ -848,7 +856,7 @@ impl ReportErrorExt for UnsupportedOpInfo { use UnsupportedOpInfo::*; use crate::fluent_generated::*; - if let ReadPointerAsInt(_) | OverwritePartialPointer(_) | ReadPartialPointer(_) = self { + if let ReadPointerAsInt(_) | ReadPartialPointer(_) = self { diag.help(const_eval_ptr_as_bytes_1); diag.help(const_eval_ptr_as_bytes_2); } @@ -860,7 +868,7 @@ impl ReportErrorExt for UnsupportedOpInfo { | UnsupportedOpInfo::ExternTypeField | Unsupported(_) | ReadPointerAsInt(_) => {} - OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => { + ReadPartialPointer(ptr) => { diag.arg("ptr", ptr); } ThreadLocalStatic(did) | ExternStatic(did) => rustc_middle::ty::tls::with(|tcx| { diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index b1cc0cc2878..4cb88d44e1b 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -27,8 +27,9 @@ use crate::{enter_trace_span, fluent_generated as fluent}; pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> { /// Pass a copy of the given operand. Copy(OpTy<'tcx, Prov>), - /// Allow for the argument to be passed in-place: destroy the value originally stored at that place and - /// make the place inaccessible for the duration of the function call. + /// Allow for the argument to be passed in-place: destroy the value originally stored at that + /// place and make the place inaccessible for the duration of the function call. This *must* be + /// an in-memory place so that we can do the proper alias checks. InPlace(MPlaceTy<'tcx, Prov>), } @@ -379,6 +380,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } + // *Before* pushing the new frame, determine whether the return destination is in memory. + // Need to use `place_to_op` to be *sure* we get the mplace if there is one. + let destination_mplace = self.place_to_op(destination)?.as_mplace_or_imm().left(); + + // Push the "raw" frame -- this leaves locals uninitialized. self.push_stack_frame_raw(instance, body, destination, cont)?; // If an error is raised here, pop the frame again to get an accurate backtrace. @@ -496,7 +502,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Protect return place for in-place return value passing. // We only need to protect anything if this is actually an in-memory place. - if let Left(mplace) = destination.as_mplace_or_local() { + if let Some(mplace) = destination_mplace { M::protect_in_place_function_argument(self, &mplace)?; } @@ -731,18 +737,21 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { ) { let tcx = *self.tcx; - let trait_def_id = tcx.trait_of_assoc(def_id).unwrap(); + let trait_def_id = tcx.parent(def_id); let virtual_trait_ref = ty::TraitRef::from_assoc(tcx, trait_def_id, virtual_instance.args); let existential_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref); let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty); - let concrete_method = Instance::expect_resolve_for_vtable( - tcx, - self.typing_env, - def_id, - virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args), - self.cur_span(), - ); + let concrete_method = { + let _trace = enter_trace_span!(M, resolve::expect_resolve_for_vtable, ?def_id); + Instance::expect_resolve_for_vtable( + tcx, + self.typing_env, + def_id, + virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args), + self.cur_span(), + ) + }; assert_eq!(concrete_instance, concrete_method); } @@ -825,7 +834,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { place } }; - let instance = ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty); + let instance = { + let _trace = + enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty); + ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty) + }; let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?; let arg = self.mplace_to_ref(&place)?; diff --git a/compiler/rustc_const_eval/src/interpret/cast.rs b/compiler/rustc_const_eval/src/interpret/cast.rs index de4fbc7b475..e3afeda5b7c 100644 --- a/compiler/rustc_const_eval/src/interpret/cast.rs +++ b/compiler/rustc_const_eval/src/interpret/cast.rs @@ -16,8 +16,8 @@ use super::{ FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub, throw_ub_custom, }; -use crate::fluent_generated as fluent; use crate::interpret::Writeable; +use crate::{enter_trace_span, fluent_generated as fluent}; impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { pub fn cast( @@ -81,13 +81,16 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // The src operand does not matter, just its type match *src.layout.ty.kind() { ty::FnDef(def_id, args) => { - let instance = ty::Instance::resolve_for_fn_ptr( - *self.tcx, - self.typing_env, - def_id, - args, - ) - .ok_or_else(|| err_inval!(TooGeneric))?; + let instance = { + let _trace = enter_trace_span!(M, resolve::resolve_for_fn_ptr, ?def_id); + ty::Instance::resolve_for_fn_ptr( + *self.tcx, + self.typing_env, + def_id, + args, + ) + .ok_or_else(|| err_inval!(TooGeneric))? + }; let fn_ptr = self.fn_ptr(FnVal::Instance(instance)); self.write_pointer(fn_ptr, dest)?; @@ -114,12 +117,15 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // The src operand does not matter, just its type match *src.layout.ty.kind() { ty::Closure(def_id, args) => { - let instance = ty::Instance::resolve_closure( - *self.tcx, - def_id, - args, - ty::ClosureKind::FnOnce, - ); + let instance = { + let _trace = enter_trace_span!(M, resolve::resolve_closure, ?def_id); + ty::Instance::resolve_closure( + *self.tcx, + def_id, + args, + ty::ClosureKind::FnOnce, + ) + }; let fn_ptr = self.fn_ptr(FnVal::Instance(instance)); self.write_pointer(fn_ptr, dest)?; } diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index d4f2bb8257d..9681d89ce35 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -325,8 +325,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let _trace = enter_trace_span!( M, "instantiate_from_frame_and_normalize_erasing_regions", - "{}", - frame.instance + %frame.instance ); frame .instance @@ -344,6 +343,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { def: DefId, args: GenericArgsRef<'tcx>, ) -> InterpResult<'tcx, ty::Instance<'tcx>> { + let _trace = enter_trace_span!(M, resolve::try_resolve, def = ?def); trace!("resolve: {:?}, {:#?}", def, args); trace!("typing_env: {:#?}", self.typing_env); trace!("args: {:#?}", args); @@ -582,6 +582,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { span: Span, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { + let _trace = enter_trace_span!(M, const_eval::eval_mir_constant, ?val); let const_val = val.eval(*self.tcx, self.typing_env, span).map_err(|err| { if M::ALL_CONSTS_ARE_PRECHECKED { match err { diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index bb59b9f5418..5d510a98352 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -19,9 +19,12 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_hir as hir; use rustc_hir::definitions::{DefPathData, DisambiguatorState}; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; -use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult}; +use rustc_middle::mir::interpret::{ + AllocBytes, ConstAllocation, CtfeProvenance, InterpResult, Provenance, +}; use rustc_middle::query::TyCtxtAt; use rustc_middle::span_bug; +use rustc_middle::ty::TyCtxt; use rustc_middle::ty::layout::TyAndLayout; use rustc_span::def_id::LocalDefId; use tracing::{instrument, trace}; @@ -52,39 +55,30 @@ impl HasStaticRootDefId for const_eval::CompileTimeMachine<'_> { } } -/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory. -/// -/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the -/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be* -/// already mutable (as a sanity check). -/// -/// Returns an iterator over all relocations referred to by this allocation. -fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>( - ecx: &mut InterpCx<'tcx, M>, - alloc_id: AllocId, +fn prepare_alloc<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( + tcx: TyCtxt<'tcx>, + kind: MemoryKind<const_eval::MemoryKind>, + alloc: &mut Allocation<Prov, Extra, Bytes>, mutability: Mutability, - disambiguator: Option<&mut DisambiguatorState>, -) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, InternError> { - trace!("intern_shallow {:?}", alloc_id); - // remove allocation - // FIXME(#120456) - is `swap_remove` correct? - let Some((kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else { - return Err(InternError::DanglingPointer); - }; - +) -> Result<(), InternError> { match kind { MemoryKind::Machine(const_eval::MemoryKind::Heap { was_made_global }) => { if !was_made_global { // Attempting to intern a `const_allocate`d pointer that was not made global via - // `const_make_global`. We want to error here, but we have to first put the - // allocation back into the `alloc_map` to keep things in a consistent state. - ecx.memory.alloc_map.insert(alloc_id, (kind, alloc)); + // `const_make_global`. + tcx.dcx().delayed_bug("non-global heap allocation in const value"); return Err(InternError::ConstAllocNotGlobal); } } MemoryKind::Stack | MemoryKind::CallerLocation => {} } + if !alloc.provenance_merge_bytes(&tcx) { + // Per-byte provenance is not supported by backends, so we cannot accept it here. + tcx.dcx().delayed_bug("partial pointer in const value"); + return Err(InternError::PartialPointer); + } + // Set allocation mutability as appropriate. This is used by LLVM to put things into // read-only memory, and also by Miri when evaluating other globals that // access this one. @@ -97,6 +91,36 @@ fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>( assert_eq!(alloc.mutability, Mutability::Mut); } } + Ok(()) +} + +/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory. +/// +/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the +/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be* +/// already mutable (as a sanity check). +/// +/// Returns an iterator over all relocations referred to by this allocation. +fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>( + ecx: &mut InterpCx<'tcx, M>, + alloc_id: AllocId, + mutability: Mutability, + disambiguator: Option<&mut DisambiguatorState>, +) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, InternError> { + trace!("intern_shallow {:?}", alloc_id); + // remove allocation + // FIXME(#120456) - is `swap_remove` correct? + let Some((kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else { + return Err(InternError::DanglingPointer); + }; + + if let Err(err) = prepare_alloc(*ecx.tcx, kind, &mut alloc, mutability) { + // We want to error here, but we have to first put the + // allocation back into the `alloc_map` to keep things in a consistent state. + ecx.memory.alloc_map.insert(alloc_id, (kind, alloc)); + return Err(err); + } + // link the alloc id to the actual allocation let alloc = ecx.tcx.mk_const_alloc(alloc); if let Some(static_id) = ecx.machine.static_def_id() { @@ -166,6 +190,7 @@ pub enum InternError { BadMutablePointer, DanglingPointer, ConstAllocNotGlobal, + PartialPointer, } /// Intern `ret` and everything it references. @@ -221,13 +246,11 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>( let mut todo: Vec<_> = if is_static { // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`. // But still change its mutability to match the requested one. - let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap(); - alloc.1.mutability = base_mutability; - alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect() + let (kind, alloc) = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap(); + prepare_alloc(*ecx.tcx, *kind, alloc, base_mutability)?; + alloc.provenance().ptrs().iter().map(|&(_, prov)| prov).collect() } else { - intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator)) - .unwrap() - .collect() + intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))?.collect() }; // We need to distinguish "has just been interned" from "was already in `tcx`", // so we track this in a separate set. @@ -235,7 +258,6 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>( // Whether we encountered a bad mutable pointer. // We want to first report "dangling" and then "mutable", so we need to delay reporting these // errors. - let mut result = Ok(()); let mut found_bad_mutable_ptr = false; // Keep interning as long as there are things to intern. @@ -310,20 +332,15 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>( // okay with losing some potential for immutability here. This can anyway only affect // `static mut`. just_interned.insert(alloc_id); - match intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator)) { - Ok(nested) => todo.extend(nested), - Err(err) => { - ecx.tcx.dcx().delayed_bug("error during const interning"); - result = Err(err); - } - } + let next = intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator))?; + todo.extend(next); } - if found_bad_mutable_ptr && result.is_ok() { + if found_bad_mutable_ptr { // We found a mutable pointer inside a const where inner allocations should be immutable, // and there was no other error. This should usually never happen! However, this can happen // in unleash-miri mode, so report it as a normal error then. if ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you { - result = Err(InternError::BadMutablePointer); + return Err(InternError::BadMutablePointer); } else { span_bug!( ecx.tcx.span, @@ -331,7 +348,7 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>( ); } } - result + Ok(()) } /// Intern `ret`. This function assumes that `ret` references no other allocation. diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 23b40894f16..2c1e5087e1c 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -1314,29 +1314,20 @@ impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> } /// Mark the given sub-range (relative to this allocation reference) as uninitialized. - pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> { + pub fn write_uninit(&mut self, range: AllocRange) { let range = self.range.subrange(range); - self.alloc - .write_uninit(&self.tcx, range) - .map_err(|e| e.to_interp_error(self.alloc_id)) - .into() + self.alloc.write_uninit(&self.tcx, range); } /// Mark the entire referenced range as uninitialized - pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> { - self.alloc - .write_uninit(&self.tcx, self.range) - .map_err(|e| e.to_interp_error(self.alloc_id)) - .into() + pub fn write_uninit_full(&mut self) { + self.alloc.write_uninit(&self.tcx, self.range); } /// Remove all provenance in the reference range. - pub fn clear_provenance(&mut self) -> InterpResult<'tcx> { - self.alloc - .clear_provenance(&self.tcx, self.range) - .map_err(|e| e.to_interp_error(self.alloc_id)) - .into() + pub fn clear_provenance(&mut self) { + self.alloc.clear_provenance(&self.tcx, self.range); } } @@ -1427,11 +1418,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Side-step AllocRef and directly access the underlying bytes more efficiently. // (We are staying inside the bounds here and all bytes do get overwritten so all is good.) - let alloc_id = alloc_ref.alloc_id; - let bytes = alloc_ref - .alloc - .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range) - .map_err(move |e| e.to_interp_error(alloc_id))?; + let bytes = + alloc_ref.alloc.get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range); // `zip` would stop when the first iterator ends; we want to definitely // cover all of `bytes`. for dest in bytes { @@ -1513,10 +1501,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // `get_bytes_mut` will clear the provenance, which is correct, // since we don't want to keep any provenance at the target. // This will also error if copying partial provenance is not supported. - let provenance = src_alloc - .provenance() - .prepare_copy(src_range, dest_offset, num_copies, self) - .map_err(|e| e.to_interp_error(src_alloc_id))?; + let provenance = + src_alloc.provenance().prepare_copy(src_range, dest_offset, num_copies, self); // Prepare a copy of the initialization mask. let init = src_alloc.init_mask().prepare_copy(src_range); @@ -1534,10 +1520,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { dest_range, )?; // Yes we do overwrite all bytes in `dest_bytes`. - let dest_bytes = dest_alloc - .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range) - .map_err(|e| e.to_interp_error(dest_alloc_id))? - .as_mut_ptr(); + let dest_bytes = + dest_alloc.get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range).as_mut_ptr(); if init.no_bytes_init() { // Fast path: If all bytes are `uninit` then there is nothing to copy. The target range @@ -1546,9 +1530,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // This also avoids writing to the target bytes so that the backing allocation is never // touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary // operating system this can avoid physically allocating the page. - dest_alloc - .write_uninit(&tcx, dest_range) - .map_err(|e| e.to_interp_error(dest_alloc_id))?; + dest_alloc.write_uninit(&tcx, dest_range); // `write_uninit` also resets the provenance, so we are done. return interp_ok(()); } diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 53a440b646b..560b0e1ae4e 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -175,6 +175,16 @@ impl<Prov: Provenance> Immediate<Prov> { } interp_ok(()) } + + pub fn has_provenance(&self) -> bool { + match self { + Immediate::Scalar(scalar) => matches!(scalar, Scalar::Ptr { .. }), + Immediate::ScalarPair(s1, s2) => { + matches!(s1, Scalar::Ptr { .. }) || matches!(s2, Scalar::Ptr { .. }) + } + Immediate::Uninit => false, + } + } } // ScalarPair needs a type to interpret, so we often have an immediate and a type together diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 6ff50dc700f..a86fdf80f60 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -234,6 +234,12 @@ impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> { } /// A place is either an mplace or some local. + /// + /// Note that the return value can be different even for logically identical places! + /// Specifically, if a local is stored in-memory, this may return `Local` or `MPlaceTy` + /// depending on how the place was constructed. In other words, seeing `Local` here does *not* + /// imply that this place does not point to memory. Every caller must therefore always handle + /// both cases. #[inline(always)] pub fn as_mplace_or_local( &self, @@ -705,7 +711,7 @@ where match value { Immediate::Scalar(scalar) => { - alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar) + alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)?; } Immediate::ScalarPair(a_val, b_val) => { let BackendRepr::ScalarPair(a, b) = layout.backend_repr else { @@ -725,10 +731,10 @@ where alloc.write_scalar(alloc_range(Size::ZERO, a_val.size()), a_val)?; alloc.write_scalar(alloc_range(b_offset, b_val.size()), b_val)?; // We don't have to reset padding here, `write_immediate` will anyway do a validation run. - interp_ok(()) } Immediate::Uninit => alloc.write_uninit_full(), } + interp_ok(()) } pub fn write_uninit( @@ -748,7 +754,7 @@ where // Zero-sized access return interp_ok(()); }; - alloc.write_uninit_full()?; + alloc.write_uninit_full(); } } interp_ok(()) @@ -759,6 +765,13 @@ where &mut self, dest: &impl Writeable<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { + // If this is an efficiently represented local variable without provenance, skip the + // `as_mplace_or_mutable_local` that would otherwise force this local into memory. + if let Right(imm) = dest.to_op(self)?.as_mplace_or_imm() { + if !imm.has_provenance() { + return interp_ok(()); + } + } match self.as_mplace_or_mutable_local(&dest.to_place())? { Right((local_val, _local_layout, local)) => { local_val.clear_provenance()?; @@ -772,7 +785,7 @@ where // Zero-sized access return interp_ok(()); }; - alloc.clear_provenance()?; + alloc.clear_provenance(); } } interp_ok(()) diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 73cc87508ef..7cabfd96121 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -20,7 +20,7 @@ use super::{ MemoryKind, Operand, PlaceTy, Pointer, Provenance, ReturnAction, Scalar, from_known_layout, interp_ok, throw_ub, throw_unsup, }; -use crate::errors; +use crate::{enter_trace_span, errors}; // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread // boundary and dropped in the other thread, it would exit the span in the other thread. @@ -386,6 +386,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). for &const_ in body.required_consts() { + // We can't use `eval_mir_constant` here as that assumes that all required consts have + // already been checked, so we need a separate tracing call. + let _trace = enter_trace_span!(M, const_eval::required_consts, ?const_.const_); let c = self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?; c.eval(*self.tcx, self.typing_env, const_.span).map_err(|err| { diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index 76e470b69dc..23d362de308 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -2,8 +2,11 @@ //! //! The main entry point is the `step` method. +use std::iter; + use either::Either; use rustc_abi::{FIRST_VARIANT, FieldIdx}; +use rustc_data_structures::fx::FxHashSet; use rustc_index::IndexSlice; use rustc_middle::ty::{self, Instance, Ty}; use rustc_middle::{bug, mir, span_bug}; @@ -389,8 +392,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Evaluate the arguments of a function call fn eval_fn_call_argument( - &self, + &mut self, op: &mir::Operand<'tcx>, + move_definitely_disjoint: bool, ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> { interp_ok(match op { mir::Operand::Copy(_) | mir::Operand::Constant(_) => { @@ -399,24 +403,19 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { FnArg::Copy(op) } mir::Operand::Move(place) => { - // If this place lives in memory, preserve its location. - // We call `place_to_op` which will be an `MPlaceTy` whenever there exists - // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local` - // which can return a local even if that has an mplace.) let place = self.eval_place(*place)?; - let op = self.place_to_op(&place)?; - - match op.as_mplace_or_imm() { - Either::Left(mplace) => FnArg::InPlace(mplace), - Either::Right(_imm) => { - // This argument doesn't live in memory, so there's no place - // to make inaccessible during the call. - // We rely on there not being any stray `PlaceTy` that would let the - // caller directly access this local! - // This is also crucial for tail calls, where we want the `FnArg` to - // stay valid when the old stack frame gets popped. - FnArg::Copy(op) + if move_definitely_disjoint { + // We still have to ensure that no *other* pointers are used to access this place, + // so *if* it is in memory then we have to treat it as `InPlace`. + // Use `place_to_op` to guarantee that we notice it being in memory. + let op = self.place_to_op(&place)?; + match op.as_mplace_or_imm() { + Either::Left(mplace) => FnArg::InPlace(mplace), + Either::Right(_imm) => FnArg::Copy(op), } + } else { + // We have to force this into memory to detect aliasing among `Move` arguments. + FnArg::InPlace(self.force_allocation(&place)?) } } }) @@ -425,18 +424,53 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the /// necessary information about callee and arguments to make a call. fn eval_callee_and_args( - &self, + &mut self, terminator: &mir::Terminator<'tcx>, func: &mir::Operand<'tcx>, args: &[Spanned<mir::Operand<'tcx>>], + dest: &mir::Place<'tcx>, ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> { let func = self.eval_operand(func, None)?; + + // Evaluating function call arguments. The tricky part here is dealing with `Move` + // arguments: we have to ensure no two such arguments alias. This would be most easily done + // by just forcing them all into memory and then doing the usual in-place argument + // protection, but then we'd force *a lot* of arguments into memory. So we do some syntactic + // pre-processing here where if all `move` arguments are syntactically distinct local + // variables (and none is indirect), we can skip the in-memory forcing. + // We have to include `dest` in that list so that we can detect aliasing of an in-place + // argument with the return place. + let move_definitely_disjoint = 'move_definitely_disjoint: { + let mut previous_locals = FxHashSet::<mir::Local>::default(); + for place in args + .iter() + .filter_map(|a| { + // We only have to care about `Move` arguments. + if let mir::Operand::Move(place) = &a.node { Some(place) } else { None } + }) + .chain(iter::once(dest)) + { + if place.is_indirect_first_projection() { + // An indirect in-place argument could alias with anything else... + break 'move_definitely_disjoint false; + } + if !previous_locals.insert(place.local) { + // This local is the base for two arguments! They might overlap. + break 'move_definitely_disjoint false; + } + } + // We found no violation so they are all definitely disjoint. + true + }; let args = args .iter() - .map(|arg| self.eval_fn_call_argument(&arg.node)) + .map(|arg| self.eval_fn_call_argument(&arg.node, move_definitely_disjoint)) .collect::<InterpResult<'tcx, Vec<_>>>()?; - let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx); + let fn_sig_binder = { + let _trace = enter_trace_span!(M, "fn_sig", ty = ?func.layout.ty.kind()); + func.layout.ty.fn_sig(*self.tcx) + }; let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder); let extra_args = &args[fn_sig.inputs().len()..]; let extra_args = @@ -519,7 +553,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let old_loc = self.frame().loc; let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = - self.eval_callee_and_args(terminator, func, args)?; + self.eval_callee_and_args(terminator, func, args, &destination)?; let destination = self.eval_place(destination)?; self.init_fn_call( @@ -542,7 +576,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let old_frame_idx = self.frame_idx(); let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = - self.eval_callee_and_args(terminator, func, args)?; + self.eval_callee_and_args(terminator, func, args, &mir::Place::return_place())?; self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?; @@ -560,7 +594,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { "Async Drop must be expanded or reset to sync in runtime MIR" ); let place = self.eval_place(place)?; - let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty); + let instance = { + let _trace = + enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty); + Instance::resolve_drop_in_place(*self.tcx, place.layout.ty) + }; if let ty::InstanceKind::DropGlue(_, None) = instance.def { // This is the branch we enter if and only if the dropped type has no drop glue // whatsoever. This can happen as a result of monomorphizing a drop of a diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index ab0c0665d51..02e3d90f4af 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -949,7 +949,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { let padding_size = offset - padding_cleared_until; let range = alloc_range(padding_start, padding_size); trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty); - alloc.write_uninit(range)?; + alloc.write_uninit(range); } padding_cleared_until = offset + size; } @@ -1239,7 +1239,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, if self.reset_provenance_and_padding { // We can't share this with above as above, we might be looking at read-only memory. let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0"); - alloc.clear_provenance()?; + alloc.clear_provenance(); // Also, mark this as containing data, not padding. self.add_data_range(mplace.ptr(), size); } @@ -1418,7 +1418,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let _trace = enter_trace_span!( M, "validate_operand", - "recursive={recursive}, reset_provenance_and_padding={reset_provenance_and_padding}, val={val:?}" + recursive, + reset_provenance_and_padding, + ?val, ); // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 2dc746754f8..2ae6655901b 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -1,26 +1,27 @@ use std::fmt::Write; use rustc_data_structures::intern::Interned; -use rustc_hir::def_id::CrateNum; +use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::DisambiguatedDefPathData; use rustc_middle::bug; use rustc_middle::ty::print::{PrettyPrinter, PrintError, Printer}; -use rustc_middle::ty::{self, GenericArg, GenericArgKind, Ty, TyCtxt}; +use rustc_middle::ty::{self, GenericArg, Ty, TyCtxt}; -struct AbsolutePathPrinter<'tcx> { +struct TypeNamePrinter<'tcx> { tcx: TyCtxt<'tcx>, path: String, } -impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { +impl<'tcx> Printer<'tcx> for TypeNamePrinter<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - // This is reachable (via `pretty_print_dyn_existential`) even though - // `<Self As PrettyPrinter>::should_print_region` returns false. See #144994. - Ok(()) + // FIXME: most regions have been erased by the time this code runs. + // Just printing `'_` is a bit hacky but gives mostly good results, and + // doing better is difficult. See `should_print_optional_region`. + write!(self, "'_") } fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { @@ -75,26 +76,26 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { self.pretty_print_dyn_existential(predicates) } - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.path.push_str(self.tcx.crate_name(cnum).as_str()); Ok(()) } - fn path_qualified( + fn print_path_with_qualified( &mut self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError> { - self.pretty_path_qualified(self_ty, trait_ref) + self.pretty_print_path_with_qualified(self_ty, trait_ref) } - fn path_append_impl( + fn print_path_with_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError> { - self.pretty_path_append_impl( + self.pretty_print_path_with_impl( |cx| { print_prefix(cx)?; @@ -107,7 +108,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { ) } - fn path_append( + fn print_path_with_simple( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, @@ -119,25 +120,59 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { Ok(()) } - fn path_generic_args( + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], ) -> Result<(), PrintError> { print_prefix(self)?; - let args = - args.iter().cloned().filter(|arg| !matches!(arg.kind(), GenericArgKind::Lifetime(_))); - if args.clone().next().is_some() { - self.generic_delimiters(|cx| cx.comma_sep(args)) + if !args.is_empty() { + self.generic_delimiters(|cx| cx.comma_sep(args.iter().copied())) } else { Ok(()) } } + + fn print_coroutine_with_kind( + &mut self, + def_id: DefId, + parent_args: &'tcx [GenericArg<'tcx>], + kind: Ty<'tcx>, + ) -> Result<(), PrintError> { + self.print_def_path(def_id, parent_args)?; + + let ty::Coroutine(_, args) = self.tcx.type_of(def_id).instantiate_identity().kind() else { + // Could be `ty::Error`. + return Ok(()); + }; + + let default_kind = args.as_coroutine().kind_ty(); + + match kind.to_opt_closure_kind() { + _ if kind == default_kind => { + // No need to mark the closure if it's the deduced coroutine kind. + } + Some(ty::ClosureKind::Fn) | None => { + // Should never happen. Just don't mark anything rather than panicking. + } + Some(ty::ClosureKind::FnMut) => self.path.push_str("::{{call_mut}}"), + Some(ty::ClosureKind::FnOnce) => self.path.push_str("::{{call_once}}"), + } + + Ok(()) + } } -impl<'tcx> PrettyPrinter<'tcx> for AbsolutePathPrinter<'tcx> { - fn should_print_region(&self, _region: ty::Region<'_>) -> bool { - false +impl<'tcx> PrettyPrinter<'tcx> for TypeNamePrinter<'tcx> { + fn should_print_optional_region(&self, region: ty::Region<'_>) -> bool { + // Bound regions are always printed (as `'_`), which gives some idea that they are special, + // even though the `for` is omitted by the pretty printer. + // E.g. `for<'a, 'b> fn(&'a u32, &'b u32)` is printed as "fn(&'_ u32, &'_ u32)". + match region.kind() { + ty::ReErased | ty::ReEarlyParam(_) => false, + ty::ReBound(..) => true, + _ => unreachable!(), + } } fn generic_delimiters( @@ -159,7 +194,7 @@ impl<'tcx> PrettyPrinter<'tcx> for AbsolutePathPrinter<'tcx> { } } -impl Write for AbsolutePathPrinter<'_> { +impl Write for TypeNamePrinter<'_> { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.path.push_str(s); Ok(()) @@ -167,7 +202,7 @@ impl Write for AbsolutePathPrinter<'_> { } pub fn type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> String { - let mut p = AbsolutePathPrinter { tcx, path: String::new() }; + let mut p = TypeNamePrinter { tcx, path: String::new() }; p.print_type(ty).unwrap(); p.path } diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index 17204883fb0..0ac9e02508a 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -6,13 +6,14 @@ edition = "2024" [dependencies] # tidy-alphabetical-start arrayvec = { version = "0.7", default-features = false } -bitflags = "2.4.1" +bitflags.workspace = true either = "1.0" elsa = "1.11.0" ena = "0.14.3" indexmap = "2.4.0" jobserver_crate = { version = "0.1.28", package = "jobserver" } measureme = "12.0.1" +parking_lot = "0.12" rustc-hash = "2.0.0" rustc-stable-hash = { version = "0.1.0", features = ["nightly"] } rustc_arena = { path = "../rustc_arena" } @@ -25,8 +26,8 @@ rustc_thread_pool = { path = "../rustc_thread_pool" } smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dangle"] } stacker = "0.1.17" tempfile = "3.2" -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end [dependencies.hashbrown] @@ -34,9 +35,6 @@ version = "0.15.2" default-features = false features = ["nightly"] # for may_dangle -[dependencies.parking_lot] -version = "0.12" - [target.'cfg(windows)'.dependencies.windows] version = "0.61.0" features = [ diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 53178d09348..17da3ea83c8 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -77,6 +77,7 @@ pub mod thinvec; pub mod thousands; pub mod transitive_relation; pub mod unhash; +pub mod union_find; pub mod unord; pub mod vec_cache; pub mod work_queue; diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs b/compiler/rustc_data_structures/src/union_find.rs index a826a953fa6..ef73cd7ab40 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs +++ b/compiler/rustc_data_structures/src/union_find.rs @@ -9,7 +9,7 @@ mod tests; /// Simple implementation of a union-find data structure, i.e. a disjoint-set /// forest. #[derive(Debug)] -pub(crate) struct UnionFind<Key: Idx> { +pub struct UnionFind<Key: Idx> { table: IndexVec<Key, UnionFindEntry<Key>>, } @@ -28,7 +28,7 @@ struct UnionFindEntry<Key> { impl<Key: Idx> UnionFind<Key> { /// Creates a new disjoint-set forest containing the keys `0..num_keys`. /// Initially, every key is part of its own one-element set. - pub(crate) fn new(num_keys: usize) -> Self { + pub fn new(num_keys: usize) -> Self { // Initially, every key is the root of its own set, so its parent is itself. Self { table: IndexVec::from_fn_n(|key| UnionFindEntry { parent: key, rank: 0 }, num_keys) } } @@ -38,7 +38,7 @@ impl<Key: Idx> UnionFind<Key> { /// /// Also updates internal data structures to make subsequent `find` /// operations faster. - pub(crate) fn find(&mut self, key: Key) -> Key { + pub fn find(&mut self, key: Key) -> Key { // Loop until we find a key that is its own parent. let mut curr = key; while let parent = self.table[curr].parent @@ -60,7 +60,7 @@ impl<Key: Idx> UnionFind<Key> { /// Merges the set containing `a` and the set containing `b` into one set. /// /// Returns the common root of both keys, after the merge. - pub(crate) fn unify(&mut self, a: Key, b: Key) -> Key { + pub fn unify(&mut self, a: Key, b: Key) -> Key { let mut a = self.find(a); let mut b = self.find(b); @@ -90,7 +90,7 @@ impl<Key: Idx> UnionFind<Key> { /// Takes a "snapshot" of the current state of this disjoint-set forest, in /// the form of a vector that directly maps each key to its current root. - pub(crate) fn snapshot(&mut self) -> IndexVec<Key, Key> { + pub fn snapshot(&mut self) -> IndexVec<Key, Key> { self.table.indices().map(|key| self.find(key)).collect() } } diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs b/compiler/rustc_data_structures/src/union_find/tests.rs index 34a4e4f8e6e..34a4e4f8e6e 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs +++ b/compiler/rustc_data_structures/src/union_find/tests.rs diff --git a/compiler/rustc_driver_impl/Cargo.toml b/compiler/rustc_driver_impl/Cargo.toml index ae1dbd2cf51..7e9af054aff 100644 --- a/compiler/rustc_driver_impl/Cargo.toml +++ b/compiler/rustc_driver_impl/Cargo.toml @@ -51,7 +51,7 @@ rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_ty_utils = { path = "../rustc_ty_utils" } serde_json = "1.0.59" shlex = "1.0" -tracing = { version = "0.1.35" } +tracing.workspace = true # tidy-alphabetical-end [target.'cfg(all(unix, any(target_env = "gnu", target_os = "macos")))'.dependencies] diff --git a/compiler/rustc_error_codes/src/error_codes/E0458.md b/compiler/rustc_error_codes/src/error_codes/E0458.md index 1b280cba44f..651bc378879 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0458.md +++ b/compiler/rustc_error_codes/src/error_codes/E0458.md @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler. + An unknown "kind" was specified for a link attribute. Erroneous code example: -```compile_fail,E0458 +```ignore (no longer emitted) #[link(kind = "wonderful_unicorn")] extern "C" {} // error: unknown kind: `wonderful_unicorn` ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0518.md b/compiler/rustc_error_codes/src/error_codes/E0518.md index f04329bc4e6..87dc231578a 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0518.md +++ b/compiler/rustc_error_codes/src/error_codes/E0518.md @@ -1,9 +1,11 @@ +#### Note: this error code is no longer emitted by the compiler. + An `#[inline(..)]` attribute was incorrectly placed on something other than a function or method. Example of erroneous code: -```compile_fail,E0518 +```ignore (no longer emitted) #[inline(always)] struct Foo; diff --git a/compiler/rustc_error_codes/src/error_codes/E0578.md b/compiler/rustc_error_codes/src/error_codes/E0578.md index fca89757287..78fabe855bb 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0578.md +++ b/compiler/rustc_error_codes/src/error_codes/E0578.md @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler. + A module cannot be found and therefore, the visibility cannot be determined. Erroneous code example: -```compile_fail,E0578,edition2018 +```ignore (no longer emitted) foo!(); pub (in ::Sea) struct Shark; // error! diff --git a/compiler/rustc_error_codes/src/error_codes/E0701.md b/compiler/rustc_error_codes/src/error_codes/E0701.md index 4965e643105..e1be0e915f4 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0701.md +++ b/compiler/rustc_error_codes/src/error_codes/E0701.md @@ -1,9 +1,11 @@ +#### Note: this error code is no longer emitted by the compiler. + This error indicates that a `#[non_exhaustive]` attribute was incorrectly placed on something other than a struct or enum. Erroneous code example: -```compile_fail,E0701 +```ignore (no longer emitted) #[non_exhaustive] trait Foo { } ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0739.md b/compiler/rustc_error_codes/src/error_codes/E0739.md index 406d3d52779..5403405ca9d 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0739.md +++ b/compiler/rustc_error_codes/src/error_codes/E0739.md @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler. + `#[track_caller]` must be applied to a function Erroneous code example: -```compile_fail,E0739 +```ignore (no longer emitted) #[track_caller] struct Bar { a: u8, diff --git a/compiler/rustc_error_codes/src/error_codes/E0755.md b/compiler/rustc_error_codes/src/error_codes/E0755.md index b67f078c78e..bd93626a8db 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0755.md +++ b/compiler/rustc_error_codes/src/error_codes/E0755.md @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler. + The `ffi_pure` attribute was used on a non-foreign function. Erroneous code example: -```compile_fail,E0755 +```ignore (no longer emitted) #![feature(ffi_pure)] #[unsafe(ffi_pure)] // error! diff --git a/compiler/rustc_error_codes/src/error_codes/E0756.md b/compiler/rustc_error_codes/src/error_codes/E0756.md index aadde038d12..daafc2a5ac0 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0756.md +++ b/compiler/rustc_error_codes/src/error_codes/E0756.md @@ -1,9 +1,11 @@ +#### Note: this error code is no longer emitted by the compiler. + The `ffi_const` attribute was used on something other than a foreign function declaration. Erroneous code example: -```compile_fail,E0756 +```ignore (no longer emitted) #![feature(ffi_const)] #[unsafe(ffi_const)] // error! diff --git a/compiler/rustc_error_codes/src/error_codes/E0788.md b/compiler/rustc_error_codes/src/error_codes/E0788.md index ba138aed2d1..1afa961f9b7 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0788.md +++ b/compiler/rustc_error_codes/src/error_codes/E0788.md @@ -1,3 +1,5 @@ +#### Note: this error code is no longer emitted by the compiler. + A `#[coverage(off|on)]` attribute was found in a position where it is not allowed. @@ -10,7 +12,7 @@ Coverage attributes can be applied to: Example of erroneous code: -```compile_fail,E0788 +```ignore (no longer emitted) unsafe extern "C" { #[coverage(off)] fn foreign_fn(); diff --git a/compiler/rustc_error_codes/src/error_codes/E0793.md b/compiler/rustc_error_codes/src/error_codes/E0793.md index ccd1b43bd19..2722dac104b 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0793.md +++ b/compiler/rustc_error_codes/src/error_codes/E0793.md @@ -1,4 +1,9 @@ -An unaligned reference to a field of a [packed] struct got created. +An unaligned reference to a field of a [packed] `struct` or `union` was created. + +The `#[repr(packed)]` attribute removes padding between fields, which can +cause fields to be stored at unaligned memory addresses. Creating references +to such fields violates Rust's memory safety guarantees and can lead to +undefined behavior in optimized code. Erroneous code example: @@ -45,9 +50,36 @@ unsafe { // For formatting, we can create a copy to avoid the direct reference. let copy = foo.field1; println!("{}", copy); + // Creating a copy can be written in a single line with curly braces. // (This is equivalent to the two lines above.) println!("{}", { foo.field1 }); + + // A reference to a field that will always be sufficiently aligned is safe: + println!("{}", foo.field2); +} +``` + +### Unions + +Although creating a reference to a `union` field is `unsafe`, this error +will still be triggered if the referenced field is not sufficiently +aligned. Use `addr_of!` and raw pointers in the same way as for struct fields. + +```compile_fail,E0793 +#[repr(packed)] +pub union Foo { + field1: u64, + field2: u8, +} + +unsafe { + let foo = Foo { field1: 0 }; + // Accessing the field directly is fine. + let val = foo.field1; + + // A reference to a packed union field causes an error. + let val = &foo.field1; // ERROR } ``` diff --git a/compiler/rustc_error_messages/Cargo.toml b/compiler/rustc_error_messages/Cargo.toml index 0951859fa53..7d2dc20e136 100644 --- a/compiler/rustc_error_messages/Cargo.toml +++ b/compiler/rustc_error_messages/Cargo.toml @@ -7,15 +7,16 @@ edition = "2024" # tidy-alphabetical-start fluent-bundle = "0.16" fluent-syntax = "0.12" -icu_list = "1.2" -icu_locid = "1.2" -icu_provider_adapters = "1.2" +icu_list = { version = "2.0", default-features = false, features = ["alloc"] } +icu_locale = { version = "2.0", default-features = false } intl-memoizer = "0.5.1" +rustc_ast = { path = "../rustc_ast" } +rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_baked_icu_data = { path = "../rustc_baked_icu_data" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } -tracing = "0.1" +tracing.workspace = true unic-langid = { version = "0.9.0", features = ["macros"] } # tidy-alphabetical-end diff --git a/compiler/rustc_error_messages/src/diagnostic_impls.rs b/compiler/rustc_error_messages/src/diagnostic_impls.rs new file mode 100644 index 00000000000..3b664cce577 --- /dev/null +++ b/compiler/rustc_error_messages/src/diagnostic_impls.rs @@ -0,0 +1,205 @@ +use std::backtrace::Backtrace; +use std::borrow::Cow; +use std::fmt; +use std::num::ParseIntError; +use std::path::{Path, PathBuf}; +use std::process::ExitStatus; + +use rustc_ast as ast; +use rustc_ast_pretty::pprust; +use rustc_span::edition::Edition; + +use crate::{DiagArgValue, IntoDiagArg}; + +pub struct DiagArgFromDisplay<'a>(pub &'a dyn fmt::Display); + +impl IntoDiagArg for DiagArgFromDisplay<'_> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.0.to_string().into_diag_arg(path) + } +} + +impl<'a> From<&'a dyn fmt::Display> for DiagArgFromDisplay<'a> { + fn from(t: &'a dyn fmt::Display) -> Self { + DiagArgFromDisplay(t) + } +} + +impl<'a, T: fmt::Display> From<&'a T> for DiagArgFromDisplay<'a> { + fn from(t: &'a T) -> Self { + DiagArgFromDisplay(t) + } +} + +impl<'a, T: Clone + IntoDiagArg> IntoDiagArg for &'a T { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.clone().into_diag_arg(path) + } +} + +#[macro_export] +macro_rules! into_diag_arg_using_display { + ($( $ty:ty ),+ $(,)?) => { + $( + impl $crate::IntoDiagArg for $ty { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> $crate::DiagArgValue { + self.to_string().into_diag_arg(path) + } + } + )+ + } +} + +macro_rules! into_diag_arg_for_number { + ($( $ty:ty ),+ $(,)?) => { + $( + impl $crate::IntoDiagArg for $ty { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> $crate::DiagArgValue { + // Convert to a string if it won't fit into `Number`. + #[allow(irrefutable_let_patterns)] + if let Ok(n) = TryInto::<i32>::try_into(self) { + $crate::DiagArgValue::Number(n) + } else { + self.to_string().into_diag_arg(path) + } + } + } + )+ + } +} + +into_diag_arg_using_display!( + ast::ParamKindOrd, + std::io::Error, + Box<dyn std::error::Error>, + std::num::NonZero<u32>, + Edition, + rustc_span::Ident, + rustc_span::MacroRulesNormalizedIdent, + ParseIntError, + ExitStatus, +); + +into_diag_arg_for_number!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize); + +impl IntoDiagArg for bool { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + if self { + DiagArgValue::Str(Cow::Borrowed("true")) + } else { + DiagArgValue::Str(Cow::Borrowed("false")) + } + } +} + +impl IntoDiagArg for char { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(format!("{self:?}"))) + } +} + +impl IntoDiagArg for Vec<char> { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::StrListSepByAnd( + self.into_iter().map(|c| Cow::Owned(format!("{c:?}"))).collect(), + ) + } +} + +impl IntoDiagArg for rustc_span::Symbol { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_ident_string().into_diag_arg(path) + } +} + +impl<'a> IntoDiagArg for &'a str { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } +} + +impl IntoDiagArg for String { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self)) + } +} + +impl<'a> IntoDiagArg for Cow<'a, str> { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.into_owned())) + } +} + +impl<'a> IntoDiagArg for &'a Path { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.display().to_string())) + } +} + +impl IntoDiagArg for PathBuf { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.display().to_string())) + } +} + +impl IntoDiagArg for ast::Expr { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(pprust::expr_to_string(&self))) + } +} + +impl IntoDiagArg for ast::Path { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(pprust::path_to_string(&self))) + } +} + +impl IntoDiagArg for ast::token::Token { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(pprust::token_to_string(&self)) + } +} + +impl IntoDiagArg for ast::token::TokenKind { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(pprust::token_kind_to_string(&self)) + } +} + +impl IntoDiagArg for std::ffi::CString { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) + } +} + +impl IntoDiagArg for rustc_data_structures::small_c_str::SmallCStr { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) + } +} + +impl IntoDiagArg for ast::Visibility { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + let s = pprust::vis_to_string(&self); + let s = s.trim_end().to_string(); + DiagArgValue::Str(Cow::Owned(s)) + } +} + +impl IntoDiagArg for Backtrace { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::from(self.to_string())) + } +} + +impl IntoDiagArg for ast::util::parser::ExprPrecedence { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Number(self as i32) + } +} + +impl IntoDiagArg for ast::FloatTy { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.name_str())) + } +} diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs index 4b3ecad307f..7b7843f6cf3 100644 --- a/compiler/rustc_error_messages/src/lib.rs +++ b/compiler/rustc_error_messages/src/lib.rs @@ -15,7 +15,6 @@ use fluent_bundle::FluentResource; pub use fluent_bundle::types::FluentType; pub use fluent_bundle::{self, FluentArgs, FluentError, FluentValue}; use fluent_syntax::parser::ParserError; -use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker}; use intl_memoizer::concurrent::IntlLangMemoizer; use rustc_data_structures::sync::{DynSend, IntoDynSyncSend}; use rustc_macros::{Decodable, Encodable}; @@ -23,6 +22,9 @@ use rustc_span::Span; use tracing::{instrument, trace}; pub use unic_langid::{LanguageIdentifier, langid}; +mod diagnostic_impls; +pub use diagnostic_impls::DiagArgFromDisplay; + pub type FluentBundle = IntoDynSyncSend<fluent_bundle::bundle::FluentBundle<FluentResource, IntlLangMemoizer>>; @@ -512,8 +514,8 @@ impl From<Vec<Span>> for MultiSpan { } } -fn icu_locale_from_unic_langid(lang: LanguageIdentifier) -> Option<icu_locid::Locale> { - icu_locid::Locale::try_from_bytes(lang.to_string().as_bytes()).ok() +fn icu_locale_from_unic_langid(lang: LanguageIdentifier) -> Option<icu_locale::Locale> { + icu_locale::Locale::try_from_str(&lang.to_string()).ok() } pub fn fluent_value_from_str_list_sep_by_and(l: Vec<Cow<'_, str>>) -> FluentValue<'_> { @@ -565,21 +567,15 @@ pub fn fluent_value_from_str_list_sep_by_and(l: Vec<Cow<'_, str>>) -> FluentValu where Self: Sized, { - let baked_data_provider = rustc_baked_icu_data::baked_data_provider(); - let locale_fallbacker = - LocaleFallbacker::try_new_with_any_provider(&baked_data_provider) - .expect("Failed to create fallback provider"); - let data_provider = - LocaleFallbackProvider::new_with_fallbacker(baked_data_provider, locale_fallbacker); let locale = icu_locale_from_unic_langid(lang) .unwrap_or_else(|| rustc_baked_icu_data::supported_locales::EN); - let list_formatter = - icu_list::ListFormatter::try_new_and_with_length_with_any_provider( - &data_provider, - &locale.into(), - icu_list::ListLength::Wide, - ) - .expect("Failed to create list formatter"); + let list_formatter = icu_list::ListFormatter::try_new_and_unstable( + &rustc_baked_icu_data::BakedDataProvider, + locale.into(), + icu_list::options::ListFormatterOptions::default() + .with_length(icu_list::options::ListLength::Wide), + ) + .expect("Failed to create list formatter"); Ok(MemoizableListFormatter(list_formatter)) } @@ -589,3 +585,53 @@ pub fn fluent_value_from_str_list_sep_by_and(l: Vec<Cow<'_, str>>) -> FluentValu FluentValue::Custom(Box::new(FluentStrListSepByAnd(l))) } + +/// Simplified version of `FluentArg` that can implement `Encodable` and `Decodable`. Collection of +/// `DiagArg` are converted to `FluentArgs` (consuming the collection) at the start of diagnostic +/// emission. +pub type DiagArg<'iter> = (&'iter DiagArgName, &'iter DiagArgValue); + +/// Name of a diagnostic argument. +pub type DiagArgName = Cow<'static, str>; + +/// Simplified version of `FluentValue` that can implement `Encodable` and `Decodable`. Converted +/// to a `FluentValue` by the emitter to be used in diagnostic translation. +#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)] +pub enum DiagArgValue { + Str(Cow<'static, str>), + // This gets converted to a `FluentNumber`, which is an `f64`. An `i32` + // safely fits in an `f64`. Any integers bigger than that will be converted + // to strings in `into_diag_arg` and stored using the `Str` variant. + Number(i32), + StrListSepByAnd(Vec<Cow<'static, str>>), +} + +/// Converts a value of a type into a `DiagArg` (typically a field of an `Diag` struct). +/// Implemented as a custom trait rather than `From` so that it is implemented on the type being +/// converted rather than on `DiagArgValue`, which enables types from other `rustc_*` crates to +/// implement this. +pub trait IntoDiagArg { + /// Convert `Self` into a `DiagArgValue` suitable for rendering in a diagnostic. + /// + /// It takes a `path` where "long values" could be written to, if the `DiagArgValue` is too big + /// for displaying on the terminal. This path comes from the `Diag` itself. When rendering + /// values that come from `TyCtxt`, like `Ty<'_>`, they can use `TyCtxt::short_string`. If a + /// value has no shortening logic that could be used, the argument can be safely ignored. + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue; +} + +impl IntoDiagArg for DiagArgValue { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self + } +} + +impl From<DiagArgValue> for FluentValue<'static> { + fn from(val: DiagArgValue) -> Self { + match val { + DiagArgValue::Str(s) => From::from(s), + DiagArgValue::Number(n) => From::from(n), + DiagArgValue::StrListSepByAnd(l) => fluent_value_from_str_list_sep_by_and(l), + } + } +} diff --git a/compiler/rustc_errors/Cargo.toml b/compiler/rustc_errors/Cargo.toml index 3e8cf6207ae..f5853855673 100644 --- a/compiler/rustc_errors/Cargo.toml +++ b/compiler/rustc_errors/Cargo.toml @@ -9,26 +9,22 @@ annotate-snippets = "0.11" derive_setters = "0.1.6" rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } -rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_codes = { path = "../rustc_error_codes" } rustc_error_messages = { path = "../rustc_error_messages" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hashes = { path = "../rustc_hashes" } -rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } -rustc_target = { path = "../rustc_target" } -rustc_type_ir = { path = "../rustc_type_ir" } serde = { version = "1.0.125", features = ["derive"] } serde_json = "1.0.59" termcolor = "1.2.0" termize = "0.2" -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] diff --git a/compiler/rustc_errors/src/codes.rs b/compiler/rustc_errors/src/codes.rs index 947cf27ca79..787a8af99b1 100644 --- a/compiler/rustc_errors/src/codes.rs +++ b/compiler/rustc_errors/src/codes.rs @@ -20,6 +20,8 @@ impl fmt::Display for ErrCode { } } +rustc_error_messages::into_diag_arg_using_display!(ErrCode); + macro_rules! define_error_code_constants_and_diagnostics_table { ($($name:ident: $num:literal,)*) => ( $( diff --git a/compiler/rustc_errors/src/decorate_diag.rs b/compiler/rustc_errors/src/decorate_diag.rs new file mode 100644 index 00000000000..5aef26ccf97 --- /dev/null +++ b/compiler/rustc_errors/src/decorate_diag.rs @@ -0,0 +1,85 @@ +/// This module provides types and traits for buffering lints until later in compilation. +use rustc_ast::node_id::NodeId; +use rustc_data_structures::fx::FxIndexMap; +use rustc_error_messages::MultiSpan; +use rustc_lint_defs::{BuiltinLintDiag, Lint, LintId}; + +use crate::{DynSend, LintDiagnostic, LintDiagnosticBox}; + +/// We can't implement `LintDiagnostic` for `BuiltinLintDiag`, because decorating some of its +/// variants requires types we don't have yet. So, handle that case separately. +pub enum DecorateDiagCompat { + Dynamic(Box<dyn for<'a> LintDiagnosticBox<'a, ()> + DynSend + 'static>), + Builtin(BuiltinLintDiag), +} + +impl std::fmt::Debug for DecorateDiagCompat { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DecorateDiagCompat").finish() + } +} + +impl !LintDiagnostic<'_, ()> for BuiltinLintDiag {} + +impl<D: for<'a> LintDiagnostic<'a, ()> + DynSend + 'static> From<D> for DecorateDiagCompat { + #[inline] + fn from(d: D) -> Self { + Self::Dynamic(Box::new(d)) + } +} + +impl From<BuiltinLintDiag> for DecorateDiagCompat { + #[inline] + fn from(b: BuiltinLintDiag) -> Self { + Self::Builtin(b) + } +} + +/// Lints that are buffered up early on in the `Session` before the +/// `LintLevels` is calculated. +#[derive(Debug)] +pub struct BufferedEarlyLint { + /// The span of code that we are linting on. + pub span: Option<MultiSpan>, + + /// The `NodeId` of the AST node that generated the lint. + pub node_id: NodeId, + + /// A lint Id that can be passed to + /// `rustc_lint::early::EarlyContextAndPass::check_id`. + pub lint_id: LintId, + + /// Customization of the `Diag<'_>` for the lint. + pub diagnostic: DecorateDiagCompat, +} + +#[derive(Default, Debug)] +pub struct LintBuffer { + pub map: FxIndexMap<NodeId, Vec<BufferedEarlyLint>>, +} + +impl LintBuffer { + pub fn add_early_lint(&mut self, early_lint: BufferedEarlyLint) { + self.map.entry(early_lint.node_id).or_default().push(early_lint); + } + + pub fn take(&mut self, id: NodeId) -> Vec<BufferedEarlyLint> { + // FIXME(#120456) - is `swap_remove` correct? + self.map.swap_remove(&id).unwrap_or_default() + } + + pub fn buffer_lint( + &mut self, + lint: &'static Lint, + node_id: NodeId, + span: impl Into<MultiSpan>, + decorate: impl Into<DecorateDiagCompat>, + ) { + self.add_early_lint(BufferedEarlyLint { + lint_id: LintId::of(lint), + node_id, + span: Some(span.into()), + diagnostic: decorate.into(), + }); + } +} diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index 98be37fd84b..96a4ed3218f 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -8,7 +8,7 @@ use std::path::PathBuf; use std::thread::panicking; use rustc_data_structures::fx::FxIndexMap; -use rustc_error_messages::{FluentValue, fluent_value_from_str_list_sep_by_and}; +use rustc_error_messages::{DiagArgName, DiagArgValue, IntoDiagArg}; use rustc_lint_defs::{Applicability, LintExpectationId}; use rustc_macros::{Decodable, Encodable}; use rustc_span::source_map::Spanned; @@ -22,26 +22,6 @@ use crate::{ Suggestions, }; -/// Simplified version of `FluentArg` that can implement `Encodable` and `Decodable`. Collection of -/// `DiagArg` are converted to `FluentArgs` (consuming the collection) at the start of diagnostic -/// emission. -pub type DiagArg<'iter> = (&'iter DiagArgName, &'iter DiagArgValue); - -/// Name of a diagnostic argument. -pub type DiagArgName = Cow<'static, str>; - -/// Simplified version of `FluentValue` that can implement `Encodable` and `Decodable`. Converted -/// to a `FluentValue` by the emitter to be used in diagnostic translation. -#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)] -pub enum DiagArgValue { - Str(Cow<'static, str>), - // This gets converted to a `FluentNumber`, which is an `f64`. An `i32` - // safely fits in an `f64`. Any integers bigger than that will be converted - // to strings in `into_diag_arg` and stored using the `Str` variant. - Number(i32), - StrListSepByAnd(Vec<Cow<'static, str>>), -} - pub type DiagArgMap = FxIndexMap<DiagArgName, DiagArgValue>; /// Trait for types that `Diag::emit` can return as a "guarantee" (or "proof") @@ -143,36 +123,6 @@ where } } -/// Converts a value of a type into a `DiagArg` (typically a field of an `Diag` struct). -/// Implemented as a custom trait rather than `From` so that it is implemented on the type being -/// converted rather than on `DiagArgValue`, which enables types from other `rustc_*` crates to -/// implement this. -pub trait IntoDiagArg { - /// Convert `Self` into a `DiagArgValue` suitable for rendering in a diagnostic. - /// - /// It takes a `path` where "long values" could be written to, if the `DiagArgValue` is too big - /// for displaying on the terminal. This path comes from the `Diag` itself. When rendering - /// values that come from `TyCtxt`, like `Ty<'_>`, they can use `TyCtxt::short_string`. If a - /// value has no shortening logic that could be used, the argument can be safely ignored. - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue; -} - -impl IntoDiagArg for DiagArgValue { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self - } -} - -impl From<DiagArgValue> for FluentValue<'static> { - fn from(val: DiagArgValue) -> Self { - match val { - DiagArgValue::Str(s) => From::from(s), - DiagArgValue::Number(n) => From::from(n), - DiagArgValue::StrListSepByAnd(l) => fluent_value_from_str_list_sep_by_and(l), - } - } -} - /// Trait implemented by error types. This should not be implemented manually. Instead, use /// `#[derive(Subdiagnostic)]` -- see [rustc_macros::Subdiagnostic]. #[rustc_diagnostic_item = "Subdiagnostic"] @@ -188,10 +138,20 @@ where /// `#[derive(LintDiagnostic)]` -- see [rustc_macros::LintDiagnostic]. #[rustc_diagnostic_item = "LintDiagnostic"] pub trait LintDiagnostic<'a, G: EmissionGuarantee> { - /// Decorate and emit a lint. + /// Decorate a lint with the information from this type. fn decorate_lint<'b>(self, diag: &'b mut Diag<'a, G>); } +pub trait LintDiagnosticBox<'a, G: EmissionGuarantee> { + fn decorate_lint_box<'b>(self: Box<Self>, diag: &'b mut Diag<'a, G>); +} + +impl<'a, G: EmissionGuarantee, D: LintDiagnostic<'a, G>> LintDiagnosticBox<'a, G> for D { + fn decorate_lint_box<'b>(self: Box<Self>, diag: &'b mut Diag<'a, G>) { + self.decorate_lint(diag); + } +} + #[derive(Clone, Debug, Encodable, Decodable)] pub(crate) struct DiagLocation { file: Cow<'static, str>, @@ -617,6 +577,29 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { self.level = Level::DelayedBug; } + /// Make emitting this diagnostic fatal + /// + /// Changes the level of this diagnostic to Fatal, and importantly also changes the emission guarantee. + /// This is sound for errors that would otherwise be printed, but now simply exit the process instead. + /// This function still gives an emission guarantee, the guarantee is now just that it exits fatally. + /// For delayed bugs this is different, since those are buffered. If we upgrade one to fatal, another + /// might now be ignored. + #[rustc_lint_diagnostics] + #[track_caller] + pub fn upgrade_to_fatal(mut self) -> Diag<'a, FatalAbort> { + assert!( + matches!(self.level, Level::Error), + "upgrade_to_fatal: cannot upgrade {:?} to Fatal: not an error", + self.level + ); + self.level = Level::Fatal; + + // Take is okay since we immediately rewrap it in another diagnostic. + // i.e. we do emit it despite defusing the original diagnostic's drop bomb. + let diag = self.diag.take(); + Diag { dcx: self.dcx, diag, _marker: PhantomData } + } + with_fn! { with_span_label, /// Appends a labeled span to the diagnostic. /// @@ -1113,7 +1096,7 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { .map(|snippet| { debug_assert!( !(sp.is_empty() && snippet.is_empty()), - "Span must not be empty and have no suggestion" + "Span `{sp:?}` must not be empty and have no suggestion" ); Substitution { parts: vec![SubstitutionPart { snippet, span: sp }] } }) diff --git a/compiler/rustc_errors/src/diagnostic_impls.rs b/compiler/rustc_errors/src/diagnostic_impls.rs index eca5806fac5..435d16a8380 100644 --- a/compiler/rustc_errors/src/diagnostic_impls.rs +++ b/compiler/rustc_errors/src/diagnostic_impls.rs @@ -1,317 +1,22 @@ -use std::backtrace::Backtrace; use std::borrow::Cow; -use std::fmt; -use std::num::ParseIntError; -use std::path::{Path, PathBuf}; -use std::process::ExitStatus; use rustc_abi::TargetDataLayoutErrors; -use rustc_ast::util::parser::ExprPrecedence; -use rustc_ast_pretty::pprust; -use rustc_hir::RustcVersion; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::Subdiagnostic; -use rustc_span::edition::Edition; -use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol}; -use rustc_target::spec::{PanicStrategy, SplitDebuginfo, StackProtector, TargetTuple}; -use rustc_type_ir::{ClosureKind, FloatTy}; -use {rustc_ast as ast, rustc_hir as hir}; +use rustc_span::{Span, Symbol}; use crate::diagnostic::DiagLocation; use crate::{ - Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, ErrCode, IntoDiagArg, Level, - Subdiagnostic, fluent_generated as fluent, + Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic, + fluent_generated as fluent, }; -pub struct DiagArgFromDisplay<'a>(pub &'a dyn fmt::Display); - -impl IntoDiagArg for DiagArgFromDisplay<'_> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.0.to_string().into_diag_arg(path) - } -} - -impl<'a> From<&'a dyn fmt::Display> for DiagArgFromDisplay<'a> { - fn from(t: &'a dyn fmt::Display) -> Self { - DiagArgFromDisplay(t) - } -} - -impl<'a, T: fmt::Display> From<&'a T> for DiagArgFromDisplay<'a> { - fn from(t: &'a T) -> Self { - DiagArgFromDisplay(t) - } -} - -impl<'a, T: Clone + IntoDiagArg> IntoDiagArg for &'a T { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.clone().into_diag_arg(path) - } -} - -#[macro_export] -macro_rules! into_diag_arg_using_display { - ($( $ty:ty ),+ $(,)?) => { - $( - impl IntoDiagArg for $ty { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } - } - )+ - } -} - -macro_rules! into_diag_arg_for_number { - ($( $ty:ty ),+ $(,)?) => { - $( - impl IntoDiagArg for $ty { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - // Convert to a string if it won't fit into `Number`. - #[allow(irrefutable_let_patterns)] - if let Ok(n) = TryInto::<i32>::try_into(self) { - DiagArgValue::Number(n) - } else { - self.to_string().into_diag_arg(path) - } - } - } - )+ - } -} - -into_diag_arg_using_display!( - ast::ParamKindOrd, - std::io::Error, - Box<dyn std::error::Error>, - std::num::NonZero<u32>, - hir::Target, - Edition, - Ident, - MacroRulesNormalizedIdent, - ParseIntError, - StackProtector, - &TargetTuple, - SplitDebuginfo, - ExitStatus, - ErrCode, - rustc_abi::ExternAbi, -); - -impl IntoDiagArg for RustcVersion { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.to_string())) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::TraitRef<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::ExistentialTraitRef<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::UnevaluatedConst<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - format!("{self:?}").into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::FnSig<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - format!("{self:?}").into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner, T> IntoDiagArg for rustc_type_ir::Binder<I, T> -where - T: IntoDiagArg, -{ - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.skip_binder().into_diag_arg(path) - } -} - -into_diag_arg_for_number!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize); - -impl IntoDiagArg for bool { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - if self { - DiagArgValue::Str(Cow::Borrowed("true")) - } else { - DiagArgValue::Str(Cow::Borrowed("false")) - } - } -} - -impl IntoDiagArg for char { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(format!("{self:?}"))) - } -} - -impl IntoDiagArg for Vec<char> { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::StrListSepByAnd( - self.into_iter().map(|c| Cow::Owned(format!("{c:?}"))).collect(), - ) - } -} - -impl IntoDiagArg for Symbol { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_ident_string().into_diag_arg(path) - } -} - -impl<'a> IntoDiagArg for &'a str { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } -} - -impl IntoDiagArg for String { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self)) - } -} - -impl<'a> IntoDiagArg for Cow<'a, str> { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.into_owned())) - } -} - -impl<'a> IntoDiagArg for &'a Path { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.display().to_string())) - } -} - -impl IntoDiagArg for PathBuf { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.display().to_string())) - } -} - -impl IntoDiagArg for PanicStrategy { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.desc().to_string())) - } -} - -impl IntoDiagArg for hir::ConstContext { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(match self { - hir::ConstContext::ConstFn => "const_fn", - hir::ConstContext::Static(_) => "static", - hir::ConstContext::Const { .. } => "const", - })) - } -} - -impl IntoDiagArg for ast::Expr { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(pprust::expr_to_string(&self))) - } -} - -impl IntoDiagArg for ast::Path { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(pprust::path_to_string(&self))) - } -} - -impl IntoDiagArg for ast::token::Token { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(pprust::token_to_string(&self)) - } -} - -impl IntoDiagArg for ast::token::TokenKind { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(pprust::token_kind_to_string(&self)) - } -} - -impl IntoDiagArg for FloatTy { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.name_str())) - } -} - -impl IntoDiagArg for std::ffi::CString { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) - } -} - -impl IntoDiagArg for rustc_data_structures::small_c_str::SmallCStr { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) - } -} - -impl IntoDiagArg for ast::Visibility { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - let s = pprust::vis_to_string(&self); - let s = s.trim_end().to_string(); - DiagArgValue::Str(Cow::Owned(s)) - } -} - -impl IntoDiagArg for rustc_lint_defs::Level { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.to_cmd_flag())) - } -} - -impl<Id> IntoDiagArg for hir::def::Res<Id> { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.descr())) - } -} - impl IntoDiagArg for DiagLocation { fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { DiagArgValue::Str(Cow::from(self.to_string())) } } -impl IntoDiagArg for Backtrace { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::from(self.to_string())) - } -} - -impl IntoDiagArg for Level { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::from(self.to_string())) - } -} - -impl IntoDiagArg for ClosureKind { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(self.as_str().into()) - } -} - -impl IntoDiagArg for hir::def::Namespace { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.descr())) - } -} - -impl IntoDiagArg for ExprPrecedence { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Number(self as i32) - } -} - #[derive(Clone)] pub struct DiagSymbolList<S = Symbol>(Vec<S>); diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index 97c47fa9b9a..b94370e8e9b 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -17,7 +17,7 @@ use std::path::Path; use std::sync::Arc; use derive_setters::Setters; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::{DynSend, IntoDynSyncSend}; use rustc_error_messages::{FluentArgs, SpanLabel}; use rustc_lexer; @@ -1462,7 +1462,7 @@ impl HumanEmitter { max_line_num_len: usize, is_secondary: bool, is_cont: bool, - ) -> io::Result<()> { + ) -> io::Result<CodeWindowStatus> { let mut buffer = StyledBuffer::new(); if !msp.has_primary_spans() && !msp.has_span_labels() && is_secondary && !self.short_message @@ -1575,12 +1575,14 @@ impl HumanEmitter { } let mut annotated_files = FileWithAnnotatedLines::collect_annotations(self, args, msp); trace!("{annotated_files:#?}"); + let mut code_window_status = CodeWindowStatus::Open; // Make sure our primary file comes first let primary_span = msp.primary_span().unwrap_or_default(); let (Some(sm), false) = (self.sm.as_ref(), primary_span.is_dummy()) else { // If we don't have span information, emit and exit - return emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message); + return emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message) + .map(|_| code_window_status); }; let primary_lo = sm.lookup_char_pos(primary_span.lo()); if let Ok(pos) = @@ -1589,6 +1591,9 @@ impl HumanEmitter { annotated_files.swap(0, pos); } + // An end column separator should be emitted when a file with with a + // source, is followed by one without a source + let mut col_sep_before_no_show_source = false; let annotated_files_len = annotated_files.len(); // Print out the annotate source lines that correspond with the error for (file_idx, annotated_file) in annotated_files.into_iter().enumerate() { @@ -1599,6 +1604,26 @@ impl HumanEmitter { &annotated_file.file, ) { if !self.short_message { + // Add an end column separator when a file without a source + // comes after one with a source + // ╭▸ $DIR/deriving-meta-unknown-trait.rs:1:10 + // │ + // LL │ #[derive(Eqr)] + // │ ━━━ + // ╰╴ (<- It prints *this* line) + // ╭▸ $SRC_DIR/core/src/cmp.rs:356:0 + // │ + // ╰╴note: similarly named derive macro `Eq` defined here + if col_sep_before_no_show_source { + let buffer_msg_line_offset = buffer.num_lines(); + self.draw_col_separator_end( + &mut buffer, + buffer_msg_line_offset, + max_line_num_len + 1, + ); + } + col_sep_before_no_show_source = false; + // We'll just print an unannotated message. for (annotation_id, line) in annotated_file.lines.iter().enumerate() { let mut annotations = line.annotations.clone(); @@ -1639,29 +1664,42 @@ impl HumanEmitter { } line_idx += 1; } - for (label, is_primary) in labels.into_iter() { + if is_cont + && file_idx == annotated_files_len - 1 + && annotation_id == annotated_file.lines.len() - 1 + && !labels.is_empty() + { + code_window_status = CodeWindowStatus::Closed; + } + let labels_len = labels.len(); + for (label_idx, (label, is_primary)) in labels.into_iter().enumerate() { let style = if is_primary { Style::LabelPrimary } else { Style::LabelSecondary }; - let pipe = self.col_separator(); - buffer.prepend(line_idx, &format!(" {pipe}"), Style::LineNumber); - for _ in 0..max_line_num_len { - buffer.prepend(line_idx, " ", Style::NoStyle); - } + self.draw_col_separator_no_space( + &mut buffer, + line_idx, + max_line_num_len + 1, + ); line_idx += 1; - let chr = self.note_separator(); - buffer.append(line_idx, &format!(" {chr} note: "), style); - for _ in 0..max_line_num_len { - buffer.prepend(line_idx, " ", Style::NoStyle); - } + self.draw_note_separator( + &mut buffer, + line_idx, + max_line_num_len + 1, + label_idx != labels_len - 1, + ); + buffer.append(line_idx, "note", Style::MainHeaderMsg); + buffer.append(line_idx, ": ", Style::NoStyle); buffer.append(line_idx, label, style); line_idx += 1; } } } continue; + } else { + col_sep_before_no_show_source = true; } // print out the span location and spacer before we print the annotated source @@ -1853,7 +1891,7 @@ impl HumanEmitter { && line_idx + 1 == annotated_file.lines.len(), ); - let mut to_add = FxHashMap::default(); + let mut to_add = FxIndexMap::default(); for (depth, style) in depths { // FIXME(#120456) - is `swap_remove` correct? @@ -1976,7 +2014,7 @@ impl HumanEmitter { // final step: take our styled buffer, render it, then output it emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?; - Ok(()) + Ok(code_window_status) } fn column_width(&self, code_offset: usize) -> usize { @@ -2491,7 +2529,7 @@ impl HumanEmitter { !children.is_empty() || suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden), ) { - Ok(()) => { + Ok(code_window_status) => { if !children.is_empty() || suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden) { @@ -2502,7 +2540,7 @@ impl HumanEmitter { { // We'll continue the vertical bar to point into the next note. self.draw_col_separator_no_space(&mut buffer, 0, max_line_num_len + 1); - } else { + } else if matches!(code_window_status, CodeWindowStatus::Open) { // We'll close the vertical bar to visually end the code window. self.draw_col_separator_end(&mut buffer, 0, max_line_num_len + 1); } @@ -2829,10 +2867,11 @@ impl HumanEmitter { } } - fn note_separator(&self) -> char { + fn note_separator(&self, is_cont: bool) -> &'static str { match self.theme { - OutputTheme::Ascii => '=', - OutputTheme::Unicode => '╰', + OutputTheme::Ascii => "= ", + OutputTheme::Unicode if is_cont => "├ ", + OutputTheme::Unicode => "╰ ", } } @@ -2945,11 +2984,7 @@ impl HumanEmitter { col: usize, is_cont: bool, ) { - let chr = match self.theme { - OutputTheme::Ascii => "= ", - OutputTheme::Unicode if is_cont => "├ ", - OutputTheme::Unicode => "╰ ", - }; + let chr = self.note_separator(is_cont); buffer.puts(line, col, chr, Style::LineNumber); } @@ -3050,6 +3085,12 @@ enum DisplaySuggestion { Add, } +#[derive(Clone, Copy, Debug)] +enum CodeWindowStatus { + Closed, + Open, +} + impl FileWithAnnotatedLines { /// Preprocess all the annotations so that they are grouped by file and by line number /// This helps us quickly iterate over the whole message (including secondary file spans) diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index 2534cddf105..71fc54f0d33 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -40,13 +40,13 @@ use std::{fmt, panic}; use Level::*; pub use codes::*; +pub use decorate_diag::{BufferedEarlyLint, DecorateDiagCompat, LintBuffer}; pub use diagnostic::{ - BugAbort, Diag, DiagArg, DiagArgMap, DiagArgName, DiagArgValue, DiagInner, DiagStyledString, - Diagnostic, EmissionGuarantee, FatalAbort, IntoDiagArg, LintDiagnostic, StringPart, Subdiag, - Subdiagnostic, + BugAbort, Diag, DiagArgMap, DiagInner, DiagStyledString, Diagnostic, EmissionGuarantee, + FatalAbort, LintDiagnostic, LintDiagnosticBox, StringPart, Subdiag, Subdiagnostic, }; pub use diagnostic_impls::{ - DiagArgFromDisplay, DiagSymbolList, ElidedLifetimeInPathSubdiag, ExpectedLifetimeParameter, + DiagSymbolList, ElidedLifetimeInPathSubdiag, ExpectedLifetimeParameter, IndicateAnonymousLifetime, SingleLabelManySpans, }; pub use emitter::ColorConfig; @@ -56,11 +56,11 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::sync::{DynSend, Lock}; pub use rustc_error_messages::{ - DiagMessage, FluentBundle, LanguageIdentifier, LazyFallbackBundle, MultiSpan, SpanLabel, - SubdiagMessage, fallback_fluent_bundle, fluent_bundle, + DiagArg, DiagArgFromDisplay, DiagArgName, DiagArgValue, DiagMessage, FluentBundle, IntoDiagArg, + LanguageIdentifier, LazyFallbackBundle, MultiSpan, SpanLabel, SubdiagMessage, + fallback_fluent_bundle, fluent_bundle, into_diag_arg_using_display, }; use rustc_hashes::Hash128; -use rustc_hir::HirId; pub use rustc_lint_defs::{Applicability, listify, pluralize}; use rustc_lint_defs::{Lint, LintExpectationId}; use rustc_macros::{Decodable, Encodable}; @@ -80,6 +80,7 @@ use crate::timings::TimingRecord; pub mod annotate_snippet_emitter_writer; pub mod codes; +mod decorate_diag; mod diagnostic; mod diagnostic_impls; pub mod emitter; @@ -108,13 +109,14 @@ rustc_data_structures::static_assert_size!(PResult<'_, bool>, 24); /// Used to avoid depending on `rustc_middle` in `rustc_attr_parsing`. /// Always the `TyCtxt`. pub trait LintEmitter: Copy { + type Id: Copy; #[track_caller] fn emit_node_span_lint( self, lint: &'static Lint, - hir_id: HirId, + hir_id: Self::Id, span: impl Into<MultiSpan>, - decorator: impl for<'a> LintDiagnostic<'a, ()>, + decorator: impl for<'a> LintDiagnostic<'a, ()> + DynSend + 'static, ); } @@ -1999,6 +2001,12 @@ impl Level { } } +impl IntoDiagArg for Level { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::from(self.to_string())) + } +} + // FIXME(eddyb) this doesn't belong here AFAICT, should be moved to callsite. pub fn elided_lifetime_in_path_suggestion( source_map: &SourceMap, diff --git a/compiler/rustc_expand/Cargo.toml b/compiler/rustc_expand/Cargo.toml index f897833d85c..9bb7143af51 100644 --- a/compiler/rustc_expand/Cargo.toml +++ b/compiler/rustc_expand/Cargo.toml @@ -29,6 +29,6 @@ rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_expand/messages.ftl b/compiler/rustc_expand/messages.ftl index 1f8f3be6809..61ba716d082 100644 --- a/compiler/rustc_expand/messages.ftl +++ b/compiler/rustc_expand/messages.ftl @@ -70,7 +70,7 @@ expand_invalid_fragment_specifier = invalid fragment specifier `{$fragment}` .help = {$help} -expand_macro_args_bad_delim = macro attribute argument matchers require parentheses +expand_macro_args_bad_delim = `{$rule_kw}` rule argument matchers require parentheses expand_macro_args_bad_delim_sugg = the delimiters should be `(` and `)` expand_macro_body_stability = diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 7da3bf27eb5..8ff21509f4a 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -13,12 +13,13 @@ use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_data_structures::sync; -use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult}; +use rustc_errors::{BufferedEarlyLint, DiagCtxtHandle, ErrorGuaranteed, PResult}; use rustc_feature::Features; use rustc_hir as hir; use rustc_hir::attrs::{AttributeKind, CfgEntry, Deprecation}; +use rustc_hir::def::MacroKinds; use rustc_hir::{Stability, find_attr}; -use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools}; +use rustc_lint_defs::RegisteredTools; use rustc_parse::MACRO_ARGUMENTS; use rustc_parse::parser::{ForceCollect, Parser}; use rustc_session::config::CollapseMacroDebuginfo; @@ -718,6 +719,9 @@ impl MacResult for DummyResult { /// A syntax extension kind. #[derive(Clone)] pub enum SyntaxExtensionKind { + /// A `macro_rules!` macro that can work as any `MacroKind` + MacroRules(Arc<crate::MacroRulesMacroExpander>), + /// A token-based function-like macro. Bang( /// An expander with signature TokenStream -> TokenStream. @@ -772,9 +776,39 @@ pub enum SyntaxExtensionKind { ), /// A glob delegation. + /// + /// This is for delegated function implementations, and has nothing to do with glob imports. GlobDelegation(Arc<dyn GlobDelegationExpander + sync::DynSync + sync::DynSend>), } +impl SyntaxExtensionKind { + /// Returns `Some(expander)` for a macro usable as a `LegacyBang`; otherwise returns `None` + /// + /// This includes a `MacroRules` with function-like rules. + pub fn as_legacy_bang(&self) -> Option<&(dyn TTMacroExpander + sync::DynSync + sync::DynSend)> { + match self { + SyntaxExtensionKind::LegacyBang(exp) => Some(exp.as_ref()), + SyntaxExtensionKind::MacroRules(exp) if exp.kinds().contains(MacroKinds::BANG) => { + Some(exp.as_ref()) + } + _ => None, + } + } + + /// Returns `Some(expander)` for a macro usable as an `Attr`; otherwise returns `None` + /// + /// This includes a `MacroRules` with `attr` rules. + pub fn as_attr(&self) -> Option<&(dyn AttrProcMacro + sync::DynSync + sync::DynSend)> { + match self { + SyntaxExtensionKind::Attr(exp) => Some(exp.as_ref()), + SyntaxExtensionKind::MacroRules(exp) if exp.kinds().contains(MacroKinds::ATTR) => { + Some(exp.as_ref()) + } + _ => None, + } + } +} + /// A struct representing a macro definition in "lowered" form ready for expansion. pub struct SyntaxExtension { /// A syntax extension kind. @@ -804,18 +838,19 @@ pub struct SyntaxExtension { } impl SyntaxExtension { - /// Returns which kind of macro calls this syntax extension. - pub fn macro_kind(&self) -> MacroKind { + /// Returns which kinds of macro call this syntax extension. + pub fn macro_kinds(&self) -> MacroKinds { match self.kind { SyntaxExtensionKind::Bang(..) | SyntaxExtensionKind::LegacyBang(..) - | SyntaxExtensionKind::GlobDelegation(..) => MacroKind::Bang, + | SyntaxExtensionKind::GlobDelegation(..) => MacroKinds::BANG, SyntaxExtensionKind::Attr(..) | SyntaxExtensionKind::LegacyAttr(..) - | SyntaxExtensionKind::NonMacroAttr => MacroKind::Attr, + | SyntaxExtensionKind::NonMacroAttr => MacroKinds::ATTR, SyntaxExtensionKind::Derive(..) | SyntaxExtensionKind::LegacyDerive(..) => { - MacroKind::Derive + MacroKinds::DERIVE } + SyntaxExtensionKind::MacroRules(ref m) => m.kinds(), } } @@ -1024,11 +1059,12 @@ impl SyntaxExtension { parent: LocalExpnId, call_site: Span, descr: Symbol, + kind: MacroKind, macro_def_id: Option<DefId>, parent_module: Option<DefId>, ) -> ExpnData { ExpnData::new( - ExpnKind::Macro(self.macro_kind(), descr), + ExpnKind::Macro(kind, descr), parent.to_expn_id(), call_site, self.span, diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 83a8d601afe..15419ab7423 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -11,8 +11,10 @@ use rustc_ast::{ NodeId, NormalAttr, }; use rustc_attr_parsing as attr; +use rustc_attr_parsing::validate_attr::deny_builtin_meta_unsafety; use rustc_attr_parsing::{ AttributeParser, CFG_TEMPLATE, EvalConfigResult, ShouldEmit, eval_config_entry, parse_cfg_attr, + validate_attr, }; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_feature::{ @@ -20,8 +22,6 @@ use rustc_feature::{ REMOVED_LANG_FEATURES, UNSTABLE_LANG_FEATURES, }; use rustc_lint_defs::BuiltinLintDiag; -use rustc_parse::validate_attr; -use rustc_parse::validate_attr::deny_builtin_meta_unsafety; use rustc_session::Session; use rustc_session::parse::feature_err; use rustc_span::{STDLIB_STABLE_CRATES, Span, Symbol, sym}; @@ -415,16 +415,6 @@ impl<'a> StripUnconfigured<'a> { node: NodeId, emit_errors: ShouldEmit, ) -> EvalConfigResult { - // We need to run this to do basic validation of the attribute, such as that lits are valid, etc - // FIXME(jdonszelmann) this should not be necessary in the future - match validate_attr::parse_meta(&self.sess.psess, attr) { - Ok(_) => {} - Err(err) => { - err.emit(); - return EvalConfigResult::True; - } - } - // Unsafety check needs to be done explicitly here because this attribute will be removed before the normal check deny_builtin_meta_unsafety( self.sess.dcx(), diff --git a/compiler/rustc_expand/src/errors.rs b/compiler/rustc_expand/src/errors.rs index e58269991fc..ba9d76970f0 100644 --- a/compiler/rustc_expand/src/errors.rs +++ b/compiler/rustc_expand/src/errors.rs @@ -490,6 +490,7 @@ pub(crate) struct MacroArgsBadDelim { pub span: Span, #[subdiagnostic] pub sugg: MacroArgsBadDelimSugg, + pub rule_kw: Symbol, } #[derive(Subdiagnostic)] diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index e7ae4416968..dbb4a9de9e0 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; -use std::{iter, mem}; +use std::{iter, mem, slice}; use rustc_ast::mut_visit::*; use rustc_ast::tokenstream::TokenStream; @@ -12,15 +12,17 @@ use rustc_ast::{ MetaItemKind, ModKind, NodeId, PatKind, StmtKind, TyKind, token, }; use rustc_ast_pretty::pprust; -use rustc_attr_parsing::{EvalConfigResult, ShouldEmit}; +use rustc_attr_parsing::{AttributeParser, Early, EvalConfigResult, ShouldEmit, validate_attr}; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::PResult; use rustc_feature::Features; +use rustc_hir::Target; +use rustc_hir::def::MacroKinds; use rustc_parse::parser::{ AttemptLocalParseRecovery, CommaRecoveryMode, ForceCollect, Parser, RecoverColon, RecoverComma, token_descr, }; -use rustc_parse::validate_attr; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{UNUSED_ATTRIBUTES, UNUSED_DOC_COMMENTS}; use rustc_session::parse::feature_err; @@ -565,6 +567,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { .map(|DeriveResolution { path, item, exts: _, is_const }| { // FIXME: Consider using the derive resolutions (`_exts`) // instead of enqueuing the derives to be resolved again later. + // Note that this can result in duplicate diagnostics. let expn_id = LocalExpnId::fresh_empty(); derive_invocations.push(( Invocation { @@ -736,8 +739,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let (fragment_kind, span) = (invoc.fragment_kind, invoc.span()); ExpandResult::Ready(match invoc.kind { - InvocationKind::Bang { mac, span } => match ext { - SyntaxExtensionKind::Bang(expander) => { + InvocationKind::Bang { mac, span } => { + if let SyntaxExtensionKind::Bang(expander) = ext { match expander.expand(self.cx, span, mac.args.tokens.clone()) { Ok(tok_result) => { let fragment = @@ -755,8 +758,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)), } - } - SyntaxExtensionKind::LegacyBang(expander) => { + } else if let Some(expander) = ext.as_legacy_bang() { let tok_result = match expander.expand(self.cx, span, mac.args.tokens.clone()) { ExpandResult::Ready(tok_result) => tok_result, ExpandResult::Retry(_) => { @@ -776,11 +778,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let guar = self.error_wrong_fragment_kind(fragment_kind, &mac, span); fragment_kind.dummy(span, guar) } + } else { + unreachable!(); } - _ => unreachable!(), - }, - InvocationKind::Attr { attr, pos, mut item, derives } => match ext { - SyntaxExtensionKind::Attr(expander) => { + } + InvocationKind::Attr { attr, pos, mut item, derives } => { + if let Some(expander) = ext.as_attr() { self.gate_proc_macro_input(&item); self.gate_proc_macro_attr_item(span, &item); let tokens = match &item { @@ -799,7 +802,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ItemKind::Mod( _, _, - ModKind::Unloaded | ModKind::Loaded(_, Inline::No, _, _), + ModKind::Unloaded + | ModKind::Loaded(_, Inline::No { .. }, _), ) ) => { @@ -835,8 +839,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)), } - } - SyntaxExtensionKind::LegacyAttr(expander) => { + } else if let SyntaxExtensionKind::LegacyAttr(expander) = ext { match validate_attr::parse_meta(&self.cx.sess.psess, &attr) { Ok(meta) => { let item_clone = macro_stats.then(|| item.clone()); @@ -878,15 +881,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> { fragment_kind.expect_from_annotatables(iter::once(item)) } } - } - SyntaxExtensionKind::NonMacroAttr => { + } else if let SyntaxExtensionKind::NonMacroAttr = ext { // `-Zmacro-stats` ignores these because they don't do any real expansion. self.cx.expanded_inert_attrs.mark(&attr); item.visit_attrs(|attrs| attrs.insert(pos, attr)); fragment_kind.expect_from_annotatables(iter::once(item)) + } else { + unreachable!(); } - _ => unreachable!(), - }, + } InvocationKind::Derive { path, item, is_const } => match ext { SyntaxExtensionKind::Derive(expander) | SyntaxExtensionKind::LegacyDerive(expander) => { @@ -923,6 +926,35 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } fragment } + SyntaxExtensionKind::MacroRules(expander) + if expander.kinds().contains(MacroKinds::DERIVE) => + { + if is_const { + let guar = self + .cx + .dcx() + .span_err(span, "macro `derive` does not support const derives"); + return ExpandResult::Ready(fragment_kind.dummy(span, guar)); + } + let body = item.to_tokens(); + match expander.expand_derive(self.cx, span, &body) { + Ok(tok_result) => { + let fragment = + self.parse_ast_fragment(tok_result, fragment_kind, &path, span); + if macro_stats { + update_derive_macro_stats( + self.cx, + fragment_kind, + span, + &path, + &fragment, + ); + } + fragment + } + Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)), + } + } _ => unreachable!(), }, InvocationKind::GlobDelegation { item, of_trait } => { @@ -1005,7 +1037,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { fn visit_item(&mut self, item: &'ast ast::Item) { match &item.kind { ItemKind::Mod(_, _, mod_kind) - if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _, _)) => + if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _)) => { feature_err( self.sess, @@ -1316,7 +1348,7 @@ impl InvocationCollectorNode for Box<ast::Item> { let ItemKind::Mod(_, ident, ref mut mod_kind) = node.kind else { unreachable!() }; let ecx = &mut collector.cx; let (file_path, dir_path, dir_ownership) = match mod_kind { - ModKind::Loaded(_, inline, _, _) => { + ModKind::Loaded(_, inline, _) => { // Inline `mod foo { ... }`, but we still need to push directories. let (dir_path, dir_ownership) = mod_dir_path( ecx.sess, @@ -1330,7 +1362,7 @@ impl InvocationCollectorNode for Box<ast::Item> { // This lets `parse_external_mod` catch cycles if it's self-referential. let file_path = match inline { Inline::Yes => None, - Inline::No => mod_file_path_from_attr(ecx.sess, &attrs, &dir_path), + Inline::No { .. } => mod_file_path_from_attr(ecx.sess, &attrs, &dir_path), }; node.attrs = attrs; (file_path, dir_path, dir_ownership) @@ -1366,7 +1398,7 @@ impl InvocationCollectorNode for Box<ast::Item> { ); } - *mod_kind = ModKind::Loaded(items, Inline::No, spans, had_parse_error); + *mod_kind = ModKind::Loaded(items, Inline::No { had_parse_error }, spans); node.attrs = attrs; if node.attrs.len() > old_attrs_len { // If we loaded an out-of-line module and added some inner attributes, @@ -2127,6 +2159,16 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { attr, self.cx.current_expansion.lint_node_id, ); + AttributeParser::parse_limited_all( + self.cx.sess, + slice::from_ref(attr), + None, + Target::MacroCall, + call.span(), + self.cx.current_expansion.lint_node_id, + Some(self.cx.ecfg.features), + ShouldEmit::ErrorsAndLints, + ); let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span }; span = Some(current_span); @@ -2142,7 +2184,9 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { self.cx.current_expansion.lint_node_id, BuiltinLintDiag::UnusedDocComment(attr.span), ); - } else if rustc_attr_parsing::is_builtin_attr(attr) { + } else if rustc_attr_parsing::is_builtin_attr(attr) + && !AttributeParser::<Early>::is_parsed_attribute(&attr.path()) + { let attr_name = attr.ident().unwrap().name; // `#[cfg]` and `#[cfg_attr]` are special - they are // eagerly evaluated. @@ -2155,6 +2199,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { attr_name, macro_name: pprust::path_to_string(&call.path), invoc_span: call.path.span, + attr_span: attr.span, }, ); } @@ -2437,7 +2482,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { if let Some(attr) = node.attrs.first() { self.cfg().maybe_emit_expr_attr_err(attr); } - self.visit_node(node) + ensure_sufficient_stack(|| self.visit_node(node)) } fn visit_method_receiver_expr(&mut self, node: &mut ast::Expr) { diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index 5b9d56ee2bc..f5edaf50edd 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -14,14 +14,22 @@ use super::macro_rules::{MacroRule, NoopTracker, parser_from_cx}; use crate::expand::{AstFragmentKind, parse_ast_fragment}; use crate::mbe::macro_parser::ParseResult::*; use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser}; -use crate::mbe::macro_rules::{Tracker, try_match_macro, try_match_macro_attr}; +use crate::mbe::macro_rules::{ + Tracker, try_match_macro, try_match_macro_attr, try_match_macro_derive, +}; + +pub(super) enum FailedMacro<'a> { + Func, + Attr(&'a TokenStream), + Derive, +} pub(super) fn failed_to_match_macro( psess: &ParseSess, sp: Span, def_span: Span, name: Ident, - attr_args: Option<&TokenStream>, + args: FailedMacro<'_>, body: &TokenStream, rules: &[MacroRule], ) -> (Span, ErrorGuaranteed) { @@ -36,10 +44,12 @@ pub(super) fn failed_to_match_macro( // diagnostics. let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp); - let try_success_result = if let Some(attr_args) = attr_args { - try_match_macro_attr(psess, name, attr_args, body, rules, &mut tracker) - } else { - try_match_macro(psess, name, body, rules, &mut tracker) + let try_success_result = match args { + FailedMacro::Func => try_match_macro(psess, name, body, rules, &mut tracker), + FailedMacro::Attr(attr_args) => { + try_match_macro_attr(psess, name, attr_args, body, rules, &mut tracker) + } + FailedMacro::Derive => try_match_macro_derive(psess, name, body, rules, &mut tracker), }; if try_success_result.is_ok() { @@ -58,18 +68,6 @@ pub(super) fn failed_to_match_macro( let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure else { - // FIXME: we should report this at macro resolution time, as we do for - // `resolve_macro_cannot_use_as_attr`. We can do that once we track multiple macro kinds for a - // Def. - if attr_args.is_none() && !rules.iter().any(|rule| matches!(rule, MacroRule::Func { .. })) { - let msg = format!("macro has no rules for function-like invocation `{name}!`"); - let mut err = psess.dcx().struct_span_err(sp, msg); - if !def_head_span.is_dummy() { - let msg = "this macro has no rules for function-like invocation"; - err.span_label(def_head_span, msg); - } - return (sp, err.emit()); - } return (sp, psess.dcx().span_delayed_bug(sp, "failed to match a macro")); }; @@ -102,7 +100,7 @@ pub(super) fn failed_to_match_macro( } // Check whether there's a missing comma in this macro call, like `println!("{}" a);` - if attr_args.is_none() + if let FailedMacro::Func = args && let Some((body, comma_span)) = body.add_comma() { for rule in rules { diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs index 25987a50366..faeae1f494e 100644 --- a/compiler/rustc_expand/src/mbe/macro_check.rs +++ b/compiler/rustc_expand/src/mbe/macro_check.rs @@ -357,10 +357,10 @@ enum NestedMacroState { /// The token `macro_rules` was processed. MacroRules, /// The tokens `macro_rules!` were processed. - MacroRulesNot, + MacroRulesBang, /// The tokens `macro_rules!` followed by a name were processed. The name may be either directly /// an identifier or a meta-variable (that hopefully would be instantiated by an identifier). - MacroRulesNotName, + MacroRulesBangName, /// The keyword `macro` was processed. Macro, /// The keyword `macro` followed by a name was processed. @@ -408,24 +408,24 @@ fn check_nested_occurrences( NestedMacroState::MacroRules, &TokenTree::Token(Token { kind: TokenKind::Bang, .. }), ) => { - state = NestedMacroState::MacroRulesNot; + state = NestedMacroState::MacroRulesBang; } ( - NestedMacroState::MacroRulesNot, + NestedMacroState::MacroRulesBang, &TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }), ) => { - state = NestedMacroState::MacroRulesNotName; + state = NestedMacroState::MacroRulesBangName; } - (NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => { - state = NestedMacroState::MacroRulesNotName; + (NestedMacroState::MacroRulesBang, &TokenTree::MetaVar(..)) => { + state = NestedMacroState::MacroRulesBangName; // We check that the meta-variable is correctly used. check_occurrences(psess, node_id, tt, macros, binders, ops, guar); } - (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(.., del)) + (NestedMacroState::MacroRulesBangName, TokenTree::Delimited(.., del)) | (NestedMacroState::MacroName, TokenTree::Delimited(.., del)) if del.delim == Delimiter::Brace => { - let macro_rules = state == NestedMacroState::MacroRulesNotName; + let macro_rules = state == NestedMacroState::MacroRulesBangName; state = NestedMacroState::Empty; let rest = check_nested_macro(psess, node_id, macro_rules, &del.tts, &nested_macros, guar); diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 08b0efb74a0..8b43f852b26 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -15,6 +15,7 @@ use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan}; use rustc_feature::Features; use rustc_hir as hir; use rustc_hir::attrs::AttributeKind; +use rustc_hir::def::MacroKinds; use rustc_hir::find_attr; use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::{ @@ -26,10 +27,10 @@ use rustc_session::Session; use rustc_session::parse::{ParseSess, feature_err}; use rustc_span::edition::Edition; use rustc_span::hygiene::Transparency; -use rustc_span::{Ident, Span, kw, sym}; +use rustc_span::{Ident, Span, Symbol, kw, sym}; use tracing::{debug, instrument, trace, trace_span}; -use super::diagnostics::failed_to_match_macro; +use super::diagnostics::{FailedMacro, failed_to_match_macro}; use super::macro_parser::{NamedMatches, NamedParseResult}; use super::{SequenceRepetition, diagnostics}; use crate::base::{ @@ -137,6 +138,8 @@ pub(super) enum MacroRule { body_span: Span, rhs: mbe::TokenTree, }, + /// A derive rule, for use with `#[m]` + Derive { body: Vec<MatcherLoc>, body_span: Span, rhs: mbe::TokenTree }, } pub struct MacroRulesMacroExpander { @@ -144,6 +147,7 @@ pub struct MacroRulesMacroExpander { name: Ident, span: Span, transparency: Transparency, + kinds: MacroKinds, rules: Vec<MacroRule>, } @@ -155,9 +159,71 @@ impl MacroRulesMacroExpander { MacroRule::Attr { args_span, body_span, ref rhs, .. } => { (MultiSpan::from_spans(vec![args_span, body_span]), rhs) } + MacroRule::Derive { body_span, ref rhs, .. } => (MultiSpan::from_span(body_span), rhs), }; if has_compile_error_macro(rhs) { None } else { Some((&self.name, span)) } } + + pub fn kinds(&self) -> MacroKinds { + self.kinds + } + + pub fn expand_derive( + &self, + cx: &mut ExtCtxt<'_>, + sp: Span, + body: &TokenStream, + ) -> Result<TokenStream, ErrorGuaranteed> { + // This is similar to `expand_macro`, but they have very different signatures, and will + // diverge further once derives support arguments. + let Self { name, ref rules, node_id, .. } = *self; + let psess = &cx.sess.psess; + + if cx.trace_macros() { + let msg = format!("expanding `#[derive({name})] {}`", pprust::tts_to_string(body)); + trace_macros_note(&mut cx.expansions, sp, msg); + } + + match try_match_macro_derive(psess, name, body, rules, &mut NoopTracker) { + Ok((rule_index, rule, named_matches)) => { + let MacroRule::Derive { rhs, .. } = rule else { + panic!("try_match_macro_derive returned non-derive rule"); + }; + let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else { + cx.dcx().span_bug(sp, "malformed macro derive rhs"); + }; + + let id = cx.current_expansion.id; + let tts = transcribe(psess, &named_matches, rhs, *rhs_span, self.transparency, id) + .map_err(|e| e.emit())?; + + if cx.trace_macros() { + let msg = format!("to `{}`", pprust::tts_to_string(&tts)); + trace_macros_note(&mut cx.expansions, sp, msg); + } + + if is_defined_in_current_crate(node_id) { + cx.resolver.record_macro_rule_usage(node_id, rule_index); + } + + Ok(tts) + } + Err(CanRetry::No(guar)) => Err(guar), + Err(CanRetry::Yes) => { + let (_, guar) = failed_to_match_macro( + cx.psess(), + sp, + self.span, + name, + FailedMacro::Derive, + body, + rules, + ); + cx.macro_error_and_trace_macros_diag(); + Err(guar) + } + } + } } impl TTMacroExpander for MacroRulesMacroExpander { @@ -319,8 +385,15 @@ fn expand_macro<'cx>( } Err(CanRetry::Yes) => { // Retry and emit a better error. - let (span, guar) = - failed_to_match_macro(cx.psess(), sp, def_span, name, None, &arg, rules); + let (span, guar) = failed_to_match_macro( + cx.psess(), + sp, + def_span, + name, + FailedMacro::Func, + &arg, + rules, + ); cx.macro_error_and_trace_macros_diag(); DummyResult::any(span, guar) } @@ -382,8 +455,15 @@ fn expand_macro_attr( Err(CanRetry::No(guar)) => Err(guar), Err(CanRetry::Yes) => { // Retry and emit a better error. - let (_, guar) = - failed_to_match_macro(cx.psess(), sp, def_span, name, Some(&args), &body, rules); + let (_, guar) = failed_to_match_macro( + cx.psess(), + sp, + def_span, + name, + FailedMacro::Attr(&args), + &body, + rules, + ); cx.trace_macros_diag(); Err(guar) } @@ -516,6 +596,7 @@ pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>( match result { Success(body_named_matches) => { psess.gated_spans.merge(gated_spans_snapshot); + #[allow(rustc::potential_query_instability)] named_matches.extend(body_named_matches); return Ok((i, rule, named_matches)); } @@ -530,6 +611,44 @@ pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>( Err(CanRetry::Yes) } +/// Try expanding the macro derive. Returns the index of the successful arm and its +/// named_matches if it was successful, and nothing if it failed. On failure, it's the caller's job +/// to use `track` accordingly to record all errors correctly. +#[instrument(level = "debug", skip(psess, body, rules, track), fields(tracking = %T::description()))] +pub(super) fn try_match_macro_derive<'matcher, T: Tracker<'matcher>>( + psess: &ParseSess, + name: Ident, + body: &TokenStream, + rules: &'matcher [MacroRule], + track: &mut T, +) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> { + // This uses the same strategy as `try_match_macro` + let body_parser = parser_from_cx(psess, body.clone(), T::recovery()); + let mut tt_parser = TtParser::new(name); + for (i, rule) in rules.iter().enumerate() { + let MacroRule::Derive { body, .. } = rule else { continue }; + + let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut()); + + let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track); + track.after_arm(true, &result); + + match result { + Success(named_matches) => { + psess.gated_spans.merge(gated_spans_snapshot); + return Ok((i, rule, named_matches)); + } + Failure(_) => { + mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut()) + } + Error(_, _) => return Err(CanRetry::Yes), + ErrorReported(guar) => return Err(CanRetry::No(guar)), + } + } + + Err(CanRetry::Yes) +} + /// Converts a macro item into a syntax extension. pub fn compile_declarative_macro( sess: &Session, @@ -540,13 +659,13 @@ pub fn compile_declarative_macro( span: Span, node_id: NodeId, edition: Edition, -) -> (SyntaxExtension, Option<Arc<SyntaxExtension>>, usize) { +) -> (SyntaxExtension, usize) { let mk_syn_ext = |kind| { let is_local = is_defined_in_current_crate(node_id); SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local) }; - let mk_bang_ext = |expander| mk_syn_ext(SyntaxExtensionKind::LegacyBang(expander)); - let dummy_syn_ext = |guar| (mk_bang_ext(Arc::new(DummyExpander(guar))), None, 0); + let dummy_syn_ext = + |guar| (mk_syn_ext(SyntaxExtensionKind::LegacyBang(Arc::new(DummyExpander(guar)))), 0); let macro_rules = macro_def.macro_rules; let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) }; @@ -559,12 +678,12 @@ pub fn compile_declarative_macro( let mut guar = None; let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err()); - let mut has_attr_rules = false; + let mut kinds = MacroKinds::empty(); let mut rules = Vec::new(); while p.token != token::Eof { - let args = if p.eat_keyword_noexpect(sym::attr) { - has_attr_rules = true; + let (args, is_derive) = if p.eat_keyword_noexpect(sym::attr) { + kinds |= MacroKinds::ATTR; if !features.macro_attr() { feature_err(sess, sym::macro_attr, span, "`macro_rules!` attributes are unstable") .emit(); @@ -573,15 +692,46 @@ pub fn compile_declarative_macro( return dummy_syn_ext(guar); } let args = p.parse_token_tree(); - check_args_parens(sess, &args); + check_args_parens(sess, sym::attr, &args); let args = parse_one_tt(args, RulePart::Pattern, sess, node_id, features, edition); check_emission(check_lhs(sess, node_id, &args)); if let Some(guar) = check_no_eof(sess, &p, "expected macro attr body") { return dummy_syn_ext(guar); } - Some(args) + (Some(args), false) + } else if p.eat_keyword_noexpect(sym::derive) { + kinds |= MacroKinds::DERIVE; + let derive_keyword_span = p.prev_token.span; + if !features.macro_derive() { + feature_err(sess, sym::macro_attr, span, "`macro_rules!` derives are unstable") + .emit(); + } + if let Some(guar) = check_no_eof(sess, &p, "expected `()` after `derive`") { + return dummy_syn_ext(guar); + } + let args = p.parse_token_tree(); + check_args_parens(sess, sym::derive, &args); + let args_empty_result = check_args_empty(sess, &args); + let args_not_empty = args_empty_result.is_err(); + check_emission(args_empty_result); + if let Some(guar) = check_no_eof(sess, &p, "expected macro derive body") { + return dummy_syn_ext(guar); + } + // If the user has `=>` right after the `()`, they might have forgotten the empty + // parentheses. + if p.token == token::FatArrow { + let mut err = sess + .dcx() + .struct_span_err(p.token.span, "expected macro derive body, got `=>`"); + if args_not_empty { + err.span_label(derive_keyword_span, "need `()` after this `derive`"); + } + return dummy_syn_ext(err.emit()); + } + (None, true) } else { - None + kinds |= MacroKinds::BANG; + (None, false) }; let lhs_tt = p.parse_token_tree(); let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition); @@ -612,6 +762,8 @@ pub fn compile_declarative_macro( let args = mbe::macro_parser::compute_locs(&delimited.tts); let body_span = lhs_span; rules.push(MacroRule::Attr { args, args_span, body: lhs, body_span, rhs: rhs_tt }); + } else if is_derive { + rules.push(MacroRule::Derive { body: lhs, body_span: lhs_span, rhs: rhs_tt }); } else { rules.push(MacroRule::Func { lhs, lhs_span, rhs: rhs_tt }); } @@ -627,6 +779,7 @@ pub fn compile_declarative_macro( let guar = sess.dcx().span_err(span, "macros must contain at least one rule"); return dummy_syn_ext(guar); } + assert!(!kinds.is_empty()); let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x) .unwrap_or(Transparency::fallback(macro_rules)); @@ -640,12 +793,8 @@ pub fn compile_declarative_macro( // Return the number of rules for unused rule linting, if this is a local macro. let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 }; - let exp = Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules }); - let opt_attr_ext = has_attr_rules.then(|| { - let exp = Arc::clone(&exp); - Arc::new(mk_syn_ext(SyntaxExtensionKind::Attr(exp))) - }); - (mk_bang_ext(exp), opt_attr_ext, nrules) + let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules }; + (mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules) } fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> { @@ -661,7 +810,7 @@ fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<Err None } -fn check_args_parens(sess: &Session, args: &tokenstream::TokenTree) { +fn check_args_parens(sess: &Session, rule_kw: Symbol, args: &tokenstream::TokenTree) { // This does not handle the non-delimited case; that gets handled separately by `check_lhs`. if let tokenstream::TokenTree::Delimited(dspan, _, delim, _) = args && *delim != Delimiter::Parenthesis @@ -669,10 +818,21 @@ fn check_args_parens(sess: &Session, args: &tokenstream::TokenTree) { sess.dcx().emit_err(errors::MacroArgsBadDelim { span: dspan.entire(), sugg: errors::MacroArgsBadDelimSugg { open: dspan.open, close: dspan.close }, + rule_kw, }); } } +fn check_args_empty(sess: &Session, args: &tokenstream::TokenTree) -> Result<(), ErrorGuaranteed> { + match args { + tokenstream::TokenTree::Delimited(.., delimited) if delimited.is_empty() => Ok(()), + _ => { + let msg = "`derive` rules do not accept arguments; `derive` must be followed by `()`"; + Err(sess.dcx().span_err(args.span(), msg)) + } + } +} + fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> { let e1 = check_lhs_nt_follows(sess, node_id, lhs); let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs)); diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs index 662c67f2d3f..19f3cdbc549 100644 --- a/compiler/rustc_expand/src/module.rs +++ b/compiler/rustc_expand/src/module.rs @@ -2,8 +2,9 @@ use std::iter::once; use std::path::{self, Path, PathBuf}; use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans}; +use rustc_attr_parsing::validate_attr; use rustc_errors::{Diag, ErrorGuaranteed}; -use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal, validate_attr}; +use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal}; use rustc_session::Session; use rustc_session::parse::ParseSess; use rustc_span::{Ident, Span, sym}; @@ -120,7 +121,7 @@ pub(crate) fn mod_dir_path( (dir_path, dir_ownership) } - Inline::No => { + Inline::No { .. } => { // FIXME: This is a subset of `parse_external_mod` without actual parsing, // check whether the logic for unloaded, loaded and inline modules can be unified. let file_path = mod_file_path(sess, ident, attrs, &module.dir_path, dir_ownership) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index fd71f2ce948..5b1d3d6d35b 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -250,12 +250,14 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre Question => op("?"), SingleQuote => op("'"), - Ident(sym, is_raw) => { - trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span })) - } + Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { + sym, + is_raw: matches!(is_raw, IdentIsRaw::Yes), + span, + })), NtIdent(ident, is_raw) => trees.push(TokenTree::Ident(Ident { sym: ident.name, - is_raw: is_raw.into(), + is_raw: matches!(is_raw, IdentIsRaw::Yes), span: ident.span, })), @@ -263,7 +265,11 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre let ident = rustc_span::Ident::new(name, span).without_first_quote(); trees.extend([ TokenTree::Punct(Punct { ch: b'\'', joint: true, span }), - TokenTree::Ident(Ident { sym: ident.name, is_raw: is_raw.into(), span }), + TokenTree::Ident(Ident { + sym: ident.name, + is_raw: matches!(is_raw, IdentIsRaw::Yes), + span, + }), ]); } NtLifetime(ident, is_raw) => { diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 83be3241b12..9fe55216f93 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -396,6 +396,8 @@ declare_features! ( (accepted, slice_patterns, "1.42.0", Some(62254)), /// Allows use of `&foo[a..b]` as a slicing syntax. (accepted, slicing_syntax, "1.0.0", None), + /// Allows use of `sse4a` target feature. + (accepted, sse4a_target_feature, "CURRENT_RUSTC_VERSION", Some(44839)), /// Allows elision of `'static` lifetimes in `static`s and `const`s. (accepted, static_in_const, "1.17.0", Some(35897)), /// Allows the definition recursive static items. @@ -408,6 +410,8 @@ declare_features! ( (accepted, target_feature, "1.27.0", None), /// Allows the use of `#[target_feature]` on safe functions. (accepted, target_feature_11, "1.86.0", Some(69098)), + /// Allows use of `tbm` target feature. + (accepted, tbm_target_feature, "CURRENT_RUSTC_VERSION", Some(44839)), /// Allows `fn main()` with return types which implements `Termination` (RFC 1937). (accepted, termination_trait, "1.26.0", Some(43301)), /// Allows `#[test]` functions where the return type implements `Termination` (RFC 1937). diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index ab6b8f92802..e81003b1897 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -6,6 +6,7 @@ use AttributeDuplicates::*; use AttributeGate::*; use AttributeType::*; use rustc_data_structures::fx::FxHashMap; +use rustc_hir::AttrStyle; use rustc_hir::attrs::EncodeCrossCrate; use rustc_span::edition::Edition; use rustc_span::{Symbol, sym}; @@ -132,9 +133,12 @@ pub struct AttributeTemplate { } impl AttributeTemplate { - pub fn suggestions(&self, inner: bool, name: impl std::fmt::Display) -> Vec<String> { + pub fn suggestions(&self, style: AttrStyle, name: impl std::fmt::Display) -> Vec<String> { let mut suggestions = vec![]; - let inner = if inner { "!" } else { "" }; + let inner = match style { + AttrStyle::Outer => "", + AttrStyle::Inner => "!", + }; if self.word { suggestions.push(format!("#{inner}[{name}]")); } @@ -741,9 +745,12 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ErrorPreceding, EncodeCrossCrate::No ), gated!( - no_sanitize, Normal, - template!(List: &["address, kcfi, memory, thread"]), DuplicatesOk, - EncodeCrossCrate::No, experimental!(no_sanitize) + unsafe force_target_feature, Normal, template!(List: &[r#"enable = "name""#]), + DuplicatesOk, EncodeCrossCrate::No, effective_target_features, experimental!(force_target_feature) + ), + gated!( + sanitize, Normal, template!(List: &[r#"address = "on|off""#, r#"kernel_address = "on|off""#, r#"cfi = "on|off""#, r#"hwaddress = "on|off""#, r#"kcfi = "on|off""#, r#"memory = "on|off""#, r#"memtag = "on|off""#, r#"shadow_call_stack = "on|off""#, r#"thread = "on|off""#]), ErrorPreceding, + EncodeCrossCrate::No, sanitize, experimental!(sanitize), ), gated!( coverage, Normal, template!(OneOf: &[sym::off, sym::on]), @@ -993,6 +1000,10 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_allocator_zeroed, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No, ), + rustc_attr!( + rustc_allocator_zeroed_variant, Normal, template!(NameValueStr: "function"), ErrorPreceding, + EncodeCrossCrate::Yes, + ), gated!( default_lib_allocator, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No, allocator_internals, experimental!(default_lib_allocator), @@ -1159,10 +1170,6 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ EncodeCrossCrate::Yes, "`#[rustc_do_not_const_check]` skips const-check for this function's body", ), rustc_attr!( - rustc_const_panic_str, Normal, template!(Word), WarnFollowing, - EncodeCrossCrate::Yes, "`#[rustc_const_panic_str]` ensures the argument to this function is &&str during const-check", - ), - rustc_attr!( rustc_const_stable_indirect, Normal, template!(Word), WarnFollowing, diff --git a/compiler/rustc_feature/src/removed.rs b/compiler/rustc_feature/src/removed.rs index 04f261ada06..e37fc6b7bfc 100644 --- a/compiler/rustc_feature/src/removed.rs +++ b/compiler/rustc_feature/src/removed.rs @@ -190,6 +190,9 @@ declare_features! ( (removed, no_coverage, "1.74.0", Some(84605), Some("renamed to `coverage_attribute`"), 114656), /// Allows `#[no_debug]`. (removed, no_debug, "1.43.0", Some(29721), Some("removed due to lack of demand"), 69667), + // Allows the use of `no_sanitize` attribute. + /// The feature was renamed to `sanitize` and the attribute to `#[sanitize(xyz = "on|off")]` + (removed, no_sanitize, "CURRENT_RUSTC_VERSION", Some(39699), Some(r#"renamed to sanitize(xyz = "on|off")"#), 142681), /// Note: this feature was previously recorded in a separate /// `STABLE_REMOVED` list because it, uniquely, was once stable but was /// then removed. But there was no utility storing it separately, so now diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index acc21f6c6d2..92b435b4b01 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -334,8 +334,6 @@ declare_features! ( (unstable, rtm_target_feature, "1.35.0", Some(44839)), (unstable, s390x_target_feature, "1.82.0", Some(44839)), (unstable, sparc_target_feature, "1.84.0", Some(132783)), - (unstable, sse4a_target_feature, "1.27.0", Some(44839)), - (unstable, tbm_target_feature, "1.27.0", Some(44839)), (unstable, wasm_target_feature, "1.30.0", Some(44839)), (unstable, x87_target_feature, "1.85.0", Some(44839)), // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! @@ -472,6 +470,8 @@ declare_features! ( (unstable, deprecated_suggestion, "1.61.0", Some(94785)), /// Allows deref patterns. (incomplete, deref_patterns, "1.79.0", Some(87121)), + /// Allows deriving the From trait on single-field structs. + (unstable, derive_from, "CURRENT_RUSTC_VERSION", Some(144889)), /// Tells rustdoc to automatically generate `#[doc(cfg(...))]`. (unstable, doc_auto_cfg, "1.58.0", Some(43781)), /// Allows `#[doc(cfg(...))]`. @@ -480,6 +480,8 @@ declare_features! ( (unstable, doc_cfg_hide, "1.57.0", Some(43781)), /// Allows `#[doc(masked)]`. (unstable, doc_masked, "1.21.0", Some(44027)), + /// Allows features to allow target_feature to better interact with traits. + (incomplete, effective_target_features, "CURRENT_RUSTC_VERSION", Some(143352)), /// Allows the .use postfix syntax `x.use` and use closures `use |x| { ... }` (incomplete, ergonomic_clones, "1.87.0", Some(132290)), /// Allows exhaustive pattern matching on types that contain uninhabited types. @@ -556,6 +558,8 @@ declare_features! ( (incomplete, loop_match, "1.90.0", Some(132306)), /// Allow `macro_rules!` attribute rules (unstable, macro_attr, "CURRENT_RUSTC_VERSION", Some(83527)), + /// Allow `macro_rules!` derive rules + (unstable, macro_derive, "CURRENT_RUSTC_VERSION", Some(143549)), /// Give access to additional metadata about declarative macro meta-variables. (unstable, macro_metavar_expr, "1.61.0", Some(83527)), /// Provides a way to concatenate identifiers using metavariable expressions. @@ -592,8 +596,6 @@ declare_features! ( (unstable, new_range, "1.86.0", Some(123741)), /// Allows `#![no_core]`. (unstable, no_core, "1.3.0", Some(29639)), - /// Allows the use of `no_sanitize` attribute. - (unstable, no_sanitize, "1.42.0", Some(39699)), /// Allows using the `non_exhaustive_omitted_patterns` lint. (unstable, non_exhaustive_omitted_patterns_lint, "1.57.0", Some(89554)), /// Allows `for<T>` binders in where-clauses @@ -614,6 +616,7 @@ declare_features! ( (unstable, proc_macro_hygiene, "1.30.0", Some(54727)), /// Allows the use of raw-dylibs on ELF platforms (incomplete, raw_dylib_elf, "1.87.0", Some(135694)), + (unstable, reborrow, "CURRENT_RUSTC_VERSION", Some(145612)), /// Makes `&` and `&mut` patterns eat only one layer of references in Rust 2024. (incomplete, ref_pat_eat_one_layer_2024, "1.79.0", Some(123076)), /// Makes `&` and `&mut` patterns eat only one layer of references in Rust 2024—structural variant @@ -626,6 +629,8 @@ declare_features! ( (unstable, return_type_notation, "1.70.0", Some(109417)), /// Allows `extern "rust-cold"`. (unstable, rust_cold_cc, "1.63.0", Some(97544)), + /// Allows the use of the `sanitize` attribute. + (unstable, sanitize, "CURRENT_RUSTC_VERSION", Some(39699)), /// Allows the use of SIMD types in functions declared in `extern` blocks. (unstable, simd_ffi, "1.0.0", Some(27731)), /// Allows specialization of implementations (RFC 1210). diff --git a/compiler/rustc_hir/Cargo.toml b/compiler/rustc_hir/Cargo.toml index 539d2e6f0b1..ea72ed68c5e 100644 --- a/compiler/rustc_hir/Cargo.toml +++ b/compiler/rustc_hir/Cargo.toml @@ -5,19 +5,22 @@ edition = "2024" [dependencies] # tidy-alphabetical-start +bitflags.workspace = true odht = { version = "0.3.1", features = ["nightly"] } rustc_abi = { path = "../rustc_abi" } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_error_messages = { path = "../rustc_error_messages" } rustc_hashes = { path = "../rustc_hashes" } +rustc_hir_id = { path = "../rustc_hir_id" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_hir/src/attrs/data_structures.rs b/compiler/rustc_hir/src/attrs/data_structures.rs index e02edf5fe24..dd5565d6f90 100644 --- a/compiler/rustc_hir/src/attrs/data_structures.rs +++ b/compiler/rustc_hir/src/attrs/data_structures.rs @@ -1,11 +1,16 @@ +use std::borrow::Cow; +use std::path::PathBuf; + pub use ReprAttr::*; use rustc_abi::Align; use rustc_ast::token::CommentKind; use rustc_ast::{AttrStyle, ast}; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::{Decodable, Encodable, HashStable_Generic, PrintAttribute}; use rustc_span::def_id::DefId; use rustc_span::hygiene::Transparency; use rustc_span::{Ident, Span, Symbol}; +pub use rustc_target::spec::SanitizerSet; use thin_vec::ThinVec; use crate::attrs::pretty_printing::PrintAttribute; @@ -187,6 +192,176 @@ pub enum CfgEntry { Version(Option<RustcVersion>, Span), } +/// Possible values for the `#[linkage]` attribute, allowing to specify the +/// linkage type for a `MonoItem`. +/// +/// See <https://llvm.org/docs/LangRef.html#linkage-types> for more details about these variants. +#[derive(Encodable, Decodable, Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(HashStable_Generic, PrintAttribute)] +pub enum Linkage { + AvailableExternally, + Common, + ExternalWeak, + External, + Internal, + LinkOnceAny, + LinkOnceODR, + WeakAny, + WeakODR, +} + +#[derive(Clone, Copy, Decodable, Debug, Encodable, PartialEq)] +#[derive(HashStable_Generic, PrintAttribute)] +pub enum MirDialect { + Analysis, + Built, + Runtime, +} + +impl IntoDiagArg for MirDialect { + fn into_diag_arg(self, _path: &mut Option<PathBuf>) -> DiagArgValue { + let arg = match self { + MirDialect::Analysis => "analysis", + MirDialect::Built => "built", + MirDialect::Runtime => "runtime", + }; + DiagArgValue::Str(Cow::Borrowed(arg)) + } +} + +#[derive(Clone, Copy, Decodable, Debug, Encodable, PartialEq)] +#[derive(HashStable_Generic, PrintAttribute)] +pub enum MirPhase { + Initial, + PostCleanup, + Optimized, +} + +impl IntoDiagArg for MirPhase { + fn into_diag_arg(self, _path: &mut Option<PathBuf>) -> DiagArgValue { + let arg = match self { + MirPhase::Initial => "initial", + MirPhase::PostCleanup => "post-cleanup", + MirPhase::Optimized => "optimized", + }; + DiagArgValue::Str(Cow::Borrowed(arg)) + } +} + +/// Different ways that the PE Format can decorate a symbol name. +/// From <https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-name-type> +#[derive( + Copy, + Clone, + Debug, + Encodable, + Decodable, + HashStable_Generic, + PartialEq, + Eq, + PrintAttribute +)] +pub enum PeImportNameType { + /// IMPORT_ORDINAL + /// Uses the ordinal (i.e., a number) rather than the name. + Ordinal(u16), + /// Same as IMPORT_NAME + /// Name is decorated with all prefixes and suffixes. + Decorated, + /// Same as IMPORT_NAME_NOPREFIX + /// Prefix (e.g., the leading `_` or `@`) is skipped, but suffix is kept. + NoPrefix, + /// Same as IMPORT_NAME_UNDECORATE + /// Prefix (e.g., the leading `_` or `@`) and suffix (the first `@` and all + /// trailing characters) are skipped. + Undecorated, +} + +#[derive( + Copy, + Clone, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Encodable, + Decodable, + PrintAttribute +)] +#[derive(HashStable_Generic)] +pub enum NativeLibKind { + /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC) + Static { + /// Whether to bundle objects from static library into produced rlib + bundle: Option<bool>, + /// Whether to link static library without throwing any object files away + whole_archive: Option<bool>, + }, + /// Dynamic library (e.g. `libfoo.so` on Linux) + /// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC). + Dylib { + /// Whether the dynamic library will be linked only if it satisfies some undefined symbols + as_needed: Option<bool>, + }, + /// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library. + /// On Linux, it refers to a generated shared library stub. + RawDylib, + /// A macOS-specific kind of dynamic libraries. + Framework { + /// Whether the framework will be linked only if it satisfies some undefined symbols + as_needed: Option<bool>, + }, + /// Argument which is passed to linker, relative order with libraries and other arguments + /// is preserved + LinkArg, + + /// Module imported from WebAssembly + WasmImportModule, + + /// The library kind wasn't specified, `Dylib` is currently used as a default. + Unspecified, +} + +impl NativeLibKind { + pub fn has_modifiers(&self) -> bool { + match self { + NativeLibKind::Static { bundle, whole_archive } => { + bundle.is_some() || whole_archive.is_some() + } + NativeLibKind::Dylib { as_needed } | NativeLibKind::Framework { as_needed } => { + as_needed.is_some() + } + NativeLibKind::RawDylib + | NativeLibKind::Unspecified + | NativeLibKind::LinkArg + | NativeLibKind::WasmImportModule => false, + } + } + + pub fn is_statically_included(&self) -> bool { + matches!(self, NativeLibKind::Static { .. }) + } + + pub fn is_dllimport(&self) -> bool { + matches!( + self, + NativeLibKind::Dylib { .. } | NativeLibKind::RawDylib | NativeLibKind::Unspecified + ) + } +} + +#[derive(Debug, Encodable, Decodable, Clone, HashStable_Generic, PrintAttribute)] +pub struct LinkEntry { + pub span: Span, + pub kind: NativeLibKind, + pub name: Symbol, + pub cfg: Option<CfgEntry>, + pub verbatim: Option<bool>, + pub import_name_type: Option<(PeImportNameType, Span)>, +} + /// Represents parsed *built-in* inert attributes. /// /// ## Overview @@ -306,6 +481,12 @@ pub enum AttributeKind { /// Represents `#[coverage(..)]`. Coverage(Span, CoverageAttrKind), + /// Represents `#[crate_name = ...]` + CrateName { name: Symbol, name_span: Span, attr_span: Span, style: AttrStyle }, + + /// Represents `#[custom_mir]`. + CustomMir(Option<(MirDialect, Span)>, Option<(MirPhase, Span)>, Span), + ///Represents `#[rustc_deny_explicit_impl]`. DenyExplicitImpl(Span), @@ -351,6 +532,9 @@ pub enum AttributeKind { /// Represents `#[inline]` and `#[rustc_force_inline]`. Inline(InlineAttr, Span), + /// Represents `#[link]`. + Link(ThinVec<LinkEntry>, Span), + /// Represents `#[link_name]`. LinkName { name: Symbol, span: Span }, @@ -360,6 +544,9 @@ pub enum AttributeKind { /// Represents [`#[link_section]`](https://doc.rust-lang.org/reference/abi.html#the-link_section-attribute) LinkSection { name: Symbol, span: Span }, + /// Represents `#[linkage]`. + Linkage(Linkage, Span), + /// Represents `#[loop_match]`. LoopMatch(Span), @@ -439,6 +626,12 @@ pub enum AttributeKind { /// Represents `#[rustc_object_lifetime_default]`. RustcObjectLifetimeDefault, + /// Represents `#[sanitize]` + /// + /// the on set and off set are distjoint since there's a third option: unset. + /// a node may not set the sanitizer setting in which case it inherits from parents. + Sanitize { on_set: SanitizerSet, off_set: SanitizerSet, span: Span }, + /// Represents `#[should_panic]` ShouldPanic { reason: Option<Symbol>, span: Span }, @@ -458,8 +651,9 @@ pub enum AttributeKind { /// Represents `#[rustc_std_internal_symbol]`. StdInternalSymbol(Span), - /// Represents `#[target_feature(enable = "...")]` - TargetFeature(ThinVec<(Symbol, Span)>, Span), + /// Represents `#[target_feature(enable = "...")]` and + /// `#[unsafe(force_target_feature(enable = "...")]`. + TargetFeature { features: ThinVec<(Symbol, Span)>, attr_span: Span, was_forced: bool }, /// Represents `#[track_caller]` TrackCaller(Span), diff --git a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs index 7ce624dcc55..3810bb6d003 100644 --- a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs +++ b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs @@ -7,6 +7,11 @@ pub enum EncodeCrossCrate { } impl AttributeKind { + /// Whether this attribute should be encoded in metadata files. + /// + /// If this is "Yes", then another crate can do `tcx.get_all_attrs(did)` for a did in this crate, and get the attribute. + /// When this is No, the attribute is filtered out while encoding and other crate won't be able to observe it. + /// This can be unexpectedly good for performance, so unless necessary for cross-crate compilation, prefer No. pub fn encode_cross_crate(&self) -> EncodeCrossCrate { use AttributeKind::*; use EncodeCrossCrate::*; @@ -31,6 +36,8 @@ impl AttributeKind { ConstTrait(..) => No, Coroutine(..) => No, Coverage(..) => No, + CrateName { .. } => No, + CustomMir(_, _, _) => Yes, DenyExplicitImpl(..) => No, Deprecation { .. } => Yes, DoNotImplementViaObject(..) => No, @@ -43,9 +50,11 @@ impl AttributeKind { Fundamental { .. } => Yes, Ignore { .. } => No, Inline(..) => No, + Link(..) => No, LinkName { .. } => Yes, // Needed for rustdoc LinkOrdinal { .. } => No, LinkSection { .. } => Yes, // Needed for rustdoc + Linkage(..) => No, LoopMatch(..) => No, MacroEscape(..) => No, MacroTransparency(..) => Yes, @@ -71,12 +80,13 @@ impl AttributeKind { RustcLayoutScalarValidRangeEnd(..) => Yes, RustcLayoutScalarValidRangeStart(..) => Yes, RustcObjectLifetimeDefault => No, + Sanitize { .. } => No, ShouldPanic { .. } => No, SkipDuringMethodDispatch { .. } => No, SpecializationTrait(..) => No, Stability { .. } => Yes, StdInternalSymbol(..) => No, - TargetFeature(..) => No, + TargetFeature { .. } => No, TrackCaller(..) => Yes, TypeConst(..) => Yes, UnsafeSpecializationMarker(..) => No, diff --git a/compiler/rustc_hir/src/attrs/pretty_printing.rs b/compiler/rustc_hir/src/attrs/pretty_printing.rs index e44b29141da..e65de25b451 100644 --- a/compiler/rustc_hir/src/attrs/pretty_printing.rs +++ b/compiler/rustc_hir/src/attrs/pretty_printing.rs @@ -6,6 +6,7 @@ use rustc_ast::{AttrStyle, IntTy, UintTy}; use rustc_ast_pretty::pp::Printer; use rustc_span::hygiene::Transparency; use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol}; +use rustc_target::spec::SanitizerSet; use thin_vec::ThinVec; /// This trait is used to print attributes in `rustc_hir_pretty`. @@ -146,4 +147,14 @@ macro_rules! print_tup { print_tup!(A B C D E F G H); print_skip!(Span, (), ErrorGuaranteed); print_disp!(u16, bool, NonZero<u32>); -print_debug!(Symbol, Ident, UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency); +print_debug!( + Symbol, + Ident, + UintTy, + IntTy, + Align, + AttrStyle, + CommentKind, + Transparency, + SanitizerSet, +); diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs index 339d4e2eab7..8af4740f376 100644 --- a/compiler/rustc_hir/src/def.rs +++ b/compiler/rustc_hir/src/def.rs @@ -1,10 +1,12 @@ use std::array::IntoIter; +use std::borrow::Cow; use std::fmt::Debug; use rustc_ast as ast; use rustc_ast::NodeId; use rustc_data_structures::stable_hasher::ToStableHashKey; use rustc_data_structures::unord::UnordMap; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::Symbol; use rustc_span::def_id::{DefId, LocalDefId}; @@ -31,6 +33,53 @@ pub enum CtorKind { Const, } +/// A set of macro kinds, for macros that can have more than one kind +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encodable, Decodable, Hash, Debug)] +#[derive(HashStable_Generic)] +pub struct MacroKinds(u8); +bitflags::bitflags! { + impl MacroKinds: u8 { + const BANG = 1 << 0; + const ATTR = 1 << 1; + const DERIVE = 1 << 2; + } +} + +impl From<MacroKind> for MacroKinds { + fn from(kind: MacroKind) -> Self { + match kind { + MacroKind::Bang => Self::BANG, + MacroKind::Attr => Self::ATTR, + MacroKind::Derive => Self::DERIVE, + } + } +} + +impl MacroKinds { + /// Convert the MacroKinds to a static string. + /// + /// This hardcodes all the possibilities, in order to return a static string. + pub fn descr(self) -> &'static str { + match self { + // FIXME: change this to "function-like macro" and fix all tests + Self::BANG => "macro", + Self::ATTR => "attribute macro", + Self::DERIVE => "derive macro", + _ if self == (Self::ATTR | Self::BANG) => "attribute/function macro", + _ if self == (Self::DERIVE | Self::BANG) => "derive/function macro", + _ if self == (Self::ATTR | Self::DERIVE) => "attribute/derive macro", + _ if self.is_all() => "attribute/derive/function macro", + _ if self.is_empty() => "useless macro", + _ => unreachable!(), + } + } + + /// Return an indefinite article (a/an) for use with `descr()` + pub fn article(self) -> &'static str { + if self.contains(Self::ATTR) { "an" } else { "a" } + } +} + /// An attribute that is not a macro; e.g., `#[inline]` or `#[rustfmt::skip]`. #[derive(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Debug, HashStable_Generic)] pub enum NonMacroAttrKind { @@ -101,7 +150,7 @@ pub enum DefKind { AssocConst, // Macro namespace - Macro(MacroKind), + Macro(MacroKinds), // Not namespaced (or they are, but we don't treat them so) ExternCrate, @@ -177,7 +226,7 @@ impl DefKind { DefKind::AssocConst => "associated constant", DefKind::TyParam => "type parameter", DefKind::ConstParam => "const parameter", - DefKind::Macro(macro_kind) => macro_kind.descr(), + DefKind::Macro(kinds) => kinds.descr(), DefKind::LifetimeParam => "lifetime parameter", DefKind::Use => "import", DefKind::ForeignMod => "foreign module", @@ -208,7 +257,7 @@ impl DefKind { | DefKind::Use | DefKind::InlineConst | DefKind::ExternCrate => "an", - DefKind::Macro(macro_kind) => macro_kind.article(), + DefKind::Macro(kinds) => kinds.article(), _ => "a", } } @@ -539,6 +588,12 @@ pub enum Res<Id = hir::HirId> { Err, } +impl<Id> IntoDiagArg for Res<Id> { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.descr())) + } +} + /// The result of resolving a path before lowering to HIR, /// with "module" segments resolved and associated item /// segments deferred to type checking. @@ -626,6 +681,12 @@ impl Namespace { } } +impl IntoDiagArg for Namespace { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.descr())) + } +} + impl<CTX: crate::HashStableContext> ToStableHashKey<CTX> for Namespace { type KeyType = Namespace; @@ -845,10 +906,10 @@ impl<Id> Res<Id> { ) } - pub fn macro_kind(self) -> Option<MacroKind> { + pub fn macro_kinds(self) -> Option<MacroKinds> { match self { - Res::Def(DefKind::Macro(kind), _) => Some(kind), - Res::NonMacroAttr(..) => Some(MacroKind::Attr), + Res::Def(DefKind::Macro(kinds), _) => Some(kinds), + Res::NonMacroAttr(..) => Some(MacroKinds::ATTR), _ => None, } } diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index b27c223527e..e397c286de2 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -1,4 +1,5 @@ // ignore-tidy-filelength +use std::borrow::Cow; use std::fmt; use rustc_abi::ExternAbi; @@ -17,10 +18,10 @@ pub use rustc_ast::{ use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::tagged_ptr::TaggedRef; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_index::IndexVec; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::def_id::LocalDefId; -use rustc_span::hygiene::MacroKind; use rustc_span::source_map::Spanned; use rustc_span::{BytePos, DUMMY_SP, ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use rustc_target::asm::InlineAsmRegOrRegClass; @@ -30,7 +31,7 @@ use tracing::debug; use crate::LangItem; use crate::attrs::AttributeKind; -use crate::def::{CtorKind, DefKind, PerNS, Res}; +use crate::def::{CtorKind, DefKind, MacroKinds, PerNS, Res}; use crate::def_id::{DefId, LocalDefIdMap}; pub(crate) use crate::hir_id::{HirId, ItemLocalId, ItemLocalMap, OwnerId}; use crate::intravisit::{FnKind, VisitorExt}; @@ -1160,6 +1161,12 @@ pub struct AttrPath { pub span: Span, } +impl IntoDiagArg for AttrPath { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } +} + impl AttrPath { pub fn from_ast(path: &ast::Path) -> Self { AttrPath { @@ -1233,6 +1240,13 @@ impl Attribute { _ => None, } } + + pub fn is_parsed_attr(&self) -> bool { + match self { + Attribute::Parsed(_) => true, + Attribute::Unparsed(_) => false, + } + } } impl AttributeExt for Attribute { @@ -1303,14 +1317,10 @@ impl AttributeExt for Attribute { match &self { Attribute::Unparsed(u) => u.span, // FIXME: should not be needed anymore when all attrs are parsed - Attribute::Parsed(AttributeKind::Deprecation { span, .. }) => *span, Attribute::Parsed(AttributeKind::DocComment { span, .. }) => *span, - Attribute::Parsed(AttributeKind::MacroUse { span, .. }) => *span, - Attribute::Parsed(AttributeKind::MayDangle(span)) => *span, - Attribute::Parsed(AttributeKind::Ignore { span, .. }) => *span, - Attribute::Parsed(AttributeKind::ShouldPanic { span, .. }) => *span, - Attribute::Parsed(AttributeKind::AutomaticallyDerived(span)) => *span, + Attribute::Parsed(AttributeKind::Deprecation { span, .. }) => *span, Attribute::Parsed(AttributeKind::AllowInternalUnsafe(span)) => *span, + Attribute::Parsed(AttributeKind::Linkage(_, span)) => *span, a => panic!("can't get the span of an arbitrary parsed attribute: {a:?}"), } } @@ -2257,8 +2267,15 @@ impl fmt::Display for ConstContext { } } -// NOTE: `IntoDiagArg` impl for `ConstContext` lives in `rustc_errors` -// due to a cyclical dependency between hir and that crate. +impl IntoDiagArg for ConstContext { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(match self { + ConstContext::ConstFn => "const_fn", + ConstContext::Static(_) => "static", + ConstContext::Const { .. } => "const", + })) + } +} /// A literal. pub type Lit = Spanned<LitKind>; @@ -4157,7 +4174,7 @@ impl<'hir> Item<'hir> { expect_fn, (Ident, &FnSig<'hir>, &'hir Generics<'hir>, BodyId), ItemKind::Fn { ident, sig, generics, body, .. }, (*ident, sig, generics, *body); - expect_macro, (Ident, &ast::MacroDef, MacroKind), + expect_macro, (Ident, &ast::MacroDef, MacroKinds), ItemKind::Macro(ident, def, mk), (*ident, def, *mk); expect_mod, (Ident, &'hir Mod<'hir>), ItemKind::Mod(ident, m), (*ident, m); @@ -4336,7 +4353,7 @@ pub enum ItemKind<'hir> { has_body: bool, }, /// A MBE macro definition (`macro_rules!` or `macro`). - Macro(Ident, &'hir ast::MacroDef, MacroKind), + Macro(Ident, &'hir ast::MacroDef, MacroKinds), /// A module. Mod(Ident, &'hir Mod<'hir>), /// An external module, e.g. `extern { .. }`. diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index 75c04b23ed6..0464665b41f 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -286,6 +286,7 @@ language_item_table! { Panic, sym::panic, panic_fn, Target::Fn, GenericRequirement::Exact(0); PanicNounwind, sym::panic_nounwind, panic_nounwind, Target::Fn, GenericRequirement::Exact(0); PanicFmt, sym::panic_fmt, panic_fmt, Target::Fn, GenericRequirement::None; + PanicDisplay, sym::panic_display, panic_display, Target::Fn, GenericRequirement::None; ConstPanicFmt, sym::const_panic_fmt, const_panic_fmt, Target::Fn, GenericRequirement::None; PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, Target::Fn, GenericRequirement::Exact(0); PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, Target::Fn, GenericRequirement::Exact(0); @@ -436,6 +437,9 @@ language_item_table! { DefaultTrait1, sym::default_trait1, default_trait1_trait, Target::Trait, GenericRequirement::None; ContractCheckEnsures, sym::contract_check_ensures, contract_check_ensures_fn, Target::Fn, GenericRequirement::None; + + // Reborrowing related lang-items + Reborrow, sym::reborrow, reborrow, Target::Trait, GenericRequirement::Exact(0); } /// The requirement imposed on the generics of a lang item diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs index f1212d07ff6..78fc63753a2 100644 --- a/compiler/rustc_hir/src/lib.rs +++ b/compiler/rustc_hir/src/lib.rs @@ -3,14 +3,11 @@ //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/hir.html // tidy-alphabetical-start -#![allow(internal_features)] #![feature(associated_type_defaults)] #![feature(closure_track_caller)] #![feature(debug_closure_helpers)] #![feature(exhaustive_patterns)] -#![feature(negative_impls)] #![feature(never_type)] -#![feature(rustc_attrs)] #![feature(variant_count)] #![recursion_limit = "256"] // tidy-alphabetical-end @@ -25,7 +22,7 @@ pub mod definitions; pub mod diagnostic_items; pub use rustc_span::def_id; mod hir; -pub mod hir_id; +pub use rustc_hir_id::{self as hir_id, *}; pub mod intravisit; pub mod lang_items; pub mod lints; @@ -41,7 +38,6 @@ mod tests; #[doc(no_inline)] pub use hir::*; -pub use hir_id::*; pub use lang_items::{LangItem, LanguageItems}; pub use stability::*; pub use stable_hash_impls::HashStableContext; diff --git a/compiler/rustc_hir/src/lints.rs b/compiler/rustc_hir/src/lints.rs index c55a41eb2b7..0b24052b453 100644 --- a/compiler/rustc_hir/src/lints.rs +++ b/compiler/rustc_hir/src/lints.rs @@ -2,7 +2,7 @@ use rustc_data_structures::fingerprint::Fingerprint; use rustc_macros::HashStable_Generic; use rustc_span::Span; -use crate::HirId; +use crate::{AttrPath, HirId, Target}; #[derive(Debug)] pub struct DelayedLints { @@ -34,4 +34,6 @@ pub enum AttributeLintKind { UnusedDuplicate { this: Span, other: Span, warning: bool }, IllFormedAttributeInput { suggestions: Vec<String> }, EmptyAttribute { first_span: Span }, + InvalidTarget { name: AttrPath, target: Target, applied: Vec<String>, only: &'static str }, + InvalidStyle { name: AttrPath, is_used_as_inner: bool, target: Target, target_span: Span }, } diff --git a/compiler/rustc_hir/src/stable_hash_impls.rs b/compiler/rustc_hir/src/stable_hash_impls.rs index ecc608d437b..16e8bac3d8a 100644 --- a/compiler/rustc_hir/src/stable_hash_impls.rs +++ b/compiler/rustc_hir/src/stable_hash_impls.rs @@ -5,7 +5,7 @@ use crate::HashIgnoredAttrId; use crate::hir::{ AttributeMap, BodyId, Crate, ForeignItemId, ImplItemId, ItemId, OwnerNodes, TraitItemId, }; -use crate::hir_id::{HirId, ItemLocalId}; +use crate::hir_id::ItemLocalId; use crate::lints::DelayedLints; /// Requirements for a `StableHashingContext` to be used in this crate. @@ -15,25 +15,6 @@ pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStabl fn hash_attr_id(&mut self, id: &HashIgnoredAttrId, hasher: &mut StableHasher); } -impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for HirId { - type KeyType = (DefPathHash, ItemLocalId); - - #[inline] - fn to_stable_hash_key(&self, hcx: &HirCtx) -> (DefPathHash, ItemLocalId) { - let def_path_hash = self.owner.def_id.to_stable_hash_key(hcx); - (def_path_hash, self.local_id) - } -} - -impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for ItemLocalId { - type KeyType = ItemLocalId; - - #[inline] - fn to_stable_hash_key(&self, _: &HirCtx) -> ItemLocalId { - *self - } -} - impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for BodyId { type KeyType = (DefPathHash, ItemLocalId); diff --git a/compiler/rustc_hir/src/target.rs b/compiler/rustc_hir/src/target.rs index d617f44f8d8..dcac51b10b4 100644 --- a/compiler/rustc_hir/src/target.rs +++ b/compiler/rustc_hir/src/target.rs @@ -6,23 +6,34 @@ use std::fmt::{self, Display}; +use rustc_ast::visit::AssocCtxt; +use rustc_ast::{AssocItemKind, ForeignItemKind, ast}; +use rustc_macros::HashStable_Generic; + use crate::def::DefKind; use crate::{Item, ItemKind, TraitItem, TraitItemKind, hir}; -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Debug, Eq, HashStable_Generic)] pub enum GenericParamKind { Type, Lifetime, Const, } -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Debug, Eq, HashStable_Generic)] pub enum MethodKind { - Trait { body: bool }, + /// Method in a `trait Trait` block + Trait { + /// Whether a default is provided for this method + body: bool, + }, + /// Method in a `impl Trait for Type` block + TraitImpl, + /// Method in a `impl Type` block Inherent, } -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Debug, Eq, HashStable_Generic)] pub enum Target { ExternCrate, Use, @@ -57,6 +68,9 @@ pub enum Target { PatField, ExprField, WherePredicate, + MacroCall, + Crate, + Delegation { mac: bool }, } impl Display for Target { @@ -65,6 +79,8 @@ impl Display for Target { } } +rustc_error_messages::into_diag_arg_using_display!(Target); + impl Target { pub fn is_associated_item(self) -> bool { match self { @@ -98,7 +114,10 @@ impl Target { | Target::Param | Target::PatField | Target::ExprField - | Target::WherePredicate => false, + | Target::MacroCall + | Target::Crate + | Target::WherePredicate + | Target::Delegation { .. } => false, } } @@ -146,6 +165,39 @@ impl Target { } } + pub fn from_ast_item(item: &ast::Item) -> Target { + match item.kind { + ast::ItemKind::ExternCrate(..) => Target::ExternCrate, + ast::ItemKind::Use(..) => Target::Use, + ast::ItemKind::Static { .. } => Target::Static, + ast::ItemKind::Const(..) => Target::Const, + ast::ItemKind::Fn { .. } => Target::Fn, + ast::ItemKind::Mod(..) => Target::Mod, + ast::ItemKind::ForeignMod { .. } => Target::ForeignMod, + ast::ItemKind::GlobalAsm { .. } => Target::GlobalAsm, + ast::ItemKind::TyAlias(..) => Target::TyAlias, + ast::ItemKind::Enum(..) => Target::Enum, + ast::ItemKind::Struct(..) => Target::Struct, + ast::ItemKind::Union(..) => Target::Union, + ast::ItemKind::Trait(..) => Target::Trait, + ast::ItemKind::TraitAlias(..) => Target::TraitAlias, + ast::ItemKind::Impl(ref i) => Target::Impl { of_trait: i.of_trait.is_some() }, + ast::ItemKind::MacCall(..) => Target::MacroCall, + ast::ItemKind::MacroDef(..) => Target::MacroDef, + ast::ItemKind::Delegation(..) => Target::Delegation { mac: false }, + ast::ItemKind::DelegationMac(..) => Target::Delegation { mac: true }, + } + } + + pub fn from_foreign_item_kind(kind: &ast::ForeignItemKind) -> Target { + match kind { + ForeignItemKind::Static(_) => Target::ForeignStatic, + ForeignItemKind::Fn(_) => Target::ForeignFn, + ForeignItemKind::TyAlias(_) => Target::ForeignTy, + ForeignItemKind::MacCall(_) => Target::MacroCall, + } + } + pub fn from_trait_item(trait_item: &TraitItem<'_>) -> Target { match trait_item.kind { TraitItemKind::Const(..) => Target::AssocConst, @@ -183,12 +235,40 @@ impl Target { } } + pub fn from_assoc_item_kind(kind: &ast::AssocItemKind, assoc_ctxt: AssocCtxt) -> Target { + match kind { + AssocItemKind::Const(_) => Target::AssocConst, + AssocItemKind::Fn(f) => Target::Method(match assoc_ctxt { + AssocCtxt::Trait => MethodKind::Trait { body: f.body.is_some() }, + AssocCtxt::Impl { of_trait } => { + if of_trait { + MethodKind::TraitImpl + } else { + MethodKind::Inherent + } + } + }), + AssocItemKind::Type(_) => Target::AssocTy, + AssocItemKind::Delegation(_) => Target::Delegation { mac: false }, + AssocItemKind::DelegationMac(_) => Target::Delegation { mac: true }, + AssocItemKind::MacCall(_) => Target::MacroCall, + } + } + + pub fn from_expr(expr: &ast::Expr) -> Self { + match &expr.kind { + ast::ExprKind::Closure(..) | ast::ExprKind::Gen(..) => Self::Closure, + ast::ExprKind::Paren(e) => Self::from_expr(&e), + _ => Self::Expression, + } + } + pub fn name(self) -> &'static str { match self { Target::ExternCrate => "extern crate", Target::Use => "use", - Target::Static => "static item", - Target::Const => "constant item", + Target::Static => "static", + Target::Const => "constant", Target::Fn => "function", Target::Closure => "closure", Target::Mod => "module", @@ -202,8 +282,7 @@ impl Target { Target::Union => "union", Target::Trait => "trait", Target::TraitAlias => "trait alias", - Target::Impl { of_trait: false } => "inherent implementation block", - Target::Impl { of_trait: true } => "trait implementation block", + Target::Impl { .. } => "implementation block", Target::Expression => "expression", Target::Statement => "statement", Target::Arm => "match arm", @@ -212,12 +291,13 @@ impl Target { MethodKind::Inherent => "inherent method", MethodKind::Trait { body: false } => "required trait method", MethodKind::Trait { body: true } => "provided trait method", + MethodKind::TraitImpl => "trait method in an impl block", }, Target::AssocTy => "associated type", Target::ForeignFn => "foreign function", Target::ForeignStatic => "foreign static item", Target::ForeignTy => "foreign type", - Target::GenericParam { kind, has_default: _ } => match kind { + Target::GenericParam { kind, .. } => match kind { GenericParamKind::Type => "type parameter", GenericParamKind::Lifetime => "lifetime parameter", GenericParamKind::Const => "const parameter", @@ -227,6 +307,60 @@ impl Target { Target::PatField => "pattern field", Target::ExprField => "struct field", Target::WherePredicate => "where predicate", + Target::MacroCall => "macro call", + Target::Crate => "crate", + Target::Delegation { .. } => "delegation", + } + } + + pub fn plural_name(self) -> &'static str { + match self { + Target::ExternCrate => "extern crates", + Target::Use => "use statements", + Target::Static => "statics", + Target::Const => "constants", + Target::Fn => "functions", + Target::Closure => "closures", + Target::Mod => "modules", + Target::ForeignMod => "foreign modules", + Target::GlobalAsm => "global asms", + Target::TyAlias => "type aliases", + Target::Enum => "enums", + Target::Variant => "enum variants", + Target::Struct => "structs", + Target::Field => "struct fields", + Target::Union => "unions", + Target::Trait => "traits", + Target::TraitAlias => "trait aliases", + Target::Impl { of_trait: false } => "inherent impl blocks", + Target::Impl { of_trait: true } => "trait impl blocks", + Target::Expression => "expressions", + Target::Statement => "statements", + Target::Arm => "match arms", + Target::AssocConst => "associated consts", + Target::Method(kind) => match kind { + MethodKind::Inherent => "inherent methods", + MethodKind::Trait { body: false } => "required trait methods", + MethodKind::Trait { body: true } => "provided trait methods", + MethodKind::TraitImpl => "trait methods in impl blocks", + }, + Target::AssocTy => "associated types", + Target::ForeignFn => "foreign functions", + Target::ForeignStatic => "foreign statics", + Target::ForeignTy => "foreign types", + Target::GenericParam { kind, has_default: _ } => match kind { + GenericParamKind::Type => "type parameters", + GenericParamKind::Lifetime => "lifetime parameters", + GenericParamKind::Const => "const parameters", + }, + Target::MacroDef => "macro defs", + Target::Param => "function params", + Target::PatField => "pattern fields", + Target::ExprField => "struct fields", + Target::WherePredicate => "where predicates", + Target::MacroCall => "macro calls", + Target::Crate => "crates", + Target::Delegation { .. } => "delegations", } } } diff --git a/compiler/rustc_hir/src/version.rs b/compiler/rustc_hir/src/version.rs index ab5ab026b4c..bc2c38a4935 100644 --- a/compiler/rustc_hir/src/version.rs +++ b/compiler/rustc_hir/src/version.rs @@ -1,6 +1,8 @@ +use std::borrow::Cow; use std::fmt::{self, Display}; use std::sync::OnceLock; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::{ Decodable, Encodable, HashStable_Generic, PrintAttribute, current_rustc_version, }; @@ -45,3 +47,9 @@ impl Display for RustcVersion { write!(formatter, "{}.{}.{}", self.major, self.minor, self.patch) } } + +impl IntoDiagArg for RustcVersion { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string())) + } +} diff --git a/compiler/rustc_hir_analysis/Cargo.toml b/compiler/rustc_hir_analysis/Cargo.toml index e5017794d8f..bb86beb2251 100644 --- a/compiler/rustc_hir_analysis/Cargo.toml +++ b/compiler/rustc_hir_analysis/Cargo.toml @@ -9,7 +9,7 @@ doctest = false [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } @@ -29,5 +29,5 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index 2428c1aa29f..06f2ec512ab 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -509,10 +509,6 @@ hir_analysis_supertrait_item_shadowee = item from `{$supertrait}` is shadowed by hir_analysis_supertrait_item_shadowing = trait item `{$item}` from `{$subtrait}` shadows identically named item from supertrait -hir_analysis_tait_forward_compat2 = item does not constrain `{$opaque_type}` - .note = consider removing `#[define_opaque]` or adding an empty `#[define_opaque()]` - .opaque = this opaque type is supposed to be constrained - hir_analysis_target_feature_on_main = `main` function is not allowed to have `#[target_feature]` hir_analysis_too_large_static = extern static is too large for the target architecture diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 161a8566b04..eccb88a938f 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -2053,3 +2053,29 @@ pub(super) fn check_coroutine_obligations( Ok(()) } + +pub(super) fn check_potentially_region_dependent_goals<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: LocalDefId, +) -> Result<(), ErrorGuaranteed> { + if !tcx.next_trait_solver_globally() { + return Ok(()); + } + let typeck_results = tcx.typeck(def_id); + let param_env = tcx.param_env(def_id); + + // We use `TypingMode::Borrowck` as we want to use the opaque types computed by HIR typeck. + let typing_mode = TypingMode::borrowck(tcx, def_id); + let infcx = tcx.infer_ctxt().ignoring_regions().build(typing_mode); + let ocx = ObligationCtxt::new_with_diagnostics(&infcx); + for (predicate, cause) in &typeck_results.potentially_region_dependent_goals { + let predicate = fold_regions(tcx, *predicate, |_, _| { + infcx.next_region_var(RegionVariableOrigin::Misc(cause.span)) + }); + ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate)); + } + + let errors = ocx.select_all_or_error(); + debug!(?errors); + if errors.is_empty() { Ok(()) } else { Err(infcx.err_ctxt().report_fulfillment_errors(errors)) } +} diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs index 6767e5ed88d..e4827256193 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs @@ -445,10 +445,10 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( tcx: TyCtxt<'tcx>, impl_m_def_id: LocalDefId, ) -> Result<&'tcx DefIdMap<ty::EarlyBinder<'tcx, Ty<'tcx>>>, ErrorGuaranteed> { - let impl_m = tcx.opt_associated_item(impl_m_def_id.to_def_id()).unwrap(); - let trait_m = tcx.opt_associated_item(impl_m.trait_item_def_id.unwrap()).unwrap(); + let impl_m = tcx.associated_item(impl_m_def_id.to_def_id()); + let trait_m = tcx.associated_item(impl_m.trait_item_def_id.unwrap()); let impl_trait_ref = - tcx.impl_trait_ref(impl_m.impl_container(tcx).unwrap()).unwrap().instantiate_identity(); + tcx.impl_trait_ref(tcx.parent(impl_m_def_id.to_def_id())).unwrap().instantiate_identity(); // First, check a few of the same things as `compare_impl_method`, // just so we don't ICE during instantiation later. check_method_is_structurally_compatible(tcx, impl_m, trait_m, impl_trait_ref, true)?; diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index 4441dd6ebd6..cfc6bc2f3a0 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -135,6 +135,11 @@ fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hi | sym::round_ties_even_f32 | sym::round_ties_even_f64 | sym::round_ties_even_f128 + | sym::autodiff + | sym::prefetch_read_data + | sym::prefetch_write_data + | sym::prefetch_read_instruction + | sym::prefetch_write_instruction | sym::const_eval_select => hir::Safety::Safe, _ => hir::Safety::Unsafe, }; @@ -198,6 +203,7 @@ pub(crate) fn check_intrinsic_type( let safety = intrinsic_operation_unsafety(tcx, intrinsic_id); let n_lts = 0; let (n_tps, n_cts, inputs, output) = match intrinsic_name { + sym::autodiff => (4, 0, vec![param(0), param(1), param(2)], param(3)), sym::abort => (0, 0, vec![], tcx.types.never), sym::unreachable => (0, 0, vec![], tcx.types.never), sym::breakpoint => (0, 0, vec![], tcx.types.unit), @@ -216,7 +222,7 @@ pub(crate) fn check_intrinsic_type( | sym::prefetch_write_data | sym::prefetch_read_instruction | sym::prefetch_write_instruction => { - (1, 0, vec![Ty::new_imm_ptr(tcx, param(0)), tcx.types.i32], tcx.types.unit) + (1, 1, vec![Ty::new_imm_ptr(tcx, param(0))], tcx.types.unit) } sym::needs_drop => (1, 0, vec![], tcx.types.bool), diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index 85445cb3c00..2e4b151d4dc 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -109,6 +109,7 @@ pub(super) fn provide(providers: &mut Providers) { collect_return_position_impl_trait_in_trait_tys, compare_impl_item: compare_impl_item::compare_impl_item, check_coroutine_obligations: check::check_coroutine_obligations, + check_potentially_region_dependent_goals: check::check_potentially_region_dependent_goals, check_type_wf: wfcheck::check_type_wf, check_well_formed: wfcheck::check_well_formed, ..*providers diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index c642435b989..e6a1f6d8d8b 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -2287,8 +2287,7 @@ fn lint_redundant_lifetimes<'tcx>( // Proceed } DefKind::AssocFn | DefKind::AssocTy | DefKind::AssocConst => { - let parent_def_id = tcx.local_parent(owner_id); - if matches!(tcx.def_kind(parent_def_id), DefKind::Impl { of_trait: true }) { + if tcx.trait_impl_of_assoc(owner_id.to_def_id()).is_some() { // Don't check for redundant lifetimes for associated items of trait // implementations, since the signature is required to be compatible // with the trait, even if the implementation implies some lifetimes diff --git a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs index 50e20a19eda..b6d898886ac 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs @@ -4,9 +4,10 @@ use rustc_hir::{self as hir, Expr, ImplItem, Item, Node, TraitItem, def, intravi use rustc_middle::bug; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{self, DefiningScopeKind, Ty, TyCtxt, TypeVisitableExt}; +use rustc_trait_selection::opaque_types::report_item_does_not_constrain_error; use tracing::{debug, instrument, trace}; -use crate::errors::{TaitForwardCompat2, UnconstrainedOpaqueType}; +use crate::errors::UnconstrainedOpaqueType; /// Checks "defining uses" of opaque `impl Trait` in associated types. /// These can only be defined by associated items of the same trait. @@ -127,14 +128,11 @@ impl<'tcx> TaitConstraintLocator<'tcx> { } fn non_defining_use_in_defining_scope(&mut self, item_def_id: LocalDefId) { - let guar = self.tcx.dcx().emit_err(TaitForwardCompat2 { - span: self - .tcx - .def_ident_span(item_def_id) - .unwrap_or_else(|| self.tcx.def_span(item_def_id)), - opaque_type_span: self.tcx.def_span(self.def_id), - opaque_type: self.tcx.def_path_str(self.def_id), - }); + // We make sure that all opaque types get defined while + // type checking the defining scope, so this error is unreachable + // with the new solver. + assert!(!self.tcx.next_trait_solver_globally()); + let guar = report_item_does_not_constrain_error(self.tcx, item_def_id, self.def_id, None); self.insert_found(ty::OpaqueHiddenType::new_error(self.tcx, guar)); } @@ -252,9 +250,7 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>( } else if let Some(hidden_ty) = tables.concrete_opaque_types.get(&def_id) { hidden_ty.ty } else { - // FIXME(-Znext-solver): This should not be necessary and we should - // instead rely on inference variable fallback inside of typeck itself. - + assert!(!tcx.next_trait_solver_globally()); // We failed to resolve the opaque type or it // resolves to itself. We interpret this as the // no values of the hidden type ever being constructed, @@ -273,6 +269,7 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>( if let Err(guar) = hir_ty.error_reported() { Ty::new_error(tcx, guar) } else { + assert!(!tcx.next_trait_solver_globally()); hir_ty } } diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 26a98722b34..6565ad7cf53 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -410,17 +410,6 @@ pub(crate) struct UnconstrainedOpaqueType { pub what: &'static str, } -#[derive(Diagnostic)] -#[diag(hir_analysis_tait_forward_compat2)] -#[note] -pub(crate) struct TaitForwardCompat2 { - #[primary_span] - pub span: Span, - #[note(hir_analysis_opaque)] - pub opaque_type_span: Span, - pub opaque_type: String, -} - pub(crate) struct MissingTypeParams { pub span: Span, pub def_span: Span, diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs index 0043f0c7117..b38639ed8c6 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs @@ -390,45 +390,13 @@ fn check_predicates<'tcx>( let mut res = Ok(()); for (clause, span) in impl1_predicates { - if !impl2_predicates.iter().any(|pred2| trait_predicates_eq(clause.as_predicate(), *pred2)) - { + if !impl2_predicates.iter().any(|&pred2| clause.as_predicate() == pred2) { res = res.and(check_specialization_on(tcx, clause, span)) } } res } -/// Checks if some predicate on the specializing impl (`predicate1`) is the same -/// as some predicate on the base impl (`predicate2`). -/// -/// This basically just checks syntactic equivalence, but is a little more -/// forgiving since we want to equate `T: Tr` with `T: [const] Tr` so this can work: -/// -/// ```ignore (illustrative) -/// #[rustc_specialization_trait] -/// trait Specialize { } -/// -/// impl<T: Bound> Tr for T { } -/// impl<T: [const] Bound + Specialize> const Tr for T { } -/// ``` -/// -/// However, we *don't* want to allow the reverse, i.e., when the bound on the -/// specializing impl is not as const as the bound on the base impl: -/// -/// ```ignore (illustrative) -/// impl<T: [const] Bound> const Tr for T { } -/// impl<T: Bound + Specialize> const Tr for T { } // should be T: [const] Bound -/// ``` -/// -/// So we make that check in this function and try to raise a helpful error message. -fn trait_predicates_eq<'tcx>( - predicate1: ty::Predicate<'tcx>, - predicate2: ty::Predicate<'tcx>, -) -> bool { - // FIXME(const_trait_impl) - predicate1 == predicate2 -} - #[instrument(level = "debug", skip(tcx))] fn check_specialization_on<'tcx>( tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 3a153ab089a..44a5ceed469 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -82,7 +82,7 @@ mod coherence; mod collect; mod constrained_generic_params; mod delegation; -mod errors; +pub mod errors; pub mod hir_ty_lowering; pub mod hir_wf_check; mod impl_wf_check; diff --git a/compiler/rustc_hir_id/Cargo.toml b/compiler/rustc_hir_id/Cargo.toml new file mode 100644 index 00000000000..c357a4f62d9 --- /dev/null +++ b/compiler/rustc_hir_id/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "rustc_hir_id" +version = "0.0.0" +edition = "2024" + +[dependencies] +# tidy-alphabetical-start +rustc_data_structures = { path = "../rustc_data_structures" } +rustc_index = { path = "../rustc_index" } +rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } +rustc_span = { path = "../rustc_span" } +# tidy-alphabetical-end diff --git a/compiler/rustc_hir/src/hir_id.rs b/compiler/rustc_hir_id/src/lib.rs index b48a081d371..d07bc88e66a 100644 --- a/compiler/rustc_hir/src/hir_id.rs +++ b/compiler/rustc_hir_id/src/lib.rs @@ -1,11 +1,15 @@ +//! Library containing Id types from `rustc_hir`, split out so crates can use it without depending +//! on all of `rustc_hir` (which is large and depends on other large things like `rustc_target`). +#![allow(internal_features)] +#![feature(negative_impls)] +#![feature(rustc_attrs)] + use std::fmt::{self, Debug}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; -use rustc_span::HashStableContext; -use rustc_span::def_id::DefPathHash; - -use crate::def_id::{CRATE_DEF_ID, DefId, DefIndex, LocalDefId}; +pub use rustc_span::HashStableContext; +use rustc_span::def_id::{CRATE_DEF_ID, DefId, DefIndex, DefPathHash, LocalDefId}; #[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable)] pub struct OwnerId { @@ -171,3 +175,22 @@ pub const CRATE_HIR_ID: HirId = HirId { owner: OwnerId { def_id: CRATE_DEF_ID }, local_id: ItemLocalId::ZERO }; pub const CRATE_OWNER_ID: OwnerId = OwnerId { def_id: CRATE_DEF_ID }; + +impl<CTX: rustc_span::HashStableContext> ToStableHashKey<CTX> for HirId { + type KeyType = (DefPathHash, ItemLocalId); + + #[inline] + fn to_stable_hash_key(&self, hcx: &CTX) -> (DefPathHash, ItemLocalId) { + let def_path_hash = self.owner.def_id.to_stable_hash_key(hcx); + (def_path_hash, self.local_id) + } +} + +impl<CTX: HashStableContext> ToStableHashKey<CTX> for ItemLocalId { + type KeyType = ItemLocalId; + + #[inline] + fn to_stable_hash_key(&self, _: &CTX) -> ItemLocalId { + *self + } +} diff --git a/compiler/rustc_hir_typeck/Cargo.toml b/compiler/rustc_hir_typeck/Cargo.toml index f00125c3e09..5af0c6134ca 100644 --- a/compiler/rustc_hir_typeck/Cargo.toml +++ b/compiler/rustc_hir_typeck/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_attr_parsing = { path = "../rustc_attr_parsing" } @@ -25,5 +25,5 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 48bb45de53e..4200afb74e6 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -511,7 +511,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Untranslatable diagnostics are okay for rustc internals #[allow(rustc::untranslatable_diagnostic)] #[allow(rustc::diagnostic_outside_of_impl)] - if self.tcx.has_attr(def_id, sym::rustc_evaluate_where_clauses) { + if self.has_rustc_attrs + && self.tcx.has_attr(def_id, sym::rustc_evaluate_where_clauses) + { let predicates = self.tcx.predicates_of(def_id); let predicates = predicates.instantiate(self.tcx, args); for (predicate, predicate_span) in predicates { @@ -894,7 +896,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } // If we have `rustc_do_not_const_check`, do not check `[const]` bounds. - if self.tcx.has_attr(self.body_id, sym::rustc_do_not_const_check) { + if self.has_rustc_attrs && self.tcx.has_attr(self.body_id, sym::rustc_do_not_const_check) { return; } @@ -910,7 +912,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // const stability checking here too, I guess. if self.tcx.is_conditionally_const(callee_did) { let q = self.tcx.const_conditions(callee_did); - // FIXME(const_trait_impl): Use this span with a better cause code. for (idx, (cond, pred_span)) in q.instantiate(self.tcx, callee_args).into_iter().enumerate() { diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index 0498a938366..940f0e3708d 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -290,6 +290,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { | ExprKind::Let(..) | ExprKind::Loop(..) | ExprKind::Match(..) => {} + // Do not warn on `as` casts from never to any, + // they are sometimes required to appeal typeck. + ExprKind::Cast(_, _) => {} // If `expr` is a result of desugaring the try block and is an ok-wrapped // diverging expression (e.g. it arose from desugaring of `try { return }`), // we skip issuing a warning because it is autogenerated code. diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index b80a2af3100..5aec50c8b53 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -83,14 +83,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { *self.deferred_cast_checks.borrow_mut() = deferred_cast_checks; } - pub(in super::super) fn check_transmutes(&self) { - let mut deferred_transmute_checks = self.deferred_transmute_checks.borrow_mut(); - debug!("FnCtxt::check_transmutes: {} deferred checks", deferred_transmute_checks.len()); - for (from, to, hir_id) in deferred_transmute_checks.drain(..) { - self.check_transmute(from, to, hir_id); - } - } - pub(in super::super) fn check_asms(&self) { let mut deferred_asm_checks = self.deferred_asm_checks.borrow_mut(); debug!("FnCtxt::check_asm: {} deferred checks", deferred_asm_checks.len()); diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs index 0c6226ce71e..74f27e85cba 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs @@ -126,6 +126,10 @@ pub(crate) struct FnCtxt<'a, 'tcx> { /// These are stored here so we may collect them when canonicalizing user /// type ascriptions later. pub(super) trait_ascriptions: RefCell<ItemLocalMap<Vec<ty::Clause<'tcx>>>>, + + /// Whether the current crate enables the `rustc_attrs` feature. + /// This allows to skip processing attributes in many places. + pub(super) has_rustc_attrs: bool, } impl<'a, 'tcx> FnCtxt<'a, 'tcx> { @@ -154,6 +158,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { diverging_fallback_behavior, diverging_block_behavior, trait_ascriptions: Default::default(), + has_rustc_attrs: root_ctxt.tcx.features().rustc_attrs(), } } @@ -525,10 +530,13 @@ fn parse_never_type_options_attr( let mut fallback = None; let mut block = None; - let items = tcx - .get_attr(CRATE_DEF_ID, sym::rustc_never_type_options) - .map(|attr| attr.meta_item_list().unwrap()) - .unwrap_or_default(); + let items = if tcx.features().rustc_attrs() { + tcx.get_attr(CRATE_DEF_ID, sym::rustc_never_type_options) + .map(|attr| attr.meta_item_list().unwrap()) + } else { + None + }; + let items = items.unwrap_or_default(); for item in items { if item.has_name(sym::fallback) && fallback.is_none() { diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 6013430e1ff..aca3840712e 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -2378,6 +2378,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .filter_map(|variant| { let sole_field = &variant.single_field(); + // When expected_ty and expr_ty are the same ADT, we prefer to compare their internal generic params, + // When the current variant has a sole field whose type is still an unresolved inference variable, + // suggestions would be often wrong. So suppress the suggestion. See #145294. + if let (ty::Adt(exp_adt, _), ty::Adt(act_adt, _)) = (expected.kind(), expr_ty.kind()) + && exp_adt.did() == act_adt.did() + && sole_field.ty(self.tcx, args).is_ty_var() { + return None; + } + let field_is_local = sole_field.did.is_local(); let field_is_accessible = sole_field.vis.is_accessible_from(expr.hir_id.owner.def_id, self.tcx) diff --git a/compiler/rustc_hir_typeck/src/intrinsicck.rs b/compiler/rustc_hir_typeck/src/intrinsicck.rs index 194e420b606..7567f8ba348 100644 --- a/compiler/rustc_hir_typeck/src/intrinsicck.rs +++ b/compiler/rustc_hir_typeck/src/intrinsicck.rs @@ -7,11 +7,10 @@ use rustc_hir as hir; use rustc_index::Idx; use rustc_middle::bug; use rustc_middle::ty::layout::{LayoutError, SizeSkeleton}; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::def_id::LocalDefId; use tracing::trace; -use super::FnCtxt; - /// If the type is `Option<T>`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { @@ -39,119 +38,117 @@ fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { ty } -impl<'a, 'tcx> FnCtxt<'a, 'tcx> { - /// FIXME: Move this check out of typeck, since it'll easily cycle when revealing opaques, - /// and we shouldn't need to check anything here if the typeck results are tainted. - pub(crate) fn check_transmute(&self, from: Ty<'tcx>, to: Ty<'tcx>, hir_id: HirId) { - let tcx = self.tcx; - let dl = &tcx.data_layout; - let span = tcx.hir_span(hir_id); - let normalize = |ty| { - let ty = self.resolve_vars_if_possible(ty); - if let Ok(ty) = - self.tcx.try_normalize_erasing_regions(self.typing_env(self.param_env), ty) - { - ty +/// Try to display a sensible error with as much information as possible. +fn skeleton_string<'tcx>( + ty: Ty<'tcx>, + sk: Result<SizeSkeleton<'tcx>, &'tcx LayoutError<'tcx>>, +) -> String { + match sk { + Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{tail}`"), + Ok(SizeSkeleton::Known(size, _)) => { + if let Some(v) = u128::from(size.bytes()).checked_mul(8) { + format!("{v} bits") } else { - Ty::new_error_with_message( - tcx, - span, - "tried to normalize non-wf type in check_transmute", - ) + // `u128` should definitely be able to hold the size of different architectures + // larger sizes should be reported as error `are too big for the target architecture` + // otherwise we have a bug somewhere + bug!("{:?} overflow for u128", size) } - }; - let from = normalize(from); - let to = normalize(to); - trace!(?from, ?to); - if from.has_non_region_infer() || to.has_non_region_infer() { - // Note: this path is currently not reached in any test, so any - // example that triggers this would be worth minimizing and - // converting into a test. - self.dcx().span_bug(span, "argument to transmute has inference variables"); } - // Transmutes that are only changing lifetimes are always ok. - if from == to { - return; + Ok(SizeSkeleton::Generic(size)) => { + format!("generic size {size}") + } + Err(LayoutError::TooGeneric(bad)) => { + if *bad == ty { + "this type does not have a fixed size".to_owned() + } else { + format!("size can vary because of {bad}") + } + } + Err(err) => err.to_string(), + } +} + +fn check_transmute<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + from: Ty<'tcx>, + to: Ty<'tcx>, + hir_id: HirId, +) { + let span = || tcx.hir_span(hir_id); + let normalize = |ty| { + if let Ok(ty) = tcx.try_normalize_erasing_regions(typing_env, ty) { + ty + } else { + Ty::new_error_with_message( + tcx, + span(), + format!("tried to normalize non-wf type {ty:#?} in check_transmute"), + ) } + }; - let skel = |ty| SizeSkeleton::compute(ty, tcx, self.typing_env(self.param_env)); - let sk_from = skel(from); - let sk_to = skel(to); - trace!(?sk_from, ?sk_to); + let from = normalize(from); + let to = normalize(to); + trace!(?from, ?to); - // Check for same size using the skeletons. - if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) { - if sk_from.same_size(sk_to) { - return; - } + // Transmutes that are only changing lifetimes are always ok. + if from == to { + return; + } - // Special-case transmuting from `typeof(function)` and - // `Option<typeof(function)>` to present a clearer error. - let from = unpack_option_like(tcx, from); - if let (&ty::FnDef(..), SizeSkeleton::Known(size_to, _)) = (from.kind(), sk_to) - && size_to == Pointer(dl.instruction_address_space).size(&tcx) - { - struct_span_code_err!(self.dcx(), span, E0591, "can't transmute zero-sized type") - .with_note(format!("source type: {from}")) - .with_note(format!("target type: {to}")) - .with_help("cast with `as` to a pointer instead") - .emit(); - return; - } + let sk_from = SizeSkeleton::compute(from, tcx, typing_env); + let sk_to = SizeSkeleton::compute(to, tcx, typing_env); + trace!(?sk_from, ?sk_to); + + // Check for same size using the skeletons. + if let Ok(sk_from) = sk_from + && let Ok(sk_to) = sk_to + { + if sk_from.same_size(sk_to) { + return; } - // Try to display a sensible error with as much information as possible. - let skeleton_string = |ty: Ty<'tcx>, sk: Result<_, &_>| match sk { - Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{tail}`"), - Ok(SizeSkeleton::Known(size, _)) => { - if let Some(v) = u128::from(size.bytes()).checked_mul(8) { - format!("{v} bits") - } else { - // `u128` should definitely be able to hold the size of different architectures - // larger sizes should be reported as error `are too big for the target architecture` - // otherwise we have a bug somewhere - bug!("{:?} overflow for u128", size) - } - } - Ok(SizeSkeleton::Generic(size)) => { - if let Some(size) = - self.try_structurally_resolve_const(span, size).try_to_target_usize(tcx) - { - format!("{size} bytes") - } else { - format!("generic size {size}") - } - } - Err(LayoutError::TooGeneric(bad)) => { - if *bad == ty { - "this type does not have a fixed size".to_owned() - } else { - format!("size can vary because of {bad}") - } - } - Err(err) => err.to_string(), - }; - - let mut err = struct_span_code_err!( - self.dcx(), - span, - E0512, - "cannot transmute between types of different sizes, \ - or dependently-sized types" - ); - if from == to { - err.note(format!("`{from}` does not have a fixed size")); - err.emit(); - } else { - err.note(format!("source type: `{}` ({})", from, skeleton_string(from, sk_from))) - .note(format!("target type: `{}` ({})", to, skeleton_string(to, sk_to))); - if let Err(LayoutError::ReferencesError(_)) = sk_from { - err.delay_as_bug(); - } else if let Err(LayoutError::ReferencesError(_)) = sk_to { - err.delay_as_bug(); - } else { - err.emit(); - } + // Special-case transmuting from `typeof(function)` and + // `Option<typeof(function)>` to present a clearer error. + let from = unpack_option_like(tcx, from); + if let ty::FnDef(..) = from.kind() + && let SizeSkeleton::Known(size_to, _) = sk_to + && size_to == Pointer(tcx.data_layout.instruction_address_space).size(&tcx) + { + struct_span_code_err!(tcx.sess.dcx(), span(), E0591, "can't transmute zero-sized type") + .with_note(format!("source type: {from}")) + .with_note(format!("target type: {to}")) + .with_help("cast with `as` to a pointer instead") + .emit(); + return; } } + + let mut err = struct_span_code_err!( + tcx.sess.dcx(), + span(), + E0512, + "cannot transmute between types of different sizes, or dependently-sized types" + ); + if from == to { + err.note(format!("`{from}` does not have a fixed size")); + err.emit(); + } else { + err.note(format!("source type: `{}` ({})", from, skeleton_string(from, sk_from))); + err.note(format!("target type: `{}` ({})", to, skeleton_string(to, sk_to))); + err.emit(); + } +} + +pub(crate) fn check_transmutes(tcx: TyCtxt<'_>, owner: LocalDefId) { + assert!(!tcx.is_typeck_child(owner.to_def_id())); + let typeck_results = tcx.typeck(owner); + let None = typeck_results.tainted_by_errors else { return }; + + let typing_env = ty::TypingEnv::post_analysis(tcx, owner); + for &(from, to, hir_id) in &typeck_results.transmutes_to_check { + check_transmute(tcx, typing_env, from, to, hir_id); + } } diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index aae870f7ee3..129de32fd4a 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -46,14 +46,14 @@ use rustc_data_structures::unord::UnordSet; use rustc_errors::codes::*; use rustc_errors::{Applicability, ErrorGuaranteed, pluralize, struct_span_code_err}; use rustc_hir as hir; -use rustc_hir::attrs::AttributeKind; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::{HirId, HirIdMap, Node, find_attr}; +use rustc_hir::{HirId, HirIdMap, Node}; use rustc_hir_analysis::check::{check_abi, check_custom_abi}; use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer; use rustc_infer::traits::{ObligationCauseCode, ObligationInspector, WellFormedLoc}; +use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::query::Providers; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_session::config; use rustc_span::Span; @@ -174,7 +174,7 @@ fn typeck_with_inspect<'tcx>( .map(|(idx, ty)| fcx.normalize(arg_span(idx), ty)), ); - if find_attr!(tcx.get_all_attrs(def_id), AttributeKind::Naked(..)) { + if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::NAKED) { naked_functions::typeck_naked_fn(tcx, def_id, body); } @@ -247,33 +247,21 @@ fn typeck_with_inspect<'tcx>( debug!(pending_obligations = ?fcx.fulfillment_cx.borrow().pending_obligations()); - if let None = fcx.infcx.tainted_by_errors() { - fcx.report_ambiguity_errors(); + // We need to handle opaque types before emitting ambiguity errors as applying + // defining uses may guide type inference. + if fcx.next_trait_solver() { + fcx.handle_opaque_type_uses_next(); } + fcx.select_obligations_where_possible(|_| {}); if let None = fcx.infcx.tainted_by_errors() { - fcx.check_transmutes(); + fcx.report_ambiguity_errors(); } fcx.check_asms(); let typeck_results = fcx.resolve_type_vars_in_body(body); - // Handle potentially region dependent goals, see `InferCtxt::in_hir_typeck`. - if let None = fcx.infcx.tainted_by_errors() { - for obligation in fcx.take_hir_typeck_potentially_region_dependent_goals() { - let obligation = fcx.resolve_vars_if_possible(obligation); - if obligation.has_non_region_infer() { - bug!("unexpected inference variable after writeback: {obligation:?}"); - } - fcx.register_predicate(obligation); - } - fcx.select_obligations_where_possible(|_| {}); - if let None = fcx.infcx.tainted_by_errors() { - fcx.report_ambiguity_errors(); - } - } - fcx.detect_opaque_types_added_during_writeback(); // Consistency check our TypeckResults instance can hold all ItemLocalIds @@ -555,6 +543,7 @@ pub fn provide(providers: &mut Providers) { method_autoderef_steps: method::probe::method_autoderef_steps, typeck, used_trait_imports, + check_transmutes: intrinsicck::check_transmutes, ..*providers }; } diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index 3ca3b56b87e..bb4748b0565 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -777,31 +777,16 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { self.assemble_inherent_candidates_from_object(generalized_self_ty); self.assemble_inherent_impl_candidates_for_type(p.def_id(), receiver_steps); - if self.tcx.has_attr(p.def_id(), sym::rustc_has_incoherent_inherent_impls) { - self.assemble_inherent_candidates_for_incoherent_ty( - raw_self_ty, - receiver_steps, - ); - } + self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps); } ty::Adt(def, _) => { let def_id = def.did(); self.assemble_inherent_impl_candidates_for_type(def_id, receiver_steps); - if self.tcx.has_attr(def_id, sym::rustc_has_incoherent_inherent_impls) { - self.assemble_inherent_candidates_for_incoherent_ty( - raw_self_ty, - receiver_steps, - ); - } + self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps); } ty::Foreign(did) => { self.assemble_inherent_impl_candidates_for_type(did, receiver_steps); - if self.tcx.has_attr(did, sym::rustc_has_incoherent_inherent_impls) { - self.assemble_inherent_candidates_for_incoherent_ty( - raw_self_ty, - receiver_steps, - ); - } + self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps); } ty::Param(_) => { self.assemble_inherent_candidates_from_param(raw_self_ty); @@ -909,6 +894,10 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { debug_assert_matches!(param_ty.kind(), ty::Param(_)); let tcx = self.tcx; + + // We use `DeepRejectCtxt` here which may return false positive on where clauses + // with alias self types. We need to later on reject these as inherent candidates + // in `consider_probe`. let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| { let bound_predicate = predicate.kind(); match bound_predicate.skip_binder() { @@ -1945,6 +1934,29 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { ); (xform_self_ty, xform_ret_ty) = self.xform_self_ty(probe.item, trait_ref.self_ty(), trait_ref.args); + + if matches!(probe.kind, WhereClauseCandidate(_)) { + // `WhereClauseCandidate` requires that the self type is a param, + // because it has special behavior with candidate preference as an + // inherent pick. + match ocx.structurally_normalize_ty( + cause, + self.param_env, + trait_ref.self_ty(), + ) { + Ok(ty) => { + if !matches!(ty.kind(), ty::Param(_)) { + debug!("--> not a param ty: {xform_self_ty:?}"); + return ProbeResult::NoMatch; + } + } + Err(errors) => { + debug!("--> cannot relate self-types {:?}", errors); + return ProbeResult::NoMatch; + } + } + } + xform_self_ty = ocx.normalize(cause, self.param_env, xform_self_ty); match ocx.relate(cause, self.param_env, self.variance(), self_ty, xform_self_ty) { @@ -2373,17 +2385,14 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { if !self.is_relevant_kind_for_mode(x.kind) { return false; } - if self.matches_by_doc_alias(x.def_id) { - return true; - } - match edit_distance_with_substrings( + if let Some(d) = edit_distance_with_substrings( name.as_str(), x.name().as_str(), max_dist, ) { - Some(d) => d > 0, - None => false, + return d > 0; } + self.matches_by_doc_alias(x.def_id) }) .copied() .collect() diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index 824d592fa6c..c8f6c06b720 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -13,9 +13,7 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::unord::UnordSet; use rustc_errors::codes::*; -use rustc_errors::{ - Applicability, Diag, DiagStyledString, MultiSpan, StashKey, pluralize, struct_span_code_err, -}; +use rustc_errors::{Applicability, Diag, MultiSpan, StashKey, pluralize, struct_span_code_err}; use rustc_hir::attrs::AttributeKind; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::DefId; @@ -1560,11 +1558,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } - if rcvr_ty.is_numeric() && rcvr_ty.is_fresh() - || restrict_type_params - || suggested_derive - || self.lookup_alternative_tuple_impls(&mut err, &unsatisfied_predicates) - { + if rcvr_ty.is_numeric() && rcvr_ty.is_fresh() || restrict_type_params || suggested_derive { } else { self.suggest_traits_to_import( &mut err, @@ -1741,119 +1735,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.emit() } - /// If the predicate failure is caused by an unmet bound on a tuple, recheck if the bound would - /// succeed if all the types on the tuple had no borrows. This is a common problem for libraries - /// like Bevy and ORMs, which rely heavily on traits being implemented on tuples. - fn lookup_alternative_tuple_impls( - &self, - err: &mut Diag<'_>, - unsatisfied_predicates: &[( - ty::Predicate<'tcx>, - Option<ty::Predicate<'tcx>>, - Option<ObligationCause<'tcx>>, - )], - ) -> bool { - let mut found_tuple = false; - for (pred, root, _ob) in unsatisfied_predicates { - let mut preds = vec![pred]; - if let Some(root) = root { - // We will look at both the current predicate and the root predicate that caused it - // to be needed. If calling something like `<(A, &B)>::default()`, then `pred` is - // `&B: Default` and `root` is `(A, &B): Default`, which is the one we are checking - // for further down, so we check both. - preds.push(root); - } - for pred in preds { - if let Some(clause) = pred.as_clause() - && let Some(clause) = clause.as_trait_clause() - && let ty = clause.self_ty().skip_binder() - && let ty::Tuple(types) = ty.kind() - { - let path = clause.skip_binder().trait_ref.print_only_trait_path(); - let def_id = clause.def_id(); - let ty = Ty::new_tup( - self.tcx, - self.tcx.mk_type_list_from_iter(types.iter().map(|ty| ty.peel_refs())), - ); - let args = ty::GenericArgs::for_item(self.tcx, def_id, |param, _| { - if param.index == 0 { - ty.into() - } else { - self.infcx.var_for_def(DUMMY_SP, param) - } - }); - if self - .infcx - .type_implements_trait(def_id, args, self.param_env) - .must_apply_modulo_regions() - { - // "`Trait` is implemented for `(A, B)` but not for `(A, &B)`" - let mut msg = DiagStyledString::normal(format!("`{path}` ")); - msg.push_highlighted("is"); - msg.push_normal(" implemented for `("); - let len = types.len(); - for (i, t) in types.iter().enumerate() { - msg.push( - format!("{}", with_forced_trimmed_paths!(t.peel_refs())), - t.peel_refs() != t, - ); - if i < len - 1 { - msg.push_normal(", "); - } - } - msg.push_normal(")` but "); - msg.push_highlighted("not"); - msg.push_normal(" for `("); - for (i, t) in types.iter().enumerate() { - msg.push( - format!("{}", with_forced_trimmed_paths!(t)), - t.peel_refs() != t, - ); - if i < len - 1 { - msg.push_normal(", "); - } - } - msg.push_normal(")`"); - - // Find the span corresponding to the impl that was found to point at it. - if let Some(impl_span) = self - .tcx - .all_impls(def_id) - .filter(|&impl_def_id| { - let header = self.tcx.impl_trait_header(impl_def_id).unwrap(); - let trait_ref = header.trait_ref.instantiate( - self.tcx, - self.infcx.fresh_args_for_item(DUMMY_SP, impl_def_id), - ); - - let value = ty::fold_regions(self.tcx, ty, |_, _| { - self.tcx.lifetimes.re_erased - }); - // FIXME: Don't bother dealing with non-lifetime binders here... - if value.has_escaping_bound_vars() { - return false; - } - self.infcx.can_eq(ty::ParamEnv::empty(), trait_ref.self_ty(), value) - && header.polarity == ty::ImplPolarity::Positive - }) - .map(|impl_def_id| self.tcx.def_span(impl_def_id)) - .next() - { - err.highlighted_span_note(impl_span, msg.0); - } else { - err.highlighted_note(msg.0); - } - found_tuple = true; - } - // If `pred` was already on the tuple, we don't need to look at the root - // obligation too. - break; - } - } - } - found_tuple - } - /// If an appropriate error source is not found, check method chain for possible candidates fn lookup_segments_chain_for_no_match_method( &self, diff --git a/compiler/rustc_hir_typeck/src/opaque_types.rs b/compiler/rustc_hir_typeck/src/opaque_types.rs index e0224f8c6e1..97feac3d009 100644 --- a/compiler/rustc_hir_typeck/src/opaque_types.rs +++ b/compiler/rustc_hir_typeck/src/opaque_types.rs @@ -1,5 +1,218 @@ -use super::FnCtxt; +use rustc_hir::def::DefKind; +use rustc_infer::traits::ObligationCause; +use rustc_middle::ty::{ + self, DefiningScopeKind, EarlyBinder, OpaqueHiddenType, OpaqueTypeKey, TypeVisitableExt, + TypingMode, +}; +use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; +use rustc_trait_selection::opaque_types::{ + NonDefiningUseReason, opaque_type_has_defining_use_args, report_item_does_not_constrain_error, +}; +use rustc_trait_selection::solve; +use tracing::{debug, instrument}; + +use crate::FnCtxt; + impl<'tcx> FnCtxt<'_, 'tcx> { + /// This takes all the opaque type uses during HIR typeck. It first computes + /// the concrete hidden type by iterating over all defining uses. + /// + /// A use during HIR typeck is defining if all non-lifetime arguments are + /// unique generic parameters and the hidden type does not reference any + /// inference variables. + /// + /// It then uses these defining uses to guide inference for all other uses. + #[instrument(level = "debug", skip(self))] + pub(super) fn handle_opaque_type_uses_next(&mut self) { + // We clone the opaques instead of stealing them here as they are still used for + // normalization in the next generation trait solver. + let mut opaque_types: Vec<_> = self.infcx.clone_opaque_types(); + let num_entries = self.inner.borrow_mut().opaque_types().num_entries(); + let prev = self.checked_opaque_types_storage_entries.replace(Some(num_entries)); + debug_assert_eq!(prev, None); + for entry in &mut opaque_types { + *entry = self.resolve_vars_if_possible(*entry); + } + debug!(?opaque_types); + + self.compute_concrete_opaque_types(&opaque_types); + self.apply_computed_concrete_opaque_types(&opaque_types); + } +} + +enum UsageKind<'tcx> { + None, + NonDefiningUse(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>), + UnconstrainedHiddenType(OpaqueHiddenType<'tcx>), + HasDefiningUse, +} + +impl<'tcx> UsageKind<'tcx> { + fn merge(&mut self, other: UsageKind<'tcx>) { + match (&*self, &other) { + (UsageKind::HasDefiningUse, _) | (_, UsageKind::None) => unreachable!(), + (UsageKind::None, _) => *self = other, + // When mergining non-defining uses, prefer earlier ones. This means + // the error happens as early as possible. + ( + UsageKind::NonDefiningUse(..) | UsageKind::UnconstrainedHiddenType(..), + UsageKind::NonDefiningUse(..), + ) => {} + // When merging unconstrained hidden types, we prefer later ones. This is + // used as in most cases, the defining use is the final return statement + // of our function, and other uses with defining arguments are likely not + // intended to be defining. + ( + UsageKind::NonDefiningUse(..) | UsageKind::UnconstrainedHiddenType(..), + UsageKind::UnconstrainedHiddenType(..) | UsageKind::HasDefiningUse, + ) => *self = other, + } + } +} + +impl<'tcx> FnCtxt<'_, 'tcx> { + fn compute_concrete_opaque_types( + &mut self, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], + ) { + let tcx = self.tcx; + let TypingMode::Analysis { defining_opaque_types_and_generators } = self.typing_mode() + else { + unreachable!(); + }; + + for def_id in defining_opaque_types_and_generators { + match tcx.def_kind(def_id) { + DefKind::OpaqueTy => {} + DefKind::Closure => continue, + _ => unreachable!("not opaque or generator: {def_id:?}"), + } + + let mut usage_kind = UsageKind::None; + for &(opaque_type_key, hidden_type) in opaque_types { + if opaque_type_key.def_id != def_id { + continue; + } + + usage_kind.merge(self.consider_opaque_type_use(opaque_type_key, hidden_type)); + if let UsageKind::HasDefiningUse = usage_kind { + break; + } + } + + let guar = match usage_kind { + UsageKind::None => { + if let Some(guar) = self.tainted_by_errors() { + guar + } else { + report_item_does_not_constrain_error(self.tcx, self.body_id, def_id, None) + } + } + UsageKind::NonDefiningUse(opaque_type_key, hidden_type) => { + report_item_does_not_constrain_error( + self.tcx, + self.body_id, + def_id, + Some((opaque_type_key, hidden_type.span)), + ) + } + UsageKind::UnconstrainedHiddenType(hidden_type) => { + let infer_var = hidden_type + .ty + .walk() + .filter_map(ty::GenericArg::as_term) + .find(|term| term.is_infer()) + .unwrap_or_else(|| hidden_type.ty.into()); + self.err_ctxt() + .emit_inference_failure_err( + self.body_id, + hidden_type.span, + infer_var, + TypeAnnotationNeeded::E0282, + false, + ) + .emit() + } + UsageKind::HasDefiningUse => continue, + }; + + self.typeck_results + .borrow_mut() + .concrete_opaque_types + .insert(def_id, OpaqueHiddenType::new_error(tcx, guar)); + self.set_tainted_by_errors(guar); + } + } + + fn consider_opaque_type_use( + &mut self, + opaque_type_key: OpaqueTypeKey<'tcx>, + hidden_type: OpaqueHiddenType<'tcx>, + ) -> UsageKind<'tcx> { + if let Err(err) = opaque_type_has_defining_use_args( + &self, + opaque_type_key, + hidden_type.span, + DefiningScopeKind::HirTypeck, + ) { + match err { + NonDefiningUseReason::Tainted(guar) => { + self.typeck_results.borrow_mut().concrete_opaque_types.insert( + opaque_type_key.def_id, + OpaqueHiddenType::new_error(self.tcx, guar), + ); + return UsageKind::HasDefiningUse; + } + _ => return UsageKind::NonDefiningUse(opaque_type_key, hidden_type), + }; + } + + // We ignore uses of the opaque if they have any inference variables + // as this can frequently happen with recursive calls. + // + // See `tests/ui/traits/next-solver/opaques/universal-args-non-defining.rs`. + if hidden_type.ty.has_non_region_infer() { + return UsageKind::UnconstrainedHiddenType(hidden_type); + } + + let cause = ObligationCause::misc(hidden_type.span, self.body_id); + let at = self.at(&cause, self.param_env); + let hidden_type = match solve::deeply_normalize(at, hidden_type) { + Ok(hidden_type) => hidden_type, + Err(errors) => { + let guar = self.err_ctxt().report_fulfillment_errors(errors); + OpaqueHiddenType::new_error(self.tcx, guar) + } + }; + let hidden_type = hidden_type.remap_generic_params_to_declaration_params( + opaque_type_key, + self.tcx, + DefiningScopeKind::HirTypeck, + ); + + let prev = self + .typeck_results + .borrow_mut() + .concrete_opaque_types + .insert(opaque_type_key.def_id, hidden_type); + assert!(prev.is_none()); + UsageKind::HasDefiningUse + } + + fn apply_computed_concrete_opaque_types( + &mut self, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], + ) { + let tcx = self.tcx; + for &(key, hidden_type) in opaque_types { + let expected = + *self.typeck_results.borrow_mut().concrete_opaque_types.get(&key.def_id).unwrap(); + + let expected = EarlyBinder::bind(expected.ty).instantiate(tcx, key.args); + self.demand_eqtype(hidden_type.span, expected, hidden_type.ty); + } + } + /// We may in theory add further uses of an opaque after cloning the opaque /// types storage during writeback when computing the defining uses. /// diff --git a/compiler/rustc_hir_typeck/src/upvar.rs b/compiler/rustc_hir_typeck/src/upvar.rs index 50fa3f2cc9d..06acff06a51 100644 --- a/compiler/rustc_hir_typeck/src/upvar.rs +++ b/compiler/rustc_hir_typeck/src/upvar.rs @@ -1747,7 +1747,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } fn should_log_capture_analysis(&self, closure_def_id: LocalDefId) -> bool { - self.tcx.has_attr(closure_def_id, sym::rustc_capture_analysis) + self.has_rustc_attrs && self.tcx.has_attr(closure_def_id, sym::rustc_capture_analysis) } fn log_capture_analysis_first_pass( diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs index 093de950d63..d75bc9edab2 100644 --- a/compiler/rustc_hir_typeck/src/writeback.rs +++ b/compiler/rustc_hir_typeck/src/writeback.rs @@ -27,7 +27,7 @@ use rustc_middle::ty::{ }; use rustc_span::{Span, sym}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; -use rustc_trait_selection::opaque_types::check_opaque_type_parameter_valid; +use rustc_trait_selection::opaque_types::opaque_type_has_defining_use_args; use rustc_trait_selection::solve; use tracing::{debug, instrument}; @@ -45,7 +45,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // This attribute causes us to dump some writeback information // in the form of errors, which is used for unit tests. - let rustc_dump_user_args = self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args); + let rustc_dump_user_args = + self.has_rustc_attrs && self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args); let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_args); for param in body.params { @@ -74,7 +75,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { wbcx.visit_user_provided_tys(); wbcx.visit_user_provided_sigs(); wbcx.visit_coroutine_interior(); + wbcx.visit_transmutes(); wbcx.visit_offset_of_container_types(); + wbcx.visit_potentially_region_dependent_goals(); wbcx.typeck_results.rvalue_scopes = mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes); @@ -532,8 +535,36 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } } + fn visit_transmutes(&mut self) { + let tcx = self.tcx(); + let fcx_typeck_results = self.fcx.typeck_results.borrow(); + assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner); + for &(from, to, hir_id) in self.fcx.deferred_transmute_checks.borrow().iter() { + let span = tcx.hir_span(hir_id); + let from = self.resolve(from, &span); + let to = self.resolve(to, &span); + self.typeck_results.transmutes_to_check.push((from, to, hir_id)); + } + } + + fn visit_opaque_types_next(&mut self) { + let mut fcx_typeck_results = self.fcx.typeck_results.borrow_mut(); + assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner); + for hidden_ty in fcx_typeck_results.concrete_opaque_types.values() { + assert!(!hidden_ty.has_infer()); + } + + assert_eq!(self.typeck_results.concrete_opaque_types.len(), 0); + self.typeck_results.concrete_opaque_types = + mem::take(&mut fcx_typeck_results.concrete_opaque_types); + } + #[instrument(skip(self), level = "debug")] fn visit_opaque_types(&mut self) { + if self.fcx.next_trait_solver() { + return self.visit_opaque_types_next(); + } + let tcx = self.tcx(); // We clone the opaques instead of stealing them here as they are still used for // normalization in the next generation trait solver. @@ -544,17 +575,14 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { for (opaque_type_key, hidden_type) in opaque_types { let hidden_type = self.resolve(hidden_type, &hidden_type.span); let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span); - - if !self.fcx.next_trait_solver() { - if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind() - && alias_ty.def_id == opaque_type_key.def_id.to_def_id() - && alias_ty.args == opaque_type_key.args - { - continue; - } + if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind() + && alias_ty.def_id == opaque_type_key.def_id.to_def_id() + && alias_ty.args == opaque_type_key.args + { + continue; } - if let Err(err) = check_opaque_type_parameter_valid( + if let Err(err) = opaque_type_has_defining_use_args( &self.fcx, opaque_type_key, hidden_type.span, @@ -762,6 +790,32 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } } + fn visit_potentially_region_dependent_goals(&mut self) { + let obligations = self.fcx.take_hir_typeck_potentially_region_dependent_goals(); + if let None = self.fcx.tainted_by_errors() { + for obligation in obligations { + let (predicate, mut cause) = + self.fcx.resolve_vars_if_possible((obligation.predicate, obligation.cause)); + if predicate.has_non_region_infer() { + self.fcx.dcx().span_delayed_bug( + cause.span, + format!("unexpected inference variable after writeback: {predicate:?}"), + ); + } else { + let predicate = self.tcx().erase_regions(predicate); + if cause.has_infer() || cause.has_placeholders() { + // We can't use the the obligation cause as it references + // information local to this query. + cause = self.fcx.misc(cause.span); + } + self.typeck_results + .potentially_region_dependent_goals + .insert((predicate, cause)); + } + } + } + } + fn resolve<T>(&mut self, value: T, span: &dyn Locatable) -> T where T: TypeFoldable<TyCtxt<'tcx>>, @@ -883,6 +937,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { } } + #[instrument(level = "debug", skip(self, outer_exclusive_binder, new_err))] fn handle_term<T>( &mut self, value: T, diff --git a/compiler/rustc_incremental/Cargo.toml b/compiler/rustc_incremental/Cargo.toml index db0a5841887..8d7f2eb8414 100644 --- a/compiler/rustc_incremental/Cargo.toml +++ b/compiler/rustc_incremental/Cargo.toml @@ -19,6 +19,6 @@ rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index 645d95b1dba..684bd34f909 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -491,19 +491,15 @@ pub struct ChunkedBitSet<T> { marker: PhantomData<T>, } -// Note: the chunk domain size is duplicated in each variant. This is a bit -// inconvenient, but it allows the type size to be smaller than if we had an -// outer struct containing a chunk domain size plus the `Chunk`, because the -// compiler can place the chunk domain size after the tag. +// NOTE: The chunk size is computed on-the-fly on each manipulation of a chunk. +// This avoids storing it, as it's almost always CHUNK_BITS except for the last one. #[derive(Clone, Debug, PartialEq, Eq)] enum Chunk { /// A chunk that is all zeros; we don't represent the zeros explicitly. - /// The `ChunkSize` is always non-zero. - Zeros(ChunkSize), + Zeros, /// A chunk that is all ones; we don't represent the ones explicitly. - /// `ChunkSize` is always non-zero. - Ones(ChunkSize), + Ones, /// A chunk that has a mix of zeros and ones, which are represented /// explicitly and densely. It never has all zeros or all ones. @@ -514,16 +510,14 @@ enum Chunk { /// to store the length, which would make this type larger. These excess /// words are always zero, as are any excess bits in the final in-use word. /// - /// The first `ChunkSize` field is always non-zero. - /// - /// The second `ChunkSize` field is the count of 1s set in the chunk, and + /// The `ChunkSize` field is the count of 1s set in the chunk, and /// must satisfy `0 < count < chunk_domain_size`. /// /// The words are within an `Rc` because it's surprisingly common to /// duplicate an entire chunk, e.g. in `ChunkedBitSet::clone_from()`, or /// when a `Mixed` chunk is union'd into a `Zeros` chunk. When we do need /// to modify a chunk we use `Rc::make_mut`. - Mixed(ChunkSize, ChunkSize, Rc<[Word; CHUNK_WORDS]>), + Mixed(ChunkSize, Rc<[Word; CHUNK_WORDS]>), } // This type is used a lot. Make sure it doesn't unintentionally get bigger. @@ -535,6 +529,22 @@ impl<T> ChunkedBitSet<T> { self.domain_size } + #[inline] + fn last_chunk_size(&self) -> ChunkSize { + let n = self.domain_size % CHUNK_BITS; + if n == 0 { CHUNK_BITS as ChunkSize } else { n as ChunkSize } + } + + /// All the chunks have a chunk_domain_size of `CHUNK_BITS` except the final one. + #[inline] + fn chunk_domain_size(&self, chunk: usize) -> ChunkSize { + if chunk == self.chunks.len() - 1 { + self.last_chunk_size() + } else { + CHUNK_BITS as ChunkSize + } + } + #[cfg(test)] fn assert_valid(&self) { if self.domain_size == 0 { @@ -544,8 +554,9 @@ impl<T> ChunkedBitSet<T> { assert!((self.chunks.len() - 1) * CHUNK_BITS <= self.domain_size); assert!(self.chunks.len() * CHUNK_BITS >= self.domain_size); - for chunk in self.chunks.iter() { - chunk.assert_valid(); + for (chunk_index, chunk) in self.chunks.iter().enumerate() { + let chunk_domain_size = self.chunk_domain_size(chunk_index); + chunk.assert_valid(chunk_domain_size); } } } @@ -556,16 +567,7 @@ impl<T: Idx> ChunkedBitSet<T> { let chunks = if domain_size == 0 { Box::new([]) } else { - // All the chunks have a chunk_domain_size of `CHUNK_BITS` except - // the final one. - let final_chunk_domain_size = { - let n = domain_size % CHUNK_BITS; - if n == 0 { CHUNK_BITS } else { n } - }; - let mut chunks = - vec![Chunk::new(CHUNK_BITS, is_empty); num_chunks(domain_size)].into_boxed_slice(); - *chunks.last_mut().unwrap() = Chunk::new(final_chunk_domain_size, is_empty); - chunks + vec![if is_empty { Zeros } else { Ones }; num_chunks(domain_size)].into_boxed_slice() }; ChunkedBitSet { domain_size, chunks, marker: PhantomData } } @@ -594,11 +596,15 @@ impl<T: Idx> ChunkedBitSet<T> { /// Count the number of bits in the set. pub fn count(&self) -> usize { - self.chunks.iter().map(|chunk| chunk.count()).sum() + self.chunks + .iter() + .enumerate() + .map(|(index, chunk)| chunk.count(self.chunk_domain_size(index))) + .sum() } pub fn is_empty(&self) -> bool { - self.chunks.iter().all(|chunk| matches!(chunk, Zeros(..))) + self.chunks.iter().all(|chunk| matches!(chunk, Zeros)) } /// Returns `true` if `self` contains `elem`. @@ -607,9 +613,9 @@ impl<T: Idx> ChunkedBitSet<T> { assert!(elem.index() < self.domain_size); let chunk = &self.chunks[chunk_index(elem)]; match &chunk { - Zeros(_) => false, - Ones(_) => true, - Mixed(_, _, words) => { + Zeros => false, + Ones => true, + Mixed(_, words) => { let (word_index, mask) = chunk_word_index_and_mask(elem); (words[word_index] & mask) != 0 } @@ -625,9 +631,10 @@ impl<T: Idx> ChunkedBitSet<T> { pub fn insert(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); let chunk_index = chunk_index(elem); + let chunk_domain_size = self.chunk_domain_size(chunk_index); let chunk = &mut self.chunks[chunk_index]; match *chunk { - Zeros(chunk_domain_size) => { + Zeros => { if chunk_domain_size > 1 { #[cfg(feature = "nightly")] let mut words = { @@ -649,14 +656,14 @@ impl<T: Idx> ChunkedBitSet<T> { let (word_index, mask) = chunk_word_index_and_mask(elem); words_ref[word_index] |= mask; - *chunk = Mixed(chunk_domain_size, 1, words); + *chunk = Mixed(1, words); } else { - *chunk = Ones(chunk_domain_size); + *chunk = Ones; } true } - Ones(_) => false, - Mixed(chunk_domain_size, ref mut count, ref mut words) => { + Ones => false, + Mixed(ref mut count, ref mut words) => { // We skip all the work if the bit is already set. let (word_index, mask) = chunk_word_index_and_mask(elem); if (words[word_index] & mask) == 0 { @@ -665,7 +672,7 @@ impl<T: Idx> ChunkedBitSet<T> { let words = Rc::make_mut(words); words[word_index] |= mask; } else { - *chunk = Ones(chunk_domain_size); + *chunk = Ones; } true } else { @@ -678,11 +685,7 @@ impl<T: Idx> ChunkedBitSet<T> { /// Sets all bits to true. pub fn insert_all(&mut self) { for chunk in self.chunks.iter_mut() { - *chunk = match *chunk { - Zeros(chunk_domain_size) - | Ones(chunk_domain_size) - | Mixed(chunk_domain_size, ..) => Ones(chunk_domain_size), - } + *chunk = Ones; } } @@ -690,10 +693,11 @@ impl<T: Idx> ChunkedBitSet<T> { pub fn remove(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); let chunk_index = chunk_index(elem); + let chunk_domain_size = self.chunk_domain_size(chunk_index); let chunk = &mut self.chunks[chunk_index]; match *chunk { - Zeros(_) => false, - Ones(chunk_domain_size) => { + Zeros => false, + Ones => { if chunk_domain_size > 1 { #[cfg(feature = "nightly")] let mut words = { @@ -722,13 +726,13 @@ impl<T: Idx> ChunkedBitSet<T> { ); let (word_index, mask) = chunk_word_index_and_mask(elem); words_ref[word_index] &= !mask; - *chunk = Mixed(chunk_domain_size, chunk_domain_size - 1, words); + *chunk = Mixed(chunk_domain_size - 1, words); } else { - *chunk = Zeros(chunk_domain_size); + *chunk = Zeros; } true } - Mixed(chunk_domain_size, ref mut count, ref mut words) => { + Mixed(ref mut count, ref mut words) => { // We skip all the work if the bit is already clear. let (word_index, mask) = chunk_word_index_and_mask(elem); if (words[word_index] & mask) != 0 { @@ -737,7 +741,7 @@ impl<T: Idx> ChunkedBitSet<T> { let words = Rc::make_mut(words); words[word_index] &= !mask; } else { - *chunk = Zeros(chunk_domain_size); + *chunk = Zeros } true } else { @@ -748,11 +752,12 @@ impl<T: Idx> ChunkedBitSet<T> { } fn chunk_iter(&self, chunk_index: usize) -> ChunkIter<'_> { + let chunk_domain_size = self.chunk_domain_size(chunk_index); match self.chunks.get(chunk_index) { - Some(Zeros(_chunk_domain_size)) => ChunkIter::Zeros, - Some(Ones(chunk_domain_size)) => ChunkIter::Ones(0..*chunk_domain_size as usize), - Some(Mixed(chunk_domain_size, _, words)) => { - let num_words = num_words(*chunk_domain_size as usize); + Some(Zeros) => ChunkIter::Zeros, + Some(Ones) => ChunkIter::Ones(0..chunk_domain_size as usize), + Some(Mixed(_, words)) => { + let num_words = num_words(chunk_domain_size as usize); ChunkIter::Mixed(BitIter::new(&words[0..num_words])) } None => ChunkIter::Finished, @@ -765,23 +770,33 @@ impl<T: Idx> ChunkedBitSet<T> { impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { fn union(&mut self, other: &ChunkedBitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); - debug_assert_eq!(self.chunks.len(), other.chunks.len()); + + let num_chunks = self.chunks.len(); + debug_assert_eq!(num_chunks, other.chunks.len()); + + let last_chunk_size = self.last_chunk_size(); + debug_assert_eq!(last_chunk_size, other.last_chunk_size()); let mut changed = false; - for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) { + for (chunk_index, (mut self_chunk, other_chunk)) in + self.chunks.iter_mut().zip(other.chunks.iter()).enumerate() + { + let chunk_domain_size = if chunk_index + 1 == num_chunks { + last_chunk_size + } else { + CHUNK_BITS as ChunkSize + }; + match (&mut self_chunk, &other_chunk) { - (_, Zeros(_)) | (Ones(_), _) => {} - (Zeros(self_chunk_domain_size), Ones(other_chunk_domain_size)) - | (Mixed(self_chunk_domain_size, ..), Ones(other_chunk_domain_size)) - | (Zeros(self_chunk_domain_size), Mixed(other_chunk_domain_size, ..)) => { + (_, Zeros) | (Ones, _) => {} + (Zeros, Ones) | (Mixed(..), Ones) | (Zeros, Mixed(..)) => { // `other_chunk` fully overwrites `self_chunk` - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); *self_chunk = other_chunk.clone(); changed = true; } ( - Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words), - Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words), + Mixed(self_chunk_count, self_chunk_words), + Mixed(_other_chunk_count, other_chunk_words), ) => { // First check if the operation would change // `self_chunk.words`. If not, we can avoid allocating some @@ -789,7 +804,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { // performance win. Also, we only need to operate on the // in-use words, hence the slicing. let op = |a, b| a | b; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); if bitwise_changes( &self_chunk_words[0..num_words], &other_chunk_words[0..num_words], @@ -806,8 +821,8 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .iter() .map(|w| w.count_ones() as ChunkSize) .sum(); - if *self_chunk_count == *self_chunk_domain_size { - *self_chunk = Ones(*self_chunk_domain_size); + if *self_chunk_count == chunk_domain_size { + *self_chunk = Ones; } changed = true; } @@ -819,36 +834,41 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { fn subtract(&mut self, other: &ChunkedBitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); - debug_assert_eq!(self.chunks.len(), other.chunks.len()); + + let num_chunks = self.chunks.len(); + debug_assert_eq!(num_chunks, other.chunks.len()); + + let last_chunk_size = self.last_chunk_size(); + debug_assert_eq!(last_chunk_size, other.last_chunk_size()); let mut changed = false; - for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) { + for (chunk_index, (mut self_chunk, other_chunk)) in + self.chunks.iter_mut().zip(other.chunks.iter()).enumerate() + { + let chunk_domain_size = if chunk_index + 1 == num_chunks { + last_chunk_size + } else { + CHUNK_BITS as ChunkSize + }; + match (&mut self_chunk, &other_chunk) { - (Zeros(..), _) | (_, Zeros(..)) => {} - ( - Ones(self_chunk_domain_size) | Mixed(self_chunk_domain_size, _, _), - Ones(other_chunk_domain_size), - ) => { - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); + (Zeros, _) | (_, Zeros) => {} + (Ones | Mixed(_, _), Ones) => { changed = true; - *self_chunk = Zeros(*self_chunk_domain_size); + *self_chunk = Zeros; } - ( - Ones(self_chunk_domain_size), - Mixed(other_chunk_domain_size, other_chunk_count, other_chunk_words), - ) => { - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); + (Ones, Mixed(other_chunk_count, other_chunk_words)) => { changed = true; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); debug_assert!(num_words > 0 && num_words <= CHUNK_WORDS); let mut tail_mask = - 1 << (*other_chunk_domain_size - ((num_words - 1) * WORD_BITS) as u16) - 1; + 1 << (chunk_domain_size - ((num_words - 1) * WORD_BITS) as u16) - 1; let mut self_chunk_words = **other_chunk_words; for word in self_chunk_words[0..num_words].iter_mut().rev() { *word = !*word & tail_mask; tail_mask = u64::MAX; } - let self_chunk_count = *self_chunk_domain_size - *other_chunk_count; + let self_chunk_count = chunk_domain_size - *other_chunk_count; debug_assert_eq!( self_chunk_count, self_chunk_words[0..num_words] @@ -856,16 +876,15 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .map(|w| w.count_ones() as ChunkSize) .sum() ); - *self_chunk = - Mixed(*self_chunk_domain_size, self_chunk_count, Rc::new(self_chunk_words)); + *self_chunk = Mixed(self_chunk_count, Rc::new(self_chunk_words)); } ( - Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words), - Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words), + Mixed(self_chunk_count, self_chunk_words), + Mixed(_other_chunk_count, other_chunk_words), ) => { // See [`<Self as BitRelations<ChunkedBitSet<T>>>::union`] for the explanation let op = |a: u64, b: u64| a & !b; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); if bitwise_changes( &self_chunk_words[0..num_words], &other_chunk_words[0..num_words], @@ -883,7 +902,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .map(|w| w.count_ones() as ChunkSize) .sum(); if *self_chunk_count == 0 { - *self_chunk = Zeros(*self_chunk_domain_size); + *self_chunk = Zeros; } changed = true; } @@ -895,28 +914,36 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { fn intersect(&mut self, other: &ChunkedBitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); - debug_assert_eq!(self.chunks.len(), other.chunks.len()); + + let num_chunks = self.chunks.len(); + debug_assert_eq!(num_chunks, other.chunks.len()); + + let last_chunk_size = self.last_chunk_size(); + debug_assert_eq!(last_chunk_size, other.last_chunk_size()); let mut changed = false; - for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) { + for (chunk_index, (mut self_chunk, other_chunk)) in + self.chunks.iter_mut().zip(other.chunks.iter()).enumerate() + { + let chunk_domain_size = if chunk_index + 1 == num_chunks { + last_chunk_size + } else { + CHUNK_BITS as ChunkSize + }; + match (&mut self_chunk, &other_chunk) { - (Zeros(..), _) | (_, Ones(..)) => {} - ( - Ones(self_chunk_domain_size), - Zeros(other_chunk_domain_size) | Mixed(other_chunk_domain_size, ..), - ) - | (Mixed(self_chunk_domain_size, ..), Zeros(other_chunk_domain_size)) => { - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); + (Zeros, _) | (_, Ones) => {} + (Ones, Zeros | Mixed(..)) | (Mixed(..), Zeros) => { changed = true; *self_chunk = other_chunk.clone(); } ( - Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words), - Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words), + Mixed(self_chunk_count, self_chunk_words), + Mixed(_other_chunk_count, other_chunk_words), ) => { // See [`<Self as BitRelations<ChunkedBitSet<T>>>::union`] for the explanation let op = |a, b| a & b; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); if bitwise_changes( &self_chunk_words[0..num_words], &other_chunk_words[0..num_words], @@ -934,7 +961,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .map(|w| w.count_ones() as ChunkSize) .sum(); if *self_chunk_count == 0 { - *self_chunk = Zeros(*self_chunk_domain_size); + *self_chunk = Zeros; } changed = true; } @@ -964,7 +991,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for DenseBitSet<T> { words = &mut words[..CHUNK_WORDS]; } match chunk { - Zeros(..) => { + Zeros => { for word in words { if *word != 0 { changed = true; @@ -972,8 +999,8 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for DenseBitSet<T> { } } } - Ones(..) => (), - Mixed(_, _, data) => { + Ones => (), + Mixed(_, data) => { for (i, word) in words.iter_mut().enumerate() { let new_val = *word & data[i]; if new_val != *word { @@ -1053,13 +1080,11 @@ impl<'a, T: Idx> Iterator for ChunkedBitIter<'a, T> { impl Chunk { #[cfg(test)] - fn assert_valid(&self) { + fn assert_valid(&self, chunk_domain_size: ChunkSize) { + assert!(chunk_domain_size as usize <= CHUNK_BITS); match *self { - Zeros(chunk_domain_size) | Ones(chunk_domain_size) => { - assert!(chunk_domain_size as usize <= CHUNK_BITS); - } - Mixed(chunk_domain_size, count, ref words) => { - assert!(chunk_domain_size as usize <= CHUNK_BITS); + Zeros | Ones => {} + Mixed(count, ref words) => { assert!(0 < count && count < chunk_domain_size); // Check the number of set bits matches `count`. @@ -1083,18 +1108,12 @@ impl Chunk { } } - fn new(chunk_domain_size: usize, is_empty: bool) -> Self { - debug_assert!(0 < chunk_domain_size && chunk_domain_size <= CHUNK_BITS); - let chunk_domain_size = chunk_domain_size as ChunkSize; - if is_empty { Zeros(chunk_domain_size) } else { Ones(chunk_domain_size) } - } - /// Count the number of 1s in the chunk. - fn count(&self) -> usize { + fn count(&self, chunk_domain_size: ChunkSize) -> usize { match *self { - Zeros(_) => 0, - Ones(chunk_domain_size) => chunk_domain_size as usize, - Mixed(_, count, _) => count as usize, + Zeros => 0, + Ones => chunk_domain_size as usize, + Mixed(count, _) => count as usize, } } } diff --git a/compiler/rustc_index/src/bit_set/tests.rs b/compiler/rustc_index/src/bit_set/tests.rs index 9ce4cf4293f..deddc872614 100644 --- a/compiler/rustc_index/src/bit_set/tests.rs +++ b/compiler/rustc_index/src/bit_set/tests.rs @@ -120,8 +120,9 @@ fn chunked_bitset() { let mut b1 = ChunkedBitSet::<usize>::new_empty(1); assert_eq!( b1, - ChunkedBitSet { domain_size: 1, chunks: Box::new([Zeros(1)]), marker: PhantomData } + ChunkedBitSet { domain_size: 1, chunks: Box::new([Zeros]), marker: PhantomData } ); + assert_eq!(b1.chunk_domain_size(0), 1); b1.assert_valid(); assert!(!b1.contains(0)); @@ -129,12 +130,12 @@ fn chunked_bitset() { assert!(b1.insert(0)); assert!(b1.contains(0)); assert_eq!(b1.count(), 1); - assert_eq!(b1.chunks(), [Ones(1)]); + assert_eq!(b1.chunks(), [Ones]); assert!(!b1.insert(0)); assert!(b1.remove(0)); assert!(!b1.contains(0)); assert_eq!(b1.count(), 0); - assert_eq!(b1.chunks(), [Zeros(1)]); + assert_eq!(b1.chunks(), [Zeros]); b1.assert_valid(); //----------------------------------------------------------------------- @@ -142,8 +143,9 @@ fn chunked_bitset() { let mut b100 = ChunkedBitSet::<usize>::new_filled(100); assert_eq!( b100, - ChunkedBitSet { domain_size: 100, chunks: Box::new([Ones(100)]), marker: PhantomData } + ChunkedBitSet { domain_size: 100, chunks: Box::new([Ones]), marker: PhantomData } ); + assert_eq!(b100.chunk_domain_size(0), 100); b100.assert_valid(); for i in 0..100 { @@ -152,7 +154,7 @@ fn chunked_bitset() { assert_eq!(b100.count(), 100); assert!(b100.remove(3)); assert!(b100.insert(3)); - assert_eq!(b100.chunks(), vec![Ones(100)]); + assert_eq!(b100.chunks(), vec![Ones]); assert!( b100.remove(20) && b100.remove(30) && b100.remove(40) && b100.remove(99) && b100.insert(30) ); @@ -161,7 +163,6 @@ fn chunked_bitset() { assert_eq!( b100.chunks(), vec![Mixed( - 100, 97, #[rustfmt::skip] Rc::new([ @@ -180,7 +181,7 @@ fn chunked_bitset() { } } assert_eq!(num_removed, 97); - assert_eq!(b100.chunks(), vec![Zeros(100)]); + assert_eq!(b100.chunks(), vec![Zeros]); b100.assert_valid(); //----------------------------------------------------------------------- @@ -188,23 +189,21 @@ fn chunked_bitset() { let mut b2548 = ChunkedBitSet::<usize>::new_empty(2548); assert_eq!( b2548, - ChunkedBitSet { - domain_size: 2548, - chunks: Box::new([Zeros(2048), Zeros(500)]), - marker: PhantomData, - } + ChunkedBitSet { domain_size: 2548, chunks: Box::new([Zeros, Zeros]), marker: PhantomData } ); + assert_eq!(b2548.chunk_domain_size(0), 2048); + assert_eq!(b2548.chunk_domain_size(1), 500); b2548.assert_valid(); b2548.insert(14); b2548.remove(14); - assert_eq!(b2548.chunks(), vec![Zeros(2048), Zeros(500)]); + assert_eq!(b2548.chunks(), vec![Zeros, Zeros]); b2548.insert_all(); for i in 0..2548 { assert!(b2548.contains(i)); } assert_eq!(b2548.count(), 2548); - assert_eq!(b2548.chunks(), vec![Ones(2048), Ones(500)]); + assert_eq!(b2548.chunks(), vec![Ones, Ones]); b2548.assert_valid(); //----------------------------------------------------------------------- @@ -212,12 +211,10 @@ fn chunked_bitset() { let mut b4096 = ChunkedBitSet::<usize>::new_empty(4096); assert_eq!( b4096, - ChunkedBitSet { - domain_size: 4096, - chunks: Box::new([Zeros(2048), Zeros(2048)]), - marker: PhantomData, - } + ChunkedBitSet { domain_size: 4096, chunks: Box::new([Zeros, Zeros]), marker: PhantomData } ); + assert_eq!(b4096.chunk_domain_size(0), 2048); + assert_eq!(b4096.chunk_domain_size(1), 2048); b4096.assert_valid(); for i in 0..4096 { @@ -231,11 +228,11 @@ fn chunked_bitset() { b4096.chunks(), #[rustfmt::skip] vec![ - Mixed(2048, 1, Rc::new([ + Mixed(1, Rc::new([ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ])), - Mixed(2048, 1, Rc::new([ + Mixed(1, Rc::new([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x8000_0000_0000_0000 ])), @@ -251,10 +248,15 @@ fn chunked_bitset() { b10000, ChunkedBitSet { domain_size: 10000, - chunks: Box::new([Zeros(2048), Zeros(2048), Zeros(2048), Zeros(2048), Zeros(1808),]), + chunks: Box::new([Zeros, Zeros, Zeros, Zeros, Zeros,]), marker: PhantomData, } ); + assert_eq!(b10000.chunk_domain_size(0), 2048); + assert_eq!(b10000.chunk_domain_size(1), 2048); + assert_eq!(b10000.chunk_domain_size(2), 2048); + assert_eq!(b10000.chunk_domain_size(3), 2048); + assert_eq!(b10000.chunk_domain_size(4), 1808); b10000.assert_valid(); assert!(b10000.insert(3000) && b10000.insert(5000)); @@ -262,17 +264,17 @@ fn chunked_bitset() { b10000.chunks(), #[rustfmt::skip] vec![ - Zeros(2048), - Mixed(2048, 1, Rc::new([ + Zeros, + Mixed(1, Rc::new([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0100_0000_0000_0000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])), - Mixed(2048, 1, Rc::new([ + Mixed(1, Rc::new([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])), - Zeros(2048), - Zeros(1808), + Zeros, + Zeros, ], ); let mut b10000b = ChunkedBitSet::<usize>::new_empty(10000); diff --git a/compiler/rustc_infer/Cargo.toml b/compiler/rustc_infer/Cargo.toml index 08c03614884..6d97fa6af1f 100644 --- a/compiler/rustc_infer/Cargo.toml +++ b/compiler/rustc_infer/Cargo.toml @@ -18,6 +18,6 @@ rustc_middle = { path = "../rustc_middle" } rustc_span = { path = "../rustc_span" } rustc_type_ir = { path = "../rustc_type_ir" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_infer/src/infer/context.rs b/compiler/rustc_infer/src/infer/context.rs index 21e999b080d..bb9c8850093 100644 --- a/compiler/rustc_infer/src/infer/context.rs +++ b/compiler/rustc_infer/src/infer/context.rs @@ -22,10 +22,6 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { self.next_trait_solver } - fn in_hir_typeck(&self) -> bool { - self.in_hir_typeck - } - fn typing_mode(&self) -> ty::TypingMode<'tcx> { self.typing_mode() } diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 82d4856df39..d1507f08c06 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -158,7 +158,8 @@ pub struct InferCtxtInner<'tcx> { region_assumptions: Vec<ty::ArgOutlivesPredicate<'tcx>>, /// `-Znext-solver`: Successfully proven goals during HIR typeck which - /// reference inference variables and get reproven after writeback. + /// reference inference variables and get reproven in case MIR type check + /// fails to prove something. /// /// See the documentation of `InferCtxt::in_hir_typeck` for more details. hir_typeck_potentially_region_dependent_goals: Vec<PredicateObligation<'tcx>>, @@ -782,22 +783,30 @@ impl<'tcx> InferCtxt<'tcx> { self.inner.borrow_mut().type_variables().num_vars() } - pub fn next_ty_var(&self, span: Span) -> Ty<'tcx> { - self.next_ty_var_with_origin(TypeVariableOrigin { span, param_def_id: None }) + pub fn next_ty_vid(&self, span: Span) -> TyVid { + self.next_ty_vid_with_origin(TypeVariableOrigin { span, param_def_id: None }) } - pub fn next_ty_var_with_origin(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { - let vid = self.inner.borrow_mut().type_variables().new_var(self.universe(), origin); - Ty::new_var(self.tcx, vid) + pub fn next_ty_vid_with_origin(&self, origin: TypeVariableOrigin) -> TyVid { + self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) } - pub fn next_ty_var_id_in_universe(&self, span: Span, universe: ty::UniverseIndex) -> TyVid { + pub fn next_ty_vid_in_universe(&self, span: Span, universe: ty::UniverseIndex) -> TyVid { let origin = TypeVariableOrigin { span, param_def_id: None }; self.inner.borrow_mut().type_variables().new_var(universe, origin) } + pub fn next_ty_var(&self, span: Span) -> Ty<'tcx> { + self.next_ty_var_with_origin(TypeVariableOrigin { span, param_def_id: None }) + } + + pub fn next_ty_var_with_origin(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { + let vid = self.next_ty_vid_with_origin(origin); + Ty::new_var(self.tcx, vid) + } + pub fn next_ty_var_in_universe(&self, span: Span, universe: ty::UniverseIndex) -> Ty<'tcx> { - let vid = self.next_ty_var_id_in_universe(span, universe); + let vid = self.next_ty_vid_in_universe(span, universe); Ty::new_var(self.tcx, vid) } diff --git a/compiler/rustc_interface/Cargo.toml b/compiler/rustc_interface/Cargo.toml index 473ac5e0cea..40152e78f8a 100644 --- a/compiler/rustc_interface/Cargo.toml +++ b/compiler/rustc_interface/Cargo.toml @@ -46,7 +46,7 @@ rustc_thread_pool = { path = "../rustc_thread_pool" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_traits = { path = "../rustc_traits" } rustc_ty_utils = { path = "../rustc_ty_utils" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [dev-dependencies] diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index c46e879b976..8f131f45bbd 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -285,7 +285,9 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> Ch .expecteds .entry(name.name) .and_modify(|v| match v { - ExpectedValues::Some(v) if !values_any_specified => { + ExpectedValues::Some(v) if !values_any_specified => + { + #[allow(rustc::potential_query_instability)] v.extend(values.clone()) } ExpectedValues::Some(_) => *v = ExpectedValues::Any, diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 3ba224723e3..bc5ef04079e 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -6,6 +6,7 @@ use std::sync::{Arc, LazyLock, OnceLock}; use std::{env, fs, iter}; use rustc_ast as ast; +use rustc_attr_parsing::{AttributeParser, ShouldEmit, validate_attr}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::steal::Steal; @@ -15,6 +16,7 @@ use rustc_errors::timings::TimingSection; use rustc_expand::base::{ExtCtxt, LintStoreExpand}; use rustc_feature::Features; use rustc_fs_util::try_canonicalize; +use rustc_hir::attrs::AttributeKind; use rustc_hir::def_id::{LOCAL_CRATE, StableCrateId, StableCrateIdMap}; use rustc_hir::definitions::Definitions; use rustc_incremental::setup_dep_graph; @@ -25,9 +27,7 @@ use rustc_middle::arena::Arena; use rustc_middle::dep_graph::DepsType; use rustc_middle::ty::{self, CurrentGcx, GlobalCtxt, RegisteredTools, TyCtxt}; use rustc_middle::util::Providers; -use rustc_parse::{ - new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal, validate_attr, -}; +use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal}; use rustc_passes::{abi_test, input_stats, layout_test}; use rustc_resolve::{Resolver, ResolverOutputs}; use rustc_session::config::{CrateType, Input, OutFileName, OutputFilenames, OutputType}; @@ -1081,7 +1081,8 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { if !tcx.is_typeck_child(def_id.to_def_id()) { // Child unsafety and borrowck happens together with the parent tcx.ensure_ok().check_unsafety(def_id); - tcx.ensure_ok().mir_borrowck(def_id) + tcx.ensure_ok().mir_borrowck(def_id); + tcx.ensure_ok().check_transmutes(def_id); } tcx.ensure_ok().has_ffi_unwind_calls(def_id); @@ -1244,8 +1245,7 @@ pub fn get_crate_name(sess: &Session, krate_attrs: &[ast::Attribute]) -> Symbol // in all code paths that require the crate name very early on, namely before // macro expansion. - let attr_crate_name = - validate_and_find_value_str_builtin_attr(sym::crate_name, sess, krate_attrs); + let attr_crate_name = parse_crate_name(sess, krate_attrs, ShouldEmit::EarlyFatal); let validate = |name, span| { rustc_session::output::validate_crate_name(sess, name, span); @@ -1283,6 +1283,28 @@ pub fn get_crate_name(sess: &Session, krate_attrs: &[ast::Attribute]) -> Symbol sym::rust_out } +pub(crate) fn parse_crate_name( + sess: &Session, + attrs: &[ast::Attribute], + emit_errors: ShouldEmit, +) -> Option<(Symbol, Span)> { + let rustc_hir::Attribute::Parsed(AttributeKind::CrateName { name, name_span, .. }) = + AttributeParser::parse_limited_should_emit( + sess, + &attrs, + sym::crate_name, + DUMMY_SP, + rustc_ast::node_id::CRATE_NODE_ID, + None, + emit_errors, + )? + else { + unreachable!("crate_name is the only attr we could've parsed here"); + }; + + Some((name, name_span)) +} + fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit { // We don't permit macro calls inside of the attribute (e.g., #![recursion_limit = `expand!()`]) // because that would require expanding this while in the middle of expansion, which needs to diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 16474b231e0..7730bddc0f1 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -8,6 +8,7 @@ use rustc_abi::Align; use rustc_data_structures::profiling::TimePassesFormat; use rustc_errors::emitter::HumanReadableErrorType; use rustc_errors::{ColorConfig, registry}; +use rustc_hir::attrs::NativeLibKind; use rustc_session::config::{ AutoDiff, BranchProtection, CFGuard, Cfg, CollapseMacroDebuginfo, CoverageLevel, CoverageOptions, DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation, @@ -20,7 +21,7 @@ use rustc_session::config::{ }; use rustc_session::lint::Level; use rustc_session::search_paths::SearchPath; -use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind}; +use rustc_session::utils::{CanonicalizedPath, NativeLib}; use rustc_session::{CompilerIO, EarlyDiagCtxt, Session, build_session, getopts}; use rustc_span::edition::{DEFAULT_EDITION, Edition}; use rustc_span::source_map::{RealFileLoader, SourceMapInputs}; @@ -689,6 +690,7 @@ fn test_unstable_options_tracking_hash() { // Make sure that changing an [UNTRACKED] option leaves the hash unchanged. // tidy-alphabetical-start untracked!(assert_incr_state, Some(String::from("loaded"))); + untracked!(codegen_source_order, true); untracked!(deduplicate_diagnostics, false); untracked!(dump_dep_graph, true); untracked!(dump_mir, Some(String::from("abc"))); @@ -806,6 +808,7 @@ fn test_unstable_options_tracking_hash() { tracked!(hint_mostly_unused, true); tracked!(human_readable_cgu_names, true); tracked!(incremental_ignore_spans, true); + tracked!(indirect_branch_cs_prefix, true); tracked!(inline_mir, Some(true)); tracked!(inline_mir_hint_threshold, Some(123)); tracked!(inline_mir_threshold, Some(123)); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 0ca4fcc66ca..04006f3e446 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -5,14 +5,15 @@ use std::sync::{Arc, OnceLock}; use std::{env, thread}; use rustc_ast as ast; +use rustc_attr_parsing::{ShouldEmit, validate_attr}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::sync; +use rustc_errors::LintBuffer; use rustc_metadata::{DylibError, load_symbol_from_dylib}; use rustc_middle::ty::CurrentGcx; -use rustc_parse::validate_attr; use rustc_session::config::{Cfg, OutFileName, OutputFilenames, OutputTypes, Sysroot, host_tuple}; -use rustc_session::lint::{self, BuiltinLintDiag, LintBuffer}; +use rustc_session::lint::{self, BuiltinLintDiag}; use rustc_session::output::{CRATE_TYPES, categorize_crate_type}; use rustc_session::{EarlyDiagCtxt, Session, filesearch}; use rustc_span::edit_distance::find_best_match_for_name; @@ -23,6 +24,7 @@ use rustc_target::spec::Target; use tracing::info; use crate::errors; +use crate::passes::parse_crate_name; /// Function pointer type that constructs a new CodegenBackend. type MakeBackendFn = fn() -> Box<dyn CodegenBackend>; @@ -519,11 +521,10 @@ pub fn build_output_filenames(attrs: &[ast::Attribute], sess: &Session) -> Outpu sess.dcx().emit_fatal(errors::MultipleOutputTypesToStdout); } - let crate_name = sess - .opts - .crate_name - .clone() - .or_else(|| rustc_attr_parsing::find_crate_name(attrs).map(|n| n.to_string())); + let crate_name = + sess.opts.crate_name.clone().or_else(|| { + parse_crate_name(sess, attrs, ShouldEmit::Nothing).map(|i| i.0.to_string()) + }); match sess.io.output_file { None => { diff --git a/compiler/rustc_lexer/Cargo.toml b/compiler/rustc_lexer/Cargo.toml index 448a50faf45..e0019fb1821 100644 --- a/compiler/rustc_lexer/Cargo.toml +++ b/compiler/rustc_lexer/Cargo.toml @@ -14,13 +14,9 @@ Rust lexer used by rustc. No stability guarantees are provided. # Note that this crate purposefully does not depend on other rustc crates [dependencies] -memchr = "2.7.4" +memchr.workspace = true +unicode-properties = { version = "0.1.0", default-features = false, features = ["emoji"] } unicode-xid = "0.2.0" -[dependencies.unicode-properties] -version = "0.1.0" -default-features = false -features = ["emoji"] - [dev-dependencies] expect-test = "1.4.0" diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs index e80196ed567..483cc3e93dc 100644 --- a/compiler/rustc_lexer/src/lib.rs +++ b/compiler/rustc_lexer/src/lib.rs @@ -540,11 +540,11 @@ impl Cursor<'_> { // whitespace between the opening and the infostring. self.eat_while(|ch| ch != '\n' && is_whitespace(ch)); - // copied from `eat_identifier`, but allows `.` in infostring to allow something like + // copied from `eat_identifier`, but allows `-` and `.` in infostring to allow something like // `---Cargo.toml` as a valid opener if is_id_start(self.first()) { self.bump(); - self.eat_while(|c| is_id_continue(c) || c == '.'); + self.eat_while(|c| is_id_continue(c) || c == '-' || c == '.'); } self.eat_while(|ch| ch != '\n' && is_whitespace(ch)); diff --git a/compiler/rustc_lint/Cargo.toml b/compiler/rustc_lint/Cargo.toml index 7718f16984d..7900e4b9ab2 100644 --- a/compiler/rustc_lint/Cargo.toml +++ b/compiler/rustc_lint/Cargo.toml @@ -24,6 +24,6 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true unicode-security = "0.1.0" # tidy-alphabetical-end diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 776d8d35e05..417e5a97069 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -205,8 +205,6 @@ lint_confusable_identifier_pair = found both `{$existing_sym}` and `{$sym}` as i .current_use = this identifier can be confused with `{$existing_sym}` .other_use = other identifier used here -lint_custom_inner_attribute_unstable = custom inner attributes are unstable - lint_dangling_pointers_from_locals = a dangling pointer will be produced because the local variable `{$local_var_name}` will be dropped .ret_ty = return type of the {$fn_kind} is `{$ret_ty}` .local_var = `{$local_var_name}` is part the {$fn_kind} and will be dropped at the end of the {$fn_kind} @@ -271,10 +269,6 @@ lint_expectation = this lint expectation is unfulfilled lint_extern_crate_not_idiomatic = `extern crate` is not idiomatic in the new edition .suggestion = convert it to a `use` -lint_extern_without_abi = `extern` declarations without an explicit ABI are deprecated - .label = ABI should be specified here - .suggestion = explicitly specify the {$default_abi} ABI - lint_for_loops_over_fallibles = for loop over {$article} `{$ref_prefix}{$ty}`. This is more readably written as an `if let` statement .suggestion = consider using `if let` to clear intent @@ -294,19 +288,6 @@ lint_hidden_glob_reexport = private item shadows public glob re-export lint_hidden_lifetime_parameters = hidden lifetime parameters in types are deprecated -lint_hidden_unicode_codepoints = unicode codepoint changing visible direction of text present in {$label} - .label = this {$label} contains {$count -> - [one] an invisible - *[other] invisible - } unicode text flow control {$count -> - [one] codepoint - *[other] codepoints - } - .note = these kind of unicode codepoints change the way text flows on applications that support them, but can cause confusion because they change the order of characters on the screen - .suggestion_remove = if their presence wasn't intentional, you can remove them - .suggestion_escape = if you want to keep them but make them visible in your source code, you can escape them - .no_suggestion_note_escape = if you want to keep them but make them visible in your source code, you can escape them: {$escaped} - lint_identifier_non_ascii_char = identifier contains non-ASCII characters lint_identifier_uncommon_codepoints = identifier contains {$codepoints_len -> @@ -431,8 +412,6 @@ lint_improper_ctypes_union_non_exhaustive = this union is non-exhaustive lint_incomplete_include = include macro expected single expression in source -lint_inner_macro_attribute_unstable = inner macro attributes are unstable - lint_invalid_asm_label_binary = avoid using labels containing only the digits `0` and `1` in inline assembly .label = use a different label that doesn't start with `0` or `1` .help = start numbering with `2` instead @@ -483,6 +462,14 @@ lint_invalid_reference_casting_note_book = for more information, visit <https:// lint_invalid_reference_casting_note_ty_has_interior_mutability = even for types with interior mutability, the only legal way to obtain a mutable pointer from a shared reference is through `UnsafeCell::get` +lint_int_to_ptr_transmutes = transmuting an integer to a pointer creates a pointer without provenance + .note = this is dangerous because dereferencing the resulting pointer is undefined behavior + .note_exposed_provenance = exposed provenance semantics can be used to create a pointer based on some previously exposed provenance + .help_transmute = for more information about transmute, see <https://doc.rust-lang.org/std/mem/fn.transmute.html#transmutation-between-pointers-and-integers> + .help_exposed_provenance = for more information about exposed provenance, see <https://doc.rust-lang.org/std/ptr/index.html#exposed-provenance> + .suggestion_with_exposed_provenance = use `std::ptr::with_exposed_provenance{$suffix}` instead to use a previously exposed provenance + .suggestion_without_provenance_mut = if you truly mean to create a pointer without provenance, use `std::ptr::without_provenance_mut` + lint_legacy_derive_helpers = derive helper attribute is used before it is introduced .label = the attribute is introduced here @@ -732,7 +719,7 @@ lint_pattern_in_foreign = patterns aren't allowed in foreign function declaratio lint_private_extern_crate_reexport = extern crate `{$ident}` is private and cannot be re-exported .suggestion = consider making the `extern crate` item publicly accessible -lint_proc_macro_derive_resolution_fallback = cannot find {$ns} `{$ident}` in this scope +lint_proc_macro_derive_resolution_fallback = cannot find {$ns_descr} `{$ident}` in this scope .label = names from parent modules are not accessible without an explicit import lint_query_instability = using `{$query}` can result in unstable query results @@ -870,10 +857,6 @@ lint_undropped_manually_drops = calls to `std::mem::drop` with `std::mem::Manual .label = argument has type `{$arg_ty}` .suggestion = use `std::mem::ManuallyDrop::into_inner` to get the inner value -lint_unexpected_builtin_cfg = unexpected `--cfg {$cfg}` flag - .controlled_by = config `{$cfg_name}` is only supposed to be controlled by `{$controlled_by}` - .incoherent = manually setting a built-in cfg can and does create incoherent behaviors - lint_unexpected_cfg_add_build_rs_println = or consider adding `{$build_rs_println}` to the top of the `build.rs` lint_unexpected_cfg_add_cargo_feature = consider using a Cargo feature instead lint_unexpected_cfg_add_cargo_toml_lint_cfg = or consider adding in `Cargo.toml` the `check-cfg` lint config for the lint:{$cargo_toml_lint_cfg} @@ -983,6 +966,7 @@ lint_unused_allocation_mut = unnecessary allocation, use `&mut` instead lint_unused_builtin_attribute = unused attribute `{$attr_name}` .note = the built-in attribute `{$attr_name}` will be ignored, since it's applied to the macro invocation `{$macro_name}` + .suggestion = remove the attribute lint_unused_closure = unused {$pre}{$count -> diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 8006cfcf30f..c3c0a34df71 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -3097,7 +3097,7 @@ impl EarlyLintPass for SpecialModuleName { if let ast::ItemKind::Mod( _, ident, - ast::ModKind::Unloaded | ast::ModKind::Loaded(_, ast::Inline::No, _, _), + ast::ModKind::Unloaded | ast::ModKind::Loaded(_, ast::Inline::No { .. }, _), ) = item.kind { if item.attrs.iter().any(|a| a.has_name(sym::path)) { diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index 11181d10af5..0669da1a025 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -11,7 +11,7 @@ use rustc_ast::util::parser::ExprPrecedence; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sync; use rustc_data_structures::unord::UnordMap; -use rustc_errors::{Diag, LintDiagnostic, MultiSpan}; +use rustc_errors::{Diag, LintBuffer, LintDiagnostic, MultiSpan}; use rustc_feature::Features; use rustc_hir::def::Res; use rustc_hir::def_id::{CrateNum, DefId}; @@ -23,8 +23,8 @@ use rustc_middle::middle::privacy::EffectiveVisibilities; use rustc_middle::ty::layout::{LayoutError, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::print::{PrintError, PrintTraitRefExt as _, Printer, with_no_trimmed_paths}; use rustc_middle::ty::{self, GenericArg, RegisteredTools, Ty, TyCtxt, TypingEnv, TypingMode}; -use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintBuffer, LintExpectationId, LintId}; -use rustc_session::{LintStoreMarker, Session}; +use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintExpectationId, LintId}; +use rustc_session::{DynLintStore, Session}; use rustc_span::edit_distance::find_best_match_for_names; use rustc_span::{Ident, Span, Symbol, sym}; use tracing::debug; @@ -62,7 +62,13 @@ pub struct LintStore { lint_groups: FxIndexMap<&'static str, LintGroup>, } -impl LintStoreMarker for LintStore {} +impl DynLintStore for LintStore { + fn lint_groups_iter(&self) -> Box<dyn Iterator<Item = rustc_session::LintGroup> + '_> { + Box::new(self.get_lint_groups().map(|(name, lints, is_externally_loaded)| { + rustc_session::LintGroup { name, lints, is_externally_loaded } + })) + } +} /// The target of the `by_name` map, which accounts for renaming/deprecation. #[derive(Debug)] @@ -745,41 +751,41 @@ impl<'tcx> LateContext<'tcx> { /// } /// ``` pub fn get_def_path(&self, def_id: DefId) -> Vec<Symbol> { - struct AbsolutePathPrinter<'tcx> { + struct LintPathPrinter<'tcx> { tcx: TyCtxt<'tcx>, path: Vec<Symbol>, } - impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { + impl<'tcx> Printer<'tcx> for LintPathPrinter<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } fn print_type(&mut self, _ty: Ty<'tcx>) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } fn print_dyn_existential( &mut self, _predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>, ) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } fn print_const(&mut self, _ct: ty::Const<'tcx>) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.path = vec![self.tcx.crate_name(cnum)]; Ok(()) } - fn path_qualified( + fn print_path_with_qualified( &mut self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, @@ -800,7 +806,7 @@ impl<'tcx> LateContext<'tcx> { }) } - fn path_append_impl( + fn print_path_with_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, @@ -825,7 +831,7 @@ impl<'tcx> LateContext<'tcx> { Ok(()) } - fn path_append( + fn print_path_with_simple( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, @@ -844,7 +850,7 @@ impl<'tcx> LateContext<'tcx> { Ok(()) } - fn path_generic_args( + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _args: &[GenericArg<'tcx>], @@ -853,7 +859,7 @@ impl<'tcx> LateContext<'tcx> { } } - let mut p = AbsolutePathPrinter { tcx: self.tcx, path: vec![] }; + let mut p = LintPathPrinter { tcx: self.tcx, path: vec![] }; p.print_def_path(def_id, &[]).unwrap(); p.path } diff --git a/compiler/rustc_lint/src/drop_forget_useless.rs b/compiler/rustc_lint/src/drop_forget_useless.rs index 7f098893f7d..c2d137986ce 100644 --- a/compiler/rustc_lint/src/drop_forget_useless.rs +++ b/compiler/rustc_lint/src/drop_forget_useless.rs @@ -151,7 +151,7 @@ impl<'tcx> LateLintPass<'tcx> for DropForgetUseless { && let Node::Stmt(stmt) = node && let StmtKind::Semi(e) = stmt.kind && e.hir_id == expr.hir_id - && let Some(arg_span) = arg.span.find_ancestor_inside(expr.span) + && let Some(arg_span) = arg.span.find_ancestor_inside_same_ctxt(expr.span) { UseLetUnderscoreIgnoreSuggestion::Suggestion { start_span: expr.span.shrink_to_lo().until(arg_span), diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs index 58205087def..dff1fc43670 100644 --- a/compiler/rustc_lint/src/early.rs +++ b/compiler/rustc_lint/src/early.rs @@ -7,10 +7,11 @@ use rustc_ast::visit::{self as ast_visit, Visitor, walk_list}; use rustc_ast::{self as ast, HasAttrs}; use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_errors::{BufferedEarlyLint, DecorateDiagCompat, LintBuffer}; use rustc_feature::Features; use rustc_middle::ty::{RegisteredTools, TyCtxt}; use rustc_session::Session; -use rustc_session::lint::{BufferedEarlyLint, LintBuffer, LintPass}; +use rustc_session::lint::LintPass; use rustc_span::{Ident, Span}; use tracing::debug; @@ -36,8 +37,11 @@ impl<'ecx, 'tcx, T: EarlyLintPass> EarlyContextAndPass<'ecx, 'tcx, T> { fn check_id(&mut self, id: ast::NodeId) { for early_lint in self.context.buffered.take(id) { let BufferedEarlyLint { span, node_id: _, lint_id, diagnostic } = early_lint; - self.context.opt_span_lint(lint_id.lint, span, |diag| { - diagnostics::decorate_builtin_lint(self.context.sess(), self.tcx, diagnostic, diag); + self.context.opt_span_lint(lint_id.lint, span, |diag| match diagnostic { + DecorateDiagCompat::Builtin(b) => { + diagnostics::decorate_builtin_lint(self.context.sess(), self.tcx, b, diag); + } + DecorateDiagCompat::Dynamic(d) => d.decorate_lint_box(diag), }); } } diff --git a/compiler/rustc_lint/src/early/diagnostics.rs b/compiler/rustc_lint/src/early/diagnostics.rs index 1e4bc79ce70..7300490b838 100644 --- a/compiler/rustc_lint/src/early/diagnostics.rs +++ b/compiler/rustc_lint/src/early/diagnostics.rs @@ -64,10 +64,12 @@ pub fn decorate_builtin_lint( } .decorate_lint(diag); } - BuiltinLintDiag::ProcMacroDeriveResolutionFallback { span: macro_span, ns, ident } => { - lints::ProcMacroDeriveResolutionFallback { span: macro_span, ns, ident } - .decorate_lint(diag) - } + BuiltinLintDiag::ProcMacroDeriveResolutionFallback { + span: macro_span, + ns_descr, + ident, + } => lints::ProcMacroDeriveResolutionFallback { span: macro_span, ns_descr, ident } + .decorate_lint(diag), BuiltinLintDiag::MacroExpandedMacroExportsAccessedByAbsolutePaths(span_def) => { lints::MacroExpandedMacroExportsAccessedByAbsolutePaths { definition: span_def } .decorate_lint(diag) @@ -156,9 +158,6 @@ pub fn decorate_builtin_lint( } .decorate_lint(diag); } - BuiltinLintDiag::MissingAbi(label_span, default_abi) => { - lints::MissingAbi { span: label_span, default_abi }.decorate_lint(diag); - } BuiltinLintDiag::LegacyDeriveHelpers(label_span) => { lints::LegacyDeriveHelpers { span: label_span }.decorate_lint(diag); } @@ -184,29 +183,14 @@ pub fn decorate_builtin_lint( lints::ReservedMultihash { suggestion }.decorate_lint(diag); } } - BuiltinLintDiag::HiddenUnicodeCodepoints { - label, - count, - span_label, - labels, - escape, - spans, + BuiltinLintDiag::UnusedBuiltinAttribute { + attr_name, + macro_name, + invoc_span, + attr_span, } => { - lints::HiddenUnicodeCodepointsDiag { - label: &label, - count, - span_label, - labels: labels.map(|spans| lints::HiddenUnicodeCodepointsDiagLabels { spans }), - sub: if escape { - lints::HiddenUnicodeCodepointsDiagSub::Escape { spans } - } else { - lints::HiddenUnicodeCodepointsDiagSub::NoEscape { spans } - }, - } - .decorate_lint(diag); - } - BuiltinLintDiag::UnusedBuiltinAttribute { attr_name, macro_name, invoc_span } => { - lints::UnusedBuiltinAttribute { invoc_span, attr_name, macro_name }.decorate_lint(diag); + lints::UnusedBuiltinAttribute { invoc_span, attr_name, macro_name, attr_span } + .decorate_lint(diag); } BuiltinLintDiag::TrailingMacro(is_trailing, name) => { lints::TrailingMacro { is_trailing, name }.decorate_lint(diag); @@ -458,17 +442,8 @@ pub fn decorate_builtin_lint( } .decorate_lint(diag) } - BuiltinLintDiag::InnerAttributeUnstable { is_macro } => if is_macro { - lints::InnerAttributeUnstable::InnerMacroAttribute - } else { - lints::InnerAttributeUnstable::CustomInnerAttribute - } - .decorate_lint(diag), BuiltinLintDiag::OutOfScopeMacroCalls { span, path, location } => { lints::OutOfScopeMacroCalls { span, path, location }.decorate_lint(diag) } - BuiltinLintDiag::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by } => { - lints::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by }.decorate_lint(diag) - } } } diff --git a/compiler/rustc_lint/src/foreign_modules.rs b/compiler/rustc_lint/src/foreign_modules.rs index 759e6c927b8..3267e70f1de 100644 --- a/compiler/rustc_lint/src/foreign_modules.rs +++ b/compiler/rustc_lint/src/foreign_modules.rs @@ -179,7 +179,7 @@ impl ClashingExternDeclarations { /// symbol's name. fn name_of_extern_decl(tcx: TyCtxt<'_>, fi: hir::OwnerId) -> SymbolName { if let Some((overridden_link_name, overridden_link_name_span)) = - tcx.codegen_fn_attrs(fi).link_name.map(|overridden_link_name| { + tcx.codegen_fn_attrs(fi).symbol_name.map(|overridden_link_name| { // FIXME: Instead of searching through the attributes again to get span // information, we could have codegen_fn_attrs also give span information back for // where the attribute was defined. However, until this is found to be a diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs index 016ff17f5d7..929fc8207b0 100644 --- a/compiler/rustc_lint/src/internal.rs +++ b/compiler/rustc_lint/src/internal.rs @@ -1,10 +1,10 @@ //! Some lints that are only useful in the compiler or crates that use compiler internals, such as //! Clippy. -use rustc_hir::HirId; use rustc_hir::def::Res; use rustc_hir::def_id::DefId; -use rustc_middle::ty::{self, GenericArgsRef, Ty as MiddleTy}; +use rustc_hir::{Expr, ExprKind, HirId}; +use rustc_middle::ty::{self, GenericArgsRef, PredicatePolarity, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::{Span, sym}; @@ -56,25 +56,6 @@ impl LateLintPass<'_> for DefaultHashTypes { } } -/// Helper function for lints that check for expressions with calls and use typeck results to -/// get the `DefId` and `GenericArgsRef` of the function. -fn typeck_results_of_method_fn<'tcx>( - cx: &LateContext<'tcx>, - expr: &hir::Expr<'_>, -) -> Option<(Span, DefId, ty::GenericArgsRef<'tcx>)> { - match expr.kind { - hir::ExprKind::MethodCall(segment, ..) - if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) => - { - Some((segment.ident.span, def_id, cx.typeck_results().node_args(expr.hir_id))) - } - _ => match cx.typeck_results().node_type(expr.hir_id).kind() { - &ty::FnDef(def_id, args) => Some((expr.span, def_id, args)), - _ => None, - }, - } -} - declare_tool_lint! { /// The `potential_query_instability` lint detects use of methods which can lead to /// potential query instability, such as iterating over a `HashMap`. @@ -101,10 +82,12 @@ declare_tool_lint! { declare_lint_pass!(QueryStability => [POTENTIAL_QUERY_INSTABILITY, UNTRACKED_QUERY_INFORMATION]); -impl LateLintPass<'_> for QueryStability { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { - let Some((span, def_id, args)) = typeck_results_of_method_fn(cx, expr) else { return }; - if let Ok(Some(instance)) = ty::Instance::try_resolve(cx.tcx, cx.typing_env(), def_id, args) +impl<'tcx> LateLintPass<'tcx> for QueryStability { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { + if let Some((callee_def_id, span, generic_args, _recv, _args)) = + get_callee_span_generic_args_and_args(cx, expr) + && let Ok(Some(instance)) = + ty::Instance::try_resolve(cx.tcx, cx.typing_env(), callee_def_id, generic_args) { let def_id = instance.def_id(); if cx.tcx.has_attr(def_id, sym::rustc_lint_query_instability) { @@ -113,7 +96,15 @@ impl LateLintPass<'_> for QueryStability { span, QueryInstability { query: cx.tcx.item_name(def_id) }, ); + } else if has_unstable_into_iter_predicate(cx, callee_def_id, generic_args) { + let call_span = span.with_hi(expr.span.hi()); + cx.emit_span_lint( + POTENTIAL_QUERY_INSTABILITY, + call_span, + QueryInstability { query: sym::into_iter }, + ); } + if cx.tcx.has_attr(def_id, sym::rustc_lint_untracked_query_information) { cx.emit_span_lint( UNTRACKED_QUERY_INFORMATION, @@ -125,6 +116,69 @@ impl LateLintPass<'_> for QueryStability { } } +fn has_unstable_into_iter_predicate<'tcx>( + cx: &LateContext<'tcx>, + callee_def_id: DefId, + generic_args: GenericArgsRef<'tcx>, +) -> bool { + let Some(into_iterator_def_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator) else { + return false; + }; + let Some(into_iter_fn_def_id) = cx.tcx.lang_items().into_iter_fn() else { + return false; + }; + let predicates = cx.tcx.predicates_of(callee_def_id).instantiate(cx.tcx, generic_args); + for (predicate, _) in predicates { + let Some(trait_pred) = predicate.as_trait_clause() else { + continue; + }; + if trait_pred.def_id() != into_iterator_def_id + || trait_pred.polarity() != PredicatePolarity::Positive + { + continue; + } + // `IntoIterator::into_iter` has no additional method args. + let into_iter_fn_args = + cx.tcx.instantiate_bound_regions_with_erased(trait_pred).trait_ref.args; + let Ok(Some(instance)) = ty::Instance::try_resolve( + cx.tcx, + cx.typing_env(), + into_iter_fn_def_id, + into_iter_fn_args, + ) else { + continue; + }; + // Does the input type's `IntoIterator` implementation have the + // `rustc_lint_query_instability` attribute on its `into_iter` method? + if cx.tcx.has_attr(instance.def_id(), sym::rustc_lint_query_instability) { + return true; + } + } + false +} + +/// Checks whether an expression is a function or method call and, if so, returns its `DefId`, +/// `Span`, `GenericArgs`, and arguments. This is a slight augmentation of a similarly named Clippy +/// function, `get_callee_generic_args_and_args`. +fn get_callee_span_generic_args_and_args<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'tcx>, +) -> Option<(DefId, Span, GenericArgsRef<'tcx>, Option<&'tcx Expr<'tcx>>, &'tcx [Expr<'tcx>])> { + if let ExprKind::Call(callee, args) = expr.kind + && let callee_ty = cx.typeck_results().expr_ty(callee) + && let ty::FnDef(callee_def_id, generic_args) = callee_ty.kind() + { + return Some((*callee_def_id, callee.span, generic_args, None, args)); + } + if let ExprKind::MethodCall(segment, recv, args, _) = expr.kind + && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) + { + let generic_args = cx.typeck_results().node_args(expr.hir_id); + return Some((method_def_id, segment.ident.span, generic_args, Some(recv), args)); + } + None +} + declare_tool_lint! { /// The `usage_of_ty_tykind` lint detects usages of `ty::TyKind::<kind>`, /// where `ty::<kind>` would suffice. @@ -461,33 +515,22 @@ declare_tool_lint! { declare_lint_pass!(Diagnostics => [UNTRANSLATABLE_DIAGNOSTIC, DIAGNOSTIC_OUTSIDE_OF_IMPL]); impl LateLintPass<'_> for Diagnostics { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { + fn check_expr<'tcx>(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { let collect_args_tys_and_spans = |args: &[hir::Expr<'_>], reserve_one_extra: bool| { let mut result = Vec::with_capacity(args.len() + usize::from(reserve_one_extra)); result.extend(args.iter().map(|arg| (cx.typeck_results().expr_ty(arg), arg.span))); result }; // Only check function calls and method calls. - let (span, def_id, fn_gen_args, arg_tys_and_spans) = match expr.kind { - hir::ExprKind::Call(callee, args) => { - match cx.typeck_results().node_type(callee.hir_id).kind() { - &ty::FnDef(def_id, fn_gen_args) => { - (callee.span, def_id, fn_gen_args, collect_args_tys_and_spans(args, false)) - } - _ => return, // occurs for fns passed as args - } - } - hir::ExprKind::MethodCall(_segment, _recv, args, _span) => { - let Some((span, def_id, fn_gen_args)) = typeck_results_of_method_fn(cx, expr) - else { - return; - }; - let mut args = collect_args_tys_and_spans(args, true); - args.insert(0, (cx.tcx.types.self_param, _recv.span)); // dummy inserted for `self` - (span, def_id, fn_gen_args, args) - } - _ => return, + let Some((def_id, span, fn_gen_args, recv, args)) = + get_callee_span_generic_args_and_args(cx, expr) + else { + return; }; + let mut arg_tys_and_spans = collect_args_tys_and_spans(args, recv.is_some()); + if let Some(recv) = recv { + arg_tys_and_spans.insert(0, (cx.tcx.types.self_param, recv.span)); // dummy inserted for `self` + } Self::diagnostic_outside_of_impl(cx, span, expr.hir_id, def_id, fn_gen_args); Self::untranslatable_diagnostic(cx, def_id, &arg_tys_and_spans); @@ -496,7 +539,7 @@ impl LateLintPass<'_> for Diagnostics { impl Diagnostics { // Is the type `{D,Subd}iagMessage`? - fn is_diag_message<'cx>(cx: &LateContext<'cx>, ty: MiddleTy<'cx>) -> bool { + fn is_diag_message<'cx>(cx: &LateContext<'cx>, ty: Ty<'cx>) -> bool { if let Some(adt_def) = ty.ty_adt_def() && let Some(name) = cx.tcx.get_diagnostic_name(adt_def.did()) && matches!(name, sym::DiagMessage | sym::SubdiagMessage) @@ -510,7 +553,7 @@ impl Diagnostics { fn untranslatable_diagnostic<'cx>( cx: &LateContext<'cx>, def_id: DefId, - arg_tys_and_spans: &[(MiddleTy<'cx>, Span)], + arg_tys_and_spans: &[(Ty<'cx>, Span)], ) { let fn_sig = cx.tcx.fn_sig(def_id).instantiate_identity().skip_binder(); let predicates = cx.tcx.predicates_of(def_id).instantiate_identity(cx.tcx).predicates; diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index f06757b3c23..bdbac7fc4d1 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -133,10 +133,9 @@ pub use early::{EarlyCheckNode, check_ast_node}; pub use late::{check_crate, late_lint_mod, unerased_lint_store}; pub use levels::LintLevelsBuilder; pub use passes::{EarlyLintPass, LateLintPass}; +pub use rustc_errors::BufferedEarlyLint; pub use rustc_session::lint::Level::{self, *}; -pub use rustc_session::lint::{ - BufferedEarlyLint, FutureIncompatibleInfo, Lint, LintId, LintPass, LintVec, -}; +pub use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintId, LintPass, LintVec}; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_lint/src/lifetime_syntax.rs b/compiler/rustc_lint/src/lifetime_syntax.rs index 464f4fc34b9..413525eb6e5 100644 --- a/compiler/rustc_lint/src/lifetime_syntax.rs +++ b/compiler/rustc_lint/src/lifetime_syntax.rs @@ -214,9 +214,9 @@ impl<T> LifetimeSyntaxCategories<Vec<T>> { } } - pub fn flatten(&self) -> impl Iterator<Item = &T> { - let Self { hidden, elided, named } = self; - [hidden.iter(), elided.iter(), named.iter()].into_iter().flatten() + pub fn iter_unnamed(&self) -> impl Iterator<Item = &T> { + let Self { hidden, elided, named: _ } = self; + [hidden.iter(), elided.iter()].into_iter().flatten() } } @@ -495,7 +495,7 @@ fn emit_mismatch_diagnostic<'tcx>( cx.emit_span_lint( MISMATCHED_LIFETIME_SYNTAXES, - inputs.flatten().copied().collect::<Vec<_>>(), + inputs.iter_unnamed().chain(outputs.iter_unnamed()).copied().collect::<Vec<_>>(), lints::MismatchedLifetimeSyntaxes { inputs, outputs, suggestions }, ); } diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index ba0112c8ac6..56d65ed08f9 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1,14 +1,14 @@ +// ignore-tidy-filelength + #![allow(rustc::untranslatable_diagnostic)] use std::num::NonZero; -use rustc_abi::ExternAbi; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagMessage, DiagStyledString, ElidedLifetimeInPathSubdiag, EmissionGuarantee, LintDiagnostic, MultiSpan, Subdiagnostic, SuggestionStyle, }; use rustc_hir as hir; -use rustc_hir::def::Namespace; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::VisitorExt; use rustc_macros::{LintDiagnostic, Subdiagnostic}; @@ -817,80 +817,6 @@ pub(crate) enum InvalidReferenceCastingDiag<'tcx> { }, } -// hidden_unicode_codepoints.rs -#[derive(LintDiagnostic)] -#[diag(lint_hidden_unicode_codepoints)] -#[note] -pub(crate) struct HiddenUnicodeCodepointsDiag<'a> { - pub label: &'a str, - pub count: usize, - #[label] - pub span_label: Span, - #[subdiagnostic] - pub labels: Option<HiddenUnicodeCodepointsDiagLabels>, - #[subdiagnostic] - pub sub: HiddenUnicodeCodepointsDiagSub, -} - -pub(crate) struct HiddenUnicodeCodepointsDiagLabels { - pub spans: Vec<(char, Span)>, -} - -impl Subdiagnostic for HiddenUnicodeCodepointsDiagLabels { - fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { - for (c, span) in self.spans { - diag.span_label(span, format!("{c:?}")); - } - } -} - -pub(crate) enum HiddenUnicodeCodepointsDiagSub { - Escape { spans: Vec<(char, Span)> }, - NoEscape { spans: Vec<(char, Span)> }, -} - -// Used because of multiple multipart_suggestion and note -impl Subdiagnostic for HiddenUnicodeCodepointsDiagSub { - fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { - match self { - HiddenUnicodeCodepointsDiagSub::Escape { spans } => { - diag.multipart_suggestion_with_style( - fluent::lint_suggestion_remove, - spans.iter().map(|(_, span)| (*span, "".to_string())).collect(), - Applicability::MachineApplicable, - SuggestionStyle::HideCodeAlways, - ); - diag.multipart_suggestion( - fluent::lint_suggestion_escape, - spans - .into_iter() - .map(|(c, span)| { - let c = format!("{c:?}"); - (span, c[1..c.len() - 1].to_string()) - }) - .collect(), - Applicability::MachineApplicable, - ); - } - HiddenUnicodeCodepointsDiagSub::NoEscape { spans } => { - // FIXME: in other suggestions we've reversed the inner spans of doc comments. We - // should do the same here to provide the same good suggestions as we do for - // literals above. - diag.arg( - "escaped", - spans - .into_iter() - .map(|(c, _)| format!("{c:?}")) - .collect::<Vec<String>>() - .join(", "), - ); - diag.note(fluent::lint_suggestion_remove); - diag.note(fluent::lint_no_suggestion_note_escape); - } - } - } -} - // map_unit_fn.rs #[derive(LintDiagnostic)] #[diag(lint_map_unit_fn)] @@ -1618,6 +1544,48 @@ impl<'a> LintDiagnostic<'a, ()> for DropGlue<'_> { } } +// transmute.rs +#[derive(LintDiagnostic)] +#[diag(lint_int_to_ptr_transmutes)] +#[note] +#[note(lint_note_exposed_provenance)] +#[help(lint_suggestion_without_provenance_mut)] +#[help(lint_help_transmute)] +#[help(lint_help_exposed_provenance)] +pub(crate) struct IntegerToPtrTransmutes<'tcx> { + #[subdiagnostic] + pub suggestion: Option<IntegerToPtrTransmutesSuggestion<'tcx>>, +} + +#[derive(Subdiagnostic)] +pub(crate) enum IntegerToPtrTransmutesSuggestion<'tcx> { + #[multipart_suggestion( + lint_suggestion_with_exposed_provenance, + applicability = "machine-applicable", + style = "verbose" + )] + ToPtr { + dst: Ty<'tcx>, + suffix: &'static str, + #[suggestion_part(code = "std::ptr::with_exposed_provenance{suffix}::<{dst}>(")] + start_call: Span, + }, + #[multipart_suggestion( + lint_suggestion_with_exposed_provenance, + applicability = "machine-applicable", + style = "verbose" + )] + ToRef { + dst: Ty<'tcx>, + suffix: &'static str, + ref_mutbl: &'static str, + #[suggestion_part( + code = "&{ref_mutbl}*std::ptr::with_exposed_provenance{suffix}::<{dst}>(" + )] + start_call: Span, + }, +} + // types.rs #[derive(LintDiagnostic)] #[diag(lint_range_endpoint_out_of_range)] @@ -2568,16 +2536,6 @@ pub(crate) mod unexpected_cfg_value { } #[derive(LintDiagnostic)] -#[diag(lint_unexpected_builtin_cfg)] -#[note(lint_controlled_by)] -#[note(lint_incoherent)] -pub(crate) struct UnexpectedBuiltinCfg { - pub(crate) cfg: String, - pub(crate) cfg_name: Symbol, - pub(crate) controlled_by: &'static str, -} - -#[derive(LintDiagnostic)] #[diag(lint_macro_use_deprecated)] #[help] pub(crate) struct MacroUseDeprecated; @@ -2691,14 +2649,6 @@ pub(crate) struct IllFormedAttributeInput { } #[derive(LintDiagnostic)] -pub(crate) enum InnerAttributeUnstable { - #[diag(lint_inner_macro_attribute_unstable)] - InnerMacroAttribute, - #[diag(lint_custom_inner_attribute_unstable)] - CustomInnerAttribute, -} - -#[derive(LintDiagnostic)] #[diag(lint_unknown_diagnostic_attribute)] pub(crate) struct UnknownDiagnosticAttribute { #[subdiagnostic] @@ -2769,7 +2719,7 @@ pub(crate) struct AbsPathWithModuleSugg { pub(crate) struct ProcMacroDeriveResolutionFallback { #[label] pub span: Span, - pub ns: Namespace, + pub ns_descr: &'static str, pub ident: Ident, } @@ -2891,14 +2841,6 @@ pub(crate) struct PatternsInFnsWithoutBodySub { } #[derive(LintDiagnostic)] -#[diag(lint_extern_without_abi)] -pub(crate) struct MissingAbi { - #[suggestion(code = "extern {default_abi}", applicability = "machine-applicable")] - pub span: Span, - pub default_abi: ExternAbi, -} - -#[derive(LintDiagnostic)] #[diag(lint_legacy_derive_helpers)] pub(crate) struct LegacyDeriveHelpers { #[label] @@ -2938,9 +2880,10 @@ pub(crate) struct RawPrefix { pub(crate) struct UnusedBuiltinAttribute { #[note] pub invoc_span: Span, - pub attr_name: Symbol, pub macro_name: String, + #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")] + pub attr_span: Span, } #[derive(LintDiagnostic)] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index 2dd3425e66c..dca22b986ff 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -5,7 +5,7 @@ use rustc_hir::{Body, HirId, Item, ItemKind, Node, Path, TyKind}; use rustc_middle::ty::TyCtxt; use rustc_session::{declare_lint, impl_lint_pass}; use rustc_span::def_id::{DefId, LOCAL_CRATE}; -use rustc_span::{ExpnKind, MacroKind, Span, kw, sym}; +use rustc_span::{ExpnKind, Span, kw, sym}; use crate::lints::{NonLocalDefinitionsCargoUpdateNote, NonLocalDefinitionsDiag}; use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent}; @@ -240,7 +240,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { }, ) } - ItemKind::Macro(_, _macro, MacroKind::Bang) + ItemKind::Macro(_, _macro, _kinds) if cx.tcx.has_attr(item.owner_id.def_id, sym::macro_export) => { cx.emit_span_lint( diff --git a/compiler/rustc_lint/src/nonstandard_style.rs b/compiler/rustc_lint/src/nonstandard_style.rs index 8fafaa33d0c..65075cfecfa 100644 --- a/compiler/rustc_lint/src/nonstandard_style.rs +++ b/compiler/rustc_lint/src/nonstandard_style.rs @@ -5,7 +5,7 @@ use rustc_hir::attrs::{AttributeKind, ReprAttr}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{FnKind, Visitor}; -use rustc_hir::{AttrArgs, AttrItem, Attribute, GenericParamKind, PatExprKind, PatKind, find_attr}; +use rustc_hir::{Attribute, GenericParamKind, PatExprKind, PatKind, find_attr}; use rustc_middle::hir::nested_filter::All; use rustc_middle::ty; use rustc_session::config::CrateType; @@ -343,35 +343,27 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase { let crate_ident = if let Some(name) = &cx.tcx.sess.opts.crate_name { Some(Ident::from_str(name)) } else { - ast::attr::find_by_name(cx.tcx.hir_attrs(hir::CRATE_HIR_ID), sym::crate_name).and_then( - |attr| { - if let Attribute::Unparsed(n) = attr - && let AttrItem { args: AttrArgs::Eq { eq_span: _, expr: lit }, .. } = - n.as_ref() - && let ast::LitKind::Str(name, ..) = lit.kind - { - // Discard the double quotes surrounding the literal. - let sp = cx - .sess() - .source_map() - .span_to_snippet(lit.span) - .ok() - .and_then(|snippet| { - let left = snippet.find('"')?; - let right = snippet.rfind('"').map(|pos| snippet.len() - pos)?; - - Some( - lit.span - .with_lo(lit.span.lo() + BytePos(left as u32 + 1)) - .with_hi(lit.span.hi() - BytePos(right as u32)), - ) - }) - .unwrap_or(lit.span); - - Some(Ident::new(name, sp)) - } else { - None - } + find_attr!(cx.tcx.hir_attrs(hir::CRATE_HIR_ID), AttributeKind::CrateName{name, name_span,..} => (name, name_span)).map( + |(&name, &span)| { + // Discard the double quotes surrounding the literal. + let sp = cx + .sess() + .source_map() + .span_to_snippet(span) + .ok() + .and_then(|snippet| { + let left = snippet.find('"')?; + let right = snippet.rfind('"').map(|pos| snippet.len() - pos)?; + + Some( + span + .with_lo(span.lo() + BytePos(left as u32 + 1)) + .with_hi(span.hi() - BytePos(right as u32)), + ) + }) + .unwrap_or(span); + + Ident::new(name, sp) }, ) }; diff --git a/compiler/rustc_lint/src/pass_by_value.rs b/compiler/rustc_lint/src/pass_by_value.rs index 4f65acd8001..29006732aad 100644 --- a/compiler/rustc_lint/src/pass_by_value.rs +++ b/compiler/rustc_lint/src/pass_by_value.rs @@ -24,10 +24,8 @@ impl<'tcx> LateLintPass<'tcx> for PassByValue { fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx hir::Ty<'tcx, AmbigArg>) { match &ty.kind { TyKind::Ref(_, hir::MutTy { ty: inner_ty, mutbl: hir::Mutability::Not }) => { - if let Some(impl_did) = cx.tcx.impl_of_assoc(ty.hir_id.owner.to_def_id()) { - if cx.tcx.impl_trait_ref(impl_did).is_some() { - return; - } + if cx.tcx.trait_impl_of_assoc(ty.hir_id.owner.to_def_id()).is_some() { + return; } if let Some(t) = path_for_pass_by_value(cx, inner_ty) { cx.emit_span_lint( diff --git a/compiler/rustc_lint/src/transmute.rs b/compiler/rustc_lint/src/transmute.rs index bc1d4587d07..98510eea73b 100644 --- a/compiler/rustc_lint/src/transmute.rs +++ b/compiler/rustc_lint/src/transmute.rs @@ -1,3 +1,4 @@ +use rustc_ast::LitKind; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::LocalDefId; @@ -7,6 +8,7 @@ use rustc_middle::ty::{self, Ty}; use rustc_session::{declare_lint, impl_lint_pass}; use rustc_span::sym; +use crate::lints::{IntegerToPtrTransmutes, IntegerToPtrTransmutesSuggestion}; use crate::{LateContext, LateLintPass}; declare_lint! { @@ -67,9 +69,44 @@ declare_lint! { "detects transmutes that can also be achieved by other operations" } +declare_lint! { + /// The `integer_to_ptr_transmutes` lint detects integer to pointer + /// transmutes where the resulting pointers are undefined behavior to dereference. + /// + /// ### Example + /// + /// ```rust + /// fn foo(a: usize) -> *const u8 { + /// unsafe { + /// std::mem::transmute::<usize, *const u8>(a) + /// } + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Any attempt to use the resulting pointers are undefined behavior as the resulting + /// pointers won't have any provenance. + /// + /// Alternatively, [`std::ptr::with_exposed_provenance`] should be used, as they do not + /// carry the provenance requirement. If wanting to create pointers without provenance + /// [`std::ptr::without_provenance`] should be used instead. + /// + /// See [`std::mem::transmute`] in the reference for more details. + /// + /// [`std::mem::transmute`]: https://doc.rust-lang.org/std/mem/fn.transmute.html + /// [`std::ptr::with_exposed_provenance`]: https://doc.rust-lang.org/std/ptr/fn.with_exposed_provenance.html + /// [`std::ptr::without_provenance`]: https://doc.rust-lang.org/std/ptr/fn.without_provenance.html + pub INTEGER_TO_PTR_TRANSMUTES, + Warn, + "detects integer to pointer transmutes", +} + pub(crate) struct CheckTransmutes; -impl_lint_pass!(CheckTransmutes => [PTR_TO_INTEGER_TRANSMUTE_IN_CONSTS, UNNECESSARY_TRANSMUTES]); +impl_lint_pass!(CheckTransmutes => [PTR_TO_INTEGER_TRANSMUTE_IN_CONSTS, UNNECESSARY_TRANSMUTES, INTEGER_TO_PTR_TRANSMUTES]); impl<'tcx> LateLintPass<'tcx> for CheckTransmutes { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { @@ -94,9 +131,68 @@ impl<'tcx> LateLintPass<'tcx> for CheckTransmutes { check_ptr_transmute_in_const(cx, expr, body_owner_def_id, const_context, src, dst); check_unnecessary_transmute(cx, expr, callee, arg, const_context, src, dst); + check_int_to_ptr_transmute(cx, expr, arg, src, dst); } } +/// Check for transmutes from integer to pointers (*const/*mut and &/&mut). +/// +/// Using the resulting pointers would be undefined behavior. +fn check_int_to_ptr_transmute<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx hir::Expr<'tcx>, + arg: &'tcx hir::Expr<'tcx>, + src: Ty<'tcx>, + dst: Ty<'tcx>, +) { + if !matches!(src.kind(), ty::Uint(_) | ty::Int(_)) { + return; + } + let (ty::Ref(_, inner_ty, mutbl) | ty::RawPtr(inner_ty, mutbl)) = dst.kind() else { + return; + }; + // bail-out if the argument is literal 0 as we have other lints for those cases + if matches!(arg.kind, hir::ExprKind::Lit(hir::Lit { node: LitKind::Int(v, _), .. }) if v == 0) { + return; + } + // bail-out if the inner type is a ZST + let Ok(layout_inner_ty) = cx.tcx.layout_of(cx.typing_env().as_query_input(*inner_ty)) else { + return; + }; + if layout_inner_ty.is_1zst() { + return; + } + + let suffix = if mutbl.is_mut() { "_mut" } else { "" }; + cx.tcx.emit_node_span_lint( + INTEGER_TO_PTR_TRANSMUTES, + expr.hir_id, + expr.span, + IntegerToPtrTransmutes { + suggestion: if layout_inner_ty.is_sized() { + Some(if dst.is_ref() { + IntegerToPtrTransmutesSuggestion::ToRef { + dst: *inner_ty, + suffix, + ref_mutbl: mutbl.prefix_str(), + start_call: expr.span.shrink_to_lo().until(arg.span), + } + } else { + IntegerToPtrTransmutesSuggestion::ToPtr { + dst: *inner_ty, + suffix, + start_call: expr.span.shrink_to_lo().until(arg.span), + } + }) + } else { + // We can't suggest using `with_exposed_provenance` for unsized type + // so don't suggest anything. + None + }, + }, + ); +} + /// Check for transmutes that exhibit undefined behavior. /// For example, transmuting pointers to integers in a const context. /// diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index b0afc333ebe..f8a692313f0 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -1904,10 +1904,9 @@ impl InvalidAtomicOrdering { if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind && recognized_names.contains(&method_path.ident.name) && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_did) = cx.tcx.impl_of_assoc(m_def_id) - && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() // skip extension traits, only lint functions from the standard library - && cx.tcx.trait_id_of_impl(impl_did).is_none() + && let Some(impl_did) = cx.tcx.inherent_impl_of_assoc(m_def_id) + && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() && let parent = cx.tcx.parent(adt.did()) && cx.tcx.is_diagnostic_item(sym::atomic_mod, parent) && ATOMIC_TYPES.contains(&cx.tcx.item_name(adt.did())) diff --git a/compiler/rustc_lint_defs/Cargo.toml b/compiler/rustc_lint_defs/Cargo.toml index 9ab350daf69..c8201d5ea8c 100644 --- a/compiler/rustc_lint_defs/Cargo.toml +++ b/compiler/rustc_lint_defs/Cargo.toml @@ -5,11 +5,10 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_messages = { path = "../rustc_error_messages" } -rustc_hir = { path = "../rustc_hir" } +rustc_hir_id = { path = "../rustc_hir_id" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index bee80335f74..97aa1065967 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -2301,18 +2301,18 @@ declare_lint! { declare_lint! { /// The `inline_no_sanitize` lint detects incompatible use of - /// [`#[inline(always)]`][inline] and [`#[no_sanitize(...)]`][no_sanitize]. + /// [`#[inline(always)]`][inline] and [`#[sanitize(xyz = "off")]`][sanitize]. /// /// [inline]: https://doc.rust-lang.org/reference/attributes/codegen.html#the-inline-attribute - /// [no_sanitize]: https://doc.rust-lang.org/nightly/unstable-book/language-features/no-sanitize.html + /// [sanitize]: https://doc.rust-lang.org/nightly/unstable-book/language-features/no-sanitize.html /// /// ### Example /// /// ```rust - /// #![feature(no_sanitize)] + /// #![feature(sanitize)] /// /// #[inline(always)] - /// #[no_sanitize(address)] + /// #[sanitize(address = "off")] /// fn x() {} /// /// fn main() { @@ -2325,11 +2325,11 @@ declare_lint! { /// ### Explanation /// /// The use of the [`#[inline(always)]`][inline] attribute prevents the - /// the [`#[no_sanitize(...)]`][no_sanitize] attribute from working. + /// the [`#[sanitize(xyz = "off")]`][sanitize] attribute from working. /// Consider temporarily removing `inline` attribute. pub INLINE_NO_SANITIZE, Warn, - "detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`", + r#"detects incompatible use of `#[inline(always)]` and `#[sanitize(... = "off")]`"#, } declare_lint! { @@ -5105,3 +5105,36 @@ declare_lint! { report_in_deps: true, }; } + +declare_lint! { + /// The `tail_call_track_caller` lint detects usage of `become` attempting to tail call + /// a function marked with `#[track_caller]`. + /// + /// ### Example + /// + /// ```rust + /// #![feature(explicit_tail_calls)] + /// #![expect(incomplete_features)] + /// + /// #[track_caller] + /// fn f() {} + /// + /// fn g() { + /// become f(); + /// } + /// + /// g(); + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Due to implementation details of tail calls and `#[track_caller]` attribute, calls to + /// functions marked with `#[track_caller]` cannot become tail calls. As such using `become` + /// is no different than a normal call (except for changes in drop order). + pub TAIL_CALL_TRACK_CALLER, + Warn, + "detects tail calls of functions marked with `#[track_caller]`", + @feature_gate = explicit_tail_calls; +} diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs index dc5ea3922f1..2e84233e5a5 100644 --- a/compiler/rustc_lint_defs/src/lib.rs +++ b/compiler/rustc_lint_defs/src/lib.rs @@ -1,16 +1,15 @@ -use rustc_abi::ExternAbi; +use std::borrow::Cow; + use rustc_ast::AttrId; use rustc_ast::attr::AttributeExt; -use rustc_ast::node_id::NodeId; -use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::stable_hasher::{ HashStable, StableCompare, StableHasher, ToStableHashKey, }; -use rustc_error_messages::{DiagMessage, MultiSpan}; -use rustc_hir::def::Namespace; -use rustc_hir::def_id::DefPathHash; -use rustc_hir::{HashStableContext, HirId, ItemLocalId}; +use rustc_error_messages::{DiagArgValue, DiagMessage, IntoDiagArg, MultiSpan}; +use rustc_hir_id::{HashStableContext, HirId, ItemLocalId}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; +use rustc_span::def_id::DefPathHash; pub use rustc_span::edition::Edition; use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol, sym}; use serde::{Deserialize, Serialize}; @@ -138,7 +137,7 @@ impl LintExpectationId { } } -impl<HCX: rustc_hir::HashStableContext> HashStable<HCX> for LintExpectationId { +impl<HCX: HashStableContext> HashStable<HCX> for LintExpectationId { #[inline] fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { match self { @@ -156,7 +155,7 @@ impl<HCX: rustc_hir::HashStableContext> HashStable<HCX> for LintExpectationId { } } -impl<HCX: rustc_hir::HashStableContext> ToStableHashKey<HCX> for LintExpectationId { +impl<HCX: HashStableContext> ToStableHashKey<HCX> for LintExpectationId { type KeyType = (DefPathHash, ItemLocalId, u16, u16); #[inline] @@ -297,6 +296,12 @@ impl Level { } } +impl IntoDiagArg for Level { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.to_cmd_flag())) + } +} + /// Specification of a single lint. #[derive(Copy, Clone, Debug)] pub struct Lint { @@ -617,7 +622,7 @@ pub enum BuiltinLintDiag { AbsPathWithModule(Span), ProcMacroDeriveResolutionFallback { span: Span, - ns: Namespace, + ns_descr: &'static str, ident: Ident, }, MacroExpandedMacroExportsAccessedByAbsolutePaths(Span), @@ -641,12 +646,12 @@ pub enum BuiltinLintDiag { path: String, since_kind: DeprecatedSinceKind, }, - MissingAbi(Span, ExternAbi), UnusedDocComment(Span), UnusedBuiltinAttribute { attr_name: Symbol, macro_name: String, invoc_span: Span, + attr_span: Span, }, PatternsInFnsWithoutBody { span: Span, @@ -663,14 +668,6 @@ pub enum BuiltinLintDiag { is_string: bool, suggestion: Span, }, - HiddenUnicodeCodepoints { - label: String, - count: usize, - span_label: Span, - labels: Option<Vec<(char, Span)>>, - escape: bool, - spans: Vec<(char, Span)>, - }, TrailingMacro(bool, Ident), BreakWithLabelAndLoop(Span), UnicodeTextFlow(Span, String), @@ -795,68 +792,11 @@ pub enum BuiltinLintDiag { suggestions: Vec<String>, docs: Option<&'static str>, }, - InnerAttributeUnstable { - is_macro: bool, - }, OutOfScopeMacroCalls { span: Span, path: String, location: String, }, - UnexpectedBuiltinCfg { - cfg: String, - cfg_name: Symbol, - controlled_by: &'static str, - }, -} - -/// Lints that are buffered up early on in the `Session` before the -/// `LintLevels` is calculated. -#[derive(Debug)] -pub struct BufferedEarlyLint { - /// The span of code that we are linting on. - pub span: Option<MultiSpan>, - - /// The `NodeId` of the AST node that generated the lint. - pub node_id: NodeId, - - /// A lint Id that can be passed to - /// `rustc_lint::early::EarlyContextAndPass::check_id`. - pub lint_id: LintId, - - /// Customization of the `Diag<'_>` for the lint. - pub diagnostic: BuiltinLintDiag, -} - -#[derive(Default, Debug)] -pub struct LintBuffer { - pub map: FxIndexMap<NodeId, Vec<BufferedEarlyLint>>, -} - -impl LintBuffer { - pub fn add_early_lint(&mut self, early_lint: BufferedEarlyLint) { - self.map.entry(early_lint.node_id).or_default().push(early_lint); - } - - pub fn take(&mut self, id: NodeId) -> Vec<BufferedEarlyLint> { - // FIXME(#120456) - is `swap_remove` correct? - self.map.swap_remove(&id).unwrap_or_default() - } - - pub fn buffer_lint( - &mut self, - lint: &'static Lint, - node_id: NodeId, - span: impl Into<MultiSpan>, - diagnostic: BuiltinLintDiag, - ) { - self.add_early_lint(BufferedEarlyLint { - lint_id: LintId::of(lint), - node_id, - span: Some(span.into()), - diagnostic, - }); - } } pub type RegisteredTools = FxIndexSet<Ident>; diff --git a/compiler/rustc_llvm/build.rs b/compiler/rustc_llvm/build.rs index 1394edcee6b..d01f79dcade 100644 --- a/compiler/rustc_llvm/build.rs +++ b/compiler/rustc_llvm/build.rs @@ -226,7 +226,6 @@ fn main() { rerun_if_changed_anything_in_dir(Path::new("llvm-wrapper")); cfg.file("llvm-wrapper/PassWrapper.cpp") .file("llvm-wrapper/RustWrapper.cpp") - .file("llvm-wrapper/ArchiveWrapper.cpp") .file("llvm-wrapper/CoverageMappingWrapper.cpp") .file("llvm-wrapper/SymbolWrapper.cpp") .file("llvm-wrapper/Linker.cpp") diff --git a/compiler/rustc_llvm/llvm-wrapper/ArchiveWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/ArchiveWrapper.cpp deleted file mode 100644 index feac6a5649c..00000000000 --- a/compiler/rustc_llvm/llvm-wrapper/ArchiveWrapper.cpp +++ /dev/null @@ -1,208 +0,0 @@ -#include "LLVMWrapper.h" - -#include "llvm/Object/Archive.h" -#include "llvm/Object/ArchiveWriter.h" -#include "llvm/Support/Path.h" - -using namespace llvm; -using namespace llvm::object; - -struct RustArchiveMember { - const char *Filename; - const char *Name; - Archive::Child Child; - - RustArchiveMember() - : Filename(nullptr), Name(nullptr), Child(nullptr, nullptr, nullptr) {} - ~RustArchiveMember() {} -}; - -struct RustArchiveIterator { - bool First; - Archive::child_iterator Cur; - Archive::child_iterator End; - std::unique_ptr<Error> Err; - - RustArchiveIterator(Archive::child_iterator Cur, Archive::child_iterator End, - std::unique_ptr<Error> Err) - : First(true), Cur(Cur), End(End), Err(std::move(Err)) {} -}; - -enum class LLVMRustArchiveKind { - GNU, - BSD, - DARWIN, - COFF, - AIX_BIG, -}; - -static Archive::Kind fromRust(LLVMRustArchiveKind Kind) { - switch (Kind) { - case LLVMRustArchiveKind::GNU: - return Archive::K_GNU; - case LLVMRustArchiveKind::BSD: - return Archive::K_BSD; - case LLVMRustArchiveKind::DARWIN: - return Archive::K_DARWIN; - case LLVMRustArchiveKind::COFF: - return Archive::K_COFF; - case LLVMRustArchiveKind::AIX_BIG: - return Archive::K_AIXBIG; - default: - report_fatal_error("Bad ArchiveKind."); - } -} - -typedef OwningBinary<Archive> *LLVMRustArchiveRef; -typedef RustArchiveMember *LLVMRustArchiveMemberRef; -typedef Archive::Child *LLVMRustArchiveChildRef; -typedef Archive::Child const *LLVMRustArchiveChildConstRef; -typedef RustArchiveIterator *LLVMRustArchiveIteratorRef; - -extern "C" LLVMRustArchiveRef LLVMRustOpenArchive(char *Path) { - ErrorOr<std::unique_ptr<MemoryBuffer>> BufOr = MemoryBuffer::getFile( - Path, /*IsText*/ false, /*RequiresNullTerminator=*/false); - if (!BufOr) { - LLVMRustSetLastError(BufOr.getError().message().c_str()); - return nullptr; - } - - Expected<std::unique_ptr<Archive>> ArchiveOr = - Archive::create(BufOr.get()->getMemBufferRef()); - - if (!ArchiveOr) { - LLVMRustSetLastError(toString(ArchiveOr.takeError()).c_str()); - return nullptr; - } - - OwningBinary<Archive> *Ret = new OwningBinary<Archive>( - std::move(ArchiveOr.get()), std::move(BufOr.get())); - - return Ret; -} - -extern "C" void LLVMRustDestroyArchive(LLVMRustArchiveRef RustArchive) { - delete RustArchive; -} - -extern "C" LLVMRustArchiveIteratorRef -LLVMRustArchiveIteratorNew(LLVMRustArchiveRef RustArchive) { - Archive *Archive = RustArchive->getBinary(); - std::unique_ptr<Error> Err = std::make_unique<Error>(Error::success()); - auto Cur = Archive->child_begin(*Err); - if (*Err) { - LLVMRustSetLastError(toString(std::move(*Err)).c_str()); - return nullptr; - } - auto End = Archive->child_end(); - return new RustArchiveIterator(Cur, End, std::move(Err)); -} - -extern "C" LLVMRustArchiveChildConstRef -LLVMRustArchiveIteratorNext(LLVMRustArchiveIteratorRef RAI) { - if (RAI->Cur == RAI->End) - return nullptr; - - // Advancing the iterator validates the next child, and this can - // uncover an error. LLVM requires that we check all Errors, - // so we only advance the iterator if we actually need to fetch - // the next child. - // This means we must not advance the iterator in the *first* call, - // but instead advance it *before* fetching the child in all later calls. - if (!RAI->First) { - ++RAI->Cur; - if (*RAI->Err) { - LLVMRustSetLastError(toString(std::move(*RAI->Err)).c_str()); - return nullptr; - } - } else { - RAI->First = false; - } - - if (RAI->Cur == RAI->End) - return nullptr; - - const Archive::Child &Child = *RAI->Cur.operator->(); - Archive::Child *Ret = new Archive::Child(Child); - - return Ret; -} - -extern "C" void LLVMRustArchiveChildFree(LLVMRustArchiveChildRef Child) { - delete Child; -} - -extern "C" void LLVMRustArchiveIteratorFree(LLVMRustArchiveIteratorRef RAI) { - delete RAI; -} - -extern "C" const char * -LLVMRustArchiveChildName(LLVMRustArchiveChildConstRef Child, size_t *Size) { - Expected<StringRef> NameOrErr = Child->getName(); - if (!NameOrErr) { - // rustc_codegen_llvm currently doesn't use this error string, but it might - // be useful in the future, and in the meantime this tells LLVM that the - // error was not ignored and that it shouldn't abort the process. - LLVMRustSetLastError(toString(NameOrErr.takeError()).c_str()); - return nullptr; - } - StringRef Name = NameOrErr.get(); - *Size = Name.size(); - return Name.data(); -} - -extern "C" LLVMRustArchiveMemberRef -LLVMRustArchiveMemberNew(char *Filename, char *Name, - LLVMRustArchiveChildRef Child) { - RustArchiveMember *Member = new RustArchiveMember; - Member->Filename = Filename; - Member->Name = Name; - if (Child) - Member->Child = *Child; - return Member; -} - -extern "C" void LLVMRustArchiveMemberFree(LLVMRustArchiveMemberRef Member) { - delete Member; -} - -extern "C" LLVMRustResult LLVMRustWriteArchive( - char *Dst, size_t NumMembers, const LLVMRustArchiveMemberRef *NewMembers, - bool WriteSymbtab, LLVMRustArchiveKind RustKind, bool isEC) { - - std::vector<NewArchiveMember> Members; - auto Kind = fromRust(RustKind); - - for (size_t I = 0; I < NumMembers; I++) { - auto Member = NewMembers[I]; - assert(Member->Name); - if (Member->Filename) { - Expected<NewArchiveMember> MOrErr = - NewArchiveMember::getFile(Member->Filename, true); - if (!MOrErr) { - LLVMRustSetLastError(toString(MOrErr.takeError()).c_str()); - return LLVMRustResult::Failure; - } - MOrErr->MemberName = sys::path::filename(MOrErr->MemberName); - Members.push_back(std::move(*MOrErr)); - } else { - Expected<NewArchiveMember> MOrErr = - NewArchiveMember::getOldMember(Member->Child, true); - if (!MOrErr) { - LLVMRustSetLastError(toString(MOrErr.takeError()).c_str()); - return LLVMRustResult::Failure; - } - Members.push_back(std::move(*MOrErr)); - } - } - - auto SymtabMode = WriteSymbtab ? SymtabWritingMode::NormalSymtab - : SymtabWritingMode::NoSymtab; - auto Result = - writeArchive(Dst, Members, SymtabMode, Kind, true, false, nullptr, isEC); - if (!Result) - return LLVMRustResult::Success; - LLVMRustSetLastError(toString(std::move(Result)).c_str()); - - return LLVMRustResult::Failure; -} diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index 588d867bbbf..361a5f76551 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -242,7 +242,7 @@ enum class LLVMRustAttributeKind { MinSize = 4, Naked = 5, NoAlias = 6, - NoCapture = 7, + CapturesAddress = 7, NoInline = 8, NonNull = 9, NoRedZone = 10, @@ -277,6 +277,8 @@ enum class LLVMRustAttributeKind { FnRetThunkExtern = 41, Writable = 42, DeadOnUnwind = 43, + DeadOnReturn = 44, + CapturesReadOnly = 45, }; static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) { @@ -295,12 +297,6 @@ static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) { return Attribute::Naked; case LLVMRustAttributeKind::NoAlias: return Attribute::NoAlias; - case LLVMRustAttributeKind::NoCapture: -#if LLVM_VERSION_GE(21, 0) - report_fatal_error("NoCapture doesn't exist in LLVM 21"); -#else - return Attribute::NoCapture; -#endif case LLVMRustAttributeKind::NoCfCheck: return Attribute::NoCfCheck; case LLVMRustAttributeKind::NoInline: @@ -369,6 +365,15 @@ static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) { return Attribute::Writable; case LLVMRustAttributeKind::DeadOnUnwind: return Attribute::DeadOnUnwind; + case LLVMRustAttributeKind::DeadOnReturn: +#if LLVM_VERSION_GE(21, 0) + return Attribute::DeadOnReturn; +#else + report_fatal_error("DeadOnReturn attribute requires LLVM 21 or later"); +#endif + case LLVMRustAttributeKind::CapturesAddress: + case LLVMRustAttributeKind::CapturesReadOnly: + report_fatal_error("Should be handled separately"); } report_fatal_error("bad LLVMRustAttributeKind"); } @@ -419,9 +424,14 @@ extern "C" void LLVMRustEraseInstFromParent(LLVMValueRef Instr) { extern "C" LLVMAttributeRef LLVMRustCreateAttrNoValue(LLVMContextRef C, LLVMRustAttributeKind RustAttr) { #if LLVM_VERSION_GE(21, 0) - // LLVM 21 replaced the NoCapture attribute with Captures(none). - if (RustAttr == LLVMRustAttributeKind::NoCapture) { - return wrap(Attribute::getWithCaptureInfo(*unwrap(C), CaptureInfo::none())); + if (RustAttr == LLVMRustAttributeKind::CapturesAddress) { + return wrap(Attribute::getWithCaptureInfo( + *unwrap(C), CaptureInfo(CaptureComponents::Address))); + } + if (RustAttr == LLVMRustAttributeKind::CapturesReadOnly) { + return wrap(Attribute::getWithCaptureInfo( + *unwrap(C), CaptureInfo(CaptureComponents::Address | + CaptureComponents::ReadProvenance))); } #endif return wrap(Attribute::get(*unwrap(C), fromRust(RustAttr))); @@ -473,6 +483,9 @@ extern "C" LLVMAttributeRef LLVMRustCreateRangeAttribute(LLVMContextRef C, unsigned NumBits, const uint64_t LowerWords[], const uint64_t UpperWords[]) { + // FIXME(Zalathar): There appears to be no stable guarantee that C++ + // `AttrKind` values correspond directly to the `unsigned KindID` values + // accepted by LLVM-C API functions, though in practice they currently do. return LLVMCreateConstantRangeAttribute(C, Attribute::Range, NumBits, LowerWords, UpperWords); } @@ -1453,60 +1466,6 @@ LLVMRustGetDiagInfoKind(LLVMDiagnosticInfoRef DI) { return toRust((DiagnosticKind)unwrap(DI)->getKind()); } -// This is kept distinct from LLVMGetTypeKind, because when -// a new type kind is added, the Rust-side enum must be -// updated or UB will result. -extern "C" LLVMTypeKind LLVMRustGetTypeKind(LLVMTypeRef Ty) { - switch (unwrap(Ty)->getTypeID()) { - case Type::VoidTyID: - return LLVMVoidTypeKind; - case Type::HalfTyID: - return LLVMHalfTypeKind; - case Type::FloatTyID: - return LLVMFloatTypeKind; - case Type::DoubleTyID: - return LLVMDoubleTypeKind; - case Type::X86_FP80TyID: - return LLVMX86_FP80TypeKind; - case Type::FP128TyID: - return LLVMFP128TypeKind; - case Type::PPC_FP128TyID: - return LLVMPPC_FP128TypeKind; - case Type::LabelTyID: - return LLVMLabelTypeKind; - case Type::MetadataTyID: - return LLVMMetadataTypeKind; - case Type::IntegerTyID: - return LLVMIntegerTypeKind; - case Type::FunctionTyID: - return LLVMFunctionTypeKind; - case Type::StructTyID: - return LLVMStructTypeKind; - case Type::ArrayTyID: - return LLVMArrayTypeKind; - case Type::PointerTyID: - return LLVMPointerTypeKind; - case Type::FixedVectorTyID: - return LLVMVectorTypeKind; - case Type::TokenTyID: - return LLVMTokenTypeKind; - case Type::ScalableVectorTyID: - return LLVMScalableVectorTypeKind; - case Type::BFloatTyID: - return LLVMBFloatTypeKind; - case Type::X86_AMXTyID: - return LLVMX86_AMXTypeKind; - default: { - std::string error; - auto stream = llvm::raw_string_ostream(error); - stream << "Rust does not support the TypeID: " << unwrap(Ty)->getTypeID() - << " for the type: " << *unwrap(Ty); - stream.flush(); - report_fatal_error(error.c_str()); - } - } -} - DEFINE_SIMPLE_CONVERSION_FUNCTIONS(SMDiagnostic, LLVMSMDiagnosticRef) extern "C" LLVMSMDiagnosticRef LLVMRustGetSMDiagnostic(LLVMDiagnosticInfoRef DI, @@ -1986,29 +1945,3 @@ extern "C" void LLVMRustSetNoSanitizeHWAddress(LLVMValueRef Global) { MD.NoHWAddress = true; GV.setSanitizerMetadata(MD); } - -enum class LLVMRustTailCallKind { - None = 0, - Tail = 1, - MustTail = 2, - NoTail = 3 -}; - -extern "C" void LLVMRustSetTailCallKind(LLVMValueRef Call, - LLVMRustTailCallKind Kind) { - CallInst *CI = unwrap<CallInst>(Call); - switch (Kind) { - case LLVMRustTailCallKind::None: - CI->setTailCallKind(CallInst::TCK_None); - break; - case LLVMRustTailCallKind::Tail: - CI->setTailCallKind(CallInst::TCK_Tail); - break; - case LLVMRustTailCallKind::MustTail: - CI->setTailCallKind(CallInst::TCK_MustTail); - break; - case LLVMRustTailCallKind::NoTail: - CI->setTailCallKind(CallInst::TCK_NoTail); - break; - } -} diff --git a/compiler/rustc_log/Cargo.toml b/compiler/rustc_log/Cargo.toml index c673d51a1d4..e9d7b4723f8 100644 --- a/compiler/rustc_log/Cargo.toml +++ b/compiler/rustc_log/Cargo.toml @@ -5,10 +5,10 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -tracing = "0.1.28" tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635 tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] } tracing-tree = "0.3.1" +tracing.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_macros/src/diagnostics/utils.rs b/compiler/rustc_macros/src/diagnostics/utils.rs index 060799e981d..c310b99d535 100644 --- a/compiler/rustc_macros/src/diagnostics/utils.rs +++ b/compiler/rustc_macros/src/diagnostics/utils.rs @@ -146,7 +146,7 @@ impl<'ty> FieldInnerTy<'ty> { }; let path = &ty_path.path; - let ty = path.segments.iter().last().unwrap(); + let ty = path.segments.last().unwrap(); let syn::PathArguments::AngleBracketed(bracketed) = &ty.arguments else { panic!("expected bracketed generic arguments"); }; diff --git a/compiler/rustc_macros/src/print_attribute.rs b/compiler/rustc_macros/src/print_attribute.rs index 9023520c750..0114e0dfde0 100644 --- a/compiler/rustc_macros/src/print_attribute.rs +++ b/compiler/rustc_macros/src/print_attribute.rs @@ -4,7 +4,7 @@ use syn::spanned::Spanned; use syn::{Data, Fields, Ident}; use synstructure::Structure; -fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, TokenStream) { +fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream) { let string_name = name.to_string(); let mut disps = vec![quote! {let mut __printed_anything = false;}]; @@ -43,7 +43,6 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok #(#disps)* __p.word("}"); }, - quote! { true }, ) } Fields::Unnamed(fields_unnamed) => { @@ -76,10 +75,9 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok #(#disps)* __p.pclose(); }, - quote! { true }, ) } - Fields::Unit => (quote! {}, quote! { __p.word(#string_name) }, quote! { true }), + Fields::Unit => (quote! {}, quote! { __p.word(#string_name) }), } } @@ -89,51 +87,33 @@ pub(crate) fn print_attribute(input: Structure<'_>) -> TokenStream { }; // Must be applied to an enum type. - let (code, printed) = match &input.ast().data { + let code = match &input.ast().data { Data::Enum(e) => { - let (arms, printed) = e + let arms = e .variants .iter() .map(|x| { let ident = &x.ident; - let (pat, code, printed) = print_fields(ident, &x.fields); + let (pat, code) = print_fields(ident, &x.fields); - ( - quote! { - Self::#ident #pat => {#code} - }, - quote! { - Self::#ident #pat => {#printed} - }, - ) + quote! { + Self::#ident #pat => {#code} + } }) - .unzip::<_, _, Vec<_>, Vec<_>>(); + .collect::<Vec<_>>(); - ( - quote! { - match self { - #(#arms)* - } - }, - quote! { - match self { - #(#printed)* - } - }, - ) + quote! { + match self { + #(#arms)* + } + } } Data::Struct(s) => { - let (pat, code, printed) = print_fields(&input.ast().ident, &s.fields); - ( - quote! { - let Self #pat = self; - #code - }, - quote! { - let Self #pat = self; - #printed - }, - ) + let (pat, code) = print_fields(&input.ast().ident, &s.fields); + quote! { + let Self #pat = self; + #code + } } Data::Union(u) => { return span_error(u.union_token.span(), "can't derive PrintAttribute on unions"); @@ -144,7 +124,7 @@ pub(crate) fn print_attribute(input: Structure<'_>) -> TokenStream { input.gen_impl(quote! { #[allow(unused)] gen impl PrintAttribute for @Self { - fn should_render(&self) -> bool { #printed } + fn should_render(&self) -> bool { true } fn print_attribute(&self, __p: &mut rustc_ast_pretty::pp::Printer) { #code } } }) diff --git a/compiler/rustc_metadata/Cargo.toml b/compiler/rustc_metadata/Cargo.toml index 1b40d9f684e..4bb9e49ccce 100644 --- a/compiler/rustc_metadata/Cargo.toml +++ b/compiler/rustc_metadata/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true libloading = "0.8.0" odht = { version = "0.3.1", features = ["nightly"] } rustc_abi = { path = "../rustc_abi" } @@ -31,7 +31,7 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } tempfile = "3.7.1" -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [target.'cfg(target_os = "aix")'.dependencies] diff --git a/compiler/rustc_metadata/messages.ftl b/compiler/rustc_metadata/messages.ftl index 4d3e879a098..e104be2c466 100644 --- a/compiler/rustc_metadata/messages.ftl +++ b/compiler/rustc_metadata/messages.ftl @@ -1,6 +1,3 @@ -metadata_as_needed_compatibility = - linking modifier `as-needed` is only compatible with `dylib` and `framework` linking kinds - metadata_async_drop_types_in_dependency = found async drop types in dependency `{$extern_crate}`, but async_drop feature is disabled for `{$local_crate}` .help = if async drop type will be dropped in a crate without `feature(async_drop)`, sync Drop will be used @@ -11,9 +8,6 @@ metadata_bad_panic_strategy = metadata_binary_output_to_tty = option `-o` or `--emit` is used to write binary output type `metadata` to stdout, but stdout is a tty -metadata_bundle_needs_static = - linking modifier `bundle` is only compatible with `static` linking kind - metadata_cannot_find_crate = can't find crate for `{$crate_name}`{$add_info} @@ -60,10 +54,6 @@ metadata_crate_not_panic_runtime = metadata_dl_error = {$path}{$err} -metadata_empty_link_name = - link name must not be empty - .label = empty link name - metadata_empty_renaming_target = an empty renaming target was specified for library `{$lib_name}` @@ -108,15 +98,6 @@ metadata_full_metadata_not_found = metadata_global_alloc_required = no global memory allocator found but one is required; link to std or add `#[global_allocator]` to a static item that implements the GlobalAlloc trait -metadata_import_name_type_form = - import name type must be of the form `import_name_type = "string"` - -metadata_import_name_type_raw = - import name type can only be used with link kind `raw-dylib` - -metadata_import_name_type_x86 = - import name type is only supported on x86 - metadata_incompatible_panic_in_drop_strategy = the crate `{$crate_name}` is compiled with the panic-in-drop strategy `{$found_strategy}` which is incompatible with this crate's strategy of `{$desired_strategy}` @@ -143,15 +124,10 @@ metadata_incompatible_target_modifiers_r_missed = mixing `{$flag_name_prefixed}` will cause an ABI mismatch in crate `{$local_crate}` .note = `{$flag_name_prefixed}={$local_value}` in this crate is incompatible with unset `{$flag_name_prefixed}` in dependency `{$extern_crate}` .help = the `{$flag_name_prefixed}` flag modifies the ABI so Rust crates compiled with different values of this flag cannot be used together safely -metadata_incompatible_wasm_link = - `wasm_import_module` is incompatible with other arguments in `#[link]` attributes metadata_install_missing_components = maybe you need to install the missing components with: `rustup component add rust-src rustc-dev llvm-tools-preview` -metadata_invalid_link_modifier = - invalid linking modifier syntax, expected '+' or '-' prefix before one of: bundle, verbatim, whole-archive, as-needed - metadata_invalid_meta_files = found invalid metadata files for crate `{$crate_name}`{$add_info} @@ -164,67 +140,18 @@ metadata_lib_framework_apple = metadata_lib_required = crate `{$crate_name}` required to be available in {$kind} format, but was not found in this form -metadata_link_arg_unstable = - link kind `link-arg` is unstable - -metadata_link_cfg_form = - link cfg must be of the form `cfg(/* predicate */)` - -metadata_link_cfg_single_predicate = - link cfg must have a single predicate argument - -metadata_link_cfg_unstable = - link cfg is unstable - -metadata_link_framework_apple = - link kind `framework` is only supported on Apple targets - -metadata_link_kind_form = - link kind must be of the form `kind = "string"` - -metadata_link_modifiers_form = - link modifiers must be of the form `modifiers = "string"` - -metadata_link_name_form = - link name must be of the form `name = "string"` - metadata_link_ordinal_raw_dylib = `#[link_ordinal]` is only supported if link kind is `raw-dylib` -metadata_link_requires_name = - `#[link]` attribute requires a `name = "string"` argument - .label = missing `name` argument - metadata_missing_native_library = could not find native static library `{$libname}`, perhaps an -L flag is missing? metadata_multiple_candidates = multiple candidates for `{$flavor}` dependency `{$crate_name}` found -metadata_multiple_cfgs = - multiple `cfg` arguments in a single `#[link]` attribute - -metadata_multiple_import_name_type = - multiple `import_name_type` arguments in a single `#[link]` attribute - -metadata_multiple_kinds_in_link = - multiple `kind` arguments in a single `#[link]` attribute - -metadata_multiple_link_modifiers = - multiple `modifiers` arguments in a single `#[link]` attribute - -metadata_multiple_modifiers = - multiple `{$modifier}` modifiers in a single `modifiers` argument - -metadata_multiple_names_in_link = - multiple `name` arguments in a single `#[link]` attribute - metadata_multiple_renamings = multiple renamings were specified for library `{$lib_name}` -metadata_multiple_wasm_import = - multiple `wasm_import_module` arguments in a single `#[link]` attribute - metadata_newer_crate_version = found possibly newer version of crate `{$crate_name}`{$add_info} .note = perhaps that crate needs to be recompiled? @@ -263,15 +190,6 @@ metadata_prev_alloc_error_handler = metadata_prev_global_alloc = previous global allocator defined here -metadata_raw_dylib_elf_unstable = - link kind `raw-dylib` is unstable on ELF platforms - -metadata_raw_dylib_no_nul = - link name must not contain NUL characters if link kind is `raw-dylib` - -metadata_raw_dylib_only_windows = - link kind `raw-dylib` is only supported on Windows targets - metadata_raw_dylib_unsupported_abi = ABI not supported by `#[link(kind = "raw-dylib")]` on this architecture @@ -307,19 +225,6 @@ metadata_target_not_installed = metadata_two_panic_runtimes = cannot link together two panic runtimes: {$prev_name} and {$cur_name} -metadata_unexpected_link_arg = - unexpected `#[link]` argument, expected one of: name, kind, modifiers, cfg, wasm_import_module, import_name_type - -metadata_unknown_import_name_type = - unknown import name type `{$import_name_type}`, expected one of: decorated, noprefix, undecorated - -metadata_unknown_link_kind = - unknown link kind `{$kind}`, expected one of: static, dylib, framework, raw-dylib, link-arg - .label = unknown link kind - -metadata_unknown_link_modifier = - unknown linking modifier `{$modifier}`, expected one of: bundle, verbatim, whole-archive, as-needed - metadata_unknown_target_modifier_unsafe_allowed = unknown target modifier `{$flag_name}`, requested by `-Cunsafe-allow-abi-mismatch={$flag_name}` metadata_wasm_c_abi = diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs index 6bfb3769f24..5d776ea581d 100644 --- a/compiler/rustc_metadata/src/creader.rs +++ b/compiler/rustc_metadata/src/creader.rs @@ -32,6 +32,7 @@ use rustc_session::cstore::{CrateDepKind, CrateSource, ExternCrate, ExternCrateS use rustc_session::lint::{self, BuiltinLintDiag}; use rustc_session::output::validate_crate_name; use rustc_session::search_paths::PathKind; +use rustc_span::def_id::DefId; use rustc_span::edition::Edition; use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym}; use rustc_target::spec::{PanicStrategy, Target}; @@ -275,6 +276,10 @@ impl CStore { .filter_map(|(cnum, data)| data.as_deref().map(|data| (cnum, data))) } + pub fn all_proc_macro_def_ids(&self) -> impl Iterator<Item = DefId> { + self.iter_crate_data().flat_map(|(krate, data)| data.proc_macros_for_crate(krate, self)) + } + fn push_dependencies_in_postorder(&self, deps: &mut IndexSet<CrateNum>, cnum: CrateNum) { if !deps.contains(&cnum) { let data = self.get_crate_data(cnum); diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs index 0332dba1077..e5a4fd48353 100644 --- a/compiler/rustc_metadata/src/errors.rs +++ b/compiler/rustc_metadata/src/errors.rs @@ -84,187 +84,6 @@ pub struct IncompatiblePanicInDropStrategy { } #[derive(Diagnostic)] -#[diag(metadata_multiple_names_in_link)] -pub struct MultipleNamesInLink { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_multiple_kinds_in_link)] -pub struct MultipleKindsInLink { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_name_form)] -pub struct LinkNameForm { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_kind_form)] -pub struct LinkKindForm { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_modifiers_form)] -pub struct LinkModifiersForm { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_cfg_form)] -pub struct LinkCfgForm { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_wasm_import_form)] -pub struct WasmImportForm { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_empty_link_name, code = E0454)] -pub struct EmptyLinkName { - #[primary_span] - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_framework_apple, code = E0455)] -pub struct LinkFrameworkApple { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_raw_dylib_only_windows, code = E0455)] -pub struct RawDylibOnlyWindows { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_unknown_link_kind, code = E0458)] -pub struct UnknownLinkKind<'a> { - #[primary_span] - #[label] - pub span: Span, - pub kind: &'a str, -} - -#[derive(Diagnostic)] -#[diag(metadata_multiple_link_modifiers)] -pub struct MultipleLinkModifiers { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_multiple_cfgs)] -pub struct MultipleCfgs { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_cfg_single_predicate)] -pub struct LinkCfgSinglePredicate { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_multiple_wasm_import)] -pub struct MultipleWasmImport { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_unexpected_link_arg)] -pub struct UnexpectedLinkArg { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_invalid_link_modifier)] -pub struct InvalidLinkModifier { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_multiple_modifiers)] -pub struct MultipleModifiers<'a> { - #[primary_span] - pub span: Span, - pub modifier: &'a str, -} - -#[derive(Diagnostic)] -#[diag(metadata_bundle_needs_static)] -pub struct BundleNeedsStatic { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_whole_archive_needs_static)] -pub struct WholeArchiveNeedsStatic { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_as_needed_compatibility)] -pub struct AsNeededCompatibility { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_unknown_link_modifier)] -pub struct UnknownLinkModifier<'a> { - #[primary_span] - pub span: Span, - pub modifier: &'a str, -} - -#[derive(Diagnostic)] -#[diag(metadata_incompatible_wasm_link)] -pub struct IncompatibleWasmLink { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_link_requires_name, code = E0459)] -pub struct LinkRequiresName { - #[primary_span] - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_raw_dylib_no_nul)] -pub struct RawDylibNoNul { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(metadata_link_ordinal_raw_dylib)] pub struct LinkOrdinalRawDylib { #[primary_span] @@ -707,42 +526,6 @@ pub struct LibFilenameForm<'a> { } #[derive(Diagnostic)] -#[diag(metadata_multiple_import_name_type)] -pub struct MultipleImportNameType { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_import_name_type_form)] -pub struct ImportNameTypeForm { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_import_name_type_x86)] -pub struct ImportNameTypeX86 { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(metadata_unknown_import_name_type)] -pub struct UnknownImportNameType<'a> { - #[primary_span] - pub span: Span, - pub import_name_type: &'a str, -} - -#[derive(Diagnostic)] -#[diag(metadata_import_name_type_raw)] -pub struct ImportNameTypeRaw { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(metadata_wasm_c_abi)] pub(crate) struct WasmCAbi { #[primary_span] diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index 958e314efab..82738c68c59 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -3,25 +3,21 @@ use std::path::{Path, PathBuf}; use rustc_abi::ExternAbi; use rustc_ast::CRATE_NODE_ID; -use rustc_attr_parsing as attr; +use rustc_attr_parsing::{ShouldEmit, eval_config_entry}; use rustc_data_structures::fx::FxHashSet; -use rustc_hir::attrs::AttributeKind; +use rustc_hir::attrs::{AttributeKind, NativeLibKind, PeImportNameType}; use rustc_hir::find_attr; use rustc_middle::query::LocalCrate; use rustc_middle::ty::{self, List, Ty, TyCtxt}; use rustc_session::Session; use rustc_session::config::CrateType; -use rustc_session::cstore::{ - DllCallingConvention, DllImport, ForeignModule, NativeLib, PeImportNameType, -}; -use rustc_session::parse::feature_err; +use rustc_session::cstore::{DllCallingConvention, DllImport, ForeignModule, NativeLib}; use rustc_session::search_paths::PathKind; -use rustc_session::utils::NativeLibKind; +use rustc_span::Symbol; use rustc_span::def_id::{DefId, LOCAL_CRATE}; -use rustc_span::{Symbol, sym}; use rustc_target::spec::{BinaryFormat, LinkSelfContainedComponents}; -use crate::{errors, fluent_generated}; +use crate::errors; /// The fallback directories are passed to linker, but not used when rustc does the search, /// because in the latter case the set of fallback directories cannot always be determined @@ -192,7 +188,9 @@ pub(crate) fn collect(tcx: TyCtxt<'_>, LocalCrate: LocalCrate) -> Vec<NativeLib> pub(crate) fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool { match lib.cfg { - Some(ref cfg) => attr::cfg_matches(cfg, sess, CRATE_NODE_ID, None), + Some(ref cfg) => { + eval_config_entry(sess, cfg, CRATE_NODE_ID, None, ShouldEmit::ErrorsAndLints).as_bool() + } None => true, } } @@ -213,289 +211,23 @@ impl<'tcx> Collector<'tcx> { return; } - // Process all of the #[link(..)]-style arguments - let features = self.tcx.features(); - - for m in self.tcx.get_attrs(def_id, sym::link) { - let Some(items) = m.meta_item_list() else { - continue; - }; - - let mut name = None; - let mut kind = None; - let mut modifiers = None; - let mut cfg = None; - let mut wasm_import_module = None; - let mut import_name_type = None; - for item in items.iter() { - match item.name() { - Some(sym::name) => { - if name.is_some() { - sess.dcx().emit_err(errors::MultipleNamesInLink { span: item.span() }); - continue; - } - let Some(link_name) = item.value_str() else { - sess.dcx().emit_err(errors::LinkNameForm { span: item.span() }); - continue; - }; - let span = item.name_value_literal_span().unwrap(); - if link_name.is_empty() { - sess.dcx().emit_err(errors::EmptyLinkName { span }); - } - name = Some((link_name, span)); - } - Some(sym::kind) => { - if kind.is_some() { - sess.dcx().emit_err(errors::MultipleKindsInLink { span: item.span() }); - continue; - } - let Some(link_kind) = item.value_str() else { - sess.dcx().emit_err(errors::LinkKindForm { span: item.span() }); - continue; - }; - - let span = item.name_value_literal_span().unwrap(); - let link_kind = match link_kind.as_str() { - "static" => NativeLibKind::Static { bundle: None, whole_archive: None }, - "dylib" => NativeLibKind::Dylib { as_needed: None }, - "framework" => { - if !sess.target.is_like_darwin { - sess.dcx().emit_err(errors::LinkFrameworkApple { span }); - } - NativeLibKind::Framework { as_needed: None } - } - "raw-dylib" => { - if sess.target.is_like_windows { - // raw-dylib is stable and working on Windows - } else if sess.target.binary_format == BinaryFormat::Elf - && features.raw_dylib_elf() - { - // raw-dylib is unstable on ELF, but the user opted in - } else if sess.target.binary_format == BinaryFormat::Elf - && sess.is_nightly_build() - { - feature_err( - sess, - sym::raw_dylib_elf, - span, - fluent_generated::metadata_raw_dylib_elf_unstable, - ) - .emit(); - } else { - sess.dcx().emit_err(errors::RawDylibOnlyWindows { span }); - } - - NativeLibKind::RawDylib - } - "link-arg" => { - if !features.link_arg_attribute() { - feature_err( - sess, - sym::link_arg_attribute, - span, - fluent_generated::metadata_link_arg_unstable, - ) - .emit(); - } - NativeLibKind::LinkArg - } - kind => { - sess.dcx().emit_err(errors::UnknownLinkKind { span, kind }); - continue; - } - }; - kind = Some(link_kind); - } - Some(sym::modifiers) => { - if modifiers.is_some() { - sess.dcx() - .emit_err(errors::MultipleLinkModifiers { span: item.span() }); - continue; - } - let Some(link_modifiers) = item.value_str() else { - sess.dcx().emit_err(errors::LinkModifiersForm { span: item.span() }); - continue; - }; - modifiers = Some((link_modifiers, item.name_value_literal_span().unwrap())); - } - Some(sym::cfg) => { - if cfg.is_some() { - sess.dcx().emit_err(errors::MultipleCfgs { span: item.span() }); - continue; - } - let Some(link_cfg) = item.meta_item_list() else { - sess.dcx().emit_err(errors::LinkCfgForm { span: item.span() }); - continue; - }; - let [link_cfg] = link_cfg else { - sess.dcx() - .emit_err(errors::LinkCfgSinglePredicate { span: item.span() }); - continue; - }; - let Some(link_cfg) = link_cfg.meta_item_or_bool() else { - sess.dcx() - .emit_err(errors::LinkCfgSinglePredicate { span: item.span() }); - continue; - }; - if !features.link_cfg() { - feature_err( - sess, - sym::link_cfg, - item.span(), - fluent_generated::metadata_link_cfg_unstable, - ) - .emit(); - } - cfg = Some(link_cfg.clone()); - } - Some(sym::wasm_import_module) => { - if wasm_import_module.is_some() { - sess.dcx().emit_err(errors::MultipleWasmImport { span: item.span() }); - continue; - } - let Some(link_wasm_import_module) = item.value_str() else { - sess.dcx().emit_err(errors::WasmImportForm { span: item.span() }); - continue; - }; - wasm_import_module = Some((link_wasm_import_module, item.span())); - } - Some(sym::import_name_type) => { - if import_name_type.is_some() { - sess.dcx() - .emit_err(errors::MultipleImportNameType { span: item.span() }); - continue; - } - let Some(link_import_name_type) = item.value_str() else { - sess.dcx().emit_err(errors::ImportNameTypeForm { span: item.span() }); - continue; - }; - if self.tcx.sess.target.arch != "x86" { - sess.dcx().emit_err(errors::ImportNameTypeX86 { span: item.span() }); - continue; - } - - let link_import_name_type = match link_import_name_type.as_str() { - "decorated" => PeImportNameType::Decorated, - "noprefix" => PeImportNameType::NoPrefix, - "undecorated" => PeImportNameType::Undecorated, - import_name_type => { - sess.dcx().emit_err(errors::UnknownImportNameType { - span: item.span(), - import_name_type, - }); - continue; - } - }; - import_name_type = Some((link_import_name_type, item.span())); - } - _ => { - sess.dcx().emit_err(errors::UnexpectedLinkArg { span: item.span() }); - } - } - } - - // Do this outside the above loop so we don't depend on modifiers coming after kinds - let mut verbatim = None; - if let Some((modifiers, span)) = modifiers { - for modifier in modifiers.as_str().split(',') { - let (modifier, value) = match modifier.strip_prefix(&['+', '-']) { - Some(m) => (m, modifier.starts_with('+')), - None => { - sess.dcx().emit_err(errors::InvalidLinkModifier { span }); - continue; - } - }; - - macro report_unstable_modifier($feature: ident) { - if !features.$feature() { - // FIXME: make this translatable - #[expect(rustc::untranslatable_diagnostic)] - feature_err( - sess, - sym::$feature, - span, - format!("linking modifier `{modifier}` is unstable"), - ) - .emit(); - } - } - let assign_modifier = |dst: &mut Option<bool>| { - if dst.is_some() { - sess.dcx().emit_err(errors::MultipleModifiers { span, modifier }); - } else { - *dst = Some(value); - } - }; - match (modifier, &mut kind) { - ("bundle", Some(NativeLibKind::Static { bundle, .. })) => { - assign_modifier(bundle) - } - ("bundle", _) => { - sess.dcx().emit_err(errors::BundleNeedsStatic { span }); - } - - ("verbatim", _) => assign_modifier(&mut verbatim), - - ("whole-archive", Some(NativeLibKind::Static { whole_archive, .. })) => { - assign_modifier(whole_archive) - } - ("whole-archive", _) => { - sess.dcx().emit_err(errors::WholeArchiveNeedsStatic { span }); - } - - ("as-needed", Some(NativeLibKind::Dylib { as_needed })) - | ("as-needed", Some(NativeLibKind::Framework { as_needed })) => { - report_unstable_modifier!(native_link_modifiers_as_needed); - assign_modifier(as_needed) - } - ("as-needed", _) => { - sess.dcx().emit_err(errors::AsNeededCompatibility { span }); - } - - _ => { - sess.dcx().emit_err(errors::UnknownLinkModifier { span, modifier }); - } - } - } - } - - if let Some((_, span)) = wasm_import_module { - if name.is_some() || kind.is_some() || modifiers.is_some() || cfg.is_some() { - sess.dcx().emit_err(errors::IncompatibleWasmLink { span }); - } - } - - if wasm_import_module.is_some() { - (name, kind) = (wasm_import_module, Some(NativeLibKind::WasmImportModule)); - } - let Some((name, name_span)) = name else { - sess.dcx().emit_err(errors::LinkRequiresName { span: m.span() }); - continue; - }; - - // Do this outside of the loop so that `import_name_type` can be specified before `kind`. - if let Some((_, span)) = import_name_type { - if kind != Some(NativeLibKind::RawDylib) { - sess.dcx().emit_err(errors::ImportNameTypeRaw { span }); - } - } - - let dll_imports = match kind { - Some(NativeLibKind::RawDylib) => { - if name.as_str().contains('\0') { - sess.dcx().emit_err(errors::RawDylibNoNul { span: name_span }); - } - foreign_items - .iter() - .map(|&child_item| { - self.build_dll_import( - abi, - import_name_type.map(|(import_name_type, _)| import_name_type), - child_item, - ) - }) - .collect() - } + for attr in + find_attr!(self.tcx.get_all_attrs(def_id), AttributeKind::Link(links, _) => links) + .iter() + .map(|v| v.iter()) + .flatten() + { + let dll_imports = match attr.kind { + NativeLibKind::RawDylib => foreign_items + .iter() + .map(|&child_item| { + self.build_dll_import( + abi, + attr.import_name_type.map(|(import_name_type, _)| import_name_type), + child_item, + ) + }) + .collect(), _ => { for &child_item in foreign_items { if let Some(span) = find_attr!(self.tcx.get_all_attrs(child_item), AttributeKind::LinkOrdinal {span, ..} => *span) @@ -508,15 +240,20 @@ impl<'tcx> Collector<'tcx> { } }; - let kind = kind.unwrap_or(NativeLibKind::Unspecified); - let filename = find_bundled_library(name, verbatim, kind, cfg.is_some(), self.tcx); + let filename = find_bundled_library( + attr.name, + attr.verbatim, + attr.kind, + attr.cfg.is_some(), + self.tcx, + ); self.libs.push(NativeLib { - name, + name: attr.name, filename, - kind, - cfg, + kind: attr.kind, + cfg: attr.cfg.clone(), foreign_module: Some(def_id.to_def_id()), - verbatim, + verbatim: attr.verbatim, dll_imports, }); } @@ -701,7 +438,7 @@ impl<'tcx> Collector<'tcx> { .link_ordinal .map_or(import_name_type, |ord| Some(PeImportNameType::Ordinal(ord))); - let name = codegen_fn_attrs.link_name.unwrap_or_else(|| self.tcx.item_name(item)); + let name = codegen_fn_attrs.symbol_name.unwrap_or_else(|| self.tcx.item_name(item)); if self.tcx.sess.target.binary_format == BinaryFormat::Elf { let name = name.as_str(); diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 548c56a97bc..110b26c62ef 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -2014,6 +2014,22 @@ impl CrateMetadata { self.root.is_proc_macro_crate() } + pub(crate) fn proc_macros_for_crate( + &self, + krate: CrateNum, + cstore: &CStore, + ) -> impl Iterator<Item = DefId> { + gen move { + for def_id in self.root.proc_macro_data.as_ref().into_iter().flat_map(move |data| { + data.macros + .decode(CrateMetadataRef { cdata: self, cstore }) + .map(move |index| DefId { index, krate }) + }) { + yield def_id; + } + } + } + pub(crate) fn name(&self) -> Symbol { self.root.header.name } diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index d42c8b947a4..a7e7e9985f4 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1981,7 +1981,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { def_key.disambiguated_data.data = DefPathData::MacroNs(name); let def_id = id.to_def_id(); - self.tables.def_kind.set_some(def_id.index, DefKind::Macro(macro_kind)); + self.tables.def_kind.set_some(def_id.index, DefKind::Macro(macro_kind.into())); self.tables.proc_macro.set_some(def_id.index, macro_kind); self.encode_attrs(id); record!(self.tables.def_keys[def_id] <- def_key); diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 99174e4ad2f..1f7d142d330 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -11,7 +11,7 @@ use rustc_abi::{FieldIdx, ReprOptions, VariantIdx}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::svh::Svh; use rustc_hir::attrs::StrippedCfgItem; -use rustc_hir::def::{CtorKind, DefKind, DocLinkResMap}; +use rustc_hir::def::{CtorKind, DefKind, DocLinkResMap, MacroKinds}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIndex, DefPathHash, StableCrateId}; use rustc_hir::definitions::DefKey; use rustc_hir::lang_items::LangItem; diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 0671aa20399..2cb07a28a8a 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -81,7 +81,7 @@ impl FixedSizeEncoding for u64 { } macro_rules! fixed_size_enum { - ($ty:ty { $(($($pat:tt)*))* }) => { + ($ty:ty { $(($($pat:tt)*))* } $( unreachable { $(($($upat:tt)*))+ } )?) => { impl FixedSizeEncoding for Option<$ty> { type ByteArray = [u8;1]; @@ -103,12 +103,24 @@ macro_rules! fixed_size_enum { b[0] = match self { None => unreachable!(), $(Some($($pat)*) => 1 + ${index()},)* + $(Some($($($upat)*)|+) => unreachable!(),)? } } } } } +// Workaround; need const traits to construct bitflags in a const +macro_rules! const_macro_kinds { + ($($name:ident),+$(,)?) => (MacroKinds::from_bits_truncate($(MacroKinds::$name.bits())|+)) +} +const MACRO_KINDS_ATTR_BANG: MacroKinds = const_macro_kinds!(ATTR, BANG); +const MACRO_KINDS_DERIVE_BANG: MacroKinds = const_macro_kinds!(DERIVE, BANG); +const MACRO_KINDS_DERIVE_ATTR: MacroKinds = const_macro_kinds!(DERIVE, ATTR); +const MACRO_KINDS_DERIVE_ATTR_BANG: MacroKinds = const_macro_kinds!(DERIVE, ATTR, BANG); +// Ensure that we get a compilation error if MacroKinds gets extended without updating metadata. +const _: () = assert!(MACRO_KINDS_DERIVE_ATTR_BANG.is_all()); + fixed_size_enum! { DefKind { ( Mod ) @@ -151,10 +163,16 @@ fixed_size_enum! { ( Ctor(CtorOf::Struct, CtorKind::Const) ) ( Ctor(CtorOf::Variant, CtorKind::Fn) ) ( Ctor(CtorOf::Variant, CtorKind::Const) ) - ( Macro(MacroKind::Bang) ) - ( Macro(MacroKind::Attr) ) - ( Macro(MacroKind::Derive) ) + ( Macro(MacroKinds::BANG) ) + ( Macro(MacroKinds::ATTR) ) + ( Macro(MacroKinds::DERIVE) ) + ( Macro(MACRO_KINDS_ATTR_BANG) ) + ( Macro(MACRO_KINDS_DERIVE_ATTR) ) + ( Macro(MACRO_KINDS_DERIVE_BANG) ) + ( Macro(MACRO_KINDS_DERIVE_ATTR_BANG) ) ( SyntheticCoroutineBody ) + } unreachable { + ( Macro(_) ) } } diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index fbcce16cedc..782066981c9 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true either = "1.5.0" gsgdt = "0.1.2" polonius-engine = "0.13.0" @@ -34,8 +34,8 @@ rustc_target = { path = "../rustc_target" } rustc_thread_pool = { path = "../rustc_thread_pool" } rustc_type_ir = { path = "../rustc_type_ir" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index e5cc23c213d..cf0549fa668 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -60,6 +60,7 @@ #![feature(try_trait_v2_residual)] #![feature(try_trait_v2_yeet)] #![feature(type_alias_impl_trait)] +#![feature(unwrap_infallible)] #![feature(yeet_expr)] #![recursion_limit = "256"] // tidy-alphabetical-end diff --git a/compiler/rustc_middle/src/lint.rs b/compiler/rustc_middle/src/lint.rs index 341a735f88f..f70b7b6fad7 100644 --- a/compiler/rustc_middle/src/lint.rs +++ b/compiler/rustc_middle/src/lint.rs @@ -211,11 +211,28 @@ impl LintExpectation { } fn explain_lint_level_source( + sess: &Session, lint: &'static Lint, level: Level, src: LintLevelSource, err: &mut Diag<'_, ()>, ) { + // Find the name of the lint group that contains the given lint. + // Assumes the lint only belongs to one group. + let lint_group_name = |lint| { + let lint_groups_iter = sess.lint_groups_iter(); + let lint_id = LintId::of(lint); + lint_groups_iter + .filter(|lint_group| !lint_group.is_externally_loaded) + .find(|lint_group| { + lint_group + .lints + .iter() + .find(|lint_group_lint| **lint_group_lint == lint_id) + .is_some() + }) + .map(|lint_group| lint_group.name) + }; let name = lint.name_lower(); if let Level::Allow = level { // Do not point at `#[allow(compat_lint)]` as the reason for a compatibility lint @@ -224,7 +241,15 @@ fn explain_lint_level_source( } match src { LintLevelSource::Default => { - err.note_once(format!("`#[{}({})]` on by default", level.as_str(), name)); + let level_str = level.as_str(); + match lint_group_name(lint) { + Some(group_name) => { + err.note_once(format!("`#[{level_str}({name})]` (part of `#[{level_str}({group_name})]`) on by default")); + } + None => { + err.note_once(format!("`#[{level_str}({name})]` on by default")); + } + } } LintLevelSource::CommandLine(lint_flag_val, orig_level) => { let flag = orig_level.to_cmd_flag(); @@ -427,7 +452,7 @@ pub fn lint_level( decorate(&mut err); } - explain_lint_level_source(lint, level, src, &mut err); + explain_lint_level_source(sess, lint, level, src, &mut err); err.emit() } lint_level_impl(sess, lint, level, span, Box::new(decorate)) diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index 94384e64afd..866736f74a0 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -1,13 +1,11 @@ use std::borrow::Cow; use rustc_abi::Align; -use rustc_ast::expand::autodiff_attrs::AutoDiffAttrs; -use rustc_hir::attrs::{InlineAttr, InstructionSetAttr, OptimizeAttr}; +use rustc_hir::attrs::{InlineAttr, InstructionSetAttr, Linkage, OptimizeAttr}; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; use rustc_span::Symbol; use rustc_target::spec::SanitizerSet; -use crate::mir::mono::Linkage; use crate::ty::{InstanceKind, TyCtxt}; impl<'tcx> TyCtxt<'tcx> { @@ -36,14 +34,10 @@ pub struct CodegenFnAttrs { pub inline: InlineAttr, /// Parsed representation of the `#[optimize]` attribute pub optimize: OptimizeAttr, - /// The `#[export_name = "..."]` attribute, indicating a custom symbol a - /// function should be exported under - pub export_name: Option<Symbol>, - /// The `#[link_name = "..."]` attribute, indicating a custom symbol an - /// imported function should be imported as. Note that `export_name` - /// probably isn't set when this is set, this is for foreign items while - /// `#[export_name]` is for Rust-defined functions. - pub link_name: Option<Symbol>, + /// The name this function will be imported/exported under. This can be set + /// using the `#[export_name = "..."]` or `#[link_name = "..."]` attribute + /// depending on if this is a function definition or foreign function. + pub symbol_name: Option<Symbol>, /// The `#[link_ordinal = "..."]` attribute, indicating an ordinal an /// imported function has in the dynamic library. Note that this must not /// be set when `link_name` is set. This is for foreign items with the @@ -62,8 +56,8 @@ pub struct CodegenFnAttrs { /// The `#[link_section = "..."]` attribute, or what executable section this /// should be placed in. pub link_section: Option<Symbol>, - /// The `#[no_sanitize(...)]` attribute. Indicates sanitizers for which - /// instrumentation should be disabled inside the annotated function. + /// The `#[sanitize(xyz = "off")]` attribute. Indicates sanitizers for which + /// instrumentation should be disabled inside the function. pub no_sanitize: SanitizerSet, /// The `#[instruction_set(set)]` attribute. Indicates if the generated code should /// be generated against a specific instruction set. Only usable on architectures which allow @@ -75,17 +69,25 @@ pub struct CodegenFnAttrs { /// The `#[patchable_function_entry(...)]` attribute. Indicates how many nops should be around /// the function entry. pub patchable_function_entry: Option<PatchableFunctionEntry>, - /// For the `#[autodiff]` macros. - pub autodiff_item: Option<AutoDiffAttrs>, +} + +#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable, PartialEq, Eq)] +pub enum TargetFeatureKind { + /// The feature is implied by another feature, rather than explicitly added by the + /// `#[target_feature]` attribute + Implied, + /// The feature is added by the regular `target_feature` attribute. + Enabled, + /// The feature is added by the unsafe `force_target_feature` attribute. + Forced, } #[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] pub struct TargetFeature { /// The name of the target feature (e.g. "avx") pub name: Symbol, - /// The feature is implied by another feature, rather than explicitly added by the - /// `#[target_feature]` attribute - pub implied: bool, + /// The way this feature was enabled. + pub kind: TargetFeatureKind, } #[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] @@ -158,6 +160,8 @@ bitflags::bitflags! { const ALLOCATOR_ZEROED = 1 << 14; /// `#[no_builtins]`: indicates that disable implicit builtin knowledge of functions for the function. const NO_BUILTINS = 1 << 15; + /// Marks foreign items, to make `contains_extern_indicator` cheaper. + const FOREIGN_ITEM = 1 << 16; } } rustc_data_structures::external_bitflags_debug! { CodegenFnAttrFlags } @@ -170,8 +174,7 @@ impl CodegenFnAttrs { flags: CodegenFnAttrFlags::empty(), inline: InlineAttr::None, optimize: OptimizeAttr::Default, - export_name: None, - link_name: None, + symbol_name: None, link_ordinal: None, target_features: vec![], safe_target_features: false, @@ -182,7 +185,6 @@ impl CodegenFnAttrs { instruction_set: None, alignment: None, patchable_function_entry: None, - autodiff_item: None, } } @@ -194,9 +196,13 @@ impl CodegenFnAttrs { /// /// Keep this in sync with the logic for the unused_attributes for `#[inline]` lint. pub fn contains_extern_indicator(&self) -> bool { + if self.flags.contains(CodegenFnAttrFlags::FOREIGN_ITEM) { + return false; + } + self.flags.contains(CodegenFnAttrFlags::NO_MANGLE) || self.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) - || self.export_name.is_some() + || self.symbol_name.is_some() || match self.linkage { // These are private, so make sure we don't try to consider // them external. diff --git a/compiler/rustc_middle/src/middle/privacy.rs b/compiler/rustc_middle/src/middle/privacy.rs index 785ddd1ee29..e3e04c9d180 100644 --- a/compiler/rustc_middle/src/middle/privacy.rs +++ b/compiler/rustc_middle/src/middle/privacy.rs @@ -181,11 +181,7 @@ impl EffectiveVisibilities { // nominal visibility. For some items nominal visibility doesn't make sense so we // don't check this condition for them. let is_impl = matches!(tcx.def_kind(def_id), DefKind::Impl { .. }); - let is_associated_item_in_trait_impl = tcx - .impl_of_assoc(def_id.to_def_id()) - .and_then(|impl_id| tcx.trait_id_of_impl(impl_id)) - .is_some(); - if !is_impl && !is_associated_item_in_trait_impl { + if !is_impl && tcx.trait_impl_of_assoc(def_id.to_def_id()).is_none() { let nominal_vis = tcx.visibility(def_id); if !nominal_vis.is_at_least(ev.reachable, tcx) { span_bug!( diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index 4a19cf1563c..18520089e3e 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -4,7 +4,7 @@ use std::num::NonZero; use rustc_ast::NodeId; -use rustc_errors::{Applicability, Diag, EmissionGuarantee}; +use rustc_errors::{Applicability, Diag, EmissionGuarantee, LintBuffer}; use rustc_feature::GateIssue; use rustc_hir::attrs::{DeprecatedSince, Deprecation}; use rustc_hir::def_id::{DefId, LocalDefId}; @@ -12,7 +12,7 @@ use rustc_hir::{self as hir, ConstStability, DefaultBodyStability, HirId, Stabil use rustc_macros::{Decodable, Encodable, HashStable, Subdiagnostic}; use rustc_session::Session; use rustc_session::lint::builtin::{DEPRECATED, DEPRECATED_IN_FUTURE, SOFT_UNSTABLE}; -use rustc_session::lint::{BuiltinLintDiag, DeprecatedSinceKind, Level, Lint, LintBuffer}; +use rustc_session::lint::{BuiltinLintDiag, DeprecatedSinceKind, Level, Lint}; use rustc_session::parse::feature_err_issue; use rustc_span::{Span, Symbol, sym}; use tracing::debug; diff --git a/compiler/rustc_middle/src/mir/consts.rs b/compiler/rustc_middle/src/mir/consts.rs index 96131d47a17..164433aede2 100644 --- a/compiler/rustc_middle/src/mir/consts.rs +++ b/compiler/rustc_middle/src/mir/consts.rs @@ -448,6 +448,11 @@ impl<'tcx> Const<'tcx> { Self::Val(val, ty) } + #[inline] + pub fn from_ty_value(tcx: TyCtxt<'tcx>, val: ty::Value<'tcx>) -> Self { + Self::Ty(val.ty, ty::Const::new_value(tcx, val.valtree, val.ty)) + } + pub fn from_bits( tcx: TyCtxt<'tcx>, bits: u128, diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index 27ead514531..2ea92a39d48 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -306,8 +306,6 @@ pub enum AllocError { ScalarSizeMismatch(ScalarSizeMismatch), /// Encountered a pointer where we needed raw bytes. ReadPointerAsInt(Option<BadBytesAccess>), - /// Partially overwriting a pointer. - OverwritePartialPointer(Size), /// Partially copying a pointer. ReadPartialPointer(Size), /// Using uninitialized data where it is not allowed. @@ -331,9 +329,6 @@ impl AllocError { ReadPointerAsInt(info) => InterpErrorKind::Unsupported( UnsupportedOpInfo::ReadPointerAsInt(info.map(|b| (alloc_id, b))), ), - OverwritePartialPointer(offset) => InterpErrorKind::Unsupported( - UnsupportedOpInfo::OverwritePartialPointer(Pointer::new(alloc_id, offset)), - ), ReadPartialPointer(offset) => InterpErrorKind::Unsupported( UnsupportedOpInfo::ReadPartialPointer(Pointer::new(alloc_id, offset)), ), @@ -633,11 +628,11 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> &mut self, cx: &impl HasDataLayout, range: AllocRange, - ) -> AllocResult<&mut [u8]> { + ) -> &mut [u8] { self.mark_init(range, true); - self.provenance.clear(range, cx)?; + self.provenance.clear(range, cx); - Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]) + &mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()] } /// A raw pointer variant of `get_bytes_unchecked_for_overwrite` that avoids invalidating existing immutable aliases @@ -646,15 +641,15 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> &mut self, cx: &impl HasDataLayout, range: AllocRange, - ) -> AllocResult<*mut [u8]> { + ) -> *mut [u8] { self.mark_init(range, true); - self.provenance.clear(range, cx)?; + self.provenance.clear(range, cx); assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check // Crucially, we go via `AllocBytes::as_mut_ptr`, not `AllocBytes::deref_mut`. let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize()); let len = range.end().bytes_usize() - range.start.bytes_usize(); - Ok(ptr::slice_from_raw_parts_mut(begin_ptr, len)) + ptr::slice_from_raw_parts_mut(begin_ptr, len) } /// This gives direct mutable access to the entire buffer, just exposing their internal state @@ -723,26 +718,45 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> let ptr = Pointer::new(prov, Size::from_bytes(bits)); return Ok(Scalar::from_pointer(ptr, cx)); } - - // If we can work on pointers byte-wise, join the byte-wise provenances. - if Prov::OFFSET_IS_ADDR { - let mut prov = self.provenance.get(range.start, cx); + // The other easy case is total absence of provenance. + if self.provenance.range_empty(range, cx) { + return Ok(Scalar::from_uint(bits, range.size)); + } + // If we get here, we have to check per-byte provenance, and join them together. + let prov = 'prov: { + // Initialize with first fragment. Must have index 0. + let Some((mut joint_prov, 0)) = self.provenance.get_byte(range.start, cx) else { + break 'prov None; + }; + // Update with the remaining fragments. for offset in Size::from_bytes(1)..range.size { - let this_prov = self.provenance.get(range.start + offset, cx); - prov = Prov::join(prov, this_prov); + // Ensure there is provenance here and it has the right index. + let Some((frag_prov, frag_idx)) = + self.provenance.get_byte(range.start + offset, cx) + else { + break 'prov None; + }; + // Wildcard provenance is allowed to come with any index (this is needed + // for Miri's native-lib mode to work). + if u64::from(frag_idx) != offset.bytes() && Some(frag_prov) != Prov::WILDCARD { + break 'prov None; + } + // Merge this byte's provenance with the previous ones. + joint_prov = match Prov::join(joint_prov, frag_prov) { + Some(prov) => prov, + None => break 'prov None, + }; } - // Now use this provenance. - let ptr = Pointer::new(prov, Size::from_bytes(bits)); - return Ok(Scalar::from_maybe_pointer(ptr, cx)); - } else { - // Without OFFSET_IS_ADDR, the only remaining case we can handle is total absence of - // provenance. - if self.provenance.range_empty(range, cx) { - return Ok(Scalar::from_uint(bits, range.size)); - } - // Else we have mixed provenance, that doesn't work. + break 'prov Some(joint_prov); + }; + if prov.is_none() && !Prov::OFFSET_IS_ADDR { + // There are some bytes with provenance here but overall the provenance does not add up. + // We need `OFFSET_IS_ADDR` to fall back to no-provenance here; without that option, we must error. return Err(AllocError::ReadPartialPointer(range.start)); } + // We can use this provenance. + let ptr = Pointer::new(prov, Size::from_bytes(bits)); + return Ok(Scalar::from_maybe_pointer(ptr, cx)); } else { // We are *not* reading a pointer. // If we can just ignore provenance or there is none, that's easy. @@ -782,7 +796,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> let endian = cx.data_layout().endian; // Yes we do overwrite all the bytes in `dst`. - let dst = self.get_bytes_unchecked_for_overwrite(cx, range)?; + let dst = self.get_bytes_unchecked_for_overwrite(cx, range); write_target_uint(endian, dst, bytes).unwrap(); // See if we have to also store some provenance. @@ -795,10 +809,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> } /// Write "uninit" to the given memory range. - pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult { + pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) { self.mark_init(range, false); - self.provenance.clear(range, cx)?; - Ok(()) + self.provenance.clear(range, cx); } /// Mark all bytes in the given range as initialised and reset the provenance @@ -817,9 +830,12 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> } /// Remove all provenance in the given memory range. - pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult { - self.provenance.clear(range, cx)?; - return Ok(()); + pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) { + self.provenance.clear(range, cx); + } + + pub fn provenance_merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool { + self.provenance.merge_bytes(cx) } /// Applies a previously prepared provenance copy. diff --git a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs index 119d4be64e6..dbbd95408c8 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs @@ -10,7 +10,7 @@ use rustc_macros::HashStable; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use tracing::trace; -use super::{AllocError, AllocRange, AllocResult, CtfeProvenance, Provenance, alloc_range}; +use super::{AllocRange, CtfeProvenance, Provenance, alloc_range}; /// Stores the provenance information of pointers stored in memory. #[derive(Clone, PartialEq, Eq, Hash, Debug)] @@ -19,25 +19,25 @@ pub struct ProvenanceMap<Prov = CtfeProvenance> { /// `Provenance` in this map applies from the given offset for an entire pointer-size worth of /// bytes. Two entries in this map are always at least a pointer size apart. ptrs: SortedMap<Size, Prov>, - /// Provenance in this map only applies to the given single byte. - /// This map is disjoint from the previous. It will always be empty when - /// `Prov::OFFSET_IS_ADDR` is false. - bytes: Option<Box<SortedMap<Size, Prov>>>, + /// This stores byte-sized provenance fragments. + /// The `u8` indicates the position of this byte inside its original pointer. + /// If the bytes are re-assembled in their original order, the pointer can be used again. + /// Wildcard provenance is allowed to have index 0 everywhere. + bytes: Option<Box<SortedMap<Size, (Prov, u8)>>>, } // These impls are generic over `Prov` since `CtfeProvenance` is only decodable/encodable // for some particular `D`/`S`. impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> { fn decode(d: &mut D) -> Self { - assert!(!Prov::OFFSET_IS_ADDR); // only `CtfeProvenance` is ever serialized + // `bytes` is not in the serialized format Self { ptrs: Decodable::decode(d), bytes: None } } } impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> { fn encode(&self, s: &mut S) { let Self { ptrs, bytes } = self; - assert!(!Prov::OFFSET_IS_ADDR); // only `CtfeProvenance` is ever serialized - debug_assert!(bytes.is_none()); // without `OFFSET_IS_ADDR`, this is always empty + assert!(bytes.is_none()); // interning refuses allocations with pointer fragments ptrs.encode(s) } } @@ -58,10 +58,10 @@ impl ProvenanceMap { /// Give access to the ptr-sized provenances (which can also be thought of as relocations, and /// indeed that is how codegen treats them). /// - /// Only exposed with `CtfeProvenance` provenance, since it panics if there is bytewise provenance. + /// Only use on interned allocations, as other allocations may have per-byte provenance! #[inline] pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> { - debug_assert!(self.bytes.is_none()); // `CtfeProvenance::OFFSET_IS_ADDR` is false so this cannot fail + assert!(self.bytes.is_none(), "`ptrs()` called on non-interned allocation"); &self.ptrs } } @@ -88,12 +88,12 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { } /// `pm.range_ptrs_is_empty(r, cx)` == `pm.range_ptrs_get(r, cx).is_empty()`, but is faster. - pub(super) fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool { + fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool { self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx)) } /// Returns all byte-wise provenance in the given range. - fn range_bytes_get(&self, range: AllocRange) -> &[(Size, Prov)] { + fn range_bytes_get(&self, range: AllocRange) -> &[(Size, (Prov, u8))] { if let Some(bytes) = self.bytes.as_ref() { bytes.range(range.start..range.end()) } else { @@ -107,19 +107,58 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { } /// Get the provenance of a single byte. - pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> { + pub fn get_byte(&self, offset: Size, cx: &impl HasDataLayout) -> Option<(Prov, u8)> { let prov = self.range_ptrs_get(alloc_range(offset, Size::from_bytes(1)), cx); debug_assert!(prov.len() <= 1); if let Some(entry) = prov.first() { // If it overlaps with this byte, it is on this byte. debug_assert!(self.bytes.as_ref().is_none_or(|b| !b.contains_key(&offset))); - Some(entry.1) + Some((entry.1, (offset - entry.0).bytes() as u8)) } else { // Look up per-byte provenance. self.bytes.as_ref().and_then(|b| b.get(&offset).copied()) } } + /// Gets the provenances of all bytes (including from pointers) in a range. + pub fn get_range( + &self, + cx: &impl HasDataLayout, + range: AllocRange, + ) -> impl Iterator<Item = Prov> { + let ptr_provs = self.range_ptrs_get(range, cx).iter().map(|(_, p)| *p); + let byte_provs = self.range_bytes_get(range).iter().map(|(_, (p, _))| *p); + ptr_provs.chain(byte_provs) + } + + /// Attempt to merge per-byte provenance back into ptr chunks, if the right fragments + /// sit next to each other. Return `false` is that is not possible due to partial pointers. + pub fn merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool { + let Some(bytes) = self.bytes.as_deref_mut() else { + return true; + }; + let ptr_size = cx.data_layout().pointer_size(); + while let Some((offset, (prov, _))) = bytes.iter().next().copied() { + // Check if this fragment starts a pointer. + let range = offset..offset + ptr_size; + let frags = bytes.range(range.clone()); + if frags.len() != ptr_size.bytes_usize() { + return false; + } + for (idx, (_offset, (frag_prov, frag_idx))) in frags.iter().copied().enumerate() { + if frag_prov != prov || frag_idx != idx as u8 { + return false; + } + } + // Looks like a pointer! Move it over to the ptr provenance map. + bytes.remove_range(range); + self.ptrs.insert(offset, prov); + } + // We managed to convert everything into whole pointers. + self.bytes = None; + true + } + /// Check if there is ptr-sized provenance at the given index. /// Does not mean anything for bytewise provenance! But can be useful as an optimization. pub fn get_ptr(&self, offset: Size) -> Option<Prov> { @@ -137,7 +176,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { /// Yields all the provenances stored in this map. pub fn provenances(&self) -> impl Iterator<Item = Prov> { - let bytes = self.bytes.iter().flat_map(|b| b.values()); + let bytes = self.bytes.iter().flat_map(|b| b.values().map(|(p, _i)| p)); self.ptrs.values().chain(bytes).copied() } @@ -148,16 +187,12 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { /// Removes all provenance inside the given range. /// If there is provenance overlapping with the edges, might result in an error. - pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) -> AllocResult { + pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) { let start = range.start; let end = range.end(); // Clear the bytewise part -- this is easy. - if Prov::OFFSET_IS_ADDR { - if let Some(bytes) = self.bytes.as_mut() { - bytes.remove_range(start..end); - } - } else { - debug_assert!(self.bytes.is_none()); + if let Some(bytes) = self.bytes.as_mut() { + bytes.remove_range(start..end); } let pointer_size = cx.data_layout().pointer_size(); @@ -168,7 +203,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { // Find all provenance overlapping the given range. if self.range_ptrs_is_empty(range, cx) { // No provenance in this range, we are done. This is the common case. - return Ok(()); + return; } // This redoes some of the work of `range_get_ptrs_is_empty`, but this path is much @@ -179,28 +214,20 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { // We need to handle clearing the provenance from parts of a pointer. if first < start { - if !Prov::OFFSET_IS_ADDR { - // We can't split up the provenance into less than a pointer. - return Err(AllocError::OverwritePartialPointer(first)); - } // Insert the remaining part in the bytewise provenance. let prov = self.ptrs[&first]; let bytes = self.bytes.get_or_insert_with(Box::default); for offset in first..start { - bytes.insert(offset, prov); + bytes.insert(offset, (prov, (offset - first).bytes() as u8)); } } if last > end { let begin_of_last = last - pointer_size; - if !Prov::OFFSET_IS_ADDR { - // We can't split up the provenance into less than a pointer. - return Err(AllocError::OverwritePartialPointer(begin_of_last)); - } // Insert the remaining part in the bytewise provenance. let prov = self.ptrs[&begin_of_last]; let bytes = self.bytes.get_or_insert_with(Box::default); for offset in end..last { - bytes.insert(offset, prov); + bytes.insert(offset, (prov, (offset - begin_of_last).bytes() as u8)); } } @@ -208,8 +235,6 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { // Since provenance do not overlap, we know that removing until `last` (exclusive) is fine, // i.e., this will not remove any other provenance just after the ones we care about. self.ptrs.remove_range(first..last); - - Ok(()) } /// Overwrites all provenance in the given range with wildcard provenance. @@ -218,10 +243,6 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { /// /// Provided for usage in Miri and panics otherwise. pub fn write_wildcards(&mut self, cx: &impl HasDataLayout, range: AllocRange) { - assert!( - Prov::OFFSET_IS_ADDR, - "writing wildcard provenance is not supported when `OFFSET_IS_ADDR` is false" - ); let wildcard = Prov::WILDCARD.unwrap(); let bytes = self.bytes.get_or_insert_with(Box::default); @@ -229,21 +250,22 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { // Remove pointer provenances that overlap with the range, then readd the edge ones bytewise. let ptr_range = Self::adjusted_range_ptrs(range, cx); let ptrs = self.ptrs.range(ptr_range.clone()); - if let Some((offset, prov)) = ptrs.first() { - for byte_ofs in *offset..range.start { - bytes.insert(byte_ofs, *prov); + if let Some((offset, prov)) = ptrs.first().copied() { + for byte_ofs in offset..range.start { + bytes.insert(byte_ofs, (prov, (byte_ofs - offset).bytes() as u8)); } } - if let Some((offset, prov)) = ptrs.last() { - for byte_ofs in range.end()..*offset + cx.data_layout().pointer_size() { - bytes.insert(byte_ofs, *prov); + if let Some((offset, prov)) = ptrs.last().copied() { + for byte_ofs in range.end()..offset + cx.data_layout().pointer_size() { + bytes.insert(byte_ofs, (prov, (byte_ofs - offset).bytes() as u8)); } } self.ptrs.remove_range(ptr_range); // Overwrite bytewise provenance. for offset in range.start..range.end() { - bytes.insert(offset, wildcard); + // The fragment index does not matter for wildcard provenance. + bytes.insert(offset, (wildcard, 0)); } } } @@ -253,7 +275,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { /// Offsets are already adjusted to the destination allocation. pub struct ProvenanceCopy<Prov> { dest_ptrs: Option<Box<[(Size, Prov)]>>, - dest_bytes: Option<Box<[(Size, Prov)]>>, + dest_bytes: Option<Box<[(Size, (Prov, u8))]>>, } impl<Prov: Provenance> ProvenanceMap<Prov> { @@ -263,7 +285,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { dest: Size, count: u64, cx: &impl HasDataLayout, - ) -> AllocResult<ProvenanceCopy<Prov>> { + ) -> ProvenanceCopy<Prov> { let shift_offset = move |idx, offset| { // compute offset for current repetition let dest_offset = dest + src.size * idx; // `Size` operations @@ -301,24 +323,16 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { let mut dest_bytes_box = None; let begin_overlap = self.range_ptrs_get(alloc_range(src.start, Size::ZERO), cx).first(); let end_overlap = self.range_ptrs_get(alloc_range(src.end(), Size::ZERO), cx).first(); - if !Prov::OFFSET_IS_ADDR { - // There can't be any bytewise provenance, and we cannot split up the begin/end overlap. - if let Some(entry) = begin_overlap { - return Err(AllocError::ReadPartialPointer(entry.0)); - } - if let Some(entry) = end_overlap { - return Err(AllocError::ReadPartialPointer(entry.0)); - } - debug_assert!(self.bytes.is_none()); - } else { - let mut bytes = Vec::new(); + // We only need to go here if there is some overlap or some bytewise provenance. + if begin_overlap.is_some() || end_overlap.is_some() || self.bytes.is_some() { + let mut bytes: Vec<(Size, (Prov, u8))> = Vec::new(); // First, if there is a part of a pointer at the start, add that. if let Some(entry) = begin_overlap { trace!("start overlapping entry: {entry:?}"); // For really small copies, make sure we don't run off the end of the `src` range. let entry_end = cmp::min(entry.0 + ptr_size, src.end()); for offset in src.start..entry_end { - bytes.push((offset, entry.1)); + bytes.push((offset, (entry.1, (offset - entry.0).bytes() as u8))); } } else { trace!("no start overlapping entry"); @@ -334,8 +348,9 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { let entry_start = cmp::max(entry.0, src.start); for offset in entry_start..src.end() { if bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < offset) { - // The last entry, if it exists, has a lower offset than us. - bytes.push((offset, entry.1)); + // The last entry, if it exists, has a lower offset than us, so we + // can add it at the end and remain sorted. + bytes.push((offset, (entry.1, (offset - entry.0).bytes() as u8))); } else { // There already is an entry for this offset in there! This can happen when the // start and end range checks actually end up hitting the same pointer, so we @@ -358,7 +373,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { dest_bytes_box = Some(dest_bytes.into_boxed_slice()); } - Ok(ProvenanceCopy { dest_ptrs: dest_ptrs_box, dest_bytes: dest_bytes_box }) + ProvenanceCopy { dest_ptrs: dest_ptrs_box, dest_bytes: dest_bytes_box } } /// Applies a provenance copy. @@ -368,14 +383,10 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { if let Some(dest_ptrs) = copy.dest_ptrs { self.ptrs.insert_presorted(dest_ptrs.into()); } - if Prov::OFFSET_IS_ADDR { - if let Some(dest_bytes) = copy.dest_bytes - && !dest_bytes.is_empty() - { - self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into()); - } - } else { - debug_assert!(copy.dest_bytes.is_none()); + if let Some(dest_bytes) = copy.dest_bytes + && !dest_bytes.is_empty() + { + self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into()); } } } diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs index 77427a12d11..7c72a8ec243 100644 --- a/compiler/rustc_middle/src/mir/interpret/error.rs +++ b/compiler/rustc_middle/src/mir/interpret/error.rs @@ -582,9 +582,6 @@ pub enum UnsupportedOpInfo { // // The variants below are only reachable from CTFE/const prop, miri will never emit them. // - /// Overwriting parts of a pointer; without knowing absolute addresses, the resulting state - /// cannot be represented by the CTFE interpreter. - OverwritePartialPointer(Pointer<AllocId>), /// Attempting to read or copy parts of a pointer to somewhere else; without knowing absolute /// addresses, the resulting state cannot be represented by the CTFE interpreter. ReadPartialPointer(Pointer<AllocId>), diff --git a/compiler/rustc_middle/src/mir/interpret/pointer.rs b/compiler/rustc_middle/src/mir/interpret/pointer.rs index e25ff7651f6..c581b2ebf09 100644 --- a/compiler/rustc_middle/src/mir/interpret/pointer.rs +++ b/compiler/rustc_middle/src/mir/interpret/pointer.rs @@ -56,7 +56,7 @@ impl<T: HasDataLayout> PointerArithmetic for T {} /// mostly opaque; the `Machine` trait extends it with some more operations that also have access to /// some global state. /// The `Debug` rendering is used to display bare provenance, and for the default impl of `fmt`. -pub trait Provenance: Copy + fmt::Debug + 'static { +pub trait Provenance: Copy + PartialEq + fmt::Debug + 'static { /// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address. /// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are /// different from what the Abstract Machine prescribes, so the interpreter must prevent any @@ -79,7 +79,7 @@ pub trait Provenance: Copy + fmt::Debug + 'static { fn get_alloc_id(self) -> Option<AllocId>; /// Defines the 'join' of provenance: what happens when doing a pointer load and different bytes have different provenance. - fn join(left: Option<Self>, right: Option<Self>) -> Option<Self>; + fn join(left: Self, right: Self) -> Option<Self>; } /// The type of provenance in the compile-time interpreter. @@ -192,8 +192,8 @@ impl Provenance for CtfeProvenance { Some(self.alloc_id()) } - fn join(_left: Option<Self>, _right: Option<Self>) -> Option<Self> { - panic!("merging provenance is not supported when `OFFSET_IS_ADDR` is false") + fn join(left: Self, right: Self) -> Option<Self> { + if left == right { Some(left) } else { None } } } @@ -224,8 +224,8 @@ impl Provenance for AllocId { Some(self) } - fn join(_left: Option<Self>, _right: Option<Self>) -> Option<Self> { - panic!("merging provenance is not supported when `OFFSET_IS_ADDR` is false") + fn join(_left: Self, _right: Self) -> Option<Self> { + unreachable!() } } diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index c55c7fc6002..c977e5329c2 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -115,48 +115,6 @@ impl MirPhase { MirPhase::Runtime(runtime_phase) => (3, 1 + runtime_phase as usize), } } - - /// Parses a `MirPhase` from a pair of strings. Panics if this isn't possible for any reason. - pub fn parse(dialect: String, phase: Option<String>) -> Self { - match &*dialect.to_ascii_lowercase() { - "built" => { - assert!(phase.is_none(), "Cannot specify a phase for `Built` MIR"); - MirPhase::Built - } - "analysis" => Self::Analysis(AnalysisPhase::parse(phase)), - "runtime" => Self::Runtime(RuntimePhase::parse(phase)), - _ => bug!("Unknown MIR dialect: '{}'", dialect), - } - } -} - -impl AnalysisPhase { - pub fn parse(phase: Option<String>) -> Self { - let Some(phase) = phase else { - return Self::Initial; - }; - - match &*phase.to_ascii_lowercase() { - "initial" => Self::Initial, - "post_cleanup" | "post-cleanup" | "postcleanup" => Self::PostCleanup, - _ => bug!("Unknown analysis phase: '{}'", phase), - } - } -} - -impl RuntimePhase { - pub fn parse(phase: Option<String>) -> Self { - let Some(phase) = phase else { - return Self::Initial; - }; - - match &*phase.to_ascii_lowercase() { - "initial" => Self::Initial, - "post_cleanup" | "post-cleanup" | "postcleanup" => Self::PostCleanup, - "optimized" => Self::Optimized, - _ => bug!("Unknown runtime phase: '{}'", phase), - } - } } /// Where a specific `mir::Body` comes from. diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index e5864660575..6b45b2dc3ff 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use std::fmt; use std::hash::Hash; -use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_data_structures::base_n::{BaseNString, CASE_INSENSITIVE, ToBaseN}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::FxIndexMap; @@ -10,9 +9,8 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHas use rustc_data_structures::unord::UnordMap; use rustc_hashes::Hash128; use rustc_hir::ItemId; -use rustc_hir::attrs::InlineAttr; +use rustc_hir::attrs::{InlineAttr, Linkage}; use rustc_hir::def_id::{CrateNum, DefId, DefIdSet, LOCAL_CRATE}; -use rustc_index::Idx; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; use rustc_query_system::ich::StableHashingContext; use rustc_session::config::OptLevel; @@ -337,7 +335,6 @@ impl ToStableHashKey<StableHashingContext<'_>> for MonoItem<'_> { pub struct MonoItemPartitions<'tcx> { pub codegen_units: &'tcx [CodegenUnit<'tcx>], pub all_mono_items: &'tcx DefIdSet, - pub autodiff_items: &'tcx [AutoDiffItem], } #[derive(Debug, HashStable)] @@ -369,22 +366,6 @@ pub struct MonoItemData { pub size_estimate: usize, } -/// Specifies the linkage type for a `MonoItem`. -/// -/// See <https://llvm.org/docs/LangRef.html#linkage-types> for more details about these variants. -#[derive(Copy, Clone, PartialEq, Debug, TyEncodable, TyDecodable, HashStable)] -pub enum Linkage { - External, - AvailableExternally, - LinkOnceAny, - LinkOnceODR, - WeakAny, - WeakODR, - Internal, - ExternalWeak, - Common, -} - /// Specifies the symbol visibility with regards to dynamic linking. /// /// Visibility doesn't have any effect when linkage is internal. @@ -526,45 +507,68 @@ impl<'tcx> CodegenUnit<'tcx> { tcx: TyCtxt<'tcx>, ) -> Vec<(MonoItem<'tcx>, MonoItemData)> { // The codegen tests rely on items being process in the same order as - // they appear in the file, so for local items, we sort by node_id first + // they appear in the file, so for local items, we sort by span first #[derive(PartialEq, Eq, PartialOrd, Ord)] - struct ItemSortKey<'tcx>(Option<usize>, SymbolName<'tcx>); - + struct ItemSortKey<'tcx>(Option<Span>, SymbolName<'tcx>); + + // We only want to take HirIds of user-defines instances into account. + // The others don't matter for the codegen tests and can even make item + // order unstable. + fn local_item_id<'tcx>(item: MonoItem<'tcx>) -> Option<DefId> { + match item { + MonoItem::Fn(ref instance) => match instance.def { + InstanceKind::Item(def) => def.as_local().map(|_| def), + InstanceKind::VTableShim(..) + | InstanceKind::ReifyShim(..) + | InstanceKind::Intrinsic(..) + | InstanceKind::FnPtrShim(..) + | InstanceKind::Virtual(..) + | InstanceKind::ClosureOnceShim { .. } + | InstanceKind::ConstructCoroutineInClosureShim { .. } + | InstanceKind::DropGlue(..) + | InstanceKind::CloneShim(..) + | InstanceKind::ThreadLocalShim(..) + | InstanceKind::FnPtrAddrShim(..) + | InstanceKind::AsyncDropGlue(..) + | InstanceKind::FutureDropPollShim(..) + | InstanceKind::AsyncDropGlueCtorShim(..) => None, + }, + MonoItem::Static(def_id) => def_id.as_local().map(|_| def_id), + MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id.to_def_id()), + } + } fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'tcx> { ItemSortKey( - match item { - MonoItem::Fn(ref instance) => { - match instance.def { - // We only want to take HirIds of user-defined - // instances into account. The others don't matter for - // the codegen tests and can even make item order - // unstable. - InstanceKind::Item(def) => def.as_local().map(Idx::index), - InstanceKind::VTableShim(..) - | InstanceKind::ReifyShim(..) - | InstanceKind::Intrinsic(..) - | InstanceKind::FnPtrShim(..) - | InstanceKind::Virtual(..) - | InstanceKind::ClosureOnceShim { .. } - | InstanceKind::ConstructCoroutineInClosureShim { .. } - | InstanceKind::DropGlue(..) - | InstanceKind::CloneShim(..) - | InstanceKind::ThreadLocalShim(..) - | InstanceKind::FnPtrAddrShim(..) - | InstanceKind::AsyncDropGlue(..) - | InstanceKind::FutureDropPollShim(..) - | InstanceKind::AsyncDropGlueCtorShim(..) => None, - } - } - MonoItem::Static(def_id) => def_id.as_local().map(Idx::index), - MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id.index()), - }, + local_item_id(item) + .map(|def_id| tcx.def_span(def_id).find_ancestor_not_from_macro()) + .flatten(), item.symbol_name(tcx), ) } let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect(); - items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i)); + if !tcx.sess.opts.unstable_opts.codegen_source_order { + // In this case, we do not need to keep the items in any specific order, as the input + // is already deterministic. + // + // However, it seems that moving related things (such as different + // monomorphizations of the same function) close to one another is actually beneficial + // for LLVM performance. + // LLVM will codegen the items in the order we pass them to it, and when it handles + // similar things in succession, it seems that it leads to better cache utilization, + // less branch mispredictions and in general to better performance. + // For example, if we have functions `a`, `c::<u32>`, `b`, `c::<i16>`, `d` and + // `c::<bool>`, it seems that it helps LLVM's performance to codegen the three `c` + // instantiations right after one another, as they will likely reference similar types, + // call similar functions, etc. + // + // See https://github.com/rust-lang/rust/pull/145358 for more details. + // + // Sorting by symbol name should not incur any new non-determinism. + items.sort_by_cached_key(|&(i, _)| i.symbol_name(tcx)); + } else { + items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i)); + } items } diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 84abcf550d2..128ae8549f7 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -929,7 +929,10 @@ impl<'tcx> TerminatorKind<'tcx> { } Yield { value, resume_arg, .. } => write!(fmt, "{resume_arg:?} = yield({value:?})"), Unreachable => write!(fmt, "unreachable"), - Drop { place, .. } => write!(fmt, "drop({place:?})"), + Drop { place, async_fut: None, .. } => write!(fmt, "drop({place:?})"), + Drop { place, async_fut: Some(async_fut), .. } => { + write!(fmt, "async drop({place:?}; poll={async_fut:?})") + } Call { func, args, destination, .. } => { write!(fmt, "{destination:?} = ")?; write!(fmt, "{func:?}(")?; @@ -1789,7 +1792,7 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( ascii.push('╼'); i += ptr_size; } - } else if let Some(prov) = alloc.provenance().get(i, &tcx) { + } else if let Some((prov, idx)) = alloc.provenance().get_byte(i, &tcx) { // Memory with provenance must be defined assert!( alloc.init_mask().is_range_initialized(alloc_range(i, Size::from_bytes(1))).is_ok() @@ -1799,7 +1802,7 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( // Format is similar to "oversized" above. let j = i.bytes_usize(); let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0]; - write!(w, "╾{c:02x}{prov:#?} (1 ptr byte)╼")?; + write!(w, "╾{c:02x}{prov:#?} (ptr fragment {idx})╼")?; i += Size::from_bytes(1); } else if alloc .init_mask() @@ -1932,7 +1935,7 @@ fn pretty_print_const_value_tcx<'tcx>( let args = tcx.lift(args).unwrap(); let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); p.print_alloc_ids = true; - p.print_value_path(variant_def.def_id, args)?; + p.pretty_print_value_path(variant_def.def_id, args)?; fmt.write_str(&p.into_buffer())?; match variant_def.ctor_kind() { @@ -1974,7 +1977,7 @@ fn pretty_print_const_value_tcx<'tcx>( (ConstValue::ZeroSized, ty::FnDef(d, s)) => { let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); p.print_alloc_ids = true; - p.print_value_path(*d, s)?; + p.pretty_print_value_path(*d, s)?; fmt.write_str(&p.into_buffer())?; return Ok(()); } diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs index a8a95c699d8..466b9c7a3c2 100644 --- a/compiler/rustc_middle/src/mir/query.rs +++ b/compiler/rustc_middle/src/mir/query.rs @@ -149,8 +149,15 @@ pub enum ConstraintCategory<'tcx> { /// A constraint that doesn't correspond to anything the user sees. Internal, - /// An internal constraint derived from an illegal universe relation. - IllegalUniverse, + /// An internal constraint added when a region outlives a placeholder + /// it cannot name and therefore has to outlive `'static`. The argument + /// is the unnameable placeholder and the constraint is always between + /// an SCC representative and `'static`. + OutlivesUnnameablePlaceholder( + #[type_foldable(identity)] + #[type_visitable(ignore)] + ty::RegionVid, + ), } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] diff --git a/compiler/rustc_middle/src/mir/statement.rs b/compiler/rustc_middle/src/mir/statement.rs index 683d7b48617..6e52bc775ef 100644 --- a/compiler/rustc_middle/src/mir/statement.rs +++ b/compiler/rustc_middle/src/mir/statement.rs @@ -160,7 +160,11 @@ impl<'tcx> PlaceTy<'tcx> { /// Convenience wrapper around `projection_ty_core` for `PlaceElem`, /// where we can just use the `Ty` that is already stored inline on /// field projection elems. - pub fn projection_ty(self, tcx: TyCtxt<'tcx>, elem: PlaceElem<'tcx>) -> PlaceTy<'tcx> { + pub fn projection_ty<V: ::std::fmt::Debug>( + self, + tcx: TyCtxt<'tcx>, + elem: ProjectionElem<V, Ty<'tcx>>, + ) -> PlaceTy<'tcx> { self.projection_ty_core(tcx, &elem, |ty| ty, |_, _, _, ty| ty, |ty| ty) } @@ -290,6 +294,36 @@ impl<V, T> ProjectionElem<V, T> { Self::UnwrapUnsafeBinder(..) => false, } } + + /// Returns the `ProjectionKind` associated to this projection. + pub fn kind(self) -> ProjectionKind { + self.try_map(|_| Some(()), |_| ()).unwrap() + } + + /// Apply functions to types and values in this projection and return the result. + pub fn try_map<V2, T2>( + self, + v: impl FnOnce(V) -> Option<V2>, + t: impl FnOnce(T) -> T2, + ) -> Option<ProjectionElem<V2, T2>> { + Some(match self { + ProjectionElem::Deref => ProjectionElem::Deref, + ProjectionElem::Downcast(name, read_variant) => { + ProjectionElem::Downcast(name, read_variant) + } + ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, t(ty)), + ProjectionElem::ConstantIndex { offset, min_length, from_end } => { + ProjectionElem::ConstantIndex { offset, min_length, from_end } + } + ProjectionElem::Subslice { from, to, from_end } => { + ProjectionElem::Subslice { from, to, from_end } + } + ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(t(ty)), + ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(t(ty)), + ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(t(ty)), + ProjectionElem::Index(val) => ProjectionElem::Index(v(val)?), + }) + } } /// Alias for projections as they appear in `UserTypeProjection`, where we diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 6039a03aa29..3b8def67f92 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -1371,12 +1371,7 @@ pub enum Rvalue<'tcx> { /// Creates an array where each element is the value of the operand. /// - /// This is the cause of a bug in the case where the repetition count is zero because the value - /// is not dropped, see [#74836]. - /// /// Corresponds to source code like `[x; 32]`. - /// - /// [#74836]: https://github.com/rust-lang/rust/issues/74836 Repeat(Operand<'tcx>, ty::Const<'tcx>), /// Creates a reference of the indicated kind to the place. diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 42a68b29ec7..904d78d69b6 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -531,13 +531,20 @@ macro_rules! make_mir_visitor { unwind: _, replace: _, drop: _, - async_fut: _, + async_fut, } => { self.visit_place( place, PlaceContext::MutatingUse(MutatingUseContext::Drop), location ); + if let Some(async_fut) = async_fut { + self.visit_local( + $(&$mutability)? *async_fut, + PlaceContext::MutatingUse(MutatingUseContext::Borrow), + location + ); + } } TerminatorKind::Call { diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index a8b357bf105..ea62461ebeb 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -343,6 +343,7 @@ trivial! { rustc_span::Symbol, rustc_span::Ident, rustc_target::spec::PanicStrategy, + rustc_target::spec::SanitizerSet, rustc_type_ir::Variance, u32, usize, diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index f4d0120a2e7..7bd8a0525a2 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -100,7 +100,7 @@ use rustc_session::lint::LintExpectationId; use rustc_span::def_id::LOCAL_CRATE; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Span, Symbol}; -use rustc_target::spec::PanicStrategy; +use rustc_target::spec::{PanicStrategy, SanitizerSet}; use {rustc_abi as abi, rustc_ast as ast, rustc_hir as hir}; use crate::infer::canonical::{self, Canonical}; @@ -696,6 +696,22 @@ rustc_queries! { return_result_from_ensure_ok } + /// Used in case `mir_borrowck` fails to prove an obligation. We generally assume that + /// all goals we prove in MIR type check hold as we've already checked them in HIR typeck. + /// + /// However, we replace each free region in the MIR body with a unique region inference + /// variable. As we may rely on structural identity when proving goals this may cause a + /// goal to no longer hold. We store obligations for which this may happen during HIR + /// typeck in the `TypeckResults`. We then uniquify and reprove them in case MIR typeck + /// encounters an unexpected error. We expect this to result in an error when used and + /// delay a bug if it does not. + query check_potentially_region_dependent_goals(key: LocalDefId) -> Result<(), ErrorGuaranteed> { + desc { + |tcx| "reproving potentially region dependent HIR typeck goals for `{}", + tcx.def_path_str(key) + } + } + /// MIR after our optimization passes have run. This is MIR that is ready /// for codegen. This is also the only query that can fetch non-local MIR, at present. query optimized_mir(key: DefId) -> &'tcx mir::Body<'tcx> { @@ -1116,6 +1132,11 @@ rustc_queries! { } /// Unsafety-check this `LocalDefId`. + query check_transmutes(key: LocalDefId) { + desc { |tcx| "check transmute calls inside `{}`", tcx.def_path_str(key) } + } + + /// Unsafety-check this `LocalDefId`. query check_unsafety(key: LocalDefId) { desc { |tcx| "unsafety-checking `{}`", tcx.def_path_str(key) } } @@ -2686,6 +2707,16 @@ rustc_queries! { desc { |tcx| "looking up anon const kind of `{}`", tcx.def_path_str(def_id) } separate_provide_extern } + + /// Checks for the nearest `#[sanitize(xyz = "off")]` or + /// `#[sanitize(xyz = "on")]` on this def and any enclosing defs, up to the + /// crate root. + /// + /// Returns the set of sanitizers that is explicitly disabled for this def. + query disabled_sanitizers_for(key: LocalDefId) -> SanitizerSet { + desc { |tcx| "checking what set of sanitizers are enabled on `{}`", tcx.def_path_str(key) } + feedable + } } rustc_with_all_queries! { define_callbacks! } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index a7ac3442898..546791135ba 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -645,34 +645,29 @@ impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> { let parent = Option::<LocalDefId>::decode(self); let tag: u8 = Decodable::decode(self); - if tag == TAG_PARTIAL_SPAN { - return Span::new(BytePos(0), BytePos(0), ctxt, parent); - } else if tag == TAG_RELATIVE_SPAN { - let dlo = u32::decode(self); - let dto = u32::decode(self); - - let enclosing = self.tcx.source_span_untracked(parent.unwrap()).data_untracked(); - let span = Span::new( - enclosing.lo + BytePos::from_u32(dlo), - enclosing.lo + BytePos::from_u32(dto), - ctxt, - parent, - ); - - return span; - } else { - debug_assert_eq!(tag, TAG_FULL_SPAN); - } - - let file_lo_index = SourceFileIndex::decode(self); - let line_lo = usize::decode(self); - let col_lo = RelativeBytePos::decode(self); - let len = BytePos::decode(self); - - let file_lo = self.file_index_to_file(file_lo_index); - let lo = file_lo.lines()[line_lo - 1] + col_lo; - let lo = file_lo.absolute_position(lo); - let hi = lo + len; + let (lo, hi) = match tag { + TAG_PARTIAL_SPAN => (BytePos(0), BytePos(0)), + TAG_RELATIVE_SPAN => { + let dlo = u32::decode(self); + let dto = u32::decode(self); + + let enclosing = self.tcx.source_span_untracked(parent.unwrap()).data_untracked(); + (enclosing.lo + BytePos::from_u32(dlo), enclosing.lo + BytePos::from_u32(dto)) + } + TAG_FULL_SPAN => { + let file_lo_index = SourceFileIndex::decode(self); + let line_lo = usize::decode(self); + let col_lo = RelativeBytePos::decode(self); + let len = BytePos::decode(self); + + let file_lo = self.file_index_to_file(file_lo_index); + let lo = file_lo.lines()[line_lo - 1] + col_lo; + let lo = file_lo.absolute_position(lo); + let hi = lo + len; + (lo, hi) + } + _ => unreachable!(), + }; Span::new(lo, hi, ctxt, parent) } diff --git a/compiler/rustc_middle/src/thir.rs b/compiler/rustc_middle/src/thir.rs index 3dd6d2c8928..f1d19800a78 100644 --- a/compiler/rustc_middle/src/thir.rs +++ b/compiler/rustc_middle/src/thir.rs @@ -832,17 +832,17 @@ pub enum PatKind<'tcx> { }, /// One of the following: - /// * `&str` (represented as a valtree), which will be handled as a string pattern and thus + /// * `&str`, which will be handled as a string pattern and thus /// exhaustiveness checking will detect if you use the same string twice in different /// patterns. - /// * integer, bool, char or float (represented as a valtree), which will be handled by + /// * integer, bool, char or float, which will be handled by /// exhaustiveness to cover exactly its own value, similar to `&str`, but these values are /// much simpler. /// * raw pointers derived from integers, other raw pointers will have already resulted in an // error. /// * `String`, if `string_deref_patterns` is enabled. Constant { - value: mir::Const<'tcx>, + value: ty::Value<'tcx>, }, /// Pattern obtained by converting a constant (inline or named) to its pattern @@ -935,7 +935,7 @@ impl<'tcx> PatRange<'tcx> { let lo_is_min = match self.lo { PatRangeBoundary::NegInfinity => true, PatRangeBoundary::Finite(value) => { - let lo = value.try_to_bits(size).unwrap() ^ bias; + let lo = value.try_to_scalar_int().unwrap().to_bits(size) ^ bias; lo <= min } PatRangeBoundary::PosInfinity => false, @@ -944,7 +944,7 @@ impl<'tcx> PatRange<'tcx> { let hi_is_max = match self.hi { PatRangeBoundary::NegInfinity => false, PatRangeBoundary::Finite(value) => { - let hi = value.try_to_bits(size).unwrap() ^ bias; + let hi = value.try_to_scalar_int().unwrap().to_bits(size) ^ bias; hi > max || hi == max && self.end == RangeEnd::Included } PatRangeBoundary::PosInfinity => true, @@ -957,22 +957,17 @@ impl<'tcx> PatRange<'tcx> { } #[inline] - pub fn contains( - &self, - value: mir::Const<'tcx>, - tcx: TyCtxt<'tcx>, - typing_env: ty::TypingEnv<'tcx>, - ) -> Option<bool> { + pub fn contains(&self, value: ty::Value<'tcx>, tcx: TyCtxt<'tcx>) -> Option<bool> { use Ordering::*; - debug_assert_eq!(self.ty, value.ty()); + debug_assert_eq!(value.ty, self.ty); let ty = self.ty; - let value = PatRangeBoundary::Finite(value); + let value = PatRangeBoundary::Finite(value.valtree); // For performance, it's important to only do the second comparison if necessary. Some( - match self.lo.compare_with(value, ty, tcx, typing_env)? { + match self.lo.compare_with(value, ty, tcx)? { Less | Equal => true, Greater => false, - } && match value.compare_with(self.hi, ty, tcx, typing_env)? { + } && match value.compare_with(self.hi, ty, tcx)? { Less => true, Equal => self.end == RangeEnd::Included, Greater => false, @@ -981,21 +976,16 @@ impl<'tcx> PatRange<'tcx> { } #[inline] - pub fn overlaps( - &self, - other: &Self, - tcx: TyCtxt<'tcx>, - typing_env: ty::TypingEnv<'tcx>, - ) -> Option<bool> { + pub fn overlaps(&self, other: &Self, tcx: TyCtxt<'tcx>) -> Option<bool> { use Ordering::*; debug_assert_eq!(self.ty, other.ty); // For performance, it's important to only do the second comparison if necessary. Some( - match other.lo.compare_with(self.hi, self.ty, tcx, typing_env)? { + match other.lo.compare_with(self.hi, self.ty, tcx)? { Less => true, Equal => self.end == RangeEnd::Included, Greater => false, - } && match self.lo.compare_with(other.hi, self.ty, tcx, typing_env)? { + } && match self.lo.compare_with(other.hi, self.ty, tcx)? { Less => true, Equal => other.end == RangeEnd::Included, Greater => false, @@ -1006,11 +996,13 @@ impl<'tcx> PatRange<'tcx> { impl<'tcx> fmt::Display for PatRange<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let PatRangeBoundary::Finite(value) = &self.lo { + if let &PatRangeBoundary::Finite(valtree) = &self.lo { + let value = ty::Value { ty: self.ty, valtree }; write!(f, "{value}")?; } - if let PatRangeBoundary::Finite(value) = &self.hi { + if let &PatRangeBoundary::Finite(valtree) = &self.hi { write!(f, "{}", self.end)?; + let value = ty::Value { ty: self.ty, valtree }; write!(f, "{value}")?; } else { // `0..` is parsed as an inclusive range, we must display it correctly. @@ -1024,7 +1016,8 @@ impl<'tcx> fmt::Display for PatRange<'tcx> { /// If present, the const must be of a numeric type. #[derive(Copy, Clone, Debug, PartialEq, HashStable, TypeVisitable)] pub enum PatRangeBoundary<'tcx> { - Finite(mir::Const<'tcx>), + /// The type of this valtree is stored in the surrounding `PatRange`. + Finite(ty::ValTree<'tcx>), NegInfinity, PosInfinity, } @@ -1035,20 +1028,15 @@ impl<'tcx> PatRangeBoundary<'tcx> { matches!(self, Self::Finite(..)) } #[inline] - pub fn as_finite(self) -> Option<mir::Const<'tcx>> { + pub fn as_finite(self) -> Option<ty::ValTree<'tcx>> { match self { Self::Finite(value) => Some(value), Self::NegInfinity | Self::PosInfinity => None, } } - pub fn eval_bits( - self, - ty: Ty<'tcx>, - tcx: TyCtxt<'tcx>, - typing_env: ty::TypingEnv<'tcx>, - ) -> u128 { + pub fn to_bits(self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> u128 { match self { - Self::Finite(value) => value.eval_bits(tcx, typing_env), + Self::Finite(value) => value.try_to_scalar_int().unwrap().to_bits_unchecked(), Self::NegInfinity => { // Unwrap is ok because the type is known to be numeric. ty.numeric_min_and_max_as_bits(tcx).unwrap().0 @@ -1060,14 +1048,8 @@ impl<'tcx> PatRangeBoundary<'tcx> { } } - #[instrument(skip(tcx, typing_env), level = "debug", ret)] - pub fn compare_with( - self, - other: Self, - ty: Ty<'tcx>, - tcx: TyCtxt<'tcx>, - typing_env: ty::TypingEnv<'tcx>, - ) -> Option<Ordering> { + #[instrument(skip(tcx), level = "debug", ret)] + pub fn compare_with(self, other: Self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option<Ordering> { use PatRangeBoundary::*; match (self, other) { // When comparing with infinities, we must remember that `0u8..` and `0u8..=255` @@ -1095,8 +1077,8 @@ impl<'tcx> PatRangeBoundary<'tcx> { _ => {} } - let a = self.eval_bits(ty, tcx, typing_env); - let b = other.eval_bits(ty, tcx, typing_env); + let a = self.to_bits(ty, tcx); + let b = other.to_bits(ty, tcx); match ty.kind() { ty::Float(ty::FloatTy::F16) => { diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 5bdde3a514e..32f91bfba6b 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -760,6 +760,9 @@ pub enum DynCompatibilityViolation { // Supertrait has a non-lifetime `for<T>` binder. SupertraitNonLifetimeBinder(SmallVec<[Span; 1]>), + // Trait has a `const Trait` supertrait. + SupertraitConst(SmallVec<[Span; 1]>), + /// Method has something illegal. Method(Symbol, MethodViolationCode, Span), @@ -785,6 +788,9 @@ impl DynCompatibilityViolation { DynCompatibilityViolation::SupertraitNonLifetimeBinder(_) => { "where clause cannot reference non-lifetime `for<...>` variables".into() } + DynCompatibilityViolation::SupertraitConst(_) => { + "it cannot have a `const` supertrait".into() + } DynCompatibilityViolation::Method(name, MethodViolationCode::StaticMethod(_), _) => { format!("associated function `{name}` has no `self` parameter").into() } @@ -842,7 +848,8 @@ impl DynCompatibilityViolation { match self { DynCompatibilityViolation::SizedSelf(_) | DynCompatibilityViolation::SupertraitSelf(_) - | DynCompatibilityViolation::SupertraitNonLifetimeBinder(..) => { + | DynCompatibilityViolation::SupertraitNonLifetimeBinder(..) + | DynCompatibilityViolation::SupertraitConst(_) => { DynCompatibilityViolationSolution::None } DynCompatibilityViolation::Method( @@ -873,15 +880,17 @@ impl DynCompatibilityViolation { match self { DynCompatibilityViolation::SupertraitSelf(spans) | DynCompatibilityViolation::SizedSelf(spans) - | DynCompatibilityViolation::SupertraitNonLifetimeBinder(spans) => spans.clone(), + | DynCompatibilityViolation::SupertraitNonLifetimeBinder(spans) + | DynCompatibilityViolation::SupertraitConst(spans) => spans.clone(), DynCompatibilityViolation::AssocConst(_, span) | DynCompatibilityViolation::GAT(_, span) - | DynCompatibilityViolation::Method(_, _, span) - if *span != DUMMY_SP => - { - smallvec![*span] + | DynCompatibilityViolation::Method(_, _, span) => { + if *span != DUMMY_SP { + smallvec![*span] + } else { + smallvec![] + } } - _ => smallvec![], } } } diff --git a/compiler/rustc_middle/src/ty/consts/valtree.rs b/compiler/rustc_middle/src/ty/consts/valtree.rs index d95006dcf4a..a14e47d7082 100644 --- a/compiler/rustc_middle/src/ty/consts/valtree.rs +++ b/compiler/rustc_middle/src/ty/consts/valtree.rs @@ -2,10 +2,12 @@ use std::fmt; use std::ops::Deref; use rustc_data_structures::intern::Interned; +use rustc_hir::def::Namespace; use rustc_macros::{HashStable, Lift, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use super::ScalarInt; use crate::mir::interpret::{ErrorHandled, Scalar}; +use crate::ty::print::{FmtPrinter, PrettyPrinter}; use crate::ty::{self, Ty, TyCtxt}; /// This datastructure is used to represent the value of constants used in the type system. @@ -133,6 +135,8 @@ pub type ConstToValTreeResult<'tcx> = Result<Result<ValTree<'tcx>, Ty<'tcx>>, Er /// A type-level constant value. /// /// Represents a typed, fully evaluated constant. +/// Note that this is also used by pattern elaboration to represent values which cannot occur in types, +/// such as raw pointers and floats. #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)] #[derive(HashStable, TyEncodable, TyDecodable, TypeFoldable, TypeVisitable, Lift)] pub struct Value<'tcx> { @@ -203,3 +207,14 @@ impl<'tcx> rustc_type_ir::inherent::ValueConst<TyCtxt<'tcx>> for Value<'tcx> { self.valtree } } + +impl<'tcx> fmt::Display for Value<'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ty::tls::with(move |tcx| { + let cv = tcx.lift(*self).unwrap(); + let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); + p.pretty_print_const_valtree(cv, /*print_ty*/ true)?; + f.write_str(&p.into_buffer()) + }) + } +} diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 49a4733de3b..4990a85466e 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -33,7 +33,7 @@ use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, LintDiagnostic, LintEmitter, MultiSpan, }; use rustc_hir::attrs::AttributeKind; -use rustc_hir::def::{CtorKind, DefKind}; +use rustc_hir::def::{CtorKind, CtorOf, DefKind}; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId}; use rustc_hir::definitions::{DefPathData, Definitions, DisambiguatorState}; use rustc_hir::intravisit::VisitorExt; @@ -445,7 +445,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> { } fn fn_is_const(self, def_id: DefId) -> bool { - debug_assert_matches!(self.def_kind(def_id), DefKind::Fn | DefKind::AssocFn); + debug_assert_matches!( + self.def_kind(def_id), + DefKind::Fn | DefKind::AssocFn | DefKind::Ctor(CtorOf::Struct, CtorKind::Fn) + ); self.is_conditionally_const(def_id) } @@ -1418,6 +1421,8 @@ pub struct TyCtxt<'tcx> { } impl<'tcx> LintEmitter for TyCtxt<'tcx> { + type Id = HirId; + fn emit_node_span_lint( self, lint: &'static Lint, diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs index b3042904a29..b9a6f67ab0d 100644 --- a/compiler/rustc_middle/src/ty/diagnostics.rs +++ b/compiler/rustc_middle/src/ty/diagnostics.rs @@ -23,7 +23,7 @@ impl IntoDiagArg for Ty<'_> { fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> rustc_errors::DiagArgValue { ty::tls::with(|tcx| { let ty = tcx.short_string(self, path); - rustc_errors::DiagArgValue::Str(std::borrow::Cow::Owned(ty)) + DiagArgValue::Str(std::borrow::Cow::Owned(ty)) }) } } @@ -32,7 +32,7 @@ impl IntoDiagArg for Instance<'_> { fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> rustc_errors::DiagArgValue { ty::tls::with(|tcx| { let instance = tcx.short_string_namespace(self, path, Namespace::ValueNS); - rustc_errors::DiagArgValue::Str(std::borrow::Cow::Owned(instance)) + DiagArgValue::Str(std::borrow::Cow::Owned(instance)) }) } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 73e1661106e..e567ba05f61 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -32,7 +32,7 @@ use rustc_data_structures::intern::Interned; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::steal::Steal; use rustc_data_structures::unord::{UnordMap, UnordSet}; -use rustc_errors::{Diag, ErrorGuaranteed}; +use rustc_errors::{Diag, ErrorGuaranteed, LintBuffer}; use rustc_hir::attrs::{AttributeKind, StrippedCfgItem}; use rustc_hir::def::{CtorKind, CtorOf, DefKind, DocLinkResMap, LifetimeRes, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap}; @@ -46,7 +46,6 @@ use rustc_macros::{ }; use rustc_query_system::ich::StableHashingContext; use rustc_serialize::{Decodable, Encodable}; -use rustc_session::lint::LintBuffer; pub use rustc_session::lint::RegisteredTools; use rustc_span::hygiene::MacroKind; use rustc_span::{DUMMY_SP, ExpnId, ExpnKind, Ident, Span, Symbol, sym}; @@ -830,14 +829,15 @@ impl<'tcx> OpaqueHiddenType<'tcx> { // Convert the type from the function into a type valid outside by mapping generic // parameters to into the context of the opaque. // - // We erase regions when doing this during HIR typeck. + // We erase regions when doing this during HIR typeck. We manually use `fold_regions` + // here as we do not want to anonymize bound variables. let this = match defining_scope_kind { - DefiningScopeKind::HirTypeck => tcx.erase_regions(self), + DefiningScopeKind::HirTypeck => fold_regions(tcx, self, |_, _| tcx.lifetimes.re_erased), DefiningScopeKind::MirBorrowck => self, }; let result = this.fold_with(&mut opaque_types::ReverseMapper::new(tcx, map, self.span)); if cfg!(debug_assertions) && matches!(defining_scope_kind, DefiningScopeKind::HirTypeck) { - assert_eq!(result.ty, tcx.erase_regions(result.ty)); + assert_eq!(result.ty, fold_regions(tcx, result.ty, |_, _| tcx.lifetimes.re_erased)); } result } @@ -1925,21 +1925,50 @@ impl<'tcx> TyCtxt<'tcx> { self.impl_trait_ref(def_id).map(|tr| tr.skip_binder().def_id) } - /// If the given `DefId` is an associated item, returns the `DefId` of the parent trait or impl. - pub fn assoc_parent(self, def_id: DefId) -> Option<DefId> { - self.def_kind(def_id).is_assoc().then(|| self.parent(def_id)) + /// If the given `DefId` is an associated item, returns the `DefId` and `DefKind` of the parent trait or impl. + pub fn assoc_parent(self, def_id: DefId) -> Option<(DefId, DefKind)> { + if !self.def_kind(def_id).is_assoc() { + return None; + } + let parent = self.parent(def_id); + let def_kind = self.def_kind(parent); + Some((parent, def_kind)) } /// If the given `DefId` is an associated item of a trait, /// returns the `DefId` of the trait; otherwise, returns `None`. pub fn trait_of_assoc(self, def_id: DefId) -> Option<DefId> { - self.assoc_parent(def_id).filter(|id| self.def_kind(id) == DefKind::Trait) + match self.assoc_parent(def_id) { + Some((id, DefKind::Trait)) => Some(id), + _ => None, + } } /// If the given `DefId` is an associated item of an impl, /// returns the `DefId` of the impl; otherwise returns `None`. pub fn impl_of_assoc(self, def_id: DefId) -> Option<DefId> { - self.assoc_parent(def_id).filter(|id| matches!(self.def_kind(id), DefKind::Impl { .. })) + match self.assoc_parent(def_id) { + Some((id, DefKind::Impl { .. })) => Some(id), + _ => None, + } + } + + /// If the given `DefId` is an associated item of an inherent impl, + /// returns the `DefId` of the impl; otherwise, returns `None`. + pub fn inherent_impl_of_assoc(self, def_id: DefId) -> Option<DefId> { + match self.assoc_parent(def_id) { + Some((id, DefKind::Impl { of_trait: false })) => Some(id), + _ => None, + } + } + + /// If the given `DefId` is an associated item of a trait impl, + /// returns the `DefId` of the impl; otherwise, returns `None`. + pub fn trait_impl_of_assoc(self, def_id: DefId) -> Option<DefId> { + match self.assoc_parent(def_id) { + Some((id, DefKind::Impl { of_trait: true })) => Some(id), + _ => None, + } } pub fn is_exportable(self, def_id: DefId) -> bool { diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index efa017074db..9e6f277ef77 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -19,18 +19,16 @@ pub trait Print<'tcx, P> { fn print(&self, p: &mut P) -> Result<(), PrintError>; } -/// Interface for outputting user-facing "type-system entities" -/// (paths, types, lifetimes, constants, etc.) as a side-effect -/// (e.g. formatting, like `PrettyPrinter` implementors do) or by -/// constructing some alternative representation (e.g. an AST), -/// which the associated types allow passing through the methods. -/// -/// For pretty-printing/formatting in particular, see `PrettyPrinter`. -// -// FIXME(eddyb) find a better name; this is more general than "printing". +/// A trait that "prints" user-facing type system entities: paths, types, lifetimes, constants, +/// etc. "Printing" here means building up a representation of the entity's path, usually as a +/// `String` (e.g. "std::io::Read") or a `Vec<Symbol>` (e.g. `[sym::std, sym::io, sym::Read]`). The +/// representation is built up by appending one or more pieces. The specific details included in +/// the built-up representation depend on the purpose of the printer. The more advanced printers +/// also rely on the `PrettyPrinter` sub-trait. pub trait Printer<'tcx>: Sized { fn tcx<'a>(&'a self) -> TyCtxt<'tcx>; + /// Appends a representation of an entity with a normal path, e.g. "std::io::Read". fn print_def_path( &mut self, def_id: DefId, @@ -39,6 +37,7 @@ pub trait Printer<'tcx>: Sized { self.default_print_def_path(def_id, args) } + /// Like `print_def_path`, but for `DefPathData::Impl`. fn print_impl_path( &mut self, impl_def_id: DefId, @@ -64,46 +63,74 @@ pub trait Printer<'tcx>: Sized { self.default_print_impl_path(impl_def_id, self_ty, impl_trait_ref) } + /// Appends a representation of a region. fn print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), PrintError>; + /// Appends a representation of a type. fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError>; + /// Appends a representation of a list of `PolyExistentialPredicate`s. fn print_dyn_existential( &mut self, predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>, ) -> Result<(), PrintError>; + /// Appends a representation of a const. fn print_const(&mut self, ct: ty::Const<'tcx>) -> Result<(), PrintError>; - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError>; + /// Appends a representation of a crate name, e.g. `std`, or even ``. + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError>; - fn path_qualified( + /// Appends a representation of a (full or partial) simple path, in two parts. `print_prefix`, + /// when called, appends the representation of the leading segments. The rest of the method + /// appends the representation of the final segment, the details of which are in + /// `disambiguated_data`. + /// + /// E.g. `std::io` + `Read` -> `std::io::Read`. + fn print_path_with_simple( &mut self, - self_ty: Ty<'tcx>, - trait_ref: Option<ty::TraitRef<'tcx>>, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, + disambiguated_data: &DisambiguatedDefPathData, ) -> Result<(), PrintError>; - fn path_append_impl( + /// Similar to `print_path_with_simple`, but the final segment is an `impl` segment. + /// + /// E.g. `slice` + `<impl [T]>` -> `slice::<impl [T]>`, which may then be further appended to, + /// giving a longer path representation such as `slice::<impl [T]>::to_vec_in::ConvertVec`. + fn print_path_with_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError>; - fn path_append( + /// Appends a representation of a path ending in generic args, in two parts. `print_prefix`, + /// when called, appends the leading segments. The rest of the method appends the + /// representation of the generic args. (Some printers choose to skip appending the generic + /// args.) + /// + /// E.g. `ImplementsTraitForUsize` + `<usize>` -> `ImplementsTraitForUsize<usize>`. + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - disambiguated_data: &DisambiguatedDefPathData, + args: &[GenericArg<'tcx>], ) -> Result<(), PrintError>; - fn path_generic_args( + /// Appends a representation of a qualified path segment, e.g. `<OsString as From<&T>>`. + /// If `trait_ref` is `None`, it may fall back to simpler forms, e.g. `<Vec<T>>` or just `Foo`. + fn print_path_with_qualified( &mut self, - print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - args: &[GenericArg<'tcx>], + self_ty: Ty<'tcx>, + trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError>; - fn should_truncate(&mut self) -> bool { - false + fn print_coroutine_with_kind( + &mut self, + def_id: DefId, + parent_args: &'tcx [GenericArg<'tcx>], + kind: Ty<'tcx>, + ) -> Result<(), PrintError> { + self.print_path_with_generic_args(|p| p.print_def_path(def_id, parent_args), &[kind.into()]) } // Defaults (should not be overridden): @@ -120,7 +147,7 @@ pub trait Printer<'tcx>: Sized { match key.disambiguated_data.data { DefPathData::CrateRoot => { assert!(key.parent.is_none()); - self.path_crate(def_id.krate) + self.print_crate_name(def_id.krate) } DefPathData::Impl => self.print_impl_path(def_id, args), @@ -144,9 +171,10 @@ pub trait Printer<'tcx>: Sized { )) = self.tcx().coroutine_kind(def_id) && args.len() > parent_args.len() { - return self.path_generic_args( - |p| p.print_def_path(def_id, parent_args), - &args[..parent_args.len() + 1][..1], + return self.print_coroutine_with_kind( + def_id, + parent_args, + args[parent_args.len()].expect_ty(), ); } else { // Closures' own generics are only captures, don't print them. @@ -166,7 +194,7 @@ pub trait Printer<'tcx>: Sized { _ => { if !generics.is_own_empty() && args.len() >= generics.count() { let args = generics.own_args_no_defaults(self.tcx(), args); - return self.path_generic_args( + return self.print_path_with_generic_args( |p| p.print_def_path(def_id, parent_args), args, ); @@ -182,7 +210,7 @@ pub trait Printer<'tcx>: Sized { && self.tcx().generics_of(parent_def_id).parent_count == 0; } - self.path_append( + self.print_path_with_simple( |p: &mut Self| { if trait_qualify_parent { let trait_ref = ty::TraitRef::new( @@ -190,7 +218,7 @@ pub trait Printer<'tcx>: Sized { parent_def_id, parent_args.iter().copied(), ); - p.path_qualified(trait_ref.self_ty(), Some(trait_ref)) + p.print_path_with_qualified(trait_ref.self_ty(), Some(trait_ref)) } else { p.print_def_path(parent_def_id, parent_args) } @@ -233,11 +261,15 @@ pub trait Printer<'tcx>: Sized { // If the impl is not co-located with either self-type or // trait-type, then fallback to a format that identifies // the module more clearly. - self.path_append_impl(|p| p.print_def_path(parent_def_id, &[]), self_ty, impl_trait_ref) + self.print_path_with_impl( + |p| p.print_def_path(parent_def_id, &[]), + self_ty, + impl_trait_ref, + ) } else { // Otherwise, try to give a good form that would be valid language // syntax. Preferably using associated item notation. - self.path_qualified(self_ty, impl_trait_ref) + self.print_path_with_qualified(self_ty, impl_trait_ref) } } } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 67244e767cb..74caee7336a 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -245,7 +245,7 @@ impl<'tcx> RegionHighlightMode<'tcx> { /// Trait for printers that pretty-print using `fmt::Write` to the printer. pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { /// Like `print_def_path` but for value paths. - fn print_value_path( + fn pretty_print_value_path( &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], @@ -253,7 +253,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { self.print_def_path(def_id, args) } - fn print_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError> + fn pretty_print_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError> where T: Print<'tcx, Self> + TypeFoldable<TyCtxt<'tcx>>, { @@ -333,10 +333,14 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { f: impl FnOnce(&mut Self) -> Result<(), PrintError>, ) -> Result<(), PrintError>; - /// Returns `true` if the region should be printed in - /// optional positions, e.g., `&'a T` or `dyn Tr + 'b`. - /// This is typically the case for all non-`'_` regions. - fn should_print_region(&self, region: ty::Region<'tcx>) -> bool; + fn should_truncate(&mut self) -> bool { + false + } + + /// Returns `true` if the region should be printed in optional positions, + /// e.g., `&'a T` or `dyn Tr + 'b`. (Regions like the one in `Cow<'static, T>` + /// will always be printed.) + fn should_print_optional_region(&self, region: ty::Region<'tcx>) -> bool; fn reset_type_limit(&mut self) {} @@ -470,7 +474,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // path to the crate followed by the path to the item within the crate. if let Some(cnum) = def_id.as_crate_root() { if cnum == LOCAL_CRATE { - self.path_crate(cnum)?; + self.print_crate_name(cnum)?; return Ok(true); } @@ -494,7 +498,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // or avoid ending up with `ExternCrateSource::Extern`, // for the injected `std`/`core`. if span.is_dummy() { - self.path_crate(cnum)?; + self.print_crate_name(cnum)?; return Ok(true); } @@ -508,13 +512,13 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { return Ok(true); } (ExternCrateSource::Path, LOCAL_CRATE) => { - self.path_crate(cnum)?; + self.print_crate_name(cnum)?; return Ok(true); } _ => {} }, None => { - self.path_crate(cnum)?; + self.print_crate_name(cnum)?; return Ok(true); } } @@ -624,7 +628,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { return Ok(false); } callers.push(visible_parent); - // HACK(eddyb) this bypasses `path_append`'s prefix printing to avoid + // HACK(eddyb) this bypasses `print_path_with_simple`'s prefix printing to avoid // knowing ahead of time whether the entire path will succeed or not. // To support printers that do not implement `PrettyPrinter`, a `Vec` or // linked list on the stack would need to be built, before any printing. @@ -633,11 +637,14 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { true => {} } callers.pop(); - self.path_append(|_| Ok(()), &DisambiguatedDefPathData { data, disambiguator: 0 })?; + self.print_path_with_simple( + |_| Ok(()), + &DisambiguatedDefPathData { data, disambiguator: 0 }, + )?; Ok(true) } - fn pretty_path_qualified( + fn pretty_print_path_with_qualified( &mut self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, @@ -672,7 +679,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { }) } - fn pretty_path_append_impl( + fn pretty_print_path_with_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, @@ -710,7 +717,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } ty::Ref(r, ty, mutbl) => { write!(self, "&")?; - if self.should_print_region(r) { + if self.should_print_optional_region(r) { r.print(self)?; write!(self, " ")?; } @@ -739,7 +746,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } sig.print(self)?; write!(self, " {{")?; - self.print_value_path(def_id, args)?; + self.pretty_print_value_path(def_id, args)?; write!(self, "}}")?; } } @@ -778,7 +785,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { }, ty::Adt(def, args) => self.print_def_path(def.did(), args)?, ty::Dynamic(data, r, repr) => { - let print_r = self.should_print_region(r); + let print_r = self.should_print_optional_region(r); if print_r { write!(self, "(")?; } @@ -1308,10 +1315,10 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { alias_ty: ty::AliasTerm<'tcx>, ) -> Result<(), PrintError> { let def_key = self.tcx().def_key(alias_ty.def_id); - self.path_generic_args( + self.print_path_with_generic_args( |p| { - p.path_append( - |p| p.path_qualified(alias_ty.self_ty(), None), + p.print_path_with_simple( + |p| p.print_path_with_qualified(alias_ty.self_ty(), None), &def_key.disambiguated_data, ) }, @@ -1386,7 +1393,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if let ty::Tuple(tys) = principal.args.type_at(0).kind() { let mut projections = predicates.projection_bounds(); if let (Some(proj), None) = (projections.next(), projections.next()) { - p.pretty_fn_sig( + p.pretty_print_fn_sig( tys, false, proj.skip_binder().term.as_type().expect("Return type was a const"), @@ -1396,7 +1403,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } - // HACK(eddyb) this duplicates `FmtPrinter`'s `path_generic_args`, + // HACK(eddyb) this duplicates `FmtPrinter`'s `print_path_with_generic_args`, // in order to place the projections inside the `<...>`. if !resugared { let principal_with_self = @@ -1488,7 +1495,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { Ok(()) } - fn pretty_fn_sig( + fn pretty_print_fn_sig( &mut self, inputs: &[Ty<'tcx>], c_variadic: bool, @@ -1525,7 +1532,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, args }) => { match self.tcx().def_kind(def) { DefKind::Const | DefKind::AssocConst => { - self.print_value_path(def, args)?; + self.pretty_print_value_path(def, args)?; } DefKind::AnonConst => { if def.is_local() @@ -1534,13 +1541,13 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { { write!(self, "{snip}")?; } else { - // Do not call `print_value_path` as if a parent of this anon const is - // an impl it will attempt to print out the impl trait ref i.e. `<T as - // Trait>::{constant#0}`. This would cause printing to enter an - // infinite recursion if the anon const is in the self type i.e. - // `impl<T: Default> Default for [T; 32 - 1 - 1 - 1] {` where we would - // try to print - // `<[T; /* print constant#0 again */] as // Default>::{constant#0}`. + // Do not call `pretty_print_value_path` as if a parent of this anon + // const is an impl it will attempt to print out the impl trait ref + // i.e. `<T as Trait>::{constant#0}`. This would cause printing to + // enter an infinite recursion if the anon const is in the self type + // i.e. `impl<T: Default> Default for [T; 32 - 1 - 1 - 1] {` where we + // would try to print `<[T; /* print constant#0 again */] as // + // Default>::{constant#0}`. write!( self, "{}::{}", @@ -1742,7 +1749,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { self.tcx().try_get_global_alloc(prov.alloc_id()) { self.typed_value( - |this| this.print_value_path(instance.def_id(), instance.args), + |this| this.pretty_print_value_path(instance.def_id(), instance.args), |this| this.print_type(ty), " as ", )?; @@ -1936,7 +1943,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let variant_idx = contents.variant.expect("destructed const of adt without variant idx"); let variant_def = &def.variant(variant_idx); - self.print_value_path(variant_def.def_id, args)?; + self.pretty_print_value_path(variant_def.def_id, args)?; match variant_def.ctor_kind() { Some(CtorKind::Const) => {} Some(CtorKind::Fn) => { @@ -1972,7 +1979,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } (_, ty::FnDef(def_id, args)) => { // Never allowed today, but we still encounter them in invalid const args. - self.print_value_path(def_id, args)?; + self.pretty_print_value_path(def_id, args)?; return Ok(()); } // FIXME(oli-obk): also pretty print arrays and other aggregate constants by reading @@ -1993,7 +2000,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { Ok(()) } - fn pretty_closure_as_impl( + fn pretty_print_closure_as_impl( &mut self, closure: ty::ClosureArgs<TyCtxt<'tcx>>, ) -> Result<(), PrintError> { @@ -2131,8 +2138,6 @@ impl<'a, 'tcx> FmtPrinter<'a, 'tcx> { } } -// HACK(eddyb) get rid of `def_path_str` and/or pass `Namespace` explicitly always -// (but also some things just print a `DefId` generally so maybe we need this?) fn guess_def_namespace(tcx: TyCtxt<'_>, def_id: DefId) -> Namespace { match tcx.def_key(def_id).disambiguated_data.data { DefPathData::TypeNs(..) | DefPathData::CrateRoot | DefPathData::OpaqueTy => { @@ -2157,6 +2162,7 @@ impl<'t> TyCtxt<'t> { self.def_path_str_with_args(def_id, &[]) } + /// For this one we determine the appropriate namespace for the `def_id`. pub fn def_path_str_with_args( self, def_id: impl IntoQueryParam<DefId>, @@ -2169,16 +2175,17 @@ impl<'t> TyCtxt<'t> { FmtPrinter::print_string(self, ns, |p| p.print_def_path(def_id, args)).unwrap() } + /// For this one we always use value namespace. pub fn value_path_str_with_args( self, def_id: impl IntoQueryParam<DefId>, args: &'t [GenericArg<'t>], ) -> String { let def_id = def_id.into_query_param(); - let ns = guess_def_namespace(self, def_id); + let ns = Namespace::ValueNS; debug!("value_path_str: def_id={:?}, ns={:?}", def_id, ns); - FmtPrinter::print_string(self, ns, |p| p.print_value_path(def_id, args)).unwrap() + FmtPrinter::print_string(self, ns, |p| p.print_def_path(def_id, args)).unwrap() } } @@ -2230,7 +2237,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { self.print_def_path(parent_def_id, &[])?; - // HACK(eddyb) copy of `path_append` to avoid + // HACK(eddyb) copy of `print_path_with_simple` to avoid // constructing a `DisambiguatedDefPathData`. if !self.empty_path { write!(self, "::")?; @@ -2295,10 +2302,6 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { } } - fn should_truncate(&mut self) -> bool { - !self.type_length_limit.value_within_limit(self.printed_type_count) - } - fn print_dyn_existential( &mut self, predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>, @@ -2310,7 +2313,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { self.pretty_print_const(ct, false) } - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.empty_path = true; if cnum == LOCAL_CRATE { if self.tcx.sess.at_least_rust_2018() { @@ -2327,23 +2330,23 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { Ok(()) } - fn path_qualified( + fn print_path_with_qualified( &mut self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError> { - self.pretty_path_qualified(self_ty, trait_ref)?; + self.pretty_print_path_with_qualified(self_ty, trait_ref)?; self.empty_path = false; Ok(()) } - fn path_append_impl( + fn print_path_with_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError> { - self.pretty_path_append_impl( + self.pretty_print_path_with_impl( |p| { print_prefix(p)?; if !p.empty_path { @@ -2359,7 +2362,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { Ok(()) } - fn path_append( + fn print_path_with_simple( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, @@ -2390,7 +2393,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { Ok(()) } - fn path_generic_args( + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], @@ -2421,7 +2424,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { self.0.const_infer_name_resolver.as_ref().and_then(|func| func(id)) } - fn print_value_path( + fn pretty_print_value_path( &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], @@ -2433,7 +2436,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { Ok(()) } - fn print_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError> + fn pretty_print_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError> where T: Print<'tcx, Self> + TypeFoldable<TyCtxt<'tcx>>, { @@ -2487,7 +2490,11 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { Ok(()) } - fn should_print_region(&self, region: ty::Region<'tcx>) -> bool { + fn should_truncate(&mut self) -> bool { + !self.type_length_limit.value_within_limit(self.printed_type_count) + } + + fn should_print_optional_region(&self, region: ty::Region<'tcx>) -> bool { let highlight = self.region_highlight_mode; if highlight.region_highlighted(region).is_some() { return true; @@ -2892,7 +2899,7 @@ where T: Print<'tcx, P> + TypeFoldable<TyCtxt<'tcx>>, { fn print(&self, p: &mut P) -> Result<(), PrintError> { - p.print_in_binder(self) + p.pretty_print_in_binder(self) } } @@ -3090,7 +3097,7 @@ define_print! { } write!(p, "fn")?; - p.pretty_fn_sig(self.inputs(), self.c_variadic, self.output())?; + p.pretty_print_fn_sig(self.inputs(), self.c_variadic, self.output())?; } ty::TraitRef<'tcx> { @@ -3225,7 +3232,7 @@ define_print! { // The args don't contain the self ty (as it has been erased) but the corresp. // generics do as the trait always has a self ty param. We need to offset. let args = &self.args[p.tcx().generics_of(self.def_id).parent_count - 1..]; - p.path_generic_args(|p| write!(p, "{name}"), args)?; + p.print_path_with_generic_args(|p| write!(p, "{name}"), args)?; write!(p, " = ")?; self.term.print(p)?; } @@ -3314,7 +3321,7 @@ define_print_and_forward_display! { } PrintClosureAsImpl<'tcx> { - p.pretty_closure_as_impl(self.closure)?; + p.pretty_print_closure_as_impl(self.closure)?; } ty::ParamTy { diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 0e2aff6f9bd..89ef46b1ae5 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -12,7 +12,6 @@ use rustc_hir::def_id::LocalDefId; use rustc_span::source_map::Spanned; use rustc_type_ir::{ConstKind, TypeFolder, VisitorResult, try_visit}; -use super::print::PrettyPrinter; use super::{GenericArg, GenericArgKind, Pattern, Region}; use crate::mir::PlaceElem; use crate::ty::print::{FmtPrinter, Printer, with_no_trimmed_paths}; @@ -168,15 +167,11 @@ impl<'tcx> fmt::Debug for ty::Const<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // If this is a value, we spend some effort to make it look nice. if let ConstKind::Value(cv) = self.kind() { - return ty::tls::with(move |tcx| { - let cv = tcx.lift(cv).unwrap(); - let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); - p.pretty_print_const_valtree(cv, /*print_ty*/ true)?; - f.write_str(&p.into_buffer()) - }); + write!(f, "{}", cv) + } else { + // Fall back to something verbose. + write!(f, "{:?}", self.kind()) } - // Fall back to something verbose. - write!(f, "{:?}", self.kind()) } } diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 72474a60566..755fc68d86f 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -821,10 +821,38 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn new_coroutine_witness( tcx: TyCtxt<'tcx>, - id: DefId, + def_id: DefId, args: GenericArgsRef<'tcx>, ) -> Ty<'tcx> { - Ty::new(tcx, CoroutineWitness(id, args)) + if cfg!(debug_assertions) { + tcx.debug_assert_args_compatible(tcx.typeck_root_def_id(def_id), args); + } + Ty::new(tcx, CoroutineWitness(def_id, args)) + } + + pub fn new_coroutine_witness_for_coroutine( + tcx: TyCtxt<'tcx>, + def_id: DefId, + coroutine_args: GenericArgsRef<'tcx>, + ) -> Ty<'tcx> { + tcx.debug_assert_args_compatible(def_id, coroutine_args); + // HACK: Coroutine witness types are lifetime erased, so they + // never reference any lifetime args from the coroutine. We erase + // the regions here since we may get into situations where a + // coroutine is recursively contained within itself, leading to + // witness types that differ by region args. This means that + // cycle detection in fulfillment will not kick in, which leads + // to unnecessary overflows in async code. See the issue: + // <https://github.com/rust-lang/rust/issues/145151>. + let args = + ty::GenericArgs::for_item(tcx, tcx.typeck_root_def_id(def_id), |def, _| { + match def.kind { + ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), + ty::GenericParamDefKind::Type { .. } + | ty::GenericParamDefKind::Const { .. } => coroutine_args[def.index as usize], + } + }); + Ty::new_coroutine_witness(tcx, def_id, args) } // misc @@ -985,6 +1013,14 @@ impl<'tcx> rustc_type_ir::inherent::Ty<TyCtxt<'tcx>> for Ty<'tcx> { Ty::new_coroutine_witness(interner, def_id, args) } + fn new_coroutine_witness_for_coroutine( + interner: TyCtxt<'tcx>, + def_id: DefId, + coroutine_args: ty::GenericArgsRef<'tcx>, + ) -> Self { + Ty::new_coroutine_witness_for_coroutine(interner, def_id, coroutine_args) + } + fn new_ptr(interner: TyCtxt<'tcx>, ty: Self, mutbl: hir::Mutability) -> Self { Ty::new_ptr(interner, ty, mutbl) } diff --git a/compiler/rustc_middle/src/ty/trait_def.rs b/compiler/rustc_middle/src/ty/trait_def.rs index 59e2b2a034d..4e38d969192 100644 --- a/compiler/rustc_middle/src/ty/trait_def.rs +++ b/compiler/rustc_middle/src/ty/trait_def.rs @@ -6,6 +6,7 @@ use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_macros::{Decodable, Encodable, HashStable}; +use rustc_span::symbol::sym; use tracing::debug; use crate::query::LocalCrate; @@ -239,6 +240,12 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait /// Query provider for `incoherent_impls`. pub(super) fn incoherent_impls_provider(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] { + if let Some(def_id) = simp.def() + && !tcx.has_attr(def_id, sym::rustc_has_incoherent_inherent_impls) + { + return &[]; + } + let mut impls = Vec::new(); for cnum in iter::once(LOCAL_CRATE).chain(tcx.crates(()).iter().copied()) { for &impl_def_id in tcx.crate_incoherent_impls((cnum, simp)) { diff --git a/compiler/rustc_middle/src/ty/typeck_results.rs b/compiler/rustc_middle/src/ty/typeck_results.rs index 6b187c5325a..8dd80aab946 100644 --- a/compiler/rustc_middle/src/ty/typeck_results.rs +++ b/compiler/rustc_middle/src/ty/typeck_results.rs @@ -206,10 +206,25 @@ pub struct TypeckResults<'tcx> { /// formatting modified file tests/ui/coroutine/retain-resume-ref.rs pub coroutine_stalled_predicates: FxIndexSet<(ty::Predicate<'tcx>, ObligationCause<'tcx>)>, + /// Goals proven during HIR typeck which may be potentially region dependent. + /// + /// Borrowck *uniquifies* regions which may cause these goal to be ambiguous in MIR + /// type check. We ICE if goals fail in borrowck to detect bugs during MIR building or + /// missed checks in HIR typeck. To avoid ICE due to region dependence we store all + /// goals which may be region dependent and reprove them in case borrowck encounters + /// an error. + pub potentially_region_dependent_goals: + FxIndexSet<(ty::Predicate<'tcx>, ObligationCause<'tcx>)>, + /// Contains the data for evaluating the effect of feature `capture_disjoint_fields` /// on closure size. pub closure_size_eval: LocalDefIdMap<ClosureSizeProfileData<'tcx>>, + /// Stores the types involved in calls to `transmute` intrinsic. These are meant to be checked + /// outside of typeck and borrowck to avoid cycles with opaque types and coroutine layout + /// computation. + pub transmutes_to_check: Vec<(Ty<'tcx>, Ty<'tcx>, HirId)>, + /// Container types and field indices of `offset_of!` expressions offset_of_data: ItemLocalMap<(Ty<'tcx>, Vec<(VariantIdx, FieldIdx)>)>, } @@ -240,7 +255,9 @@ impl<'tcx> TypeckResults<'tcx> { closure_fake_reads: Default::default(), rvalue_scopes: Default::default(), coroutine_stalled_predicates: Default::default(), + potentially_region_dependent_goals: Default::default(), closure_size_eval: Default::default(), + transmutes_to_check: Default::default(), offset_of_data: Default::default(), } } diff --git a/compiler/rustc_mir_build/Cargo.toml b/compiler/rustc_mir_build/Cargo.toml index f756f0a19ee..c3f7fdfcb00 100644 --- a/compiler/rustc_mir_build/Cargo.toml +++ b/compiler/rustc_mir_build/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_apfloat = "0.2.0" rustc_arena = { path = "../rustc_arena" } @@ -23,5 +23,5 @@ rustc_pattern_analysis = { path = "../rustc_pattern_analysis" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_trait_selection = { path = "../rustc_trait_selection" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_mir_build/src/builder/coverageinfo.rs b/compiler/rustc_mir_build/src/builder/coverageinfo.rs index 14199c20921..091b9dad5bc 100644 --- a/compiler/rustc_mir_build/src/builder/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/builder/coverageinfo.rs @@ -157,6 +157,21 @@ impl CoverageInfoBuilder { // if there's nothing interesting in it. Box::new(CoverageInfoHi { num_block_markers, branch_spans }) } + + pub(crate) fn as_done(&self) -> Box<CoverageInfoHi> { + let &Self { nots: _, markers: BlockMarkerGen { num_block_markers }, ref branch_info } = + self; + + let branch_spans = branch_info + .as_ref() + .map(|branch_info| branch_info.branch_spans.as_slice()) + .unwrap_or_default() + .to_owned(); + + // For simplicity, always return an info struct (without Option), even + // if there's nothing interesting in it. + Box::new(CoverageInfoHi { num_block_markers, branch_spans }) + } } impl<'tcx> Builder<'_, 'tcx> { diff --git a/compiler/rustc_mir_build/src/builder/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs index 902a6e7f115..792ad6d782c 100644 --- a/compiler/rustc_mir_build/src/builder/custom/mod.rs +++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs @@ -19,10 +19,10 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; -use rustc_hir::{Attribute, HirId}; +use rustc_hir::{HirId, attrs}; use rustc_index::{IndexSlice, IndexVec}; +use rustc_middle::bug; use rustc_middle::mir::*; -use rustc_middle::span_bug; use rustc_middle::thir::*; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::Span; @@ -39,7 +39,8 @@ pub(super) fn build_custom_mir<'tcx>( return_ty: Ty<'tcx>, return_ty_span: Span, span: Span, - attr: &Attribute, + dialect: Option<attrs::MirDialect>, + phase: Option<attrs::MirPhase>, ) -> Body<'tcx> { let mut body = Body { basic_blocks: BasicBlocks::new(IndexVec::new()), @@ -72,7 +73,7 @@ pub(super) fn build_custom_mir<'tcx>( inlined_parent_scope: None, local_data: ClearCrossCrate::Set(SourceScopeLocalData { lint_root: hir_id }), }); - body.injection_phase = Some(parse_attribute(attr)); + body.injection_phase = Some(parse_attribute(dialect, phase)); let mut pctxt = ParseCtxt { tcx, @@ -98,40 +99,38 @@ pub(super) fn build_custom_mir<'tcx>( body } -fn parse_attribute(attr: &Attribute) -> MirPhase { - let meta_items = attr.meta_item_list().unwrap(); - let mut dialect: Option<String> = None; - let mut phase: Option<String> = None; - - // Not handling errors properly for this internal attribute; will just abort on errors. - for nested in meta_items { - let name = nested.name().unwrap(); - let value = nested.value_str().unwrap().as_str().to_string(); - match name.as_str() { - "dialect" => { - assert!(dialect.is_none()); - dialect = Some(value); - } - "phase" => { - assert!(phase.is_none()); - phase = Some(value); - } - other => { - span_bug!( - nested.span(), - "Unexpected key while parsing custom_mir attribute: '{}'", - other - ); - } - } - } - +/// Turns the arguments passed to `#[custom_mir(..)]` into a proper +/// [`MirPhase`]. Panics if this isn't possible for any reason. +fn parse_attribute(dialect: Option<attrs::MirDialect>, phase: Option<attrs::MirPhase>) -> MirPhase { let Some(dialect) = dialect else { + // Caught during attribute checking. assert!(phase.is_none()); return MirPhase::Built; }; - MirPhase::parse(dialect, phase) + match dialect { + attrs::MirDialect::Built => { + // Caught during attribute checking. + assert!(phase.is_none(), "Cannot specify a phase for `Built` MIR"); + MirPhase::Built + } + attrs::MirDialect::Analysis => match phase { + None | Some(attrs::MirPhase::Initial) => MirPhase::Analysis(AnalysisPhase::Initial), + + Some(attrs::MirPhase::PostCleanup) => MirPhase::Analysis(AnalysisPhase::PostCleanup), + + Some(attrs::MirPhase::Optimized) => { + // Caught during attribute checking. + bug!("`optimized` dialect is not compatible with the `analysis` dialect") + } + }, + + attrs::MirDialect::Runtime => match phase { + None | Some(attrs::MirPhase::Initial) => MirPhase::Runtime(RuntimePhase::Initial), + Some(attrs::MirPhase::PostCleanup) => MirPhase::Runtime(RuntimePhase::PostCleanup), + Some(attrs::MirPhase::Optimized) => MirPhase::Runtime(RuntimePhase::Optimized), + }, + } } struct ParseCtxt<'a, 'tcx> { diff --git a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index 9825b947fe0..41d3aefcbe6 100644 --- a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -160,7 +160,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { }); } }; - values.push(value.eval_bits(self.tcx, self.typing_env)); + values.push(value.valtree.unwrap_leaf().to_bits_unchecked()); targets.push(self.parse_block(arm.body)?); } diff --git a/compiler/rustc_mir_build/src/builder/matches/mod.rs b/compiler/rustc_mir_build/src/builder/matches/mod.rs index 94ae5dadd8a..6ee9674bb08 100644 --- a/compiler/rustc_mir_build/src/builder/matches/mod.rs +++ b/compiler/rustc_mir_build/src/builder/matches/mod.rs @@ -16,7 +16,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir::{BindingMode, ByRef, LetStmt, LocalSource, Node}; use rustc_middle::bug; use rustc_middle::middle::region; -use rustc_middle::mir::{self, *}; +use rustc_middle::mir::*; use rustc_middle::thir::{self, *}; use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, ValTree, ValTreeKind}; use rustc_pattern_analysis::constructor::RangeEnd; @@ -1245,7 +1245,7 @@ struct Ascription<'tcx> { #[derive(Debug, Clone)] enum TestCase<'tcx> { Variant { adt_def: ty::AdtDef<'tcx>, variant_index: VariantIdx }, - Constant { value: mir::Const<'tcx> }, + Constant { value: ty::Value<'tcx> }, Range(Arc<PatRange<'tcx>>), Slice { len: usize, variable_length: bool }, Deref { temp: Place<'tcx>, mutability: Mutability }, @@ -1316,13 +1316,13 @@ enum TestKind<'tcx> { If, /// Test for equality with value, possibly after an unsizing coercion to - /// `ty`, + /// `cast_ty`, Eq { - value: Const<'tcx>, + value: ty::Value<'tcx>, // Integer types are handled by `SwitchInt`, and constants with ADT // types and `&[T]` types are converted back into patterns, so this can - // only be `&str`, `f32` or `f64`. - ty: Ty<'tcx>, + // only be `&str` or floats. + cast_ty: Ty<'tcx>, }, /// Test whether the value falls within an inclusive or exclusive range. @@ -1357,8 +1357,8 @@ pub(crate) struct Test<'tcx> { enum TestBranch<'tcx> { /// Success branch, used for tests with two possible outcomes. Success, - /// Branch corresponding to this constant. - Constant(Const<'tcx>, u128), + /// Branch corresponding to this constant. Must be a scalar. + Constant(ty::Value<'tcx>), /// Branch corresponding to this variant. Variant(VariantIdx), /// Failure branch for tests with two possible outcomes, and "otherwise" branch for other tests. @@ -1366,8 +1366,8 @@ enum TestBranch<'tcx> { } impl<'tcx> TestBranch<'tcx> { - fn as_constant(&self) -> Option<&Const<'tcx>> { - if let Self::Constant(v, _) = self { Some(v) } else { None } + fn as_constant(&self) -> Option<ty::Value<'tcx>> { + if let Self::Constant(v) = self { Some(*v) } else { None } } } diff --git a/compiler/rustc_mir_build/src/builder/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs index a4609a6053e..d03794fe2d5 100644 --- a/compiler/rustc_mir_build/src/builder/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -35,7 +35,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TestCase::Constant { .. } if match_pair.pattern_ty.is_bool() => TestKind::If, TestCase::Constant { .. } if is_switch_ty(match_pair.pattern_ty) => TestKind::SwitchInt, - TestCase::Constant { value } => TestKind::Eq { value, ty: match_pair.pattern_ty }, + TestCase::Constant { value } => TestKind::Eq { value, cast_ty: match_pair.pattern_ty }, TestCase::Range(ref range) => { assert_eq!(range.ty, match_pair.pattern_ty); @@ -112,7 +112,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let otherwise_block = target_block(TestBranch::Failure); let switch_targets = SwitchTargets::new( target_blocks.iter().filter_map(|(&branch, &block)| { - if let TestBranch::Constant(_, bits) = branch { + if let TestBranch::Constant(value) = branch { + let bits = value.valtree.unwrap_leaf().to_bits_unchecked(); Some((bits, block)) } else { None @@ -135,17 +136,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.cfg.terminate(block, self.source_info(match_start_span), terminator); } - TestKind::Eq { value, mut ty } => { + TestKind::Eq { value, mut cast_ty } => { let tcx = self.tcx; let success_block = target_block(TestBranch::Success); let fail_block = target_block(TestBranch::Failure); - let mut expect_ty = value.ty(); - let mut expect = self.literal_operand(test.span, value); + let mut expect_ty = value.ty; + let mut expect = self.literal_operand(test.span, Const::from_ty_value(tcx, value)); let mut place = place; let mut block = block; - match ty.kind() { + match cast_ty.kind() { ty::Str => { // String literal patterns may have type `str` if `deref_patterns` is // enabled, in order to allow `deref!("..."): String`. In this case, `value` @@ -167,7 +168,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Rvalue::Ref(re_erased, BorrowKind::Shared, place), ); place = ref_place; - ty = ref_str_ty; + cast_ty = ref_str_ty; } ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::String) => { if !tcx.features().string_deref_patterns() { @@ -186,7 +187,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { eq_block, place, Mutability::Not, - ty, + cast_ty, ref_str, test.span, ); @@ -195,10 +196,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Similarly, the normal test code should be generated for the `&str`, instead of the `String`. block = eq_block; place = ref_str; - ty = ref_str_ty; + cast_ty = ref_str_ty; } &ty::Pat(base, _) => { - assert_eq!(ty, value.ty()); + assert_eq!(cast_ty, value.ty); assert!(base.is_trivially_pure_clone_copy()); let transmuted_place = self.temp(base, test.span); @@ -219,14 +220,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { place = transmuted_place; expect = Operand::Copy(transmuted_expect); - ty = base; + cast_ty = base; expect_ty = base; } _ => {} } - assert_eq!(expect_ty, ty); - if !ty.is_scalar() { + assert_eq!(expect_ty, cast_ty); + if !cast_ty.is_scalar() { // Use `PartialEq::eq` instead of `BinOp::Eq` // (the binop can only handle primitives) // Make sure that we do *not* call any user-defined code here. @@ -234,10 +235,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // comparison defined in `core`. // (Interestingly this means that exhaustiveness analysis relies, for soundness, // on the `PartialEq` impl for `str` to b correct!) - match *ty.kind() { + match *cast_ty.kind() { ty::Ref(_, deref_ty, _) if deref_ty == self.tcx.types.str_ => {} _ => { - span_bug!(source_info.span, "invalid type for non-scalar compare: {ty}") + span_bug!( + source_info.span, + "invalid type for non-scalar compare: {cast_ty}" + ) } }; self.string_compare( @@ -276,7 +280,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; if let Some(lo) = range.lo.as_finite() { - let lo = self.literal_operand(test.span, lo); + let lo = ty::Value { ty: range.ty, valtree: lo }; + let lo = self.literal_operand(test.span, Const::from_ty_value(self.tcx, lo)); self.compare( block, intermediate_block, @@ -289,7 +294,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; if let Some(hi) = range.hi.as_finite() { - let hi = self.literal_operand(test.span, hi); + let hi = ty::Value { ty: range.ty, valtree: hi }; + let hi = self.literal_operand(test.span, Const::from_ty_value(self.tcx, hi)); let op = match range.end { RangeEnd::Included => BinOp::Le, RangeEnd::Excluded => BinOp::Lt, @@ -555,10 +561,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // not to add such values here. let is_covering_range = |test_case: &TestCase<'tcx>| { test_case.as_range().is_some_and(|range| { - matches!( - range.contains(value, self.tcx, self.typing_env()), - None | Some(true) - ) + matches!(range.contains(value, self.tcx), None | Some(true)) }) }; let is_conflicting_candidate = |candidate: &&mut Candidate<'tcx>| { @@ -575,8 +578,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { None } else { fully_matched = true; - let bits = value.eval_bits(self.tcx, self.typing_env()); - Some(TestBranch::Constant(value, bits)) + Some(TestBranch::Constant(value)) } } (TestKind::SwitchInt, TestCase::Range(range)) => { @@ -585,12 +587,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // the values being tested. (This restricts what values can be // added to the test by subsequent candidates.) fully_matched = false; - let not_contained = - sorted_candidates.keys().filter_map(|br| br.as_constant()).copied().all( - |val| { - matches!(range.contains(val, self.tcx, self.typing_env()), Some(false)) - }, - ); + let not_contained = sorted_candidates + .keys() + .filter_map(|br| br.as_constant()) + .all(|val| matches!(range.contains(val, self.tcx), Some(false))); not_contained.then(|| { // No switch values are contained in the pattern range, @@ -601,7 +601,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { (TestKind::If, TestCase::Constant { value }) => { fully_matched = true; - let value = value.try_eval_bool(self.tcx, self.typing_env()).unwrap_or_else(|| { + let value = value.try_to_bool().unwrap_or_else(|| { span_bug!(test.span, "expected boolean value but got {value:?}") }); Some(if value { TestBranch::Success } else { TestBranch::Failure }) @@ -681,16 +681,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fully_matched = false; // If the testing range does not overlap with pattern range, // the pattern can be matched only if this test fails. - if !test.overlaps(pat, self.tcx, self.typing_env())? { - Some(TestBranch::Failure) - } else { - None - } + if !test.overlaps(pat, self.tcx)? { Some(TestBranch::Failure) } else { None } } } (TestKind::Range(range), &TestCase::Constant { value }) => { fully_matched = false; - if !range.contains(value, self.tcx, self.typing_env())? { + if !range.contains(value, self.tcx)? { // `value` is not contained in the testing range, // so `value` can be matched only if this test fails. Some(TestBranch::Failure) diff --git a/compiler/rustc_mir_build/src/builder/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs index 855cd2f3bc0..6a8f0b21ee0 100644 --- a/compiler/rustc_mir_build/src/builder/mod.rs +++ b/compiler/rustc_mir_build/src/builder/mod.rs @@ -11,9 +11,10 @@ use rustc_ast::attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sorted_map::SortedIndexMultiMap; use rustc_errors::ErrorGuaranteed; +use rustc_hir::attrs::AttributeKind; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; -use rustc_hir::{self as hir, BindingMode, ByRef, HirId, ItemLocalId, Node}; +use rustc_hir::{self as hir, BindingMode, ByRef, HirId, ItemLocalId, Node, find_attr}; use rustc_index::bit_set::GrowableBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; @@ -479,8 +480,7 @@ fn construct_fn<'tcx>( ty => span_bug!(span_with_body, "unexpected type of body: {ty:?}"), }; - if let Some(custom_mir_attr) = - tcx.hir_attrs(fn_id).iter().find(|attr| attr.has_name(sym::custom_mir)) + if let Some((dialect, phase)) = find_attr!(tcx.hir_attrs(fn_id), AttributeKind::CustomMir(dialect, phase, _) => (dialect, phase)) { return custom::build_custom_mir( tcx, @@ -492,7 +492,8 @@ fn construct_fn<'tcx>( return_ty, return_ty_span, span_with_body, - custom_mir_attr, + dialect.as_ref().map(|(d, _)| *d), + phase.as_ref().map(|(p, _)| *p), ); } @@ -789,6 +790,28 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { builder } + #[allow(dead_code)] + fn dump_for_debugging(&self) { + let mut body = Body::new( + MirSource::item(self.def_id.to_def_id()), + self.cfg.basic_blocks.clone(), + self.source_scopes.clone(), + self.local_decls.clone(), + self.canonical_user_type_annotations.clone(), + self.arg_count.clone(), + self.var_debug_info.clone(), + self.fn_span.clone(), + self.coroutine.clone(), + None, + ); + body.coverage_info_hi = self.coverage_info.as_ref().map(|b| b.as_done()); + + use rustc_middle::mir::pretty; + let options = pretty::PrettyPrintMirOptions::from_cli(self.tcx); + pretty::write_mir_fn(self.tcx, &body, &mut |_, _| Ok(()), &mut std::io::stdout(), options) + .unwrap(); + } + fn finish(self) -> Body<'tcx> { let mut body = Body::new( MirSource::item(self.def_id.to_def_id()), diff --git a/compiler/rustc_mir_build/src/check_tail_calls.rs b/compiler/rustc_mir_build/src/check_tail_calls.rs index 3ecccb422c4..e0cbe8519ed 100644 --- a/compiler/rustc_mir_build/src/check_tail_calls.rs +++ b/compiler/rustc_mir_build/src/check_tail_calls.rs @@ -3,6 +3,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::Applicability; use rustc_hir::LangItem; use rustc_hir::def::DefKind; +use rustc_hir::def_id::CRATE_DEF_ID; use rustc_middle::span_bug; use rustc_middle::thir::visit::{self, Visitor}; use rustc_middle::thir::{BodyTy, Expr, ExprId, ExprKind, Thir}; @@ -134,22 +135,23 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { self.report_abi_mismatch(expr.span, caller_sig.abi, callee_sig.abi); } + // FIXME(explicit_tail_calls): this currently fails for cases where opaques are used. + // e.g. + // ``` + // fn a() -> impl Sized { become b() } // ICE + // fn b() -> u8 { 0 } + // ``` + // we should think what is the expected behavior here. + // (we should probably just accept this by revealing opaques?) if caller_sig.inputs_and_output != callee_sig.inputs_and_output { - if caller_sig.inputs() != callee_sig.inputs() { - self.report_arguments_mismatch(expr.span, caller_sig, callee_sig); - } - - // FIXME(explicit_tail_calls): this currently fails for cases where opaques are used. - // e.g. - // ``` - // fn a() -> impl Sized { become b() } // ICE - // fn b() -> u8 { 0 } - // ``` - // we should think what is the expected behavior here. - // (we should probably just accept this by revealing opaques?) - if caller_sig.output() != callee_sig.output() { - span_bug!(expr.span, "hir typeck should have checked the return type already"); - } + self.report_signature_mismatch( + expr.span, + self.tcx.liberate_late_bound_regions( + CRATE_DEF_ID.to_def_id(), + self.caller_ty.fn_sig(self.tcx), + ), + self.tcx.liberate_late_bound_regions(CRATE_DEF_ID.to_def_id(), ty.fn_sig(self.tcx)), + ); } { @@ -356,7 +358,7 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { self.found_errors = Err(err); } - fn report_arguments_mismatch( + fn report_signature_mismatch( &mut self, sp: Span, caller_sig: ty::FnSig<'_>, diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 0b6b36640e9..b5e165c7517 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -5,9 +5,10 @@ use std::ops::Bound; use rustc_ast::AsmMacro; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::DiagArgValue; +use rustc_hir::attrs::AttributeKind; use rustc_hir::def::DefKind; -use rustc_hir::{self as hir, BindingMode, ByRef, HirId, Mutability}; -use rustc_middle::middle::codegen_fn_attrs::TargetFeature; +use rustc_hir::{self as hir, BindingMode, ByRef, HirId, Mutability, find_attr}; +use rustc_middle::middle::codegen_fn_attrs::{TargetFeature, TargetFeatureKind}; use rustc_middle::mir::BorrowKind; use rustc_middle::span_bug; use rustc_middle::thir::visit::Visitor; @@ -521,7 +522,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { .iter() .copied() .filter(|feature| { - !feature.implied + feature.kind == TargetFeatureKind::Enabled && !self .body_target_features .iter() @@ -1157,7 +1158,7 @@ pub(crate) fn check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) { // Closures and inline consts are handled by their owner, if it has a body assert!(!tcx.is_typeck_child(def.to_def_id())); // Also, don't safety check custom MIR - if tcx.has_attr(def, sym::custom_mir) { + if find_attr!(tcx.get_all_attrs(def), AttributeKind::CustomMir(..) => ()).is_some() { return; } diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs index 24d4136c642..9657c4dc839 100644 --- a/compiler/rustc_mir_build/src/thir/cx/mod.rs +++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs @@ -4,11 +4,11 @@ use rustc_data_structures::steal::Steal; use rustc_errors::ErrorGuaranteed; -use rustc_hir as hir; -use rustc_hir::HirId; +use rustc_hir::attrs::AttributeKind; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::lang_items::LangItem; +use rustc_hir::{self as hir, HirId, find_attr}; use rustc_middle::bug; use rustc_middle::middle::region; use rustc_middle::thir::*; @@ -111,10 +111,8 @@ impl<'tcx> ThirBuildCx<'tcx> { typeck_results, rvalue_scopes: &typeck_results.rvalue_scopes, body_owner: def.to_def_id(), - apply_adjustments: tcx - .hir_attrs(hir_id) - .iter() - .all(|attr| !attr.has_name(rustc_span::sym::custom_mir)), + apply_adjustments: + !find_attr!(tcx.hir_attrs(hir_id), AttributeKind::CustomMir(..) => ()).is_some(), } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index a501cdf88c2..d46c4678bcf 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -11,11 +11,11 @@ use rustc_index::Idx; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::Obligation; use rustc_middle::mir::interpret::ErrorHandled; +use rustc_middle::span_bug; use rustc_middle::thir::{FieldPat, Pat, PatKind}; use rustc_middle::ty::{ self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitableExt, TypeVisitor, ValTree, }; -use rustc_middle::{mir, span_bug}; use rustc_span::def_id::DefId; use rustc_span::{DUMMY_SP, Span}; use rustc_trait_selection::traits::ObligationCause; @@ -288,16 +288,12 @@ impl<'tcx> ConstToPat<'tcx> { // when lowering to MIR in `Builder::perform_test`, treat the constant as a `&str`. // This works because `str` and `&str` have the same valtree representation. let ref_str_ty = Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, ty); - PatKind::Constant { - value: mir::Const::Ty(ref_str_ty, ty::Const::new_value(tcx, cv, ref_str_ty)), - } + PatKind::Constant { value: ty::Value { ty: ref_str_ty, valtree: cv } } } ty::Ref(_, pointee_ty, ..) => match *pointee_ty.kind() { // `&str` is represented as a valtree, let's keep using this // optimization for now. - ty::Str => PatKind::Constant { - value: mir::Const::Ty(ty, ty::Const::new_value(tcx, cv, ty)), - }, + ty::Str => PatKind::Constant { value: ty::Value { ty, valtree: cv } }, // All other references are converted into deref patterns and then recursively // convert the dereferenced constant to a pattern that is the sub-pattern of the // deref pattern. @@ -326,15 +322,13 @@ impl<'tcx> ConstToPat<'tcx> { // Also see <https://github.com/rust-lang/rfcs/pull/3535>. return self.mk_err(tcx.dcx().create_err(NaNPattern { span }), ty); } else { - PatKind::Constant { - value: mir::Const::Ty(ty, ty::Const::new_value(tcx, cv, ty)), - } + PatKind::Constant { value: ty::Value { ty, valtree: cv } } } } ty::Pat(..) | ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => { // The raw pointers we see here have been "vetted" by valtree construction to be // just integers, so we simply allow them. - PatKind::Constant { value: mir::Const::Ty(ty, ty::Const::new_value(tcx, cv, ty)) } + PatKind::Constant { value: ty::Value { ty, valtree: cv } } } ty::FnPtr(..) => { unreachable!( diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index a44afed5492..166e64a5fcb 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -161,8 +161,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { format!("found bad range pattern endpoint `{expr:?}` outside of error recovery"); return Err(self.tcx.dcx().span_delayed_bug(expr.span, msg)); }; - - Ok(Some(PatRangeBoundary::Finite(value))) + Ok(Some(PatRangeBoundary::Finite(value.valtree))) } /// Overflowing literals are linted against in a late pass. This is mostly fine, except when we @@ -235,7 +234,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { let lo = lower_endpoint(lo_expr)?.unwrap_or(PatRangeBoundary::NegInfinity); let hi = lower_endpoint(hi_expr)?.unwrap_or(PatRangeBoundary::PosInfinity); - let cmp = lo.compare_with(hi, ty, self.tcx, self.typing_env); + let cmp = lo.compare_with(hi, ty, self.tcx); let mut kind = PatKind::Range(Arc::new(PatRange { lo, hi, end, ty })); match (end, cmp) { // `x..y` where `x < y`. @@ -244,7 +243,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { (RangeEnd::Included, Some(Ordering::Less)) => {} // `x..=y` where `x == y` and `x` and `y` are finite. (RangeEnd::Included, Some(Ordering::Equal)) if lo.is_finite() && hi.is_finite() => { - kind = PatKind::Constant { value: lo.as_finite().unwrap() }; + let value = ty::Value { ty, valtree: lo.as_finite().unwrap() }; + kind = PatKind::Constant { value }; } // `..=x` where `x == ty::MIN`. (RangeEnd::Included, Some(Ordering::Equal)) if !lo.is_finite() => {} diff --git a/compiler/rustc_mir_build/src/thir/print.rs b/compiler/rustc_mir_build/src/thir/print.rs index 5efc4be2de2..b7e8d6fea40 100644 --- a/compiler/rustc_mir_build/src/thir/print.rs +++ b/compiler/rustc_mir_build/src/thir/print.rs @@ -763,7 +763,7 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> { } PatKind::Constant { value } => { print_indented!(self, "Constant {", depth_lvl + 1); - print_indented!(self, format!("value: {:?}", value), depth_lvl + 2); + print_indented!(self, format!("value: {}", value), depth_lvl + 2); print_indented!(self, "}", depth_lvl + 1); } PatKind::ExpandedConstant { def_id, subpattern } => { diff --git a/compiler/rustc_mir_dataflow/Cargo.toml b/compiler/rustc_mir_dataflow/Cargo.toml index 293bcbef21b..e422f69c7ca 100644 --- a/compiler/rustc_mir_dataflow/Cargo.toml +++ b/compiler/rustc_mir_dataflow/Cargo.toml @@ -13,11 +13,10 @@ rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_graphviz = { path = "../rustc_graphviz" } -rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_span = { path = "../rustc_span" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs index f86a15a8f92..372a3f3a8b8 100644 --- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs +++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs @@ -8,7 +8,6 @@ use std::sync::OnceLock; use std::{io, ops, str}; use regex::Regex; -use rustc_hir::def_id::DefId; use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{ self, BasicBlock, Body, Location, create_dump_file, dump_enabled, graphviz_safe_def_name, @@ -16,6 +15,7 @@ use rustc_middle::mir::{ }; use rustc_middle::ty::TyCtxt; use rustc_middle::ty::print::with_no_trimmed_paths; +use rustc_span::def_id::DefId; use rustc_span::{Symbol, sym}; use tracing::debug; use {rustc_ast as ast, rustc_graphviz as dot}; diff --git a/compiler/rustc_mir_dataflow/src/move_paths/abs_domain.rs b/compiler/rustc_mir_dataflow/src/move_paths/abs_domain.rs deleted file mode 100644 index d056ad3d4b4..00000000000 --- a/compiler/rustc_mir_dataflow/src/move_paths/abs_domain.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! The move-analysis portion of borrowck needs to work in an abstract -//! domain of lifted `Place`s. Most of the `Place` variants fall into a -//! one-to-one mapping between the concrete and abstract (e.g., a -//! field-deref on a local variable, `x.field`, has the same meaning -//! in both domains). Indexed projections are the exception: `a[x]` -//! needs to be treated as mapping to the same move path as `a[y]` as -//! well as `a[13]`, etc. So we map these `x`/`y` values to `()`. -//! -//! (In theory, the analysis could be extended to work with sets of -//! paths, so that `a[0]` and `a[13]` could be kept distinct, while -//! `a[x]` would still overlap them both. But that is not this -//! representation does today.) - -use rustc_middle::mir::{PlaceElem, ProjectionElem, ProjectionKind}; - -pub(crate) trait Lift { - fn lift(&self) -> ProjectionKind; -} - -impl<'tcx> Lift for PlaceElem<'tcx> { - fn lift(&self) -> ProjectionKind { - match *self { - ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Field(f, _ty) => ProjectionElem::Field(f, ()), - ProjectionElem::OpaqueCast(_ty) => ProjectionElem::OpaqueCast(()), - ProjectionElem::Index(_i) => ProjectionElem::Index(()), - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Downcast(a, u) => ProjectionElem::Downcast(a, u), - ProjectionElem::Subtype(_ty) => ProjectionElem::Subtype(()), - ProjectionElem::UnwrapUnsafeBinder(_ty) => ProjectionElem::UnwrapUnsafeBinder(()), - } - } -} diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index 8bbc89fdcec..48718cad597 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -7,7 +7,6 @@ use rustc_middle::{bug, span_bug}; use smallvec::{SmallVec, smallvec}; use tracing::debug; -use super::abs_domain::Lift; use super::{ Init, InitIndex, InitKind, InitLocation, LocationMap, LookupResult, MoveData, MoveOut, MoveOutIndex, MovePath, MovePathIndex, MovePathLookup, @@ -241,7 +240,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { if union_path.is_none() { // inlined from add_move_path because of a borrowck conflict with the iterator base = - *data.rev_lookup.projections.entry((base, elem.lift())).or_insert_with(|| { + *data.rev_lookup.projections.entry((base, elem.kind())).or_insert_with(|| { new_move_path( &mut data.move_paths, &mut data.path_map, @@ -272,7 +271,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { tcx, .. } = self; - *rev_lookup.projections.entry((base, elem.lift())).or_insert_with(move || { + *rev_lookup.projections.entry((base, elem.kind())).or_insert_with(move || { new_move_path(move_paths, path_map, init_path_map, Some(base), mk_place(*tcx)) }) } diff --git a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs index 18985ba0da2..466416d63f5 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs @@ -1,3 +1,15 @@ +//! The move-analysis portion of borrowck needs to work in an abstract domain of lifted `Place`s. +//! Most of the `Place` variants fall into a one-to-one mapping between the concrete and abstract +//! (e.g., a field projection on a local variable, `x.field`, has the same meaning in both +//! domains). In other words, all field projections for the same field on the same local do not +//! have meaningfully different types if ever. Indexed projections are the exception: `a[x]` needs +//! to be treated as mapping to the same move path as `a[y]` as well as `a[13]`, etc. So we map +//! these `x`/`y` values to `()`. +//! +//! (In theory, the analysis could be extended to work with sets of paths, so that `a[0]` and +//! `a[13]` could be kept distinct, while `a[x]` would still overlap them both. But that is not +//! what this representation does today.) + use std::fmt; use std::ops::{Index, IndexMut}; @@ -8,11 +20,8 @@ use rustc_middle::ty::{Ty, TyCtxt}; use rustc_span::Span; use smallvec::SmallVec; -use self::abs_domain::Lift; use crate::un_derefer::UnDerefer; -mod abs_domain; - rustc_index::newtype_index! { #[orderable] #[debug_format = "mp{}"] @@ -324,7 +333,7 @@ impl<'tcx> MovePathLookup<'tcx> { }; for (_, elem) in self.un_derefer.iter_projections(place) { - if let Some(&subpath) = self.projections.get(&(result, elem.lift())) { + if let Some(&subpath) = self.projections.get(&(result, elem.kind())) { result = subpath; } else { return LookupResult::Parent(Some(result)); diff --git a/compiler/rustc_mir_dataflow/src/rustc_peek.rs b/compiler/rustc_mir_dataflow/src/rustc_peek.rs index 1682f332857..a899ec1fa88 100644 --- a/compiler/rustc_mir_dataflow/src/rustc_peek.rs +++ b/compiler/rustc_mir_dataflow/src/rustc_peek.rs @@ -1,7 +1,7 @@ use rustc_ast::MetaItem; -use rustc_hir::def_id::DefId; use rustc_middle::mir::{self, Body, Local, Location}; use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::def_id::DefId; use rustc_span::{Span, Symbol, sym}; use tracing::{debug, info}; diff --git a/compiler/rustc_mir_transform/Cargo.toml b/compiler/rustc_mir_transform/Cargo.toml index 08c43a4648c..16acec15e92 100644 --- a/compiler/rustc_mir_transform/Cargo.toml +++ b/compiler/rustc_mir_transform/Cargo.toml @@ -6,7 +6,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start either = "1" -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } @@ -26,5 +26,5 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_mir_transform/src/check_call_recursion.rs b/compiler/rustc_mir_transform/src/check_call_recursion.rs index 6d61ac2dd80..a9acb1da5a3 100644 --- a/compiler/rustc_mir_transform/src/check_call_recursion.rs +++ b/compiler/rustc_mir_transform/src/check_call_recursion.rs @@ -43,8 +43,8 @@ impl<'tcx> MirLint<'tcx> for CheckDropRecursion { // First check if `body` is an `fn drop()` of `Drop` if let DefKind::AssocFn = tcx.def_kind(def_id) - && let Some(trait_ref) = - tcx.impl_of_assoc(def_id.to_def_id()).and_then(|def_id| tcx.impl_trait_ref(def_id)) + && let Some(impl_id) = tcx.trait_impl_of_assoc(def_id.to_def_id()) + && let trait_ref = tcx.impl_trait_ref(impl_id).unwrap() && tcx.is_lang_item(trait_ref.instantiate_identity().def_id, LangItem::Drop) // avoid erroneous `Drop` impls from causing ICEs below && let sig = tcx.fn_sig(def_id).instantiate_identity() diff --git a/compiler/rustc_mir_transform/src/check_enums.rs b/compiler/rustc_mir_transform/src/check_enums.rs index 33a87cb9873..12447dc7cbb 100644 --- a/compiler/rustc_mir_transform/src/check_enums.rs +++ b/compiler/rustc_mir_transform/src/check_enums.rs @@ -48,6 +48,21 @@ impl<'tcx> crate::MirPass<'tcx> for CheckEnums { let new_block = split_block(basic_blocks, location); match check { + EnumCheckType::Direct { op_size, .. } + | EnumCheckType::WithNiche { op_size, .. } + if op_size.bytes() == 0 => + { + // It is never valid to use a ZST as a discriminant for an inhabited enum, but that will + // have been caught by the type checker. Do nothing but ensure that a bug has been signaled. + tcx.dcx().span_delayed_bug( + source_info.span, + "cannot build enum discriminant from zero-sized type", + ); + basic_blocks[block].terminator = Some(Terminator { + source_info, + kind: TerminatorKind::Goto { target: new_block }, + }); + } EnumCheckType::Direct { source_op, discr, op_size, valid_discrs } => { insert_direct_enum_check( tcx, diff --git a/compiler/rustc_mir_transform/src/check_packed_ref.rs b/compiler/rustc_mir_transform/src/check_packed_ref.rs index dcb812c7899..100104e9de0 100644 --- a/compiler/rustc_mir_transform/src/check_packed_ref.rs +++ b/compiler/rustc_mir_transform/src/check_packed_ref.rs @@ -40,7 +40,7 @@ impl<'tcx> Visitor<'tcx> for PackedRefChecker<'_, 'tcx> { if context.is_borrow() && util::is_disaligned(self.tcx, self.body, self.typing_env, *place) { let def_id = self.body.source.instance.def_id(); - if let Some(impl_def_id) = self.tcx.impl_of_assoc(def_id) + if let Some(impl_def_id) = self.tcx.trait_impl_of_assoc(def_id) && self.tcx.is_builtin_derived(impl_def_id) { // If we ever reach here it means that the generated derive diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 5568d42ab8f..879a20e771d 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -16,7 +16,6 @@ use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; mod balanced_flow; pub(crate) mod node_flow; -mod union_find; /// Struct containing the results of [`prepare_bcb_counters_data`]. pub(crate) struct BcbCountersData { diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs index 91ed54b8b59..e063f75887b 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs @@ -7,13 +7,12 @@ //! (Knuth & Stevenson, 1973). use rustc_data_structures::graph; +use rustc_data_structures::union_find::UnionFind; use rustc_index::bit_set::DenseBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; pub(crate) use rustc_middle::mir::coverage::NodeFlowData; use rustc_middle::mir::coverage::Op; -use crate::coverage::counters::union_find::UnionFind; - #[cfg(test)] mod tests; diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs index 46fe7c40826..8dbe564f517 100644 --- a/compiler/rustc_mir_transform/src/coverage/mappings.rs +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -1,51 +1,36 @@ use rustc_index::IndexVec; -use rustc_middle::mir::coverage::{BlockMarkerId, BranchSpan, CoverageInfoHi, CoverageKind}; +use rustc_middle::mir::coverage::{ + BlockMarkerId, BranchSpan, CoverageInfoHi, CoverageKind, Mapping, MappingKind, +}; use rustc_middle::mir::{self, BasicBlock, StatementKind}; use rustc_middle::ty::TyCtxt; -use rustc_span::Span; -use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; +use crate::coverage::graph::CoverageGraph; use crate::coverage::hir_info::ExtractedHirInfo; use crate::coverage::spans::extract_refined_covspans; use crate::coverage::unexpand::unexpand_into_body_span; -/// Associates an ordinary executable code span with its corresponding BCB. -#[derive(Debug)] -pub(super) struct CodeMapping { - pub(super) span: Span, - pub(super) bcb: BasicCoverageBlock, -} - -#[derive(Debug)] -pub(super) struct BranchPair { - pub(super) span: Span, - pub(super) true_bcb: BasicCoverageBlock, - pub(super) false_bcb: BasicCoverageBlock, -} - #[derive(Default)] -pub(super) struct ExtractedMappings { - pub(super) code_mappings: Vec<CodeMapping>, - pub(super) branch_pairs: Vec<BranchPair>, +pub(crate) struct ExtractedMappings { + pub(crate) mappings: Vec<Mapping>, } -/// Extracts coverage-relevant spans from MIR, and associates them with -/// their corresponding BCBs. -pub(super) fn extract_all_mapping_info_from_mir<'tcx>( +/// Extracts coverage-relevant spans from MIR, and uses them to create +/// coverage mapping data for inclusion in MIR. +pub(crate) fn extract_mappings_from_mir<'tcx>( tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>, hir_info: &ExtractedHirInfo, graph: &CoverageGraph, ) -> ExtractedMappings { - let mut code_mappings = vec![]; - let mut branch_pairs = vec![]; + let mut mappings = vec![]; // Extract ordinary code mappings from MIR statement/terminator spans. - extract_refined_covspans(tcx, mir_body, hir_info, graph, &mut code_mappings); + extract_refined_covspans(tcx, mir_body, hir_info, graph, &mut mappings); - branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, graph)); + extract_branch_mappings(mir_body, hir_info, graph, &mut mappings); - ExtractedMappings { code_mappings, branch_pairs } + ExtractedMappings { mappings } } fn resolve_block_markers( @@ -69,19 +54,18 @@ fn resolve_block_markers( block_markers } -pub(super) fn extract_branch_pairs( +pub(super) fn extract_branch_mappings( mir_body: &mir::Body<'_>, hir_info: &ExtractedHirInfo, graph: &CoverageGraph, -) -> Vec<BranchPair> { - let Some(coverage_info_hi) = mir_body.coverage_info_hi.as_deref() else { return vec![] }; + mappings: &mut Vec<Mapping>, +) { + let Some(coverage_info_hi) = mir_body.coverage_info_hi.as_deref() else { return }; let block_markers = resolve_block_markers(coverage_info_hi, mir_body); - coverage_info_hi - .branch_spans - .iter() - .filter_map(|&BranchSpan { span: raw_span, true_marker, false_marker }| { + mappings.extend(coverage_info_hi.branch_spans.iter().filter_map( + |&BranchSpan { span: raw_span, true_marker, false_marker }| try { // For now, ignore any branch span that was introduced by // expansion. This makes things like assert macros less noisy. if !raw_span.ctxt().outer_expn_data().is_root() { @@ -94,7 +78,7 @@ pub(super) fn extract_branch_pairs( let true_bcb = bcb_from_marker(true_marker)?; let false_bcb = bcb_from_marker(false_marker)?; - Some(BranchPair { span, true_bcb, false_bcb }) - }) - .collect::<Vec<_>>() + Mapping { span, kind: MappingKind::Branch { true_bcb, false_bcb } } + }, + )); } diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 47a87a2e94d..c5fef299244 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -1,4 +1,4 @@ -use rustc_middle::mir::coverage::{CoverageKind, FunctionCoverageInfo, Mapping, MappingKind}; +use rustc_middle::mir::coverage::{CoverageKind, FunctionCoverageInfo}; use rustc_middle::mir::{self, BasicBlock, Statement, StatementKind, TerminatorKind}; use rustc_middle::ty::TyCtxt; use tracing::{debug, debug_span, trace}; @@ -71,10 +71,8 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: //////////////////////////////////////////////////// // Extract coverage spans and other mapping info from MIR. - let extracted_mappings = - mappings::extract_all_mapping_info_from_mir(tcx, mir_body, &hir_info, &graph); - - let mappings = create_mappings(&extracted_mappings); + let ExtractedMappings { mappings } = + mappings::extract_mappings_from_mir(tcx, mir_body, &hir_info, &graph); if mappings.is_empty() { // No spans could be converted into valid mappings, so skip this function. debug!("no spans could be converted into valid mappings; skipping"); @@ -100,34 +98,6 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: })); } -/// For each coverage span extracted from MIR, create a corresponding mapping. -/// -/// FIXME(Zalathar): This used to be where BCBs in the extracted mappings were -/// resolved to a `CovTerm`. But that is now handled elsewhere, so this -/// function can potentially be simplified even further. -fn create_mappings(extracted_mappings: &ExtractedMappings) -> Vec<Mapping> { - // Fully destructure the mappings struct to make sure we don't miss any kinds. - let ExtractedMappings { code_mappings, branch_pairs } = extracted_mappings; - let mut mappings = Vec::new(); - - mappings.extend(code_mappings.iter().map( - // Ordinary code mappings are the simplest kind. - |&mappings::CodeMapping { span, bcb }| { - let kind = MappingKind::Code { bcb }; - Mapping { kind, span } - }, - )); - - mappings.extend(branch_pairs.iter().map( - |&mappings::BranchPair { span, true_bcb, false_bcb }| { - let kind = MappingKind::Branch { true_bcb, false_bcb }; - Mapping { kind, span } - }, - )); - - mappings -} - /// Inject any necessary coverage statements into MIR, so that they influence codegen. fn inject_coverage_statements<'tcx>(mir_body: &mut mir::Body<'tcx>, graph: &CoverageGraph) { for (bcb, data) in graph.iter_enumerated() { diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index ae9459dee84..d1b04c8f587 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -1,6 +1,6 @@ use rustc_data_structures::fx::FxHashSet; use rustc_middle::mir; -use rustc_middle::mir::coverage::START_BCB; +use rustc_middle::mir::coverage::{Mapping, MappingKind, START_BCB}; use rustc_middle::ty::TyCtxt; use rustc_span::source_map::SourceMap; use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span}; @@ -9,7 +9,7 @@ use tracing::instrument; use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; use crate::coverage::hir_info::ExtractedHirInfo; use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir, SpanFromMir}; -use crate::coverage::{mappings, unexpand}; +use crate::coverage::unexpand; mod from_mir; @@ -18,7 +18,7 @@ pub(super) fn extract_refined_covspans<'tcx>( mir_body: &mir::Body<'tcx>, hir_info: &ExtractedHirInfo, graph: &CoverageGraph, - code_mappings: &mut Vec<mappings::CodeMapping>, + mappings: &mut Vec<Mapping>, ) { if hir_info.is_async_fn { // An async function desugars into a function that returns a future, @@ -26,7 +26,7 @@ pub(super) fn extract_refined_covspans<'tcx>( // outer function will be unhelpful, so just keep the signature span // and ignore all of the spans in the MIR body. if let Some(span) = hir_info.fn_sig_span { - code_mappings.push(mappings::CodeMapping { span, bcb: START_BCB }); + mappings.push(Mapping { span, kind: MappingKind::Code { bcb: START_BCB } }) } return; } @@ -111,9 +111,9 @@ pub(super) fn extract_refined_covspans<'tcx>( // Merge covspans that can be merged. covspans.dedup_by(|b, a| a.merge_if_eligible(b)); - code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| { + mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| { // Each span produced by the refiner represents an ordinary code region. - mappings::CodeMapping { span, bcb } + Mapping { span, kind: MappingKind::Code { bcb } } })); } diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index 952da2cdf72..5a13394543b 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -447,26 +447,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { Projection(base, elem) => { let base = self.evaluated[base].as_ref()?; - let elem = match elem { - ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Downcast(name, read_variant) => { - ProjectionElem::Downcast(name, read_variant) - } - ProjectionElem::Field(f, ()) => ProjectionElem::Field(f, ty.ty), - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::OpaqueCast(()) => ProjectionElem::OpaqueCast(ty.ty), - ProjectionElem::Subtype(()) => ProjectionElem::Subtype(ty.ty), - ProjectionElem::UnwrapUnsafeBinder(()) => { - ProjectionElem::UnwrapUnsafeBinder(ty.ty) - } - // This should have been replaced by a `ConstantIndex` earlier. - ProjectionElem::Index(_) => return None, - }; + // `Index` by constants should have been replaced by `ConstantIndex` by + // `simplify_place_projection`. + let elem = elem.try_map(|_| None, |()| ty.ty)?; self.ecx.project(base, elem).discard_err()? } Address { place, kind: _, provenance: _ } => { @@ -476,13 +459,11 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { let local = self.locals[place.local]?; let pointer = self.evaluated[local].as_ref()?; let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?; - for proj in place.projection.iter().skip(1) { - // We have no call stack to associate a local with a value, so we cannot - // interpret indexing. - if matches!(proj, ProjectionElem::Index(_)) { - return None; - } - mplace = self.ecx.project(&mplace, proj).discard_err()?; + for elem in place.projection.iter().skip(1) { + // `Index` by constants should have been replaced by `ConstantIndex` by + // `simplify_place_projection`. + let elem = elem.try_map(|_| None, |ty| ty)?; + mplace = self.ecx.project(&mplace, elem).discard_err()?; } let pointer = mplace.to_ref(&self.ecx); ImmTy::from_immediate(pointer, ty).into() @@ -902,27 +883,14 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { proj: ProjectionElem<VnIndex, ()>, loc: Location, ) -> Option<PlaceElem<'tcx>> { - Some(match proj { - ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Field(idx, ()) => ProjectionElem::Field(idx, ty), - ProjectionElem::Index(idx) => { - let Some(local) = self.try_as_local(idx, loc) else { - return None; - }; + proj.try_map( + |value| { + let local = self.try_as_local(value, loc)?; self.reused_locals.insert(local); - ProjectionElem::Index(local) - } - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::Downcast(symbol, idx) => ProjectionElem::Downcast(symbol, idx), - ProjectionElem::OpaqueCast(()) => ProjectionElem::OpaqueCast(ty), - ProjectionElem::Subtype(()) => ProjectionElem::Subtype(ty), - ProjectionElem::UnwrapUnsafeBinder(()) => ProjectionElem::UnwrapUnsafeBinder(ty), - }) + Some(local) + }, + |()| ty, + ) } fn simplify_aggregate_to_copy( diff --git a/compiler/rustc_mir_transform/src/pass_manager.rs b/compiler/rustc_mir_transform/src/pass_manager.rs index 7a8d3ba1ff1..5b3ddcc777b 100644 --- a/compiler/rustc_mir_transform/src/pass_manager.rs +++ b/compiler/rustc_mir_transform/src/pass_manager.rs @@ -41,19 +41,40 @@ fn to_profiler_name(type_name: &'static str) -> &'static str { }) } -// const wrapper for `if let Some((_, tail)) = name.rsplit_once(':') { tail } else { name }` -const fn c_name(name: &'static str) -> &'static str { +// A function that simplifies a pass's type_name. E.g. `Baz`, `Baz<'_>`, +// `foo::bar::Baz`, and `foo::bar::Baz<'a, 'b>` all become `Baz`. +// +// It's `const` for perf reasons: it's called a lot, and doing the string +// operations at runtime causes a non-trivial slowdown. If +// `split_once`/`rsplit_once` become `const` its body could be simplified to +// this: +// ```ignore (fragment) +// let name = if let Some((_, tail)) = name.rsplit_once(':') { tail } else { name }; +// let name = if let Some((head, _)) = name.split_once('<') { head } else { name }; +// name +// ``` +const fn simplify_pass_type_name(name: &'static str) -> &'static str { // FIXME(const-hack) Simplify the implementation once more `str` methods get const-stable. - // and inline into call site + + // Work backwards from the end. If a ':' is hit, strip it and everything before it. let bytes = name.as_bytes(); let mut i = bytes.len(); while i > 0 && bytes[i - 1] != b':' { - i = i - 1; + i -= 1; } let (_, bytes) = bytes.split_at(i); + + // Work forwards from the start of what's left. If a '<' is hit, strip it and everything after + // it. + let mut i = 0; + while i < bytes.len() && bytes[i] != b'<' { + i += 1; + } + let (bytes, _) = bytes.split_at(i); + match std::str::from_utf8(bytes) { Ok(name) => name, - Err(_) => name, + Err(_) => panic!(), } } @@ -62,12 +83,7 @@ const fn c_name(name: &'static str) -> &'static str { /// loop that goes over each available MIR and applies `run_pass`. pub(super) trait MirPass<'tcx> { fn name(&self) -> &'static str { - // FIXME(const-hack) Simplify the implementation once more `str` methods get const-stable. - // See copypaste in `MirLint` - const { - let name = std::any::type_name::<Self>(); - c_name(name) - } + const { simplify_pass_type_name(std::any::type_name::<Self>()) } } fn profiler_name(&self) -> &'static str { @@ -101,12 +117,7 @@ pub(super) trait MirPass<'tcx> { /// disabled (via the `Lint` adapter). pub(super) trait MirLint<'tcx> { fn name(&self) -> &'static str { - // FIXME(const-hack) Simplify the implementation once more `str` methods get const-stable. - // See copypaste in `MirPass` - const { - let name = std::any::type_name::<Self>(); - c_name(name) - } + const { simplify_pass_type_name(std::any::type_name::<Self>()) } } fn is_enabled(&self, _sess: &Session) -> bool { diff --git a/compiler/rustc_mir_transform/src/ref_prop.rs b/compiler/rustc_mir_transform/src/ref_prop.rs index d1c2d6b508f..6f61215cee2 100644 --- a/compiler/rustc_mir_transform/src/ref_prop.rs +++ b/compiler/rustc_mir_transform/src/ref_prop.rs @@ -79,6 +79,7 @@ impl<'tcx> crate::MirPass<'tcx> for ReferencePropagation { #[instrument(level = "trace", skip(self, tcx, body))] fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { debug!(def_id = ?body.source.def_id()); + move_to_copy_pointers(tcx, body); while propagate_ssa(tcx, body) {} } @@ -87,11 +88,43 @@ impl<'tcx> crate::MirPass<'tcx> for ReferencePropagation { } } +/// The SSA analysis done by [`SsaLocals`] treats [`Operand::Move`] as a read, even though in +/// general [`Operand::Move`] represents pass-by-pointer where the callee can overwrite the +/// pointee (Miri always considers the place deinitialized). CopyProp has a similar trick to +/// turn [`Operand::Move`] into [`Operand::Copy`] when required for an optimization, but in this +/// pass we just turn all moves of pointers into copies because pointers should be by-value anyway. +fn move_to_copy_pointers<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let mut visitor = MoveToCopyVisitor { tcx, local_decls: &body.local_decls }; + for (bb, data) in body.basic_blocks.as_mut_preserves_cfg().iter_enumerated_mut() { + visitor.visit_basic_block_data(bb, data); + } + + struct MoveToCopyVisitor<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + local_decls: &'a IndexVec<Local, LocalDecl<'tcx>>, + } + + impl<'a, 'tcx> MutVisitor<'tcx> for MoveToCopyVisitor<'a, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_operand(&mut self, operand: &mut Operand<'tcx>, loc: Location) { + if let Operand::Move(place) = *operand { + if place.ty(self.local_decls, self.tcx).ty.is_any_ptr() { + *operand = Operand::Copy(place); + } + } + self.super_operand(operand, loc); + } + } +} + fn propagate_ssa<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool { let typing_env = body.typing_env(tcx); let ssa = SsaLocals::new(tcx, body, typing_env); - let mut replacer = compute_replacement(tcx, body, &ssa); + let mut replacer = compute_replacement(tcx, body, ssa); debug!(?replacer.targets); debug!(?replacer.allowed_replacements); debug!(?replacer.storage_to_remove); @@ -119,7 +152,7 @@ enum Value<'tcx> { fn compute_replacement<'tcx>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, - ssa: &SsaLocals, + ssa: SsaLocals, ) -> Replacer<'tcx> { let always_live_locals = always_storage_live_locals(body); @@ -138,7 +171,7 @@ fn compute_replacement<'tcx>( // reborrowed references. let mut storage_to_remove = DenseBitSet::new_empty(body.local_decls.len()); - let fully_replaceable_locals = fully_replaceable_locals(ssa); + let fully_replaceable_locals = fully_replaceable_locals(&ssa); // Returns true iff we can use `place` as a pointee. // diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index c687036f544..c6760b3583f 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -75,7 +75,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body< build_call_shim(tcx, instance, Some(adjustment), CallKind::Direct(def_id)) } ty::InstanceKind::FnPtrShim(def_id, ty) => { - let trait_ = tcx.trait_of_assoc(def_id).unwrap(); + let trait_ = tcx.parent(def_id); // Supports `Fn` or `async Fn` traits. let adjustment = match tcx .fn_trait_kind_from_def_id(trait_) diff --git a/compiler/rustc_monomorphize/Cargo.toml b/compiler/rustc_monomorphize/Cargo.toml index 0ed5b4fc0d0..b11084cf169 100644 --- a/compiler/rustc_monomorphize/Cargo.toml +++ b/compiler/rustc_monomorphize/Cargo.toml @@ -6,7 +6,6 @@ edition = "2024" [dependencies] # tidy-alphabetical-start rustc_abi = { path = "../rustc_abi" } -rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } @@ -15,9 +14,8 @@ rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } rustc_target = { path = "../rustc_target" } serde = "1" serde_json = "1" -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 26ca8518434..6a836442c32 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -205,6 +205,8 @@ //! this is not implemented however: a mono item will be produced //! regardless of whether it is actually needed or not. +mod autodiff; + use std::cell::OnceCell; use rustc_data_structures::fx::FxIndexMap; @@ -235,6 +237,7 @@ use rustc_span::source_map::{Spanned, dummy_spanned, respan}; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument, trace}; +use crate::collector::autodiff::collect_autodiff_fn; use crate::errors::{ self, EncounteredErrorWhileInstantiating, EncounteredErrorWhileInstantiatingGlobalAsm, NoOptimizedMir, RecursionLimit, @@ -786,7 +789,35 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> { // *Before* monomorphizing, record that we already handled this mention. self.used_mentioned_items.insert(MentionedItem::Fn(callee_ty)); let callee_ty = self.monomorphize(callee_ty); - visit_fn_use(self.tcx, callee_ty, true, source, &mut self.used_items) + + // HACK(explicit_tail_calls): collect tail calls to `#[track_caller]` functions as indirect, + // because we later call them as such, to prevent issues with ABI incompatibility. + // Ideally we'd replace such tail calls with normal call + return, but this requires + // post-mono MIR optimizations, which we don't yet have. + let force_indirect_call = + if matches!(terminator.kind, mir::TerminatorKind::TailCall { .. }) + && let &ty::FnDef(def_id, args) = callee_ty.kind() + && let instance = ty::Instance::expect_resolve( + self.tcx, + ty::TypingEnv::fully_monomorphized(), + def_id, + args, + source, + ) + && instance.def.requires_caller_location(self.tcx) + { + true + } else { + false + }; + + visit_fn_use( + self.tcx, + callee_ty, + !force_indirect_call, + source, + &mut self.used_items, + ) } mir::TerminatorKind::Drop { ref place, .. } => { let ty = place.ty(self.body, self.tcx).ty; @@ -911,6 +942,8 @@ fn visit_instance_use<'tcx>( return; } if let Some(intrinsic) = tcx.intrinsic(instance.def_id()) { + collect_autodiff_fn(tcx, instance, intrinsic, output); + if let Some(_requirement) = ValidityRequirement::from_intrinsic(intrinsic.name) { // The intrinsics assert_inhabited, assert_zero_valid, and assert_mem_uninitialized_valid will // be lowered in codegen to nothing or a call to panic_nounwind. So if we encounter any diff --git a/compiler/rustc_monomorphize/src/collector/autodiff.rs b/compiler/rustc_monomorphize/src/collector/autodiff.rs new file mode 100644 index 00000000000..13868cca944 --- /dev/null +++ b/compiler/rustc_monomorphize/src/collector/autodiff.rs @@ -0,0 +1,48 @@ +use rustc_middle::bug; +use rustc_middle::ty::{self, GenericArg, IntrinsicDef, TyCtxt}; + +use crate::collector::{MonoItems, create_fn_mono_item}; + +// Here, we force both primal and diff function to be collected in +// mono so this does not interfere in `autodiff` intrinsics +// codegen process. If they are unused, LLVM will remove them when +// compiling with O3. +pub(crate) fn collect_autodiff_fn<'tcx>( + tcx: TyCtxt<'tcx>, + instance: ty::Instance<'tcx>, + intrinsic: IntrinsicDef, + output: &mut MonoItems<'tcx>, +) { + if intrinsic.name != rustc_span::sym::autodiff { + return; + }; + + collect_autodiff_fn_from_arg(instance.args[0], tcx, output); +} + +fn collect_autodiff_fn_from_arg<'tcx>( + arg: GenericArg<'tcx>, + tcx: TyCtxt<'tcx>, + output: &mut MonoItems<'tcx>, +) { + let (instance, span) = match arg.kind() { + ty::GenericArgKind::Type(ty) => match ty.kind() { + ty::FnDef(def_id, substs) => { + let span = tcx.def_span(def_id); + let instance = ty::Instance::expect_resolve( + tcx, + ty::TypingEnv::non_body_analysis(tcx, def_id), + *def_id, + substs, + span, + ); + + (instance, span) + } + _ => bug!("expected autodiff function"), + }, + _ => bug!("expected type when matching autodiff arg"), + }; + + output.push(create_fn_mono_item(tcx, instance, span)); +} diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index d76b27d9970..d784d3540c4 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -92,8 +92,6 @@ //! source-level module, functions from the same module will be available for //! inlining, even when they are not marked `#[inline]`. -mod autodiff; - use std::cmp; use std::collections::hash_map::Entry; use std::fs::{self, File}; @@ -104,7 +102,7 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sync; use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_hir::LangItem; -use rustc_hir::attrs::InlineAttr; +use rustc_hir::attrs::{InlineAttr, Linkage}; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, DefIdSet, LOCAL_CRATE}; use rustc_hir::definitions::DefPathDataName; @@ -112,7 +110,7 @@ use rustc_middle::bug; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; use rustc_middle::mir::mono::{ - CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, Linkage, MonoItem, MonoItemData, + CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, MonoItem, MonoItemData, MonoItemPartitions, Visibility, }; use rustc_middle::ty::print::{characteristic_def_id_of_type, with_no_trimmed_paths}; @@ -251,17 +249,7 @@ where always_export_generics, ); - // We can't differentiate a function that got inlined. - let autodiff_active = cfg!(llvm_enzyme) - && matches!(mono_item, MonoItem::Fn(_)) - && cx - .tcx - .codegen_fn_attrs(mono_item.def_id()) - .autodiff_item - .as_ref() - .is_some_and(|ad| ad.is_active()); - - if !autodiff_active && visibility == Visibility::Hidden && can_be_internalized { + if visibility == Visibility::Hidden && can_be_internalized { internalization_candidates.insert(mono_item); } let size_estimate = mono_item.size_estimate(cx.tcx); @@ -650,17 +638,18 @@ fn characteristic_def_id_of_mono_item<'tcx>( // its self-type. If the self-type does not provide a characteristic // DefId, we use the location of the impl after all. - if tcx.trait_of_assoc(def_id).is_some() { + let assoc_parent = tcx.assoc_parent(def_id); + + if let Some((_, DefKind::Trait)) = assoc_parent { let self_ty = instance.args.type_at(0); // This is a default implementation of a trait method. return characteristic_def_id_of_type(self_ty).or(Some(def_id)); } - if let Some(impl_def_id) = tcx.impl_of_assoc(def_id) { - if tcx.sess.opts.incremental.is_some() - && tcx - .trait_id_of_impl(impl_def_id) - .is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Drop)) + if let Some((impl_def_id, DefKind::Impl { of_trait })) = assoc_parent { + if of_trait + && tcx.sess.opts.incremental.is_some() + && tcx.is_lang_item(tcx.trait_id_of_impl(impl_def_id).unwrap(), LangItem::Drop) { // Put `Drop::drop` into the same cgu as `drop_in_place` // since `drop_in_place` is the only thing that can @@ -1156,27 +1145,15 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio } } - #[cfg(not(llvm_enzyme))] - let autodiff_mono_items: Vec<_> = vec![]; - #[cfg(llvm_enzyme)] - let mut autodiff_mono_items: Vec<_> = vec![]; let mono_items: DefIdSet = items .iter() .filter_map(|mono_item| match *mono_item { - MonoItem::Fn(ref instance) => { - #[cfg(llvm_enzyme)] - autodiff_mono_items.push((mono_item, instance)); - Some(instance.def_id()) - } + MonoItem::Fn(ref instance) => Some(instance.def_id()), MonoItem::Static(def_id) => Some(def_id), _ => None, }) .collect(); - let autodiff_items = - autodiff::find_autodiff_source_functions(tcx, &usage_map, autodiff_mono_items); - let autodiff_items = tcx.arena.alloc_from_iter(autodiff_items); - // Output monomorphization stats per def_id if let SwitchWithOptPath::Enabled(ref path) = tcx.sess.opts.unstable_opts.dump_mono_stats && let Err(err) = @@ -1234,11 +1211,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio } } - MonoItemPartitions { - all_mono_items: tcx.arena.alloc(mono_items), - codegen_units, - autodiff_items, - } + MonoItemPartitions { all_mono_items: tcx.arena.alloc(mono_items), codegen_units } } /// Outputs stats about instantiation counts and estimated size, per `MonoItem`'s diff --git a/compiler/rustc_monomorphize/src/partitioning/autodiff.rs b/compiler/rustc_monomorphize/src/partitioning/autodiff.rs deleted file mode 100644 index 22d593b80b8..00000000000 --- a/compiler/rustc_monomorphize/src/partitioning/autodiff.rs +++ /dev/null @@ -1,143 +0,0 @@ -use rustc_ast::expand::autodiff_attrs::{AutoDiffItem, DiffActivity}; -use rustc_hir::def_id::LOCAL_CRATE; -use rustc_middle::bug; -use rustc_middle::mir::mono::MonoItem; -use rustc_middle::ty::{self, Instance, PseudoCanonicalInput, Ty, TyCtxt, TypingEnv}; -use rustc_symbol_mangling::symbol_name_for_instance_in_crate; -use tracing::{debug, trace}; - -use crate::partitioning::UsageMap; - -fn adjust_activity_to_abi<'tcx>(tcx: TyCtxt<'tcx>, fn_ty: Ty<'tcx>, da: &mut Vec<DiffActivity>) { - if !matches!(fn_ty.kind(), ty::FnDef(..)) { - bug!("expected fn def for autodiff, got {:?}", fn_ty); - } - - // We don't actually pass the types back into the type system. - // All we do is decide how to handle the arguments. - let sig = fn_ty.fn_sig(tcx).skip_binder(); - - let mut new_activities = vec![]; - let mut new_positions = vec![]; - for (i, ty) in sig.inputs().iter().enumerate() { - if let Some(inner_ty) = ty.builtin_deref(true) { - if inner_ty.is_slice() { - // Now we need to figure out the size of each slice element in memory to allow - // safety checks and usability improvements in the backend. - let sty = match inner_ty.builtin_index() { - Some(sty) => sty, - None => { - panic!("slice element type unknown"); - } - }; - let pci = PseudoCanonicalInput { - typing_env: TypingEnv::fully_monomorphized(), - value: sty, - }; - - let layout = tcx.layout_of(pci); - let elem_size = match layout { - Ok(layout) => layout.size, - Err(_) => { - bug!("autodiff failed to compute slice element size"); - } - }; - let elem_size: u32 = elem_size.bytes() as u32; - - // We know that the length will be passed as extra arg. - if !da.is_empty() { - // We are looking at a slice. The length of that slice will become an - // extra integer on llvm level. Integers are always const. - // However, if the slice get's duplicated, we want to know to later check the - // size. So we mark the new size argument as FakeActivitySize. - // There is one FakeActivitySize per slice, so for convenience we store the - // slice element size in bytes in it. We will use the size in the backend. - let activity = match da[i] { - DiffActivity::DualOnly - | DiffActivity::Dual - | DiffActivity::Dualv - | DiffActivity::DuplicatedOnly - | DiffActivity::Duplicated => { - DiffActivity::FakeActivitySize(Some(elem_size)) - } - DiffActivity::Const => DiffActivity::Const, - _ => bug!("unexpected activity for ptr/ref"), - }; - new_activities.push(activity); - new_positions.push(i + 1); - } - - continue; - } - } - } - // now add the extra activities coming from slices - // Reverse order to not invalidate the indices - for _ in 0..new_activities.len() { - let pos = new_positions.pop().unwrap(); - let activity = new_activities.pop().unwrap(); - da.insert(pos, activity); - } -} - -pub(crate) fn find_autodiff_source_functions<'tcx>( - tcx: TyCtxt<'tcx>, - usage_map: &UsageMap<'tcx>, - autodiff_mono_items: Vec<(&MonoItem<'tcx>, &Instance<'tcx>)>, -) -> Vec<AutoDiffItem> { - let mut autodiff_items: Vec<AutoDiffItem> = vec![]; - for (item, instance) in autodiff_mono_items { - let target_id = instance.def_id(); - let cg_fn_attr = &tcx.codegen_fn_attrs(target_id).autodiff_item; - let Some(target_attrs) = cg_fn_attr else { - continue; - }; - let mut input_activities: Vec<DiffActivity> = target_attrs.input_activity.clone(); - if target_attrs.is_source() { - trace!("source found: {:?}", target_id); - } - if !target_attrs.apply_autodiff() { - continue; - } - - let target_symbol = symbol_name_for_instance_in_crate(tcx, instance.clone(), LOCAL_CRATE); - - let source = - usage_map.used_map.get(&item).unwrap().into_iter().find_map(|item| match *item { - MonoItem::Fn(ref instance_s) => { - let source_id = instance_s.def_id(); - if let Some(ad) = &tcx.codegen_fn_attrs(source_id).autodiff_item - && ad.is_active() - { - return Some(instance_s); - } - None - } - _ => None, - }); - let inst = match source { - Some(source) => source, - None => continue, - }; - - debug!("source_id: {:?}", inst.def_id()); - let fn_ty = inst.ty(tcx, ty::TypingEnv::fully_monomorphized()); - assert!(fn_ty.is_fn()); - adjust_activity_to_abi(tcx, fn_ty, &mut input_activities); - let symb = symbol_name_for_instance_in_crate(tcx, inst.clone(), LOCAL_CRATE); - - let mut new_target_attrs = target_attrs.clone(); - new_target_attrs.input_activity = input_activities; - let itm = new_target_attrs.into_item(symb, target_symbol); - autodiff_items.push(itm); - } - - if !autodiff_items.is_empty() { - trace!("AUTODIFF ITEMS EXIST"); - for item in &mut *autodiff_items { - trace!("{}", &item); - } - } - - autodiff_items -} diff --git a/compiler/rustc_next_trait_solver/Cargo.toml b/compiler/rustc_next_trait_solver/Cargo.toml index 05bcabad02f..8b66f30cf0e 100644 --- a/compiler/rustc_next_trait_solver/Cargo.toml +++ b/compiler/rustc_next_trait_solver/Cargo.toml @@ -11,7 +11,7 @@ rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } rustc_type_ir = { path = "../rustc_type_ir", default-features = false } rustc_type_ir_macros = { path = "../rustc_type_ir_macros" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index 1bc35e599c7..da05c49756f 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -25,12 +25,8 @@ enum CanonicalizeInputKind { /// trait candidates relies on it when deciding whether a where-bound /// is trivial. ParamEnv, - /// When canonicalizing predicates, we don't keep `'static`. If we're - /// currently outside of the trait solver and canonicalize the root goal - /// during HIR typeck, we replace each occurance of a region with a - /// unique region variable. See the comment on `InferCtxt::in_hir_typeck` - /// for more details. - Predicate { is_hir_typeck_root_goal: bool }, + /// When canonicalizing predicates, we don't keep `'static`. + Predicate, } /// Whether we're canonicalizing a query input or the query response. @@ -191,7 +187,6 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> { pub fn canonicalize_input<P: TypeFoldable<I>>( delegate: &'a D, variables: &'a mut Vec<I::GenericArg>, - is_hir_typeck_root_goal: bool, input: QueryInput<I, P>, ) -> ty::Canonical<I, QueryInput<I, P>> { // First canonicalize the `param_env` while keeping `'static` @@ -201,9 +196,7 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> { // while *mostly* reusing the canonicalizer from above. let mut rest_canonicalizer = Canonicalizer { delegate, - canonicalize_mode: CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { - is_hir_typeck_root_goal, - }), + canonicalize_mode: CanonicalizeMode::Input(CanonicalizeInputKind::Predicate), variables, variable_lookup_table, @@ -481,31 +474,13 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz } }; - let var = if let CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { - is_hir_typeck_root_goal: true, - }) = self.canonicalize_mode - { - let var = ty::BoundVar::from(self.variables.len()); - self.variables.push(r.into()); - self.var_kinds.push(kind); - var - } else { - self.get_or_insert_bound_var(r, kind) - }; + let var = self.get_or_insert_bound_var(r, kind); Region::new_anon_bound(self.cx(), self.binder_index, var) } fn fold_ty(&mut self, t: I::Ty) -> I::Ty { - if let CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { - is_hir_typeck_root_goal: true, - }) = self.canonicalize_mode - { - // If we're canonicalizing a root goal during HIR typeck, we - // must not use the `cache` as we want to map each occurrence - // of a region to a unique existential variable. - self.inner_fold_ty(t) - } else if let Some(&ty) = self.cache.get(&(self.binder_index, t)) { + if let Some(&ty) = self.cache.get(&(self.binder_index, t)) { ty } else { let res = self.inner_fold_ty(t); diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 5e08c3a03d8..a4a8317912a 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -8,6 +8,7 @@ use std::ops::ControlFlow; use derive_where::derive_where; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; +use rustc_type_ir::search_graph::CandidateHeadUsages; use rustc_type_ir::solve::SizedTraitKind; use rustc_type_ir::{ self as ty, Interner, TypeFlags, TypeFoldable, TypeSuperVisitable, TypeVisitable, @@ -33,10 +34,11 @@ enum AliasBoundKind { /// /// It consists of both the `source`, which describes how that goal would be proven, /// and the `result` when using the given `source`. -#[derive_where(Clone, Debug; I: Interner)] +#[derive_where(Debug; I: Interner)] pub(super) struct Candidate<I: Interner> { pub(super) source: CandidateSource<I>, pub(super) result: CanonicalResponse<I>, + pub(super) head_usages: CandidateHeadUsages, } /// Methods used to assemble candidates for either trait or projection goals. @@ -116,8 +118,11 @@ where ecx: &mut EvalCtxt<'_, D>, goal: Goal<I, Self>, assumption: I::Clause, - ) -> Result<Candidate<I>, NoSolution> { - Self::fast_reject_assumption(ecx, goal, assumption)?; + ) -> Result<Candidate<I>, CandidateHeadUsages> { + match Self::fast_reject_assumption(ecx, goal, assumption) { + Ok(()) => {} + Err(NoSolution) => return Err(CandidateHeadUsages::default()), + } // Dealing with `ParamEnv` candidates is a bit of a mess as we need to lazily // check whether the candidate is global while considering normalization. @@ -126,18 +131,23 @@ where // in `probe` even if the candidate does not apply before we get there. We handle this // by using a `Cell` here. We only ever write into it inside of `match_assumption`. let source = Cell::new(CandidateSource::ParamEnv(ParamEnvSource::Global)); - ecx.probe(|result: &QueryResult<I>| inspect::ProbeKind::TraitCandidate { - source: source.get(), - result: *result, - }) - .enter(|ecx| { - Self::match_assumption(ecx, goal, assumption, |ecx| { - ecx.try_evaluate_added_goals()?; - source.set(ecx.characterize_param_env_assumption(goal.param_env, assumption)?); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + let (result, head_usages) = ecx + .probe(|result: &QueryResult<I>| inspect::ProbeKind::TraitCandidate { + source: source.get(), + result: *result, }) - }) - .map(|result| Candidate { source: source.get(), result }) + .enter_single_candidate(|ecx| { + Self::match_assumption(ecx, goal, assumption, |ecx| { + ecx.try_evaluate_added_goals()?; + source.set(ecx.characterize_param_env_assumption(goal.param_env, assumption)?); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) + }); + + match result { + Ok(result) => Ok(Candidate { source: source.get(), result, head_usages }), + Err(NoSolution) => Err(head_usages), + } } /// Try equating an assumption predicate against a goal's predicate. If it @@ -355,6 +365,19 @@ pub(super) enum AssembleCandidatesFrom { EnvAndBounds, } +/// This is currently used to track the [CandidateHeadUsages] of all failed `ParamEnv` +/// candidates. This is then used to ignore their head usages in case there's another +/// always applicable `ParamEnv` candidate. Look at how `param_env_head_usages` is +/// used in the code for more details. +/// +/// We could easily extend this to also ignore head usages of other ignored candidates. +/// However, we currently don't have any tests where this matters and the complexity of +/// doing so does not feel worth it for now. +#[derive(Debug)] +pub(super) struct FailedCandidateInfo { + pub param_env_head_usages: CandidateHeadUsages, +} + impl<D, I> EvalCtxt<'_, D> where D: SolverDelegate<Interner = I>, @@ -364,16 +387,20 @@ where &mut self, goal: Goal<I, G>, assemble_from: AssembleCandidatesFrom, - ) -> Vec<Candidate<I>> { + ) -> (Vec<Candidate<I>>, FailedCandidateInfo) { + let mut candidates = vec![]; + let mut failed_candidate_info = + FailedCandidateInfo { param_env_head_usages: CandidateHeadUsages::default() }; let Ok(normalized_self_ty) = self.structurally_normalize_ty(goal.param_env, goal.predicate.self_ty()) else { - return vec![]; + return (candidates, failed_candidate_info); }; if normalized_self_ty.is_ty_var() { debug!("self type has been normalized to infer"); - return self.forced_ambiguity(MaybeCause::Ambiguity).into_iter().collect(); + candidates.extend(self.forced_ambiguity(MaybeCause::Ambiguity)); + return (candidates, failed_candidate_info); } let goal: Goal<I, G> = goal @@ -382,16 +409,15 @@ where // normalizing the self type as well, since type variables are not uniquified. let goal = self.resolve_vars_if_possible(goal); - let mut candidates = vec![]; - if let TypingMode::Coherence = self.typing_mode() && let Ok(candidate) = self.consider_coherence_unknowable_candidate(goal) { - return vec![candidate]; + candidates.push(candidate); + return (candidates, failed_candidate_info); } self.assemble_alias_bound_candidates(goal, &mut candidates); - self.assemble_param_env_candidates(goal, &mut candidates); + self.assemble_param_env_candidates(goal, &mut candidates, &mut failed_candidate_info); match assemble_from { AssembleCandidatesFrom::All => { @@ -423,7 +449,7 @@ where AssembleCandidatesFrom::EnvAndBounds => {} } - candidates + (candidates, failed_candidate_info) } pub(super) fn forced_ambiguity( @@ -584,9 +610,15 @@ where &mut self, goal: Goal<I, G>, candidates: &mut Vec<Candidate<I>>, + failed_candidate_info: &mut FailedCandidateInfo, ) { for assumption in goal.param_env.caller_bounds().iter() { - candidates.extend(G::probe_and_consider_param_env_candidate(self, goal, assumption)); + match G::probe_and_consider_param_env_candidate(self, goal, assumption) { + Ok(candidate) => candidates.push(candidate), + Err(head_usages) => { + failed_candidate_info.param_env_head_usages.merge_usages(head_usages) + } + } } } @@ -661,7 +693,11 @@ where if let Ok(result) = self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) { - candidates.push(Candidate { source: CandidateSource::AliasBound, result }); + candidates.push(Candidate { + source: CandidateSource::AliasBound, + result, + head_usages: CandidateHeadUsages::default(), + }); } return; } @@ -959,7 +995,7 @@ where // Even when a trait bound has been proven using a where-bound, we // still need to consider alias-bounds for normalization, see // `tests/ui/next-solver/alias-bound-shadowed-by-env.rs`. - let mut candidates: Vec<_> = self + let (mut candidates, _) = self .assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::EnvAndBounds); // We still need to prefer where-bounds over alias-bounds however. @@ -972,23 +1008,20 @@ where return inject_normalize_to_rigid_candidate(self); } - if let Some(response) = self.try_merge_candidates(&candidates) { + if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok(response) } else { self.flounder(&candidates) } } TraitGoalProvenVia::Misc => { - let mut candidates = + let (mut candidates, _) = self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); // Prefer "orphaned" param-env normalization predicates, which are used // (for example, and ideally only) when proving item bounds for an impl. - let candidates_from_env: Vec<_> = candidates - .extract_if(.., |c| matches!(c.source, CandidateSource::ParamEnv(_))) - .collect(); - if let Some(response) = self.try_merge_candidates(&candidates_from_env) { - return Ok(response); + if candidates.iter().any(|c| matches!(c.source, CandidateSource::ParamEnv(_))) { + candidates.retain(|c| matches!(c.source, CandidateSource::ParamEnv(_))); } // We drop specialized impls to allow normalization via a final impl here. In case @@ -997,7 +1030,7 @@ where // means we can just ignore inference constraints and don't have to special-case // constraining the normalized-to `term`. self.filter_specialized_impls(AllowInferenceConstraints::Yes, &mut candidates); - if let Some(response) = self.try_merge_candidates(&candidates) { + if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok(response) } else { self.flounder(&candidates) diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index faa86734d08..d25e74e7335 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -75,17 +75,10 @@ where Ok(ty::Binder::dummy(vec![args.as_coroutine_closure().tupled_upvars_ty()])) } - ty::Coroutine(def_id, args) => { - let coroutine_args = args.as_coroutine(); - Ok(ty::Binder::dummy(vec![ - coroutine_args.tupled_upvars_ty(), - Ty::new_coroutine_witness( - ecx.cx(), - def_id, - ecx.cx().mk_args(coroutine_args.parent_args().as_slice()), - ), - ])) - } + ty::Coroutine(def_id, args) => Ok(ty::Binder::dummy(vec![ + args.as_coroutine().tupled_upvars_ty(), + Ty::new_coroutine_witness_for_coroutine(ecx.cx(), def_id, args), + ])), ty::CoroutineWitness(def_id, args) => Ok(ecx .cx() @@ -251,14 +244,9 @@ where Movability::Static => Err(NoSolution), Movability::Movable => { if ecx.cx().features().coroutine_clone() { - let coroutine = args.as_coroutine(); Ok(ty::Binder::dummy(vec![ - coroutine.tupled_upvars_ty(), - Ty::new_coroutine_witness( - ecx.cx(), - def_id, - ecx.cx().mk_args(coroutine.parent_args().as_slice()), - ), + args.as_coroutine().tupled_upvars_ty(), + Ty::new_coroutine_witness_for_coroutine(ecx.cx(), def_id, args), ])) } else { Err(NoSolution) diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs index 9a22bf58c03..b7ae9994c62 100644 --- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs @@ -6,7 +6,7 @@ use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; use rustc_type_ir::solve::SizedTraitKind; use rustc_type_ir::solve::inspect::ProbeKind; -use rustc_type_ir::{self as ty, Interner, elaborate}; +use rustc_type_ir::{self as ty, Interner, TypingMode, elaborate}; use tracing::instrument; use super::assembly::{Candidate, structural_traits}; @@ -135,12 +135,16 @@ where } let impl_polarity = cx.impl_polarity(impl_def_id); - match impl_polarity { + let certainty = match impl_polarity { ty::ImplPolarity::Negative => return Err(NoSolution), - ty::ImplPolarity::Reservation => { - unimplemented!("reservation impl for const trait: {:?}", goal) - } - ty::ImplPolarity::Positive => {} + ty::ImplPolarity::Reservation => match ecx.typing_mode() { + TypingMode::Coherence => Certainty::AMBIGUOUS, + TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } + | TypingMode::PostBorrowckAnalysis { .. } + | TypingMode::PostAnalysis => return Err(NoSolution), + }, + ty::ImplPolarity::Positive => Certainty::Yes, }; if !cx.impl_is_const(impl_def_id) { @@ -171,7 +175,7 @@ where }); ecx.add_goals(GoalSource::ImplWhereBound, const_conditions); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.evaluate_added_goals_and_make_canonical_response(certainty) }) } diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs index 74c5b49ea92..4644b145b18 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs @@ -57,7 +57,6 @@ where /// This expects `goal` and `opaque_types` to be eager resolved. pub(super) fn canonicalize_goal( &self, - is_hir_typeck_root_goal: bool, goal: Goal<I, I::Predicate>, opaque_types: Vec<(ty::OpaqueTypeKey<I>, I::Ty)>, ) -> (Vec<I::GenericArg>, CanonicalInput<I, I::Predicate>) { @@ -65,7 +64,6 @@ where let canonical = Canonicalizer::canonicalize_input( self.delegate, &mut orig_values, - is_hir_typeck_root_goal, QueryInput { goal, predefined_opaques_in_body: self diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index 8671cc7c3d3..4f87902e46e 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -4,11 +4,10 @@ use std::ops::ControlFlow; #[cfg(feature = "nightly")] use rustc_macros::HashStable_NoContext; use rustc_type_ir::data_structures::{HashMap, HashSet}; -use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; use rustc_type_ir::relate::Relate; use rustc_type_ir::relate::solver_relating::RelateExt; -use rustc_type_ir::search_graph::PathKind; +use rustc_type_ir::search_graph::{CandidateHeadUsages, PathKind}; use rustc_type_ir::{ self as ty, CanonicalVarValues, InferCtxtLike, Interner, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, @@ -399,6 +398,10 @@ where result } + pub(super) fn ignore_candidate_head_usages(&mut self, usages: CandidateHeadUsages) { + self.search_graph.ignore_candidate_head_usages(usages); + } + /// Recursively evaluates `goal`, returning whether any inference vars have /// been constrained and the certainty of the result. fn evaluate_goal( @@ -455,10 +458,7 @@ where let opaque_types = self.delegate.clone_opaque_types_lookup_table(); let (goal, opaque_types) = eager_resolve_vars(self.delegate, (goal, opaque_types)); - let is_hir_typeck_root_goal = matches!(goal_evaluation_kind, GoalEvaluationKind::Root) - && self.delegate.in_hir_typeck(); - let (orig_values, canonical_goal) = - self.canonicalize_goal(is_hir_typeck_root_goal, goal, opaque_types); + let (orig_values, canonical_goal) = self.canonicalize_goal(goal, opaque_types); let mut goal_evaluation = self.inspect.new_goal_evaluation(goal, &orig_values, goal_evaluation_kind); let canonical_result = self.search_graph.evaluate_goal( @@ -1127,6 +1127,7 @@ where self.delegate.fetch_eligible_assoc_item(goal_trait_ref, trait_assoc_def_id, impl_def_id) } + #[instrument(level = "debug", skip(self), ret)] pub(super) fn register_hidden_type_in_storage( &mut self, opaque_type_key: ty::OpaqueTypeKey<I>, @@ -1153,29 +1154,6 @@ where self.add_goals(GoalSource::AliasWellFormed, goals); } - // Do something for each opaque/hidden pair defined with `def_id` in the - // current inference context. - pub(super) fn probe_existing_opaque_ty( - &mut self, - key: ty::OpaqueTypeKey<I>, - ) -> Option<(ty::OpaqueTypeKey<I>, I::Ty)> { - // We shouldn't have any duplicate entries when using - // this function during `TypingMode::Analysis`. - let duplicate_entries = self.delegate.clone_duplicate_opaque_types(); - assert!(duplicate_entries.is_empty(), "unexpected duplicates: {duplicate_entries:?}"); - let mut matching = self.delegate.clone_opaque_types_lookup_table().into_iter().filter( - |(candidate_key, _)| { - candidate_key.def_id == key.def_id - && DeepRejectCtxt::relate_rigid_rigid(self.cx()) - .args_may_unify(candidate_key.args, key.args) - }, - ); - let first = matching.next(); - let second = matching.next(); - assert_eq!(second, None); - first - } - // Try to evaluate a const, or return `None` if the const is too generic. // This doesn't mean the const isn't evaluatable, though, and should be treated // as an ambiguity rather than no-solution. diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs index ed0cedc4077..e5658ba32ff 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs @@ -1,5 +1,6 @@ use std::marker::PhantomData; +use rustc_type_ir::search_graph::CandidateHeadUsages; use rustc_type_ir::{InferCtxtLike, Interner}; use tracing::instrument; @@ -25,6 +26,20 @@ where D: SolverDelegate<Interner = I>, I: Interner, { + pub(in crate::solve) fn enter_single_candidate( + self, + f: impl FnOnce(&mut EvalCtxt<'_, D>) -> T, + ) -> (T, CandidateHeadUsages) { + self.ecx.search_graph.enter_single_candidate(); + let mut candidate_usages = CandidateHeadUsages::default(); + let result = self.enter(|ecx| { + let result = f(ecx); + candidate_usages = ecx.search_graph.finish_single_candidate(); + result + }); + (result, candidate_usages) + } + pub(in crate::solve) fn enter(self, f: impl FnOnce(&mut EvalCtxt<'_, D>) -> T) -> T { let ProbeCtxt { ecx: outer, probe_kind, _result } = self; @@ -78,7 +93,8 @@ where self, f: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult<I>, ) -> Result<Candidate<I>, NoSolution> { - self.cx.enter(|ecx| f(ecx)).map(|result| Candidate { source: self.source, result }) + let (result, head_usages) = self.cx.enter_single_candidate(f); + result.map(|result| Candidate { source: self.source, result, head_usages }) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs index 2feebe270a6..c2745c878dc 100644 --- a/compiler/rustc_next_trait_solver/src/solve/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs @@ -236,6 +236,12 @@ where } } +#[derive(Debug)] +enum MergeCandidateInfo { + AlwaysApplicable(usize), + EqualResponse, +} + impl<D, I> EvalCtxt<'_, D> where D: SolverDelegate<Interner = I>, @@ -248,23 +254,25 @@ where fn try_merge_candidates( &mut self, candidates: &[Candidate<I>], - ) -> Option<CanonicalResponse<I>> { + ) -> Option<(CanonicalResponse<I>, MergeCandidateInfo)> { if candidates.is_empty() { return None; } + let always_applicable = candidates.iter().enumerate().find(|(_, candidate)| { + candidate.result.value.certainty == Certainty::Yes + && has_no_inference_or_external_constraints(candidate.result) + }); + if let Some((i, c)) = always_applicable { + return Some((c.result, MergeCandidateInfo::AlwaysApplicable(i))); + } + let one: CanonicalResponse<I> = candidates[0].result; if candidates[1..].iter().all(|candidate| candidate.result == one) { - return Some(one); + return Some((one, MergeCandidateInfo::EqualResponse)); } - candidates - .iter() - .find(|candidate| { - candidate.result.value.certainty == Certainty::Yes - && has_no_inference_or_external_constraints(candidate.result) - }) - .map(|candidate| candidate.result) + None } fn bail_with_ambiguity(&mut self, candidates: &[Candidate<I>]) -> CanonicalResponse<I> { diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs index df3ad1e468b..a5f857a1dd8 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs @@ -1,13 +1,12 @@ //! Computes a normalizes-to (projection) goal for opaque types. This goal //! behaves differently depending on the current `TypingMode`. -use rustc_index::bit_set::GrowableBitSet; use rustc_type_ir::inherent::*; use rustc_type_ir::solve::GoalSource; use rustc_type_ir::{self as ty, Interner, TypingMode, fold_regions}; use crate::delegate::SolverDelegate; -use crate::solve::{Certainty, EvalCtxt, Goal, NoSolution, QueryResult, inspect}; +use crate::solve::{Certainty, EvalCtxt, Goal, QueryResult}; impl<D, I> EvalCtxt<'_, D> where @@ -39,100 +38,68 @@ where self.add_goal(GoalSource::Misc, goal.with(cx, ty::PredicateKind::Ambiguous)); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } - TypingMode::Analysis { defining_opaque_types_and_generators } => { + TypingMode::Analysis { + defining_opaque_types_and_generators: defining_opaque_types, + } + | TypingMode::Borrowck { defining_opaque_types } => { let Some(def_id) = opaque_ty .def_id .as_local() - .filter(|&def_id| defining_opaque_types_and_generators.contains(&def_id)) + .filter(|&def_id| defining_opaque_types.contains(&def_id)) else { + // If we're not in the defining scope, treat the alias as rigid. self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes); }; - // FIXME: This may have issues when the args contain aliases... - match uses_unique_placeholders_ignoring_regions(self.cx(), opaque_ty.args) { - Err(NotUniqueParam::NotParam(param)) if param.is_non_region_infer() => { - return self.evaluate_added_goals_and_make_canonical_response( - Certainty::AMBIGUOUS, - ); - } - Err(_) => { - return Err(NoSolution); - } - Ok(()) => {} - } - // Prefer opaques registered already. - let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args }; - // FIXME: This also unifies the previous hidden type with the expected. + // We structurally normalize the args so that we're able to detect defining uses + // later on. // - // If that fails, we insert `expected` as a new hidden type instead of - // eagerly emitting an error. - let existing = self.probe_existing_opaque_ty(opaque_type_key); - if let Some((candidate_key, candidate_ty)) = existing { - return self - .probe(|result| inspect::ProbeKind::OpaqueTypeStorageLookup { - result: *result, - }) - .enter(|ecx| { - for (a, b) in std::iter::zip( - candidate_key.args.iter(), - opaque_type_key.args.iter(), - ) { - ecx.eq(goal.param_env, a, b)?; - } - ecx.eq(goal.param_env, candidate_ty, expected)?; - ecx.add_item_bounds_for_hidden_type( - def_id.into(), - candidate_key.args, - goal.param_env, - candidate_ty, - ); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }); - } + // This reduces the amount of duplicate definitions in the `opaque_type_storage` and + // strengthens inference. This causes us to subtly depend on the normalization behavior + // when inferring the hidden type of opaques. + // + // E.g. it's observable that we don't normalize nested aliases with bound vars in + // `structurally_normalize` and because we use structural lookup, we also don't + // reuse an entry for `Tait<for<'a> fn(&'a ())>` for `Tait<for<'b> fn(&'b ())>`. + let normalized_args = + cx.mk_args_from_iter(opaque_ty.args.iter().map(|arg| match arg.kind() { + ty::GenericArgKind::Lifetime(lt) => Ok(lt.into()), + ty::GenericArgKind::Type(ty) => { + self.structurally_normalize_ty(goal.param_env, ty).map(Into::into) + } + ty::GenericArgKind::Const(ct) => { + self.structurally_normalize_const(goal.param_env, ct).map(Into::into) + } + }))?; - // Otherwise, define a new opaque type - let prev = self.register_hidden_type_in_storage(opaque_type_key, expected); - assert_eq!(prev, None); - self.add_item_bounds_for_hidden_type( - def_id.into(), - opaque_ty.args, - goal.param_env, - expected, - ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } - // Very similar to `TypingMode::Analysis` with some notably differences: - // - we accept opaque types even if they have non-universal arguments - // - we do a structural lookup instead of semantically unifying regions - // - the hidden type starts out as the type from HIR typeck with fresh region - // variables instead of a fully unconstrained inference variable - TypingMode::Borrowck { defining_opaque_types } => { - let Some(def_id) = opaque_ty - .def_id - .as_local() - .filter(|&def_id| defining_opaque_types.contains(&def_id)) - else { - self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); - return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes); - }; + let opaque_type_key = ty::OpaqueTypeKey { def_id, args: normalized_args }; + if let Some(prev) = self.register_hidden_type_in_storage(opaque_type_key, expected) + { + self.eq(goal.param_env, expected, prev)?; + } else { + // During HIR typeck, opaque types start out as unconstrained + // inference variables. In borrowck we instead use the type + // computed in HIR typeck as the initial value. + match self.typing_mode() { + TypingMode::Analysis { .. } => {} + TypingMode::Borrowck { .. } => { + let actual = cx + .type_of_opaque_hir_typeck(def_id) + .instantiate(cx, opaque_ty.args); + let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() { + ty::ReErased => self.next_region_var(), + _ => re, + }); + self.eq(goal.param_env, expected, actual)?; + } + _ => unreachable!(), + } + } - let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args }; - let actual = self - .register_hidden_type_in_storage(opaque_type_key, expected) - .unwrap_or_else(|| { - let actual = - cx.type_of_opaque_hir_typeck(def_id).instantiate(cx, opaque_ty.args); - let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() { - ty::ReErased => self.next_region_var(), - _ => re, - }); - actual - }); - self.eq(goal.param_env, expected, actual)?; self.add_item_bounds_for_hidden_type( def_id.into(), - opaque_ty.args, + normalized_args, goal.param_env, expected, ); @@ -168,44 +135,3 @@ where } } } - -/// Checks whether each generic argument is simply a unique generic placeholder. -/// -/// FIXME: Interner argument is needed to constrain the `I` parameter. -fn uses_unique_placeholders_ignoring_regions<I: Interner>( - _cx: I, - args: I::GenericArgs, -) -> Result<(), NotUniqueParam<I>> { - let mut seen = GrowableBitSet::default(); - for arg in args.iter() { - match arg.kind() { - // Ignore regions, since we can't resolve those in a canonicalized - // query in the trait solver. - ty::GenericArgKind::Lifetime(_) => {} - ty::GenericArgKind::Type(t) => match t.kind() { - ty::Placeholder(p) => { - if !seen.insert(p.var()) { - return Err(NotUniqueParam::DuplicateParam(t.into())); - } - } - _ => return Err(NotUniqueParam::NotParam(t.into())), - }, - ty::GenericArgKind::Const(c) => match c.kind() { - ty::ConstKind::Placeholder(p) => { - if !seen.insert(p.var()) { - return Err(NotUniqueParam::DuplicateParam(c.into())); - } - } - _ => return Err(NotUniqueParam::NotParam(c.into())), - }, - } - } - - Ok(()) -} - -// FIXME: This should check for dupes and non-params first, then infer vars. -enum NotUniqueParam<I: Interner> { - DuplicateParam(I::GenericArg), - NotParam(I::GenericArg), -} diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index 60bae738e61..891ecab041a 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -6,18 +6,20 @@ use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; use rustc_type_ir::solve::{CanonicalResponse, SizedTraitKind}; use rustc_type_ir::{ - self as ty, Interner, Movability, TraitPredicate, TraitRef, TypeVisitableExt as _, TypingMode, - Upcast as _, elaborate, + self as ty, Interner, Movability, PredicatePolarity, TraitPredicate, TraitRef, + TypeVisitableExt as _, TypingMode, Upcast as _, elaborate, }; use tracing::{debug, instrument, trace}; use crate::delegate::SolverDelegate; use crate::solve::assembly::structural_traits::{self, AsyncCallableRelevantTypes}; -use crate::solve::assembly::{self, AllowInferenceConstraints, AssembleCandidatesFrom, Candidate}; +use crate::solve::assembly::{ + self, AllowInferenceConstraints, AssembleCandidatesFrom, Candidate, FailedCandidateInfo, +}; use crate::solve::inspect::ProbeKind; use crate::solve::{ BuiltinImplSource, CandidateSource, Certainty, EvalCtxt, Goal, GoalSource, MaybeCause, - NoSolution, ParamEnvSource, QueryResult, has_only_region_constraints, + MergeCandidateInfo, NoSolution, ParamEnvSource, QueryResult, has_only_region_constraints, }; impl<D, I> assembly::GoalKind<D> for TraitPredicate<I> @@ -131,19 +133,26 @@ where cx: I, clause_def_id: I::DefId, goal_def_id: I::DefId, + polarity: PredicatePolarity, ) -> bool { clause_def_id == goal_def_id // PERF(sized-hierarchy): Sizedness supertraits aren't elaborated to improve perf, so // check for a `MetaSized` supertrait being matched against a `Sized` assumption. // // `PointeeSized` bounds are syntactic sugar for a lack of bounds so don't need this. - || (cx.is_lang_item(clause_def_id, TraitSolverLangItem::Sized) + || (polarity == PredicatePolarity::Positive + && cx.is_lang_item(clause_def_id, TraitSolverLangItem::Sized) && cx.is_lang_item(goal_def_id, TraitSolverLangItem::MetaSized)) } if let Some(trait_clause) = assumption.as_trait_clause() && trait_clause.polarity() == goal.predicate.polarity - && trait_def_id_matches(ecx.cx(), trait_clause.def_id(), goal.predicate.def_id()) + && trait_def_id_matches( + ecx.cx(), + trait_clause.def_id(), + goal.predicate.def_id(), + goal.predicate.polarity, + ) && DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( goal.predicate.trait_ref.args, trait_clause.skip_binder().trait_ref.args, @@ -166,6 +175,8 @@ where // PERF(sized-hierarchy): Sizedness supertraits aren't elaborated to improve perf, so // check for a `Sized` subtrait when looking for `MetaSized`. `PointeeSized` bounds // are syntactic sugar for a lack of bounds so don't need this. + // We don't need to check polarity, `fast_reject_assumption` already rejected non-`Positive` + // polarity `Sized` assumptions as matching non-`Positive` `MetaSized` goals. if ecx.cx().is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::MetaSized) && ecx.cx().is_lang_item(trait_clause.def_id(), TraitSolverLangItem::Sized) { @@ -1344,9 +1355,10 @@ where pub(super) fn merge_trait_candidates( &mut self, mut candidates: Vec<Candidate<I>>, + failed_candidate_info: FailedCandidateInfo, ) -> Result<(CanonicalResponse<I>, Option<TraitGoalProvenVia>), NoSolution> { if let TypingMode::Coherence = self.typing_mode() { - return if let Some(response) = self.try_merge_candidates(&candidates) { + return if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok((response, Some(TraitGoalProvenVia::Misc))) } else { self.flounder(&candidates).map(|r| (r, None)) @@ -1376,10 +1388,41 @@ where let where_bounds: Vec<_> = candidates .extract_if(.., |c| matches!(c.source, CandidateSource::ParamEnv(_))) .collect(); - return if let Some(response) = self.try_merge_candidates(&where_bounds) { - Ok((response, Some(TraitGoalProvenVia::ParamEnv))) + if let Some((response, info)) = self.try_merge_candidates(&where_bounds) { + match info { + // If there's an always applicable candidate, the result of all + // other candidates does not matter. This means we can ignore + // them when checking whether we've reached a fixpoint. + // + // We always prefer the first always applicable candidate, even if a + // later candidate is also always applicable and would result in fewer + // reruns. We could slightly improve this by e.g. searching for another + // always applicable candidate which doesn't depend on any cycle heads. + // + // NOTE: This is optimization is observable in case there is an always + // applicable global candidate and another non-global candidate which only + // applies because of a provisional result. I can't even think of a test + // case where this would occur and even then, this would not be unsound. + // Supporting this makes the code more involved, so I am just going to + // ignore this for now. + MergeCandidateInfo::AlwaysApplicable(i) => { + for (j, c) in where_bounds.into_iter().enumerate() { + if i != j { + self.ignore_candidate_head_usages(c.head_usages) + } + } + // If a where-bound does not apply, we don't actually get a + // candidate for it. We manually track the head usages + // of all failed `ParamEnv` candidates instead. + self.ignore_candidate_head_usages( + failed_candidate_info.param_env_head_usages, + ); + } + MergeCandidateInfo::EqualResponse => {} + } + return Ok((response, Some(TraitGoalProvenVia::ParamEnv))); } else { - Ok((self.bail_with_ambiguity(&where_bounds), None)) + return Ok((self.bail_with_ambiguity(&where_bounds), None)); }; } @@ -1387,7 +1430,7 @@ where let alias_bounds: Vec<_> = candidates .extract_if(.., |c| matches!(c.source, CandidateSource::AliasBound)) .collect(); - return if let Some(response) = self.try_merge_candidates(&alias_bounds) { + return if let Some((response, _)) = self.try_merge_candidates(&alias_bounds) { Ok((response, Some(TraitGoalProvenVia::AliasBound))) } else { Ok((self.bail_with_ambiguity(&alias_bounds), None)) @@ -1412,7 +1455,7 @@ where TraitGoalProvenVia::Misc }; - if let Some(response) = self.try_merge_candidates(&candidates) { + if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok((response, Some(proven_via))) } else { self.flounder(&candidates).map(|r| (r, None)) @@ -1424,8 +1467,9 @@ where &mut self, goal: Goal<I, TraitPredicate<I>>, ) -> Result<(CanonicalResponse<I>, Option<TraitGoalProvenVia>), NoSolution> { - let candidates = self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); - self.merge_trait_candidates(candidates) + let (candidates, failed_candidate_info) = + self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); + self.merge_trait_candidates(candidates, failed_candidate_info) } fn try_stall_coroutine(&mut self, self_ty: I::Ty) -> Option<Result<Candidate<I>, NoSolution>> { diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml index 0ae0b613fa2..7cb4ae7ff5f 100644 --- a/compiler/rustc_parse/Cargo.toml +++ b/compiler/rustc_parse/Cargo.toml @@ -5,11 +5,10 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" -rustc-literal-escaper = "0.0.5" +bitflags.workspace = true +rustc-literal-escaper.workspace = true rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } -rustc_attr_parsing = { path = "../rustc_attr_parsing" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } @@ -19,8 +18,8 @@ rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true unicode-normalization = "0.1.11" unicode-width = "0.2.0" # tidy-alphabetical-end diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 3a21eea3d0a..4ca2f57bd87 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -359,6 +359,20 @@ parse_generics_in_path = unexpected generic arguments in path parse_help_set_edition_cargo = set `edition = "{$edition}"` in `Cargo.toml` parse_help_set_edition_standalone = pass `--edition {$edition}` to `rustc` + +parse_hidden_unicode_codepoints = unicode codepoint changing visible direction of text present in {$label} + .label = this {$label} contains {$count -> + [one] an invisible + *[other] invisible + } unicode text flow control {$count -> + [one] codepoint + *[other] codepoints + } + .note = these kind of unicode codepoints change the way text flows on applications that support them, but can cause confusion because they change the order of characters on the screen + .suggestion_remove = if their presence wasn't intentional, you can remove them + .suggestion_escape = if you want to keep them but make them visible in your source code, you can escape them + .no_suggestion_note_escape = if you want to keep them but make them visible in your source code, you can escape them: {$escaped} + parse_if_expression_missing_condition = missing condition for `if` expression .condition_label = expected condition here .block_label = if this block is the condition of the `if` expression, then it must be followed by another block @@ -436,11 +450,6 @@ parse_inner_doc_comment_not_permitted = expected outer doc comment .label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item} .sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style -parse_invalid_attr_unsafe = `{$name}` is not an unsafe attribute - .label = this is not an unsafe attribute - .suggestion = remove the `unsafe(...)` - .note = extraneous unsafe is not allowed in attributes - parse_invalid_block_macro_segment = cannot use a `block` macro fragment here .label = the `block` fragment is within this context .suggestion = wrap this in another block @@ -468,9 +477,6 @@ parse_invalid_dyn_keyword = invalid `dyn` keyword parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else` parse_invalid_identifier_with_leading_number = identifiers cannot start with a number -parse_invalid_label = - invalid label name `{$name}` - parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid .label = invalid suffix `{$suffix}` .tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases @@ -500,6 +506,8 @@ parse_invalid_unicode_escape = invalid unicode character escape parse_invalid_variable_declaration = invalid variable declaration +parse_keyword_label = labels cannot use keyword names + parse_keyword_lifetime = lifetimes cannot use keyword names @@ -601,7 +609,6 @@ parse_maybe_report_ambiguous_plus = ambiguous `+` in a type .suggestion = use parentheses to disambiguate -parse_meta_bad_delim = wrong meta list delimiters parse_meta_bad_delim_suggestion = the delimiters should be `(` and `)` parse_mismatched_closing_delimiter = mismatched closing delimiter: `{$delimiter}` @@ -748,9 +755,6 @@ parse_parentheses_with_struct_fields = invalid `struct` delimiters or `fn` call .suggestion_braces_for_struct = if `{$type}` is a struct, use braces as delimiters .suggestion_no_fields_for_fn = if `{$type}` is a function, use the arguments directly -parse_parenthesized_lifetime = parenthesized lifetime bounds are not supported -parse_parenthesized_lifetime_suggestion = remove the parentheses - parse_path_double_colon = path separator must be a double colon .suggestion = use a double colon instead @@ -862,8 +866,8 @@ parse_too_many_hashes = too many `#` symbols: raw strings may be delimited by up parse_too_short_hex_escape = numeric character escape is too short -parse_trailing_vert_not_allowed = a trailing `|` is not allowed in an or-pattern - .suggestion = remove the `{$token}` +parse_trailing_vert_not_allowed = a trailing `{$token}` is not allowed in an or-pattern +parse_trailing_vert_not_allowed_suggestion = remove the `{$token}` parse_trait_alias_cannot_be_auto = trait aliases cannot be `auto` parse_trait_alias_cannot_be_const = trait aliases cannot be `const` @@ -993,10 +997,6 @@ parse_unmatched_angle_brackets = {$num_extra_brackets -> *[other] remove extra angle brackets } -parse_unsafe_attr_outside_unsafe = unsafe attribute used without unsafe - .label = usage of unsafe attribute -parse_unsafe_attr_outside_unsafe_suggestion = wrap the attribute in `unsafe(...)` - parse_unskipped_whitespace = whitespace symbol '{$ch}' is not skipped .label = {parse_unskipped_whitespace} diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index a07d0606fd0..797d4830c2f 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -8,8 +8,9 @@ use rustc_ast::{Path, Visibility}; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic, + SuggestionStyle, }; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::edition::{Edition, LATEST_STABLE_EDITION}; use rustc_span::{Ident, Span, Symbol}; @@ -2228,11 +2229,10 @@ pub(crate) struct KeywordLifetime { } #[derive(Diagnostic)] -#[diag(parse_invalid_label)] -pub(crate) struct InvalidLabel { +#[diag(parse_keyword_label)] +pub(crate) struct KeywordLabel { #[primary_span] pub span: Span, - pub name: Symbol, } #[derive(Diagnostic)] @@ -2661,7 +2661,7 @@ pub(crate) enum TopLevelOrPatternNotAllowedSugg { parse_sugg_remove_leading_vert_in_pattern, code = "", applicability = "machine-applicable", - style = "verbose" + style = "tool-only" )] RemoveLeadingVert { #[primary_span] @@ -2694,12 +2694,25 @@ pub(crate) struct UnexpectedVertVertInPattern { pub start: Option<Span>, } +#[derive(Subdiagnostic)] +#[suggestion( + parse_trailing_vert_not_allowed, + code = "", + applicability = "machine-applicable", + style = "tool-only" +)] +pub(crate) struct TrailingVertSuggestion { + #[primary_span] + pub span: Span, +} + #[derive(Diagnostic)] #[diag(parse_trailing_vert_not_allowed)] pub(crate) struct TrailingVertNotAllowed { #[primary_span] - #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")] pub span: Span, + #[subdiagnostic] + pub suggestion: TrailingVertSuggestion, #[label(parse_label_while_parsing_or_pattern_here)] pub start: Option<Span>, pub token: Token, @@ -3150,27 +3163,6 @@ pub(crate) struct ModifierLifetime { pub modifier: &'static str, } -#[derive(Subdiagnostic)] -#[multipart_suggestion( - parse_parenthesized_lifetime_suggestion, - applicability = "machine-applicable" -)] -pub(crate) struct RemoveParens { - #[suggestion_part(code = "")] - pub lo: Span, - #[suggestion_part(code = "")] - pub hi: Span, -} - -#[derive(Diagnostic)] -#[diag(parse_parenthesized_lifetime)] -pub(crate) struct ParenthesizedLifetime { - #[primary_span] - pub span: Span, - #[subdiagnostic] - pub sugg: RemoveParens, -} - #[derive(Diagnostic)] #[diag(parse_underscore_literal_suffix)] pub(crate) struct UnderscoreLiteralSuffix { @@ -3354,15 +3346,6 @@ pub(crate) struct KwBadCase<'a> { } #[derive(Diagnostic)] -#[diag(parse_meta_bad_delim)] -pub(crate) struct MetaBadDelim { - #[primary_span] - pub span: Span, - #[subdiagnostic] - pub sugg: MetaBadDelimSugg, -} - -#[derive(Diagnostic)] #[diag(parse_cfg_attr_bad_delim)] pub(crate) struct CfgAttrBadDelim { #[primary_span] @@ -3502,38 +3485,6 @@ pub(crate) struct DotDotRangeAttribute { } #[derive(Diagnostic)] -#[diag(parse_invalid_attr_unsafe)] -#[note] -pub(crate) struct InvalidAttrUnsafe { - #[primary_span] - #[label] - pub span: Span, - pub name: Path, -} - -#[derive(Diagnostic)] -#[diag(parse_unsafe_attr_outside_unsafe)] -pub(crate) struct UnsafeAttrOutsideUnsafe { - #[primary_span] - #[label] - pub span: Span, - #[subdiagnostic] - pub suggestion: UnsafeAttrOutsideUnsafeSuggestion, -} - -#[derive(Subdiagnostic)] -#[multipart_suggestion( - parse_unsafe_attr_outside_unsafe_suggestion, - applicability = "machine-applicable" -)] -pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion { - #[suggestion_part(code = "unsafe(")] - pub left: Span, - #[suggestion_part(code = ")")] - pub right: Span, -} - -#[derive(Diagnostic)] #[diag(parse_binder_before_modifiers)] pub(crate) struct BinderBeforeModifiers { #[primary_span] @@ -3651,3 +3602,76 @@ pub(crate) struct ExpectedRegisterClassOrExplicitRegister { #[primary_span] pub(crate) span: Span, } + +#[derive(LintDiagnostic)] +#[diag(parse_hidden_unicode_codepoints)] +#[note] +pub(crate) struct HiddenUnicodeCodepointsDiag { + pub label: String, + pub count: usize, + #[label] + pub span_label: Span, + #[subdiagnostic] + pub labels: Option<HiddenUnicodeCodepointsDiagLabels>, + #[subdiagnostic] + pub sub: HiddenUnicodeCodepointsDiagSub, +} + +pub(crate) struct HiddenUnicodeCodepointsDiagLabels { + pub spans: Vec<(char, Span)>, +} + +impl Subdiagnostic for HiddenUnicodeCodepointsDiagLabels { + fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { + for (c, span) in self.spans { + diag.span_label(span, format!("{c:?}")); + } + } +} + +pub(crate) enum HiddenUnicodeCodepointsDiagSub { + Escape { spans: Vec<(char, Span)> }, + NoEscape { spans: Vec<(char, Span)> }, +} + +// Used because of multiple multipart_suggestion and note +impl Subdiagnostic for HiddenUnicodeCodepointsDiagSub { + fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { + match self { + HiddenUnicodeCodepointsDiagSub::Escape { spans } => { + diag.multipart_suggestion_with_style( + fluent::parse_suggestion_remove, + spans.iter().map(|(_, span)| (*span, "".to_string())).collect(), + Applicability::MachineApplicable, + SuggestionStyle::HideCodeAlways, + ); + diag.multipart_suggestion( + fluent::parse_suggestion_escape, + spans + .into_iter() + .map(|(c, span)| { + let c = format!("{c:?}"); + (span, c[1..c.len() - 1].to_string()) + }) + .collect(), + Applicability::MachineApplicable, + ); + } + HiddenUnicodeCodepointsDiagSub::NoEscape { spans } => { + // FIXME: in other suggestions we've reversed the inner spans of doc comments. We + // should do the same here to provide the same good suggestions as we do for + // literals above. + diag.arg( + "escaped", + spans + .into_iter() + .map(|(c, _)| format!("{c:?}")) + .collect::<Vec<String>>() + .join(", "), + ); + diag.note(fluent::parse_suggestion_remove); + diag.note(fluent::parse_no_suggestion_note_escape); + } + } + } +} diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 85af5a615ae..9792240a548 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -49,6 +49,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>( mut src: &'src str, mut start_pos: BytePos, override_span: Option<Span>, + frontmatter_allowed: FrontmatterAllowed, ) -> Result<TokenStream, Vec<Diag<'psess>>> { // Skip `#!`, if present. if let Some(shebang_len) = rustc_lexer::strip_shebang(src) { @@ -56,7 +57,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>( start_pos = start_pos + BytePos::from_usize(shebang_len); } - let cursor = Cursor::new(src, FrontmatterAllowed::Yes); + let cursor = Cursor::new(src, frontmatter_allowed); let mut lexer = Lexer { psess, start_pos, @@ -542,21 +543,21 @@ impl<'psess, 'src> Lexer<'psess, 'src> { }) .collect(); + let label = label.to_string(); let count = spans.len(); - let labels = point_at_inner_spans.then_some(spans.clone()); + let labels = point_at_inner_spans + .then_some(errors::HiddenUnicodeCodepointsDiagLabels { spans: spans.clone() }); + let sub = if point_at_inner_spans && !spans.is_empty() { + errors::HiddenUnicodeCodepointsDiagSub::Escape { spans } + } else { + errors::HiddenUnicodeCodepointsDiagSub::NoEscape { spans } + }; self.psess.buffer_lint( TEXT_DIRECTION_CODEPOINT_IN_LITERAL, span, ast::CRATE_NODE_ID, - BuiltinLintDiag::HiddenUnicodeCodepoints { - label: label.to_string(), - count, - span_label: span, - labels, - escape: point_at_inner_spans && !spans.is_empty(), - spans, - }, + errors::HiddenUnicodeCodepointsDiag { label, count, span_label: span, labels, sub }, ); } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 2050c5f9608..48289b2e8ab 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -16,10 +16,11 @@ use std::str::Utf8Error; use std::sync::Arc; use rustc_ast as ast; -use rustc_ast::tokenstream::TokenStream; +use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{AttrItem, Attribute, MetaItemInner, token}; use rustc_ast_pretty::pprust; use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize}; +use rustc_lexer::FrontmatterAllowed; use rustc_session::parse::ParseSess; use rustc_span::source_map::SourceMap; use rustc_span::{FileName, SourceFile, Span}; @@ -30,8 +31,9 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; use parser::Parser; +use rustc_ast::token::Delimiter; + pub mod lexer; -pub mod validate_attr; mod errors; @@ -146,7 +148,7 @@ fn new_parser_from_source_file( source_file: Arc<SourceFile>, ) -> Result<Parser<'_>, Vec<Diag<'_>>> { let end_pos = source_file.end_position(); - let stream = source_file_to_stream(psess, source_file, None)?; + let stream = source_file_to_stream(psess, source_file, None, FrontmatterAllowed::Yes)?; let mut parser = Parser::new(psess, stream, None); if parser.token == token::Eof { parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); @@ -161,7 +163,9 @@ pub fn source_str_to_stream( override_span: Option<Span>, ) -> Result<TokenStream, Vec<Diag<'_>>> { let source_file = psess.source_map().new_source_file(name, source); - source_file_to_stream(psess, source_file, override_span) + // used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse + // frontmatters as frontmatters. + source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No) } /// Given a source file, produces a sequence of token trees. Returns any buffered errors from @@ -170,6 +174,7 @@ fn source_file_to_stream<'psess>( psess: &'psess ParseSess, source_file: Arc<SourceFile>, override_span: Option<Span>, + frontmatter_allowed: FrontmatterAllowed, ) -> Result<TokenStream, Vec<Diag<'psess>>> { let src = source_file.src.as_ref().unwrap_or_else(|| { psess.dcx().bug(format!( @@ -178,7 +183,13 @@ fn source_file_to_stream<'psess>( )); }); - lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span) + lexer::lex_token_trees( + psess, + src.as_str(), + source_file.start_pos, + override_span, + frontmatter_allowed, + ) } /// Runs the given subparser `f` on the tokens of the given `attr`'s item. @@ -225,7 +236,7 @@ pub fn parse_cfg_attr( ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens }) if !tokens.is_empty() => { - crate::validate_attr::check_cfg_attr_bad_delim(psess, dspan, delim); + check_cfg_attr_bad_delim(psess, dspan, delim); match parse_in(psess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) { Ok(r) => return Some(r), Err(e) => { @@ -244,3 +255,13 @@ pub fn parse_cfg_attr( } None } + +fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) { + if let Delimiter::Parenthesis = delim { + return; + } + psess.dcx().emit_err(errors::CfgAttrBadDelim { + span: span.entire(), + sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close }, + }); +} diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 41d3889c448..acd338156ce 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -11,6 +11,7 @@ use tracing::debug; use super::{ AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos, }; +use crate::parser::FnContext; use crate::{errors, exp, fluent_generated as fluent}; // Public for rustfmt usage @@ -200,7 +201,7 @@ impl<'a> Parser<'a> { AttrWrapper::empty(), true, false, - FnParseMode { req_name: |_| true, req_body: true }, + FnParseMode { req_name: |_| true, context: FnContext::Free, req_body: true }, ForceCollect::No, ) { Ok(Some(item)) => { @@ -398,7 +399,7 @@ impl<'a> Parser<'a> { } /// Matches `COMMASEP(meta_item_inner)`. - pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::MetaItemInner>> { + pub fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::MetaItemInner>> { // Presumably, the majority of the time there will only be one attr. let mut nmis = ThinVec::with_capacity(1); while self.token != token::Eof { diff --git a/compiler/rustc_parse/src/parser/cfg_select.rs b/compiler/rustc_parse/src/parser/cfg_select.rs index 2c6fb224d70..08a71db4de8 100644 --- a/compiler/rustc_parse/src/parser/cfg_select.rs +++ b/compiler/rustc_parse/src/parser/cfg_select.rs @@ -1,11 +1,12 @@ use rustc_ast::token::Token; use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::util::classify; use rustc_ast::{MetaItemInner, token}; use rustc_errors::PResult; use rustc_span::Span; use crate::exp; -use crate::parser::Parser; +use crate::parser::{AttrWrapper, ForceCollect, Parser, Restrictions, Trailing, UsePreAttrPos}; pub enum CfgSelectPredicate { Cfg(MetaItemInner), @@ -23,19 +24,26 @@ pub struct CfgSelectBranches { pub unreachable: Vec<(CfgSelectPredicate, TokenStream, Span)>, } -/// Parses a `TokenTree` that must be of the form `{ /* ... */ }`, and returns a `TokenStream` where -/// the surrounding braces are stripped. +/// Parses a `TokenTree` consisting either of `{ /* ... */ }` (and strip the braces) or an +/// expression followed by a comma (and strip the comma). fn parse_token_tree<'a>(p: &mut Parser<'a>) -> PResult<'a, TokenStream> { - // Generate an error if the `=>` is not followed by `{`. - if p.token != token::OpenBrace { - p.expect(exp!(OpenBrace))?; + if p.token == token::OpenBrace { + // Strip the outer '{' and '}'. + match p.parse_token_tree() { + TokenTree::Token(..) => unreachable!("because of the expect above"), + TokenTree::Delimited(.., tts) => return Ok(tts), + } } - - // Strip the outer '{' and '}'. - match p.parse_token_tree() { - TokenTree::Token(..) => unreachable!("because of the expect above"), - TokenTree::Delimited(.., tts) => Ok(tts), + let expr = p.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |p, _| { + p.parse_expr_res(Restrictions::STMT_EXPR, AttrWrapper::empty()) + .map(|(expr, _)| (expr, Trailing::No, UsePreAttrPos::No)) + })?; + if !classify::expr_is_complete(&expr) && p.token != token::CloseBrace && p.token != token::Eof { + p.expect(exp!(Comma))?; + } else { + let _ = p.eat(exp!(Comma)); } + Ok(TokenStream::from_ast(&expr)) } pub fn parse_cfg_select<'a>(p: &mut Parser<'a>) -> PResult<'a, CfgSelectBranches> { diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index a32cd33a260..a28af7833c3 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -44,6 +44,7 @@ use crate::errors::{ UnexpectedConstParamDeclaration, UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType, }; +use crate::parser::FnContext; use crate::parser::attr::InnerAttrPolicy; use crate::{exp, fluent_generated as fluent}; @@ -462,8 +463,8 @@ impl<'a> Parser<'a> { pub(super) fn expected_one_of_not_found( &mut self, - edible: &[ExpTokenPair<'_>], - inedible: &[ExpTokenPair<'_>], + edible: &[ExpTokenPair], + inedible: &[ExpTokenPair], ) -> PResult<'a, ErrorGuaranteed> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { @@ -1091,7 +1092,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `closes` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) { + pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair]) { if let Err(err) = self .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree())) { @@ -1112,7 +1113,7 @@ impl<'a> Parser<'a> { pub(super) fn check_trailing_angle_brackets( &mut self, segment: &PathSegment, - end: &[ExpTokenPair<'_>], + end: &[ExpTokenPair], ) -> Option<ErrorGuaranteed> { if !self.may_recover() { return None; @@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> { // second case. if self.look_ahead(position, |t| { trace!("check_trailing_angle_brackets: t={:?}", t); - end.iter().any(|exp| exp.tok == &t.kind) + end.iter().any(|exp| exp.tok == t.kind) }) { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. @@ -2119,8 +2120,8 @@ impl<'a> Parser<'a> { pub(super) fn recover_seq_parse_error( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, lo: Span, err: Diag<'a>, ) -> Box<Expr> { @@ -2246,6 +2247,7 @@ impl<'a> Parser<'a> { pat: Box<ast::Pat>, require_name: bool, first_param: bool, + fn_parse_mode: &crate::parser::item::FnParseMode, ) -> Option<Ident> { // If we find a pattern followed by an identifier, it could be an (incorrect) // C-style parameter declaration. @@ -2268,7 +2270,14 @@ impl<'a> Parser<'a> { || self.token == token::Lt || self.token == token::CloseParen) { - let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)"; + let maybe_emit_anon_params_note = |this: &mut Self, err: &mut Diag<'_>| { + let ed = this.token.span.with_neighbor(this.prev_token.span).edition(); + if matches!(fn_parse_mode.context, crate::parser::item::FnContext::Trait) + && (fn_parse_mode.req_name)(ed) + { + err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); + } + }; let (ident, self_sugg, param_sugg, type_sugg, self_span, param_span, type_span) = match pat.kind { @@ -2305,7 +2314,7 @@ impl<'a> Parser<'a> { "_: ".to_string(), Applicability::MachineApplicable, ); - err.note(rfc_note); + maybe_emit_anon_params_note(self, err); } return None; @@ -2313,7 +2322,13 @@ impl<'a> Parser<'a> { }; // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}` - if first_param { + if first_param + // Only when the fn is a method, we emit this suggestion. + && matches!( + fn_parse_mode.context, + FnContext::Trait | FnContext::Impl + ) + { err.span_suggestion_verbose( self_span, "if this is a `self` type, give it a parameter name", @@ -2337,7 +2352,7 @@ impl<'a> Parser<'a> { type_sugg, Applicability::MachineApplicable, ); - err.note(rfc_note); + maybe_emit_anon_params_note(self, err); // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name. return if self.token == token::Lt { None } else { Some(ident) }; @@ -2371,8 +2386,8 @@ impl<'a> Parser<'a> { pub(super) fn consume_block( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, consume_close: ConsumeClosingDelim, ) { let mut brace_depth = 0; diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index d0604f76317..f0f58e901a9 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1598,7 +1598,7 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr) } - fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, Box<Expr>> { + fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair) -> PResult<'a, Box<Expr>> { let lo = self.token.span; self.bump(); // `[` or other open delim @@ -2077,7 +2077,7 @@ impl<'a> Parser<'a> { (token::Lit { symbol: name, suffix: None, kind: token::Char }, span) } - fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit { + pub fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit { ast::MetaItemLit { symbol: name, suffix: None, @@ -2086,7 +2086,7 @@ impl<'a> Parser<'a> { } } - fn handle_missing_lit<L>( + pub fn handle_missing_lit<L>( &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { @@ -2156,7 +2156,7 @@ impl<'a> Parser<'a> { /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and /// `Lit::from_token` (excluding unary negation). - fn eat_token_lit(&mut self) -> Option<token::Lit> { + pub fn eat_token_lit(&mut self) -> Option<token::Lit> { let check_expr = |expr: Box<Expr>| { if let ast::ExprKind::Lit(token_lit) = expr.kind { Some(token_lit) @@ -2397,20 +2397,24 @@ impl<'a> Parser<'a> { let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; - let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; + let (bound_vars, _) = self.parse_higher_ranked_binder()?; let span = lo.to(self.prev_token.span); self.psess.gated_spans.gate(sym::closure_lifetime_binder, span); - ClosureBinder::For { span, generic_params: lifetime_defs } + ClosureBinder::For { span, generic_params: bound_vars } } else { ClosureBinder::NotPresent }; let constness = self.parse_closure_constness(); - let movability = - if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; + let movability = if self.eat_keyword(exp!(Static)) { + self.psess.gated_spans.gate(sym::coroutines, self.prev_token.span); + Movability::Static + } else { + Movability::Movable + }; let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) @@ -3090,7 +3094,7 @@ impl<'a> Parser<'a> { if let Some((ident, is_raw)) = self.token.lifetime() { // Disallow `'fn`, but with a better error message than `expect_lifetime`. if matches!(is_raw, IdentIsRaw::No) && ident.without_first_quote().is_reserved() { - self.dcx().emit_err(errors::InvalidLabel { span: ident.span, name: ident.name }); + self.dcx().emit_err(errors::KeywordLabel { span: ident.span }); } self.bump(); @@ -3657,7 +3661,7 @@ impl<'a> Parser<'a> { &mut self, pth: ast::Path, recover: bool, - close: ExpTokenPair<'_>, + close: ExpTokenPair, ) -> PResult< 'a, ( @@ -3676,8 +3680,8 @@ impl<'a> Parser<'a> { errors::HelpUseLatestEdition::new().add_to_diag(e); }; - while self.token != *close.tok { - if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) { + while self.token != close.tok { + if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(&close.tok) { let exp_span = self.prev_token.span; // We permit `.. }` on the left-hand side of a destructuring assignment. if self.check(close) { diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index 4a8530a2b38..eb684c3a62f 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -172,8 +172,11 @@ impl<'a> Parser<'a> { }) } - /// Parses a (possibly empty) list of lifetime and type parameters, possibly including - /// a trailing comma and erroneous trailing attributes. + /// Parse a (possibly empty) list of generic (lifetime, type, const) parameters. + /// + /// ```ebnf + /// GenericParams = (GenericParam ("," GenericParam)* ","?)? + /// ``` pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> { let mut params = ThinVec::new(); let mut done = false; @@ -520,7 +523,7 @@ impl<'a> Parser<'a> { // * `for<'a> Trait1<'a>: Trait2<'a /* ok */>` // * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>` // * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>` - let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; + let (bound_vars, _) = self.parse_higher_ranked_binder()?; // Parse type with mandatory colon and (possibly empty) bounds, // or with mandatory equality sign and the second type. @@ -528,7 +531,7 @@ impl<'a> Parser<'a> { if self.eat(exp!(Colon)) { let bounds = self.parse_generic_bounds()?; Ok(ast::WherePredicateKind::BoundPredicate(ast::WhereBoundPredicate { - bound_generic_params: lifetime_defs, + bound_generic_params: bound_vars, bounded_ty: ty, bounds, })) diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 607adaf0829..eb264f59fed 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -43,7 +43,7 @@ impl<'a> Parser<'a> { self.expect(exp!(OpenBrace))?; let (inner_attrs, items, inner_span) = self.parse_mod(exp!(CloseBrace))?; attrs.extend(inner_attrs); - ModKind::Loaded(items, Inline::Yes, inner_span, Ok(())) + ModKind::Loaded(items, Inline::Yes, inner_span) }; Ok(ItemKind::Mod(safety, ident, mod_kind)) } @@ -54,7 +54,7 @@ impl<'a> Parser<'a> { /// - `}` for mod items pub fn parse_mod( &mut self, - term: ExpTokenPair<'_>, + term: ExpTokenPair, ) -> PResult<'a, (AttrVec, ThinVec<Box<Item>>, ModSpans)> { let lo = self.token.span; let attrs = self.parse_inner_attributes()?; @@ -116,7 +116,8 @@ impl<'a> Parser<'a> { impl<'a> Parser<'a> { pub fn parse_item(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Box<Item>>> { - let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true }; + let fn_parse_mode = + FnParseMode { req_name: |_| true, context: FnContext::Free, req_body: true }; self.parse_item_(fn_parse_mode, force_collect).map(|i| i.map(Box::new)) } @@ -975,7 +976,8 @@ impl<'a> Parser<'a> { &mut self, force_collect: ForceCollect, ) -> PResult<'a, Option<Option<Box<AssocItem>>>> { - let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true }; + let fn_parse_mode = + FnParseMode { req_name: |_| true, context: FnContext::Impl, req_body: true }; self.parse_assoc_item(fn_parse_mode, force_collect) } @@ -983,8 +985,11 @@ impl<'a> Parser<'a> { &mut self, force_collect: ForceCollect, ) -> PResult<'a, Option<Option<Box<AssocItem>>>> { - let fn_parse_mode = - FnParseMode { req_name: |edition| edition >= Edition::Edition2018, req_body: false }; + let fn_parse_mode = FnParseMode { + req_name: |edition| edition >= Edition::Edition2018, + context: FnContext::Trait, + req_body: false, + }; self.parse_assoc_item(fn_parse_mode, force_collect) } @@ -1196,7 +1201,7 @@ impl<'a> Parser<'a> { }?; let dash = exp!(Minus); - if self.token != *dash.tok { + if self.token != dash.tok { return Ok(ident); } @@ -1261,7 +1266,8 @@ impl<'a> Parser<'a> { &mut self, force_collect: ForceCollect, ) -> PResult<'a, Option<Option<Box<ForeignItem>>>> { - let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: false }; + let fn_parse_mode = + FnParseMode { req_name: |_| true, context: FnContext::Free, req_body: false }; Ok(self.parse_item_(fn_parse_mode, force_collect)?.map( |Item { attrs, id, span, vis, kind, tokens }| { let kind = match ForeignItemKind::try_from(kind) { @@ -2135,7 +2141,8 @@ impl<'a> Parser<'a> { let inherited_vis = Visibility { span: DUMMY_SP, kind: VisibilityKind::Inherited, tokens: None }; // We use `parse_fn` to get a span for the function - let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true }; + let fn_parse_mode = + FnParseMode { req_name: |_| true, context: FnContext::Free, req_body: true }; match self.parse_fn( &mut AttrVec::new(), fn_parse_mode, @@ -2403,6 +2410,9 @@ pub(crate) struct FnParseMode { /// * The span is from Edition 2015. In particular, you can get a /// 2015 span inside a 2021 crate using macros. pub(super) req_name: ReqName, + /// The context in which this function is parsed, used for diagnostics. + /// This indicates the fn is a free function or method and so on. + pub(super) context: FnContext, /// If this flag is set to `true`, then plain, semicolon-terminated function /// prototypes are not allowed here. /// @@ -2424,6 +2434,18 @@ pub(crate) struct FnParseMode { pub(super) req_body: bool, } +/// The context in which a function is parsed. +/// FIXME(estebank, xizheyin): Use more variants. +#[derive(Clone, Copy, PartialEq, Eq)] +pub(crate) enum FnContext { + /// Free context. + Free, + /// A Trait context. + Trait, + /// An Impl block. + Impl, +} + /// Parsing of functions and methods. impl<'a> Parser<'a> { /// Parse a function starting from the front matter (`const ...`) to the body `{ ... }` or `;`. @@ -2439,11 +2461,8 @@ impl<'a> Parser<'a> { let header = self.parse_fn_front_matter(vis, case, FrontMatterParsingMode::Function)?; // `const ... fn` let ident = self.parse_ident()?; // `foo` let mut generics = self.parse_generics()?; // `<'a, T, ...>` - let decl = match self.parse_fn_decl( - fn_parse_mode.req_name, - AllowPlus::Yes, - RecoverReturnSign::Yes, - ) { + let decl = match self.parse_fn_decl(&fn_parse_mode, AllowPlus::Yes, RecoverReturnSign::Yes) + { Ok(decl) => decl, Err(old_err) => { // If we see `for Ty ...` then user probably meant `impl` item. @@ -2961,18 +2980,21 @@ impl<'a> Parser<'a> { /// Parses the parameter list and result type of a function declaration. pub(super) fn parse_fn_decl( &mut self, - req_name: ReqName, + fn_parse_mode: &FnParseMode, ret_allow_plus: AllowPlus, recover_return_sign: RecoverReturnSign, ) -> PResult<'a, Box<FnDecl>> { Ok(Box::new(FnDecl { - inputs: self.parse_fn_params(req_name)?, + inputs: self.parse_fn_params(fn_parse_mode)?, output: self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes, recover_return_sign)?, })) } /// Parses the parameter list of a function, including the `(` and `)` delimiters. - pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> { + pub(super) fn parse_fn_params( + &mut self, + fn_parse_mode: &FnParseMode, + ) -> PResult<'a, ThinVec<Param>> { let mut first_param = true; // Parse the arguments, starting out with `self` being allowed... if self.token != TokenKind::OpenParen @@ -2988,7 +3010,7 @@ impl<'a> Parser<'a> { let (mut params, _) = self.parse_paren_comma_seq(|p| { p.recover_vcs_conflict_marker(); let snapshot = p.create_snapshot_for_diagnostic(); - let param = p.parse_param_general(req_name, first_param, true).or_else(|e| { + let param = p.parse_param_general(fn_parse_mode, first_param, true).or_else(|e| { let guar = e.emit(); // When parsing a param failed, we should check to make the span of the param // not contain '(' before it. @@ -3019,7 +3041,7 @@ impl<'a> Parser<'a> { /// - `recover_arg_parse` is used to recover from a failed argument parse. pub(super) fn parse_param_general( &mut self, - req_name: ReqName, + fn_parse_mode: &FnParseMode, first_param: bool, recover_arg_parse: bool, ) -> PResult<'a, Param> { @@ -3035,16 +3057,22 @@ impl<'a> Parser<'a> { let is_name_required = match this.token.kind { token::DotDotDot => false, - _ => req_name(this.token.span.with_neighbor(this.prev_token.span).edition()), + _ => (fn_parse_mode.req_name)( + this.token.span.with_neighbor(this.prev_token.span).edition(), + ), }; let (pat, ty) = if is_name_required || this.is_named_param() { debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required); let (pat, colon) = this.parse_fn_param_pat_colon()?; if !colon { let mut err = this.unexpected().unwrap_err(); - return if let Some(ident) = - this.parameter_without_type(&mut err, pat, is_name_required, first_param) - { + return if let Some(ident) = this.parameter_without_type( + &mut err, + pat, + is_name_required, + first_param, + fn_parse_mode, + ) { let guar = err.emit(); Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No)) } else { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 0a8a0203013..15598f32429 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -22,9 +22,9 @@ use std::{fmt, mem, slice}; use attr_wrapper::{AttrWrapper, UsePreAttrPos}; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; -pub(crate) use item::FnParseMode; +pub(crate) use item::{FnContext, FnParseMode}; pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; -use path::PathStyle; +pub use path::PathStyle; use rustc_ast::token::{ self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind, }; @@ -261,19 +261,19 @@ struct CaptureState { /// A sequence separator. #[derive(Debug)] -struct SeqSep<'a> { +struct SeqSep { /// The separator token. - sep: Option<ExpTokenPair<'a>>, + sep: Option<ExpTokenPair>, /// `true` if a trailing separator is allowed. trailing_sep_allowed: bool, } -impl<'a> SeqSep<'a> { - fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> { +impl SeqSep { + fn trailing_allowed(sep: ExpTokenPair) -> SeqSep { SeqSep { sep: Some(sep), trailing_sep_allowed: true } } - fn none() -> SeqSep<'a> { + fn none() -> SeqSep { SeqSep { sep: None, trailing_sep_allowed: false } } } @@ -285,7 +285,7 @@ pub enum FollowedByType { } #[derive(Copy, Clone, Debug)] -enum Trailing { +pub enum Trailing { No, Yes, } @@ -425,13 +425,13 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> { + pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> { if self.expected_token_types.is_empty() { - if self.token == *exp.tok { + if self.token == exp.tok { self.bump(); Ok(Recovered::No) } else { - self.unexpected_try_recover(exp.tok) + self.unexpected_try_recover(&exp.tok) } } else { self.expect_one_of(slice::from_ref(&exp), &[]) @@ -443,13 +443,13 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. fn expect_one_of( &mut self, - edible: &[ExpTokenPair<'_>], - inedible: &[ExpTokenPair<'_>], + edible: &[ExpTokenPair], + inedible: &[ExpTokenPair], ) -> PResult<'a, Recovered> { - if edible.iter().any(|exp| exp.tok == &self.token.kind) { + if edible.iter().any(|exp| exp.tok == self.token.kind) { self.bump(); Ok(Recovered::No) - } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) { + } else if inedible.iter().any(|exp| exp.tok == self.token.kind) { // leave it in the input Ok(Recovered::No) } else if self.token != token::Eof @@ -494,8 +494,8 @@ impl<'a> Parser<'a> { /// This method will automatically add `tok` to `expected_token_types` if `tok` is not /// encountered. #[inline] - fn check(&mut self, exp: ExpTokenPair<'_>) -> bool { - let is_present = self.token == *exp.tok; + pub fn check(&mut self, exp: ExpTokenPair) -> bool { + let is_present = self.token == exp.tok; if !is_present { self.expected_token_types.insert(exp.token_type); } @@ -542,7 +542,7 @@ impl<'a> Parser<'a> { /// Consumes a token 'tok' if it exists. Returns whether the given token was present. #[inline] #[must_use] - pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool { + pub fn eat(&mut self, exp: ExpTokenPair) -> bool { let is_present = self.check(exp); if is_present { self.bump() @@ -633,7 +633,7 @@ impl<'a> Parser<'a> { } /// Consume a sequence produced by a metavar expansion, if present. - fn eat_metavar_seq<T>( + pub fn eat_metavar_seq<T>( &mut self, mv_kind: MetaVarKind, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, @@ -745,13 +745,13 @@ impl<'a> Parser<'a> { /// Eats the expected token if it's present possibly breaking /// compound tokens like multi-character operators in process. /// Returns `true` if the token was eaten. - fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool { - if self.token == *exp.tok { + fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool { + if self.token == exp.tok { self.bump(); return true; } match self.token.kind.break_two_token_op(1) { - Some((first, second)) if first == *exp.tok => { + Some((first, second)) if first == exp.tok => { let first_span = self.psess.source_map().start_point(self.token.span); let second_span = self.token.span.with_lo(first_span.hi()); self.token = Token::new(first, first_span); @@ -826,7 +826,7 @@ impl<'a> Parser<'a> { /// Checks if the next token is contained within `closes`, and returns `true` if so. fn expect_any_with_type( &mut self, - closes_expected: &[ExpTokenPair<'_>], + closes_expected: &[ExpTokenPair], closes_not_expected: &[&TokenKind], ) -> bool { closes_expected.iter().any(|&close| self.check(close)) @@ -838,9 +838,9 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_before_tokens<T>( &mut self, - closes_expected: &[ExpTokenPair<'_>], + closes_expected: &[ExpTokenPair], closes_not_expected: &[&TokenKind], - sep: SeqSep<'_>, + sep: SeqSep, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { let mut first = true; @@ -869,7 +869,7 @@ impl<'a> Parser<'a> { } Err(mut expect_err) => { let sp = self.prev_token.span.shrink_to_hi(); - let token_str = pprust::token_kind_to_string(exp.tok); + let token_str = pprust::token_kind_to_string(&exp.tok); match self.current_closure.take() { Some(closure_spans) if self.token == TokenKind::Semi => { @@ -1039,8 +1039,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_before_end<T>( &mut self, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[close], &[], sep, f) @@ -1051,8 +1051,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_end<T>( &mut self, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?; @@ -1070,9 +1070,9 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq<T>( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + open: ExpTokenPair, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.expect(open)?; @@ -1084,8 +1084,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_delim_comma_seq<T>( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f) @@ -1094,7 +1094,7 @@ impl<'a> Parser<'a> { /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`). /// The function `f` must consume tokens until reaching the next separator or /// closing bracket. - fn parse_paren_comma_seq<T>( + pub fn parse_paren_comma_seq<T>( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { @@ -1355,7 +1355,8 @@ impl<'a> Parser<'a> { AttrArgs::Delimited(args) } else if self.eat(exp!(Eq)) { let eq_span = self.prev_token.span; - AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? } + let expr = self.parse_expr_force_collect()?; + AttrArgs::Eq { eq_span, expr } } else { AttrArgs::Empty }) @@ -1388,15 +1389,26 @@ impl<'a> Parser<'a> { // matching `CloseDelim` we are *after* the delimited sequence, // i.e. at depth `d - 1`. let target_depth = self.token_cursor.stack.len() - 1; - loop { - // Advance one token at a time, so `TokenCursor::next()` - // can capture these tokens if necessary. + + if let Capturing::No = self.capture_state.capturing { + // We are not capturing tokens, so skip to the end of the + // delimited sequence. This is a perf win when dealing with + // declarative macros that pass large `tt` fragments through + // multiple rules, as seen in the uom-0.37.0 crate. + self.token_cursor.curr.bump_to_end(); self.bump(); - if self.token_cursor.stack.len() == target_depth { - debug_assert!(self.token.kind.close_delim().is_some()); - break; + debug_assert_eq!(self.token_cursor.stack.len(), target_depth); + } else { + loop { + // Advance one token at a time, so `TokenCursor::next()` + // can capture these tokens if necessary. + self.bump(); + if self.token_cursor.stack.len() == target_depth { + break; + } } } + debug_assert!(self.token.kind.close_delim().is_some()); // Consume close delimiter self.bump(); diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index a415849b915..9754691a0b9 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -24,10 +24,10 @@ use crate::errors::{ GenericArgsInPatRequireTurbofishSyntax, InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern, ParenRangeSuggestion, PatternOnWrongSideOfAt, RemoveLet, RepeatedMutInPattern, SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, - TopLevelOrPatternNotAllowedSugg, TrailingVertNotAllowed, UnexpectedExpressionInPattern, - UnexpectedExpressionInPatternSugg, UnexpectedLifetimeInPattern, UnexpectedParenInRangePat, - UnexpectedParenInRangePatSugg, UnexpectedVertVertBeforeFunctionParam, - UnexpectedVertVertInPattern, WrapInParens, + TopLevelOrPatternNotAllowedSugg, TrailingVertNotAllowed, TrailingVertSuggestion, + UnexpectedExpressionInPattern, UnexpectedExpressionInPatternSugg, UnexpectedLifetimeInPattern, + UnexpectedParenInRangePat, UnexpectedParenInRangePatSugg, + UnexpectedVertVertBeforeFunctionParam, UnexpectedVertVertInPattern, WrapInParens, }; use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal}; use crate::{exp, maybe_recover_from_interpolated_ty_qpath}; @@ -267,10 +267,9 @@ impl<'a> Parser<'a> { if let PatKind::Or(pats) = &pat.kind { let span = pat.span; - let sub = if pats.len() == 1 { - Some(TopLevelOrPatternNotAllowedSugg::RemoveLeadingVert { - span: span.with_hi(span.lo() + BytePos(1)), - }) + let sub = if let [_] = &pats[..] { + let span = span.with_hi(span.lo() + BytePos(1)); + Some(TopLevelOrPatternNotAllowedSugg::RemoveLeadingVert { span }) } else { Some(TopLevelOrPatternNotAllowedSugg::WrapInParens { span, @@ -362,6 +361,9 @@ impl<'a> Parser<'a> { self.dcx().emit_err(TrailingVertNotAllowed { span: self.token.span, start: lo, + suggestion: TrailingVertSuggestion { + span: self.prev_token.span.shrink_to_hi().with_hi(self.token.span.hi()), + }, token: self.token, note_double_vert: self.token.kind == token::OrOr, }); diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index a6ec3ea4245..86045648859 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -20,11 +20,13 @@ use crate::errors::{ PathFoundAttributeInParams, PathFoundCVariadicParams, PathSingleColon, PathTripleColon, }; use crate::exp; -use crate::parser::{CommaRecoveryMode, ExprKind, RecoverColon, RecoverComma}; +use crate::parser::{ + CommaRecoveryMode, ExprKind, FnContext, FnParseMode, RecoverColon, RecoverComma, +}; /// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] -pub(super) enum PathStyle { +pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous /// with something else. For example, in expressions `segment < ....` can be interpreted /// as a comparison and `segment ( ....` can be interpreted as a function call. @@ -148,7 +150,7 @@ impl<'a> Parser<'a> { true } - pub(super) fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> { + pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> { self.parse_path_inner(style, None) } @@ -399,7 +401,13 @@ impl<'a> Parser<'a> { let dcx = self.dcx(); let parse_params_result = self.parse_paren_comma_seq(|p| { - let param = p.parse_param_general(|_| false, false, false); + // Inside parenthesized type arguments, we want types only, not names. + let mode = FnParseMode { + context: FnContext::Free, + req_name: |_| false, + req_body: false, + }; + let param = p.parse_param_general(&mode, false, false); param.map(move |param| { if !matches!(param.pat.kind, PatKind::Missing) { dcx.emit_err(FnPathFoundNamedParams { diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 7aacb674253..b4943ff7de6 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -19,8 +19,8 @@ use super::diagnostics::AttemptLocalParseRecovery; use super::pat::{PatternLocation, RecoverComma}; use super::path::PathStyle; use super::{ - AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode, - Trailing, UsePreAttrPos, + AttrWrapper, BlockMode, FnContext, FnParseMode, ForceCollect, Parser, Restrictions, + SemiColonMode, Trailing, UsePreAttrPos, }; use crate::errors::{self, MalformedLoopLabel}; use crate::exp; @@ -153,7 +153,7 @@ impl<'a> Parser<'a> { attrs.clone(), // FIXME: unwanted clone of attrs false, true, - FnParseMode { req_name: |_| true, req_body: true }, + FnParseMode { req_name: |_| true, context: FnContext::Free, req_body: true }, force_collect, )? { self.mk_stmt(lo.to(item.span), StmtKind::Item(Box::new(item))) diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs index b91548196a3..bd4bb368df0 100644 --- a/compiler/rustc_parse/src/parser/token_type.rs +++ b/compiler/rustc_parse/src/parser/token_type.rs @@ -416,8 +416,8 @@ impl TokenType { /// is always by used those methods. The second field is only used when the /// first field doesn't match. #[derive(Clone, Copy, Debug)] -pub struct ExpTokenPair<'a> { - pub tok: &'a TokenKind, +pub struct ExpTokenPair { + pub tok: TokenKind, pub token_type: TokenType, } @@ -444,7 +444,7 @@ macro_rules! exp { // `ExpTokenPair` helper rules. (@tok, $tok:ident) => { $crate::parser::token_type::ExpTokenPair { - tok: &rustc_ast::token::$tok, + tok: rustc_ast::token::$tok, token_type: $crate::parser::token_type::TokenType::$tok } }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 0d479731e73..6168647183f 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -19,6 +19,7 @@ use crate::errors::{ NestedCVariadicType, ReturnTypesUseThinArrow, }; use crate::parser::item::FrontMatterParsingMode; +use crate::parser::{FnContext, FnParseMode}; use crate::{exp, maybe_recover_from_interpolated_ty_qpath}; /// Signals whether parsing a type should allow `+`. @@ -136,14 +137,37 @@ impl<'a> Parser<'a> { /// The difference from `parse_ty` is that this version allows `...` /// (`CVarArgs`) at the top level of the type. pub(super) fn parse_ty_for_param(&mut self) -> PResult<'a, Box<Ty>> { - self.parse_ty_common( + let ty = self.parse_ty_common( AllowPlus::Yes, AllowCVariadic::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes, None, RecoverQuestionMark::Yes, - ) + )?; + + // Recover a trailing `= EXPR` if present. + if self.may_recover() + && self.check_noexpect(&token::Eq) + && self.look_ahead(1, |tok| tok.can_begin_expr()) + { + let snapshot = self.create_snapshot_for_diagnostic(); + self.bump(); + let eq_span = self.prev_token.span; + match self.parse_expr() { + Ok(e) => { + self.dcx() + .struct_span_err(eq_span.to(e.span), "parameter defaults are not supported") + .emit(); + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + } + } + } + + Ok(ty) } /// Parses a type in restricted contexts where `+` is not permitted. @@ -307,11 +331,11 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; + let (bound_vars, _) = self.parse_higher_ranked_binder()?; if self.check_fn_front_matter(false, Case::Sensitive) { self.parse_ty_fn_ptr( lo, - lifetime_defs, + bound_vars, Some(self.prev_token.span.shrink_to_lo()), recover_return_sign, )? @@ -325,7 +349,7 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus(); let kind = self.parse_remaining_bounds_path( - lifetime_defs, + bound_vars, path, lo, parse_plus, @@ -358,7 +382,7 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus(); self.parse_remaining_bounds_path( - lifetime_defs, + bound_vars, path, lo, parse_plus, @@ -442,7 +466,7 @@ impl<'a> Parser<'a> { let ty = ts.into_iter().next().unwrap(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { - // `(TY_BOUND_NOPAREN) + BOUND + ...`. + // `"(" BareTraitBound ")" "+" Bound "+" ...`. TyKind::Path(None, path) if maybe_bounds => self.parse_remaining_bounds_path( ThinVec::new(), path, @@ -769,7 +793,12 @@ impl<'a> Parser<'a> { if self.may_recover() && self.token == TokenKind::Lt { self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?; } - let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?; + let mode = crate::parser::item::FnParseMode { + req_name: |_| false, + context: FnContext::Free, + req_body: false, + }; + let decl = self.parse_fn_decl(&mode, AllowPlus::No, recover_return_sign)?; let decl_span = span_start.to(self.prev_token.span); Ok(TyKind::FnPtr(Box::new(FnPtrTy { @@ -847,10 +876,13 @@ impl<'a> Parser<'a> { Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)) } - fn parse_precise_capturing_args( - &mut self, - ) -> PResult<'a, (ThinVec<PreciseCapturingArg>, Span)> { - let lo = self.token.span; + /// Parse a use-bound aka precise capturing list. + /// + /// ```ebnf + /// UseBound = "use" "<" (PreciseCapture ("," PreciseCapture)* ","?)? ">" + /// PreciseCapture = "Self" | Ident | Lifetime + /// ``` + fn parse_use_bound(&mut self, lo: Span, parens: ast::Parens) -> PResult<'a, GenericBound> { self.expect_lt()?; let (args, _, _) = self.parse_seq_to_before_tokens( &[exp!(Gt)], @@ -876,7 +908,13 @@ impl<'a> Parser<'a> { }, )?; self.expect_gt()?; - Ok((args, lo.to(self.prev_token.span))) + + if let ast::Parens::Yes = parens { + self.expect(exp!(CloseParen))?; + self.report_parenthesized_bound(lo, self.prev_token.span, "precise capturing lists"); + } + + Ok(GenericBound::Use(args, lo.to(self.prev_token.span))) } /// Is a `dyn B0 + ... + Bn` type allowed here? @@ -934,9 +972,10 @@ impl<'a> Parser<'a> { self.parse_generic_bounds_common(AllowPlus::Yes) } - /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`. + /// Parse generic bounds. /// - /// See `parse_generic_bound` for the `BOUND` grammar. + /// Only if `allow_plus` this parses a `+`-separated list of bounds (trailing `+` is admitted). + /// Otherwise, this only parses a single bound or none. fn parse_generic_bounds_common(&mut self, allow_plus: AllowPlus) -> PResult<'a, GenericBounds> { let mut bounds = Vec::new(); @@ -982,48 +1021,56 @@ impl<'a> Parser<'a> { || self.check_keyword(exp!(Use)) } - /// Parses a bound according to the grammar: + /// Parse a bound. + /// /// ```ebnf - /// BOUND = TY_BOUND | LT_BOUND + /// Bound = LifetimeBound | UseBound | TraitBound /// ``` fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> { - let lo = self.token.span; let leading_token = self.prev_token; + let lo = self.token.span; + + // We only admit parenthesized *trait* bounds. However, we want to gracefully recover from + // other kinds of parenthesized bounds, so parse the opening parenthesis *here*. + // + // In the future we might want to lift this syntactic restriction and + // introduce "`GenericBound::Paren(Box<GenericBound>)`". let parens = if self.eat(exp!(OpenParen)) { ast::Parens::Yes } else { ast::Parens::No }; - let bound = if self.token.is_lifetime() { - self.parse_generic_lt_bound(lo, parens)? + if self.token.is_lifetime() { + self.parse_lifetime_bound(lo, parens) } else if self.eat_keyword(exp!(Use)) { - // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of - // lifetimes and ident params (including SelfUpper). These are validated later - // for order, duplication, and whether they actually reference params. - let use_span = self.prev_token.span; - let (args, args_span) = self.parse_precise_capturing_args()?; - GenericBound::Use(args, use_span.to(args_span)) + self.parse_use_bound(lo, parens) } else { - self.parse_generic_ty_bound(lo, parens, &leading_token)? - }; - - Ok(bound) + self.parse_trait_bound(lo, parens, &leading_token) + } } - /// Parses a lifetime ("outlives") bound, e.g. `'a`, according to: + /// Parse a lifetime-bound aka outlives-bound. + /// /// ```ebnf - /// LT_BOUND = LIFETIME + /// LifetimeBound = Lifetime /// ``` - fn parse_generic_lt_bound( - &mut self, - lo: Span, - parens: ast::Parens, - ) -> PResult<'a, GenericBound> { + fn parse_lifetime_bound(&mut self, lo: Span, parens: ast::Parens) -> PResult<'a, GenericBound> { let lt = self.expect_lifetime(); - let bound = GenericBound::Outlives(lt); + if let ast::Parens::Yes = parens { - // FIXME(Centril): Consider not erroring here and accepting `('lt)` instead, - // possibly introducing `GenericBound::Paren(Box<GenericBound>)`? - self.recover_paren_lifetime(lo)?; + self.expect(exp!(CloseParen))?; + self.report_parenthesized_bound(lo, self.prev_token.span, "lifetime bounds"); } - Ok(bound) + + Ok(GenericBound::Outlives(lt)) + } + + fn report_parenthesized_bound(&self, lo: Span, hi: Span, kind: &str) -> ErrorGuaranteed { + let mut diag = + self.dcx().struct_span_err(lo.to(hi), format!("{kind} may not be parenthesized")); + diag.multipart_suggestion( + "remove the parentheses", + vec![(lo, String::new()), (hi, String::new())], + Applicability::MachineApplicable, + ); + diag.emit() } /// Emits an error if any trait bound modifiers were present. @@ -1068,27 +1115,17 @@ impl<'a> Parser<'a> { unreachable!("lifetime bound intercepted in `parse_generic_ty_bound` but no modifiers?") } - /// Recover on `('lifetime)` with `(` already eaten. - fn recover_paren_lifetime(&mut self, lo: Span) -> PResult<'a, ()> { - self.expect(exp!(CloseParen))?; - let span = lo.to(self.prev_token.span); - let sugg = errors::RemoveParens { lo, hi: self.prev_token.span }; - - self.dcx().emit_err(errors::ParenthesizedLifetime { span, sugg }); - Ok(()) - } - /// Parses the modifiers that may precede a trait in a bound, e.g. `?Trait` or `[const] Trait`. /// /// If no modifiers are present, this does not consume any tokens. /// /// ```ebnf - /// CONSTNESS = [["["] "const" ["]"]] - /// ASYNCNESS = ["async"] - /// POLARITY = ["?" | "!"] + /// Constness = ("const" | "[" "const" "]")? + /// Asyncness = "async"? + /// Polarity = ("?" | "!")? /// ``` /// - /// See `parse_generic_ty_bound` for the complete grammar of trait bound modifiers. + /// See `parse_trait_bound` for more context. fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> { let modifier_lo = self.token.span; let constness = self.parse_bound_constness()?; @@ -1181,20 +1218,21 @@ impl<'a> Parser<'a> { }) } - /// Parses a type bound according to: + /// Parse a trait bound. + /// /// ```ebnf - /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) - /// TY_BOUND_NOPAREN = [for<GENERIC_PARAMS> CONSTNESS ASYNCNESS | POLARITY] SIMPLE_PATH + /// TraitBound = BareTraitBound | "(" BareTraitBound ")" + /// BareTraitBound = + /// (HigherRankedBinder Constness Asyncness | Polarity) + /// TypePath /// ``` - /// - /// For example, this grammar accepts `for<'a: 'b> [const] ?m::Trait<'a>`. - fn parse_generic_ty_bound( + fn parse_trait_bound( &mut self, lo: Span, parens: ast::Parens, leading_token: &Token, ) -> PResult<'a, GenericBound> { - let (mut lifetime_defs, binder_span) = self.parse_late_bound_lifetime_defs()?; + let (mut bound_vars, binder_span) = self.parse_higher_ranked_binder()?; let modifiers_lo = self.token.span; let modifiers = self.parse_trait_bound_modifiers()?; @@ -1217,11 +1255,11 @@ impl<'a> Parser<'a> { // e.g. `T: for<'a> 'a` or `T: [const] 'a`. if self.token.is_lifetime() { let _: ErrorGuaranteed = self.error_lt_bound_with_modifiers(modifiers, binder_span); - return self.parse_generic_lt_bound(lo, parens); + return self.parse_lifetime_bound(lo, parens); } - if let (more_lifetime_defs, Some(binder_span)) = self.parse_late_bound_lifetime_defs()? { - lifetime_defs.extend(more_lifetime_defs); + if let (more_bound_vars, Some(binder_span)) = self.parse_higher_ranked_binder()? { + bound_vars.extend(more_bound_vars); self.dcx().emit_err(errors::BinderBeforeModifiers { binder_span, modifiers_span }); } @@ -1281,7 +1319,7 @@ impl<'a> Parser<'a> { }; if self.may_recover() && self.token == TokenKind::OpenParen { - self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?; + self.recover_fn_trait_with_lifetime_params(&mut path, &mut bound_vars)?; } if let ast::Parens::Yes = parens { @@ -1304,7 +1342,7 @@ impl<'a> Parser<'a> { } let poly_trait = - PolyTraitRef::new(lifetime_defs, path, modifiers, lo.to(self.prev_token.span), parens); + PolyTraitRef::new(bound_vars, path, modifiers, lo.to(self.prev_token.span), parens); Ok(GenericBound::Trait(poly_trait)) } @@ -1314,7 +1352,8 @@ impl<'a> Parser<'a> { self.bump(); let args_lo = self.token.span; let snapshot = self.create_snapshot_for_diagnostic(); - match self.parse_fn_decl(|_| false, AllowPlus::No, RecoverReturnSign::OnlyFatArrow) { + let mode = FnParseMode { req_name: |_| false, context: FnContext::Free, req_body: false }; + match self.parse_fn_decl(&mode, AllowPlus::No, RecoverReturnSign::OnlyFatArrow) { Ok(decl) => { self.dcx().emit_err(ExpectedFnPathFoundFnKeyword { fn_token_span }); Some(ast::Path { @@ -1342,8 +1381,12 @@ impl<'a> Parser<'a> { } } - /// Optionally parses `for<$generic_params>`. - pub(super) fn parse_late_bound_lifetime_defs( + /// Parse an optional higher-ranked binder. + /// + /// ```ebnf + /// HigherRankedBinder = ("for" "<" GenericParams ">")? + /// ``` + pub(super) fn parse_higher_ranked_binder( &mut self, ) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> { if self.eat_keyword(exp!(For)) { @@ -1400,8 +1443,9 @@ impl<'a> Parser<'a> { // Parse `(T, U) -> R`. let inputs_lo = self.token.span; + let mode = FnParseMode { req_name: |_| false, context: FnContext::Free, req_body: false }; let inputs: ThinVec<_> = - self.parse_fn_params(|_| false)?.into_iter().map(|input| input.ty).collect(); + self.parse_fn_params(&mode)?.into_iter().map(|input| input.ty).collect(); let inputs_span = inputs_lo.to(self.prev_token.span); let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?; let args = ast::ParenthesizedArgs { @@ -1458,8 +1502,7 @@ impl<'a> Parser<'a> { pub(super) fn expect_lifetime(&mut self) -> Lifetime { if let Some((ident, is_raw)) = self.token.lifetime() { if matches!(is_raw, IdentIsRaw::No) - && ident.without_first_quote().is_reserved() - && ![kw::UnderscoreLifetime, kw::StaticLifetime].contains(&ident.name) + && ident.without_first_quote().is_reserved_lifetime() { self.dcx().emit_err(errors::KeywordLifetime { span: ident.span }); } diff --git a/compiler/rustc_parse_format/Cargo.toml b/compiler/rustc_parse_format/Cargo.toml index d178fcda1fb..645e34b6e32 100644 --- a/compiler/rustc_parse_format/Cargo.toml +++ b/compiler/rustc_parse_format/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -rustc-literal-escaper = "0.0.5" +rustc-literal-escaper.workspace = true rustc_lexer = { path = "../rustc_lexer" } # tidy-alphabetical-end diff --git a/compiler/rustc_passes/Cargo.toml b/compiler/rustc_passes/Cargo.toml index ba81ef3103b..c74608a6146 100644 --- a/compiler/rustc_passes/Cargo.toml +++ b/compiler/rustc_passes/Cargo.toml @@ -24,5 +24,5 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index b5031f5d02e..498d9a405fa 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -13,22 +13,6 @@ passes_abi_ne = passes_abi_of = fn_abi_of({$fn_name}) = {$fn_abi} -passes_align_attr_application = - `#[rustc_align(...)]` should be applied to a function item - .label = not a function item - -passes_align_on_fields = - attribute should be applied to a function or method - .warn = {-passes_previously_accepted} - -passes_align_should_be_repr_align = - `#[rustc_align(...)]` is not supported on {$item} items - .suggestion = use `#[repr(align(...))]` instead - -passes_allow_incoherent_impl = - `rustc_allow_incoherent_impl` attribute should be applied to impl items - .label = the only currently supported targets are inherent methods - passes_macro_only_attribute = attribute should be applied to a macro .label = not a macro @@ -78,18 +62,10 @@ passes_change_fields_to_be_of_unit_type = *[other] fields } -passes_cold = - {passes_should_be_applied_to_fn} - .warn = {-passes_previously_accepted} - .label = {passes_should_be_applied_to_fn.label} - passes_collapse_debuginfo = `collapse_debuginfo` attribute should be applied to macro definitions .label = not a macro definition -passes_confusables = attribute should be applied to an inherent method - .label = not an inherent method - passes_const_continue_attr = `#[const_continue]` should be applied to a break expression .label = not a break expression @@ -98,15 +74,15 @@ passes_const_stable_not_stable = attribute `#[rustc_const_stable]` can only be applied to functions that are declared `#[stable]` .label = attribute specified here -passes_coroutine_on_non_closure = - attribute should be applied to closures - .label = not a closure +passes_custom_mir_incompatible_dialect_and_phase = + The {$dialect} dialect is not compatible with the {$phase} phase + .dialect_span = this dialect... + .phase_span = ... is not compatible with this phase + +passes_custom_mir_phase_requires_dialect = + `dialect` key required + .phase_span = `phase` argument requires a `dialect` argument -passes_coverage_attribute_not_allowed = - coverage attribute not allowed here - .not_fn_impl_mod = not a function, impl block, or module - .no_body = function has no body - .help = coverage attribute can be applied to a function (with body), impl block, or module passes_dead_codes = { $multiple -> @@ -129,9 +105,6 @@ passes_debug_visualizer_placement = passes_debug_visualizer_unreadable = couldn't read {$file}: {$error} -passes_deprecated = - attribute is ignored here - passes_deprecated_annotation_has_no_effect = this `#[deprecated]` annotation has no effect .suggestion = remove the unnecessary deprecation attribute @@ -172,6 +145,10 @@ passes_doc_alias_start_end = passes_doc_attr_not_crate_level = `#![doc({$attr_name} = "...")]` isn't allowed as a crate-level attribute +passes_doc_attribute_not_attribute = + nonexistent builtin attribute `{$attribute}` used in `#[doc(attribute = "...")]` + .help = only existing builtin attributes are allowed in core/std + passes_doc_cfg_hide_takes_list = `#[doc(cfg_hide(...))]` takes a list of attributes @@ -200,16 +177,16 @@ passes_doc_inline_only_use = passes_doc_invalid = invalid `doc` attribute -passes_doc_keyword_empty_mod = - `#[doc(keyword = "...")]` should be used on empty modules +passes_doc_keyword_attribute_empty_mod = + `#[doc({$attr_name} = "...")]` should be used on empty modules + +passes_doc_keyword_attribute_not_mod = + `#[doc({$attr_name} = "...")]` should be used on modules passes_doc_keyword_not_keyword = nonexistent keyword `{$keyword}` used in `#[doc(keyword = "...")]` .help = only existing keywords are allowed in core/std -passes_doc_keyword_not_mod = - `#[doc(keyword = "...")]` should be used on modules - passes_doc_keyword_only_impl = `#[doc(keyword = "...")]` should be used on impl blocks @@ -304,10 +281,6 @@ passes_duplicate_lang_item_crate_depends = passes_enum_variant_same_name = it is impossible to refer to the {$dead_descr} `{$dead_name}` because it is shadowed by this enum variant with the same name -passes_export_name = - attribute should be applied to a free function, impl method or static - .label = not a free function, impl method or static - passes_extern_main = the `main` function cannot be declared in an `extern` block @@ -317,21 +290,10 @@ passes_feature_previously_declared = passes_feature_stable_twice = feature `{$feature}` is declared stable since {$since}, but was previously declared stable since {$prev_since} -passes_ffi_const_invalid_target = - `#[ffi_const]` may only be used on foreign functions - -passes_ffi_pure_invalid_target = - `#[ffi_pure]` may only be used on foreign functions - passes_has_incoherent_inherent_impl = `rustc_has_incoherent_inherent_impls` attribute should be applied to types or traits .label = only adts, extern types and traits are supported -passes_ignored_attr = - `#[{$sym}]` is ignored on struct fields and match arms - .warn = {-passes_previously_accepted} - .note = {-passes_see_issue(issue: "80564")} - passes_ignored_attr_with_macro = `#[{$sym}]` is ignored on struct fields, match arms and macro defs .warn = {-passes_previously_accepted} @@ -373,22 +335,10 @@ passes_incorrect_target = passes_ineffective_unstable_impl = an `#[unstable]` annotation here has no effect .note = see issue #55436 <https://github.com/rust-lang/rust/issues/55436> for more information -passes_inline_ignored_constants = - `#[inline]` is ignored on constants - .warn = {-passes_previously_accepted} - .note = {-passes_see_issue(issue: "65833")} - passes_inline_ignored_for_exported = `#[inline]` is ignored on externally exported functions .help = externally exported functions are functions with `#[no_mangle]`, `#[export_name]`, or `#[linkage]` -passes_inline_ignored_function_prototype = - `#[inline]` is ignored on function prototypes - -passes_inline_not_fn_or_closure = - attribute should be applied to function or closure - .label = not a function or closure - passes_inner_crate_level_attr = crate-level attribute should be in the root module @@ -438,25 +388,6 @@ passes_link = .warn = {-passes_previously_accepted} .label = not an `extern` block -passes_link_name = - attribute should be applied to a foreign function or static - .warn = {-passes_previously_accepted} - .label = not a foreign function or static - .help = try `#[link(name = "{$value}")]` instead - -passes_link_ordinal = - attribute should be applied to a foreign function or static - .label = not a foreign function or static - -passes_link_section = - attribute should be applied to a function or static - .warn = {-passes_previously_accepted} - .label = not a function or static - -passes_linkage = - attribute should be applied to a function or static - .label = not a function definition or static - passes_loop_match_attr = `#[loop_match]` should be applied to a loop .label = not a loop @@ -468,9 +399,6 @@ passes_macro_export_on_decl_macro = `#[macro_export]` has no effect on declarative macro definitions .note = declarative macros follow the same exporting rules as regular items -passes_macro_use = - `#[{$name}]` only has an effect on `extern crate` and modules - passes_may_dangle = `#[may_dangle]` must be applied to a lifetime or type generic parameter in `Drop` impl @@ -508,9 +436,6 @@ passes_must_not_suspend = `must_not_suspend` attribute should be applied to a struct, enum, union, or trait .label = is not a struct, enum, union, or trait -passes_must_use_no_effect = - `#[must_use]` has no effect when applied to {$article} {$target} - passes_no_link = attribute should be applied to an `extern crate` item .label = not an `extern crate` item @@ -529,22 +454,6 @@ passes_no_main_function = .teach_note = If you don't know the basics of Rust, you can go look to the Rust Book to get started: https://doc.rust-lang.org/book/ .non_function_main = non-function item at `crate::main` is found -passes_no_mangle = - attribute should be applied to a free function, impl method or static - .warn = {-passes_previously_accepted} - .label = not a free function, impl method or static - -passes_no_mangle_foreign = - `#[no_mangle]` has no effect on a foreign {$foreign_item_kind} - .warn = {-passes_previously_accepted} - .label = foreign {$foreign_item_kind} - .note = symbol names in extern blocks are not mangled - .suggestion = remove this attribute - -passes_no_sanitize = - `#[no_sanitize({$attr_str})]` should be applied to {$accepted_kind} - .label = not {$accepted_kind} - passes_non_exhaustive_with_default_field_values = `#[non_exhaustive]` can't be used to annotate items with default field values .label = this struct has default field values @@ -556,22 +465,11 @@ passes_non_exported_macro_invalid_attrs = passes_object_lifetime_err = {$repr} -passes_only_has_effect_on = - `#[{$attr_name}]` only has an effect on {$target_name -> - [function] functions - [module] modules - [trait_implementation_block] trait implementation blocks - [inherent_implementation_block] inherent implementation blocks - *[unspecified] (unspecified--this is a compiler bug) - } - -passes_optimize_invalid_target = - attribute applied to an invalid target - .label = invalid target - passes_outer_crate_level_attr = - crate-level attribute should be an inner attribute: add an exclamation mark: `#![foo]` + crate-level attribute should be an inner attribute +passes_outer_crate_level_attr_suggestion = + add a `!` passes_panic_unwind_without_std = unwinding panics are not supported without std @@ -584,10 +482,6 @@ passes_parent_info = *[other] {$descr}s } in this {$parent_descr} -passes_pass_by_value = - `pass_by_value` attribute should be applied to a struct, enum or type alias - .label = is not a struct, enum or type alias - passes_proc_macro_bad_sig = {$kind} has incorrect signature passes_remove_fields = @@ -604,7 +498,7 @@ passes_repr_align_greater_than_target_max = .note = `isize::MAX` is {$size} for the current target passes_repr_align_should_be_align = - `#[repr(align(...))]` is not supported on {$item} items + `#[repr(align(...))]` is not supported on {$item} .help = use `#[rustc_align(...)]` instead passes_repr_conflicting = @@ -619,18 +513,10 @@ passes_rustc_const_stable_indirect_pairing = passes_rustc_dirty_clean = attribute requires -Z query-dep-graph to be enabled -passes_rustc_force_inline = - attribute should be applied to a function - .label = not a function definition - passes_rustc_force_inline_coro = attribute cannot be applied to a `async`, `gen` or `async gen` function .label = `async`, `gen` or `async gen` function -passes_rustc_layout_scalar_valid_range_not_struct = - attribute should be applied to a struct - .label = not a struct - passes_rustc_legacy_const_generics_index = #[rustc_legacy_const_generics] must have one index for each generic parameter .label = generic parameters @@ -664,13 +550,11 @@ passes_rustc_pub_transparent = attribute should be applied to `#[repr(transparent)]` types .label = not a `#[repr(transparent)]` type -passes_rustc_std_internal_symbol = - attribute should be applied to functions or statics - .label = not a function or static - -passes_rustc_unstable_feature_bound = - attribute should be applied to `impl`, trait or free function - .label = not an `impl`, trait or free function +passes_sanitize_attribute_not_allowed = + sanitize attribute not allowed here + .not_fn_impl_mod = not a function, impl block, or module + .no_body = function has no body + .help = sanitize attribute can be applied to a function (with body), impl block, or module passes_should_be_applied_to_fn = attribute should be applied to a function definition @@ -683,24 +567,12 @@ passes_should_be_applied_to_static = attribute should be applied to a static .label = not a static -passes_should_be_applied_to_struct_enum = - attribute should be applied to a struct or enum - .label = not a struct or enum - passes_should_be_applied_to_trait = attribute should be applied to a trait .label = not a trait -passes_stability_promotable = - attribute cannot be applied to an expression - passes_string_interpolation_only_works = string interpolation only works in `format!` invocations -passes_target_feature_on_statement = - {passes_should_be_applied_to_fn} - .warn = {-passes_previously_accepted} - .label = {passes_should_be_applied_to_fn.label} - passes_trait_impl_const_stability_mismatch = const stability on the impl does not match the const stability on the trait passes_trait_impl_const_stability_mismatch_impl_stable = this impl is (implicitly) stable... passes_trait_impl_const_stability_mismatch_impl_unstable = this impl is unstable... @@ -825,11 +697,6 @@ passes_unused_variable_try_prefix = unused variable: `{$name}` .label = unused variable .suggestion = if this is intentional, prefix it with an underscore - -passes_used_static = - attribute must be applied to a `static` variable - .label = but this is a {$target} - passes_useless_assignment = useless assignment of {$is_field_assign -> [true] field diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index 93b94a8ba14..2cd4830b5d9 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -10,7 +10,7 @@ use std::collections::hash_map::Entry; use std::slice; use rustc_abi::{Align, ExternAbi, Size}; -use rustc_ast::{AttrStyle, LitKind, MetaItemInner, MetaItemKind, ast, join_path_syms}; +use rustc_ast::{AttrStyle, LitKind, MetaItemInner, MetaItemKind, ast}; use rustc_attr_parsing::{AttributeParser, Late}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, DiagCtxtHandle, IntoDiagArg, MultiSpan, StashKey}; @@ -18,7 +18,7 @@ use rustc_feature::{ ACCEPTED_LANG_FEATURES, AttributeDuplicates, AttributeType, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute, }; -use rustc_hir::attrs::{AttributeKind, InlineAttr, ReprAttr}; +use rustc_hir::attrs::{AttributeKind, InlineAttr, MirDialect, MirPhase, ReprAttr, SanitizerSet}; use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalModDefId; use rustc_hir::intravisit::{self, Visitor}; @@ -40,7 +40,6 @@ use rustc_session::lint; use rustc_session::lint::builtin::{ CONFLICTING_REPR_HINTS, INVALID_DOC_ATTRIBUTES, INVALID_MACRO_EXPORT_ARGUMENTS, MALFORMED_DIAGNOSTIC_ATTRIBUTES, MISPLACED_DIAGNOSTIC_ATTRIBUTES, UNUSED_ATTRIBUTES, - USELESS_DEPRECATED, }; use rustc_session::parse::feature_err; use rustc_span::edition::Edition; @@ -50,7 +49,6 @@ use rustc_trait_selection::infer::{TyCtxtInferExt, ValuePairs}; use rustc_trait_selection::traits::ObligationCtxt; use tracing::debug; -use crate::errors::AlignOnFields; use crate::{errors, fluent_generated as fluent}; #[derive(LintDiagnostic)] @@ -101,6 +99,21 @@ impl IntoDiagArg for ProcMacroKind { } } +#[derive(Clone, Copy)] +enum DocFakeItemKind { + Attribute, + Keyword, +} + +impl DocFakeItemKind { + fn name(self) -> &'static str { + match self { + Self::Attribute => "attribute", + Self::Keyword => "keyword", + } + } +} + struct CheckAttrVisitor<'tcx> { tcx: TyCtxt<'tcx>, @@ -134,56 +147,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Attribute::Parsed(AttributeKind::ProcMacroAttribute(_)) => { self.check_proc_macro(hir_id, target, ProcMacroKind::Attribute); } - Attribute::Parsed(AttributeKind::ProcMacroDerive { span: attr_span, .. }) => { - self.check_generic_attr( - hir_id, - sym::proc_macro_derive, - *attr_span, - target, - Target::Fn, - ); + Attribute::Parsed(AttributeKind::ProcMacroDerive { .. }) => { self.check_proc_macro(hir_id, target, ProcMacroKind::Derive) } - Attribute::Parsed( - AttributeKind::SkipDuringMethodDispatch { span: attr_span, .. } - | AttributeKind::Coinductive(attr_span) - | AttributeKind::ConstTrait(attr_span) - | AttributeKind::DenyExplicitImpl(attr_span) - | AttributeKind::DoNotImplementViaObject(attr_span), - ) => { - self.check_must_be_applied_to_trait(*attr_span, span, target); - } - &Attribute::Parsed( - AttributeKind::SpecializationTrait(attr_span) - | AttributeKind::UnsafeSpecializationMarker(attr_span) - | AttributeKind::ParenSugar(attr_span), - ) => { - // FIXME: more validation is needed - self.check_must_be_applied_to_trait(attr_span, span, target); - } &Attribute::Parsed(AttributeKind::TypeConst(attr_span)) => { self.check_type_const(hir_id, attr_span, target) } - &Attribute::Parsed(AttributeKind::Marker(attr_span)) => { - self.check_marker(hir_id, attr_span, span, target) - } - Attribute::Parsed(AttributeKind::Fundamental | AttributeKind::CoherenceIsCore) => { - // FIXME: add validation - } - &Attribute::Parsed(AttributeKind::AllowIncoherentImpl(attr_span)) => { - self.check_allow_incoherent_impl(attr_span, span, target) - } - Attribute::Parsed(AttributeKind::Confusables { first_span, .. }) => { - self.check_confusables(*first_span, target); - } - Attribute::Parsed(AttributeKind::AutomaticallyDerived(attr_span)) => self - .check_generic_attr( - hir_id, - sym::automatically_derived, - *attr_span, - target, - Target::Impl { of_trait: true }, - ), Attribute::Parsed( AttributeKind::Stability { span: attr_span, @@ -193,13 +162,10 @@ impl<'tcx> CheckAttrVisitor<'tcx> { span: attr_span, stability: PartialConstStability { level, feature, .. }, }, - ) => self.check_stability(*attr_span, span, level, *feature, target), + ) => self.check_stability(*attr_span, span, level, *feature), Attribute::Parsed(AttributeKind::Inline(InlineAttr::Force { .. }, ..)) => {} // handled separately below Attribute::Parsed(AttributeKind::Inline(kind, attr_span)) => { - self.check_inline(hir_id, *attr_span, span, kind, target) - } - Attribute::Parsed(AttributeKind::Optimize(_, attr_span)) => { - self.check_optimize(hir_id, *attr_span, span, target) + self.check_inline(hir_id, *attr_span, kind, target) } Attribute::Parsed(AttributeKind::LoopMatch(attr_span)) => { self.check_loop_match(hir_id, *attr_span, target) @@ -207,11 +173,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Attribute::Parsed(AttributeKind::ConstContinue(attr_span)) => { self.check_const_continue(hir_id, *attr_span, target) } - Attribute::Parsed(AttributeKind::AllowInternalUnsafe(attr_span)) => { - self.check_allow_internal_unsafe(hir_id, *attr_span, span, target, attrs) - } - Attribute::Parsed(AttributeKind::AllowInternalUnstable(_, first_span)) => { - self.check_allow_internal_unstable(hir_id, *first_span, span, target, attrs) + Attribute::Parsed(AttributeKind::AllowInternalUnsafe(attr_span) | AttributeKind::AllowInternalUnstable(.., attr_span)) => { + self.check_macro_only_attr(*attr_span, span, target, attrs) } Attribute::Parsed(AttributeKind::AllowConstFnUnstable(_, first_span)) => { self.check_rustc_allow_const_fn_unstable(hir_id, *first_span, span, target) @@ -219,12 +182,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Attribute::Parsed(AttributeKind::Deprecation { .. }) => { self.check_deprecated(hir_id, attr, span, target) } - Attribute::Parsed(AttributeKind::TargetFeature(_, attr_span)) => { - self.check_target_feature(hir_id, *attr_span, span, target, attrs) - } - Attribute::Parsed(AttributeKind::DocComment { .. }) => { /* `#[doc]` is actually a lot more than just doc comments, so is checked below*/ - } - Attribute::Parsed(AttributeKind::Repr { .. }) => { /* handled below this loop and elsewhere */ + Attribute::Parsed(AttributeKind::TargetFeature{ attr_span, ..}) => { + self.check_target_feature(hir_id, *attr_span, target, attrs) } Attribute::Parsed(AttributeKind::RustcObjectLifetimeDefault) => { self.check_object_lifetime_default(hir_id); @@ -232,59 +191,32 @@ impl<'tcx> CheckAttrVisitor<'tcx> { &Attribute::Parsed(AttributeKind::PubTransparent(attr_span)) => { self.check_rustc_pub_transparent(attr_span, span, attrs) } - Attribute::Parsed(AttributeKind::Cold(attr_span)) => { - self.check_cold(hir_id, *attr_span, span, target) - } - Attribute::Parsed(AttributeKind::ExportName { span: attr_span, .. }) => { - self.check_export_name(hir_id, *attr_span, span, target) - } Attribute::Parsed(AttributeKind::Align { align, span: attr_span }) => { - self.check_align(span, hir_id, target, *align, *attr_span) - } - Attribute::Parsed(AttributeKind::LinkSection { span: attr_span, .. }) => { - self.check_link_section(hir_id, *attr_span, span, target) - } - Attribute::Parsed(AttributeKind::MacroUse { span, .. }) => { - self.check_macro_use(hir_id, sym::macro_use, *span, target) + self.check_align(*align, *attr_span) } - Attribute::Parsed(AttributeKind::MacroEscape(span)) => { - self.check_macro_use(hir_id, sym::macro_escape, *span, target) - } - Attribute::Parsed(AttributeKind::Naked(attr_span)) => { - self.check_naked(hir_id, *attr_span, span, target) - } - Attribute::Parsed(AttributeKind::NoImplicitPrelude(attr_span)) => self - .check_generic_attr( - hir_id, - sym::no_implicit_prelude, - *attr_span, - target, - Target::Mod, - ), - Attribute::Parsed(AttributeKind::Path(_, attr_span)) => { - self.check_generic_attr(hir_id, sym::path, *attr_span, target, Target::Mod) + Attribute::Parsed(AttributeKind::Naked(..)) => { + self.check_naked(hir_id, target) } Attribute::Parsed(AttributeKind::TrackCaller(attr_span)) => { - self.check_track_caller(hir_id, *attr_span, attrs, span, target) + self.check_track_caller(hir_id, *attr_span, attrs, target) } Attribute::Parsed(AttributeKind::NonExhaustive(attr_span)) => { - self.check_non_exhaustive(hir_id, *attr_span, span, target, item) + self.check_non_exhaustive(*attr_span, span, target, item) } - Attribute::Parsed( - AttributeKind::RustcLayoutScalarValidRangeStart(_num, attr_span) - | AttributeKind::RustcLayoutScalarValidRangeEnd(_num, attr_span), - ) => self.check_rustc_layout_scalar_valid_range(*attr_span, span, target), - Attribute::Parsed(AttributeKind::ExportStable) => { - // handled in `check_export` + &Attribute::Parsed(AttributeKind::FfiPure(attr_span)) => { + self.check_ffi_pure(attr_span, attrs) } - &Attribute::Parsed(AttributeKind::FfiConst(attr_span)) => { - self.check_ffi_const(attr_span, target) + Attribute::Parsed(AttributeKind::MayDangle(attr_span)) => { + self.check_may_dangle(hir_id, *attr_span) } - &Attribute::Parsed(AttributeKind::FfiPure(attr_span)) => { - self.check_ffi_pure(attr_span, attrs, target) + &Attribute::Parsed(AttributeKind::CustomMir(dialect, phase, attr_span)) => { + self.check_custom_mir(dialect, phase, attr_span) } - Attribute::Parsed(AttributeKind::UnstableFeatureBound(syms)) => { - self.check_unstable_feature_bound(syms.first().unwrap().1, span, target) + &Attribute::Parsed(AttributeKind::Sanitize { on_set, off_set, span: attr_span}) => { + self.check_sanitize(attr_span, on_set | off_set, span, target); + }, + Attribute::Parsed(AttributeKind::Link(_, attr_span)) => { + self.check_link(hir_id, *attr_span, span, target) } Attribute::Parsed( AttributeKind::BodyStability { .. } @@ -292,46 +224,53 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | AttributeKind::MacroTransparency(_) | AttributeKind::Pointee(..) | AttributeKind::Dummy - | AttributeKind::RustcBuiltinMacro { .. }, + | AttributeKind::RustcBuiltinMacro { .. } + | AttributeKind::Ignore { .. } + | AttributeKind::Path(..) + | AttributeKind::NoImplicitPrelude(..) + | AttributeKind::AutomaticallyDerived(..) + | AttributeKind::Marker(..) + | AttributeKind::SkipDuringMethodDispatch { .. } + | AttributeKind::Coinductive(..) + | AttributeKind::ConstTrait(..) + | AttributeKind::DenyExplicitImpl(..) + | AttributeKind::DoNotImplementViaObject(..) + | AttributeKind::SpecializationTrait(..) + | AttributeKind::UnsafeSpecializationMarker(..) + | AttributeKind::ParenSugar(..) + | AttributeKind::AllowIncoherentImpl(..) + | AttributeKind::Confusables { .. } + // `#[doc]` is actually a lot more than just doc comments, so is checked below + | AttributeKind::DocComment {..} + // handled below this loop and elsewhere + | AttributeKind::Repr { .. } + | AttributeKind::Cold(..) + | AttributeKind::ExportName { .. } + | AttributeKind::CoherenceIsCore + | AttributeKind::Fundamental + | AttributeKind::Optimize(..) + | AttributeKind::LinkSection { .. } + | AttributeKind::MacroUse { .. } + | AttributeKind::MacroEscape( .. ) + | AttributeKind::RustcLayoutScalarValidRangeStart(..) + | AttributeKind::RustcLayoutScalarValidRangeEnd(..) + | AttributeKind::ExportStable + | AttributeKind::FfiConst(..) + | AttributeKind::UnstableFeatureBound(..) + | AttributeKind::AsPtr(..) + | AttributeKind::LinkName { .. } + | AttributeKind::LinkOrdinal { .. } + | AttributeKind::NoMangle(..) + | AttributeKind::Used { .. } + | AttributeKind::PassByValue (..) + | AttributeKind::StdInternalSymbol (..) + | AttributeKind::Coverage (..) + | AttributeKind::ShouldPanic { .. } + | AttributeKind::Coroutine(..) + | AttributeKind::Linkage(..) + | AttributeKind::MustUse { .. } + | AttributeKind::CrateName { .. } ) => { /* do nothing */ } - Attribute::Parsed(AttributeKind::AsPtr(attr_span)) => { - self.check_applied_to_fn_or_method(hir_id, *attr_span, span, target) - } - Attribute::Parsed(AttributeKind::LinkName { span: attr_span, name }) => { - self.check_link_name(hir_id, *attr_span, *name, span, target) - } - Attribute::Parsed(AttributeKind::LinkOrdinal { span: attr_span, .. }) => { - self.check_link_ordinal(*attr_span, span, target) - } - Attribute::Parsed(AttributeKind::MayDangle(attr_span)) => { - self.check_may_dangle(hir_id, *attr_span) - } - Attribute::Parsed(AttributeKind::Ignore { span, .. }) => { - self.check_generic_attr(hir_id, sym::ignore, *span, target, Target::Fn) - } - Attribute::Parsed(AttributeKind::MustUse { span, .. }) => { - self.check_must_use(hir_id, *span, target) - } - Attribute::Parsed(AttributeKind::NoMangle(attr_span)) => { - self.check_no_mangle(hir_id, *attr_span, span, target) - } - Attribute::Parsed(AttributeKind::Used { span: attr_span, .. }) => { - self.check_used(*attr_span, target, span); - } - Attribute::Parsed(AttributeKind::ShouldPanic { span: attr_span, .. }) => self - .check_generic_attr(hir_id, sym::should_panic, *attr_span, target, Target::Fn), - &Attribute::Parsed(AttributeKind::PassByValue(attr_span)) => { - self.check_pass_by_value(attr_span, span, target) - } - &Attribute::Parsed(AttributeKind::StdInternalSymbol(attr_span)) => { - self.check_rustc_std_internal_symbol(attr_span, span, target) - } - &Attribute::Parsed(AttributeKind::Coverage(attr_span, _)) => { - self.check_coverage(attr_span, span, target) - } - &Attribute::Parsed(AttributeKind::Coroutine(attr_span)) => { - self.check_coroutine(attr_span, target) - } Attribute::Unparsed(attr_item) => { style = Some(attr_item.style); match attr.path().as_slice() { @@ -341,9 +280,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { [sym::diagnostic, sym::on_unimplemented, ..] => { self.check_diagnostic_on_unimplemented(attr.span(), hir_id, target) } - [sym::no_sanitize, ..] => { - self.check_no_sanitize(attr, span, target) - } [sym::thread_local, ..] => self.check_thread_local(attr, span, target), [sym::doc, ..] => self.check_doc_attrs( attr, @@ -387,15 +323,10 @@ impl<'tcx> CheckAttrVisitor<'tcx> { [sym::rustc_has_incoherent_inherent_impls, ..] => { self.check_has_incoherent_inherent_impls(attr, span, target) } - [sym::ffi_pure, ..] => self.check_ffi_pure(attr.span(), attrs, target), - [sym::ffi_const, ..] => self.check_ffi_const(attr.span(), target), - [sym::link, ..] => self.check_link(hir_id, attr, span, target), - [sym::path, ..] => self.check_generic_attr_unparsed(hir_id, attr, target, Target::Mod), [sym::macro_export, ..] => self.check_macro_export(hir_id, attr, target), [sym::autodiff_forward, ..] | [sym::autodiff_reverse, ..] => { self.check_autodiff(hir_id, attr, span, target) } - [sym::linkage, ..] => self.check_linkage(attr, span, target), [ // ok sym::allow @@ -418,8 +349,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | sym::panic_handler | sym::lang | sym::needs_allocator - | sym::default_lib_allocator - | sym::custom_mir, + | sym::default_lib_allocator, .. ] => {} [name, rest@..] => { @@ -455,22 +385,46 @@ impl<'tcx> CheckAttrVisitor<'tcx> { let builtin = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)); if hir_id != CRATE_HIR_ID { - if let Some(BuiltinAttribute { type_: AttributeType::CrateLevel, .. }) = - attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)) - { - match style { - Some(ast::AttrStyle::Outer) => self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr.span(), - errors::OuterCrateLevelAttr, - ), - Some(ast::AttrStyle::Inner) | None => self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr.span(), - errors::InnerCrateLevelAttr, - ), + match attr { + Attribute::Parsed(_) => { /* Already validated. */ } + Attribute::Unparsed(attr) => { + // FIXME(jdonszelmann): remove once all crate-level attrs are parsed and caught by + // the above + if let Some(BuiltinAttribute { type_: AttributeType::CrateLevel, .. }) = + attr.path + .segments + .first() + .and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)) + { + match attr.style { + ast::AttrStyle::Outer => { + let attr_span = attr.span; + let bang_position = self + .tcx + .sess + .source_map() + .span_until_char(attr_span, '[') + .shrink_to_hi(); + + self.tcx.emit_node_span_lint( + UNUSED_ATTRIBUTES, + hir_id, + attr.span, + errors::OuterCrateLevelAttr { + suggestion: errors::OuterCrateLevelAttrSuggestion { + bang_position, + }, + }, + ) + } + ast::AttrStyle::Inner => self.tcx.emit_node_span_lint( + UNUSED_ATTRIBUTES, + hir_id, + attr.span, + errors::InnerCrateLevelAttr, + ), + } + } } } } @@ -483,7 +437,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } self.check_repr(attrs, span, target, item, hir_id); - self.check_rustc_force_inline(hir_id, attrs, span, target); + self.check_rustc_force_inline(hir_id, attrs, target); self.check_mix_no_mangle_export(hir_id, attrs); } @@ -496,15 +450,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { ); } - fn inline_attr_str_error_without_macro_def(&self, hir_id: HirId, attr_span: Span, sym: &str) { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::IgnoredAttr { sym }, - ); - } - /// Checks if `#[diagnostic::do_not_recommend]` is applied on a trait impl and that it has no /// arguments. fn check_do_not_recommend( @@ -552,66 +497,41 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks if an `#[inline]` is applied to a function or a closure. - fn check_inline( - &self, - hir_id: HirId, - attr_span: Span, - defn_span: Span, - kind: &InlineAttr, - target: Target, - ) { + fn check_inline(&self, hir_id: HirId, attr_span: Span, kind: &InlineAttr, target: Target) { match target { Target::Fn | Target::Closure - | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => {} - Target::Method(MethodKind::Trait { body: false }) | Target::ForeignFn => { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::IgnoredInlineAttrFnProto, - ) - } - // FIXME(#65833): We permit associated consts to have an `#[inline]` attribute with - // just a lint, because we previously erroneously allowed it and some crates used it - // accidentally, to be compatible with crates depending on them, we can't throw an - // error here. - Target::AssocConst => self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::IgnoredInlineAttrConstants, - ), - // FIXME(#80564): Same for fields, arms, and macro defs - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "inline") - } - _ => { - self.dcx().emit_err(errors::InlineNotFnOrClosure { attr_span, defn_span }); - } - } - - // `#[inline]` is ignored if the symbol must be codegened upstream because it's exported. - if let Some(did) = hir_id.as_owner() - && self.tcx.def_kind(did).has_codegen_attrs() - && kind != &InlineAttr::Never - { - let attrs = self.tcx.codegen_fn_attrs(did); - // Not checking naked as `#[inline]` is forbidden for naked functions anyways. - if attrs.contains_extern_indicator() { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::InlineIgnoredForExported {}, - ); + | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => { + // `#[inline]` is ignored if the symbol must be codegened upstream because it's exported. + if let Some(did) = hir_id.as_owner() + && self.tcx.def_kind(did).has_codegen_attrs() + && kind != &InlineAttr::Never + { + let attrs = self.tcx.codegen_fn_attrs(did); + // Not checking naked as `#[inline]` is forbidden for naked functions anyways. + if attrs.contains_extern_indicator() { + self.tcx.emit_node_span_lint( + UNUSED_ATTRIBUTES, + hir_id, + attr_span, + errors::InlineIgnoredForExported {}, + ); + } + } } + _ => {} } } - /// Checks that `#[coverage(..)]` is applied to a function/closure/method, - /// or to an impl block or module. - fn check_coverage(&self, attr_span: Span, target_span: Span, target: Target) { + /// Checks that the `#[sanitize(..)]` attribute is applied to a + /// function/closure/method, or to an impl block or module. + fn check_sanitize( + &self, + attr_span: Span, + set: SanitizerSet, + target_span: Span, + target: Target, + ) { let mut not_fn_impl_mod = None; let mut no_body = None; @@ -621,6 +541,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) | Target::Impl { .. } | Target::Mod => return, + Target::Static + // if we mask out the address bits, i.e. *only* address was set, + // we allow it + if set & !(SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS) + == SanitizerSet::empty() => + { + return; + } // These are "functions", but they aren't allowed because they don't // have a body, so the usual explanation would be confusing. @@ -633,7 +561,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - self.dcx().emit_err(errors::CoverageAttributeNotAllowed { + self.dcx().emit_err(errors::SanitizeAttributeNotAllowed { attr_span, not_fn_impl_mod, no_body, @@ -641,105 +569,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { }); } - /// Checks that `#[optimize(..)]` is applied to a function/closure/method, - /// or to an impl block or module. - fn check_optimize(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { - let is_valid = matches!( - target, - Target::Fn - | Target::Closure - | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) - ); - if !is_valid { - self.dcx().emit_err(errors::OptimizeInvalidTarget { - attr_span, - defn_span: span, - on_crate: hir_id == CRATE_HIR_ID, - }); - } - } - - fn check_no_sanitize(&self, attr: &Attribute, span: Span, target: Target) { - if let Some(list) = attr.meta_item_list() { - for item in list.iter() { - let sym = item.name(); - match sym { - Some(s @ sym::address | s @ sym::hwaddress) => { - let is_valid = - matches!(target, Target::Fn | Target::Method(..) | Target::Static); - if !is_valid { - self.dcx().emit_err(errors::NoSanitize { - attr_span: item.span(), - defn_span: span, - accepted_kind: "a function or static", - attr_str: s.as_str(), - }); - } - } - _ => { - let is_valid = matches!(target, Target::Fn | Target::Method(..)); - if !is_valid { - self.dcx().emit_err(errors::NoSanitize { - attr_span: item.span(), - defn_span: span, - accepted_kind: "a function", - attr_str: &match sym { - Some(name) => name.to_string(), - None => "...".to_string(), - }, - }); - } - } - } - } - } - } - - /// FIXME: Remove when all attributes are ported to the new parser - fn check_generic_attr_unparsed( - &self, - hir_id: HirId, - attr: &Attribute, - target: Target, - allowed_target: Target, - ) { - if target != allowed_target { - let attr_name = join_path_syms(attr.path()); - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr.span(), - errors::OnlyHasEffectOn { - attr_name, - target_name: allowed_target.name().replace(' ', "_"), - }, - ); - } - } - - fn check_generic_attr( - &self, - hir_id: HirId, - attr_name: Symbol, - attr_span: Span, - target: Target, - allowed_target: Target, - ) { - if target != allowed_target { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::OnlyHasEffectOn { - attr_name: attr_name.to_string(), - target_name: allowed_target.name().replace(' ', "_"), - }, - ); - } - } - /// Checks if `#[naked]` is applied to a function definition. - fn check_naked(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { + fn check_naked(&self, hir_id: HirId, target: Target) { match target { Target::Fn | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => { @@ -758,13 +589,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { .emit(); } } - _ => { - self.dcx().emit_err(errors::AttrShouldBeAppliedToFn { - attr_span, - defn_span: span, - on_crate: hir_id == CRATE_HIR_ID, - }); - } + _ => {} } } @@ -787,7 +612,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } } - /// Checks if `#[collapse_debuginfo]` is applied to a macro. fn check_collapse_debuginfo(&self, attr: &Attribute, span: Span, target: Target) { match target { @@ -807,7 +631,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { hir_id: HirId, attr_span: Span, attrs: &[Attribute], - span: Span, target: Target, ) { match target { @@ -827,28 +650,13 @@ impl<'tcx> CheckAttrVisitor<'tcx> { }); } } - Target::Method(..) | Target::ForeignFn | Target::Closure => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[track_caller]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "track_caller"); - } - _ => { - self.dcx().emit_err(errors::TrackedCallerWrongLocation { - attr_span, - defn_span: span, - on_crate: hir_id == CRATE_HIR_ID, - }); - } + _ => {} } } /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. fn check_non_exhaustive( &self, - hir_id: HirId, attr_span: Span, span: Span, target: Target, @@ -869,36 +677,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { }); } } - Target::Enum | Target::Variant => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[non_exhaustive]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "non_exhaustive"); - } - _ => { - self.dcx() - .emit_err(errors::NonExhaustiveWrongLocation { attr_span, defn_span: span }); - } - } - } - - /// Checks if the `#[marker]` attribute on an `item` is valid. - fn check_marker(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { - match target { - Target::Trait => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[marker]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "marker"); - } - _ => { - self.dcx() - .emit_err(errors::AttrShouldBeAppliedToTrait { attr_span, defn_span: span }); - } + _ => {} } } @@ -907,7 +686,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { &self, hir_id: HirId, attr_span: Span, - span: Span, target: Target, attrs: &[Attribute], ) { @@ -930,30 +708,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { }); } } - // FIXME: #[target_feature] was previously erroneously allowed on statements and some - // crates used this, so only emit a warning. - Target::Statement => { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::TargetFeatureOnStatement, - ); - } - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[target_feature]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "target_feature"); - } - _ => { - self.dcx().emit_err(errors::AttrShouldBeAppliedToFn { - attr_span, - defn_span: span, - on_crate: hir_id == CRATE_HIR_ID, - }); - } + _ => {} } } @@ -1057,7 +812,10 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | Target::GenericParam { .. } | Target::MacroDef | Target::PatField - | Target::ExprField => None, + | Target::ExprField + | Target::Crate + | Target::MacroCall + | Target::Delegation { .. } => None, } { tcx.dcx().emit_err(errors::DocAliasBadLocation { span, attr_str, location }); return; @@ -1110,7 +868,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_doc_keyword(&self, meta: &MetaItemInner, hir_id: HirId) { + fn check_doc_keyword_and_attribute( + &self, + meta: &MetaItemInner, + hir_id: HirId, + attr_kind: DocFakeItemKind, + ) { fn is_doc_keyword(s: Symbol) -> bool { // FIXME: Once rustdoc can handle URL conflicts on case insensitive file systems, we // can remove the `SelfTy` case here, remove `sym::SelfTy`, and update the @@ -1118,9 +881,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> { s.is_reserved(|| edition::LATEST_STABLE_EDITION) || s.is_weak() || s == sym::SelfTy } - let doc_keyword = match meta.value_str() { + // FIXME: This should support attributes with namespace like `diagnostic::do_not_recommend`. + fn is_builtin_attr(s: Symbol) -> bool { + rustc_feature::BUILTIN_ATTRIBUTE_MAP.contains_key(&s) + } + + let value = match meta.value_str() { Some(value) if value != sym::empty => value, - _ => return self.doc_attr_str_error(meta, "keyword"), + _ => return self.doc_attr_str_error(meta, attr_kind.name()), }; let item_kind = match self.tcx.hir_node(hir_id) { @@ -1130,20 +898,38 @@ impl<'tcx> CheckAttrVisitor<'tcx> { match item_kind { Some(ItemKind::Mod(_, module)) => { if !module.item_ids.is_empty() { - self.dcx().emit_err(errors::DocKeywordEmptyMod { span: meta.span() }); + self.dcx().emit_err(errors::DocKeywordAttributeEmptyMod { + span: meta.span(), + attr_name: attr_kind.name(), + }); return; } } _ => { - self.dcx().emit_err(errors::DocKeywordNotMod { span: meta.span() }); + self.dcx().emit_err(errors::DocKeywordAttributeNotMod { + span: meta.span(), + attr_name: attr_kind.name(), + }); return; } } - if !is_doc_keyword(doc_keyword) { - self.dcx().emit_err(errors::DocKeywordNotKeyword { - span: meta.name_value_literal_span().unwrap_or_else(|| meta.span()), - keyword: doc_keyword, - }); + match attr_kind { + DocFakeItemKind::Keyword => { + if !is_doc_keyword(value) { + self.dcx().emit_err(errors::DocKeywordNotKeyword { + span: meta.name_value_literal_span().unwrap_or_else(|| meta.span()), + keyword: value, + }); + } + } + DocFakeItemKind::Attribute => { + if !is_builtin_attr(value) { + self.dcx().emit_err(errors::DocAttributeNotAttribute { + span: meta.name_value_literal_span().unwrap_or_else(|| meta.span()), + attribute: value, + }); + } + } } } @@ -1403,7 +1189,21 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Some(sym::keyword) => { if self.check_attr_not_crate_level(meta, hir_id, "keyword") { - self.check_doc_keyword(meta, hir_id); + self.check_doc_keyword_and_attribute( + meta, + hir_id, + DocFakeItemKind::Keyword, + ); + } + } + + Some(sym::attribute) => { + if self.check_attr_not_crate_level(meta, hir_id, "attribute") { + self.check_doc_keyword_and_attribute( + meta, + hir_id, + DocFakeItemKind::Attribute, + ); } } @@ -1533,25 +1333,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - /// Warns against some misuses of `#[pass_by_value]` - fn check_pass_by_value(&self, attr_span: Span, span: Span, target: Target) { - match target { - Target::Struct | Target::Enum | Target::TyAlias => {} - _ => { - self.dcx().emit_err(errors::PassByValue { attr_span, span }); - } - } - } - - fn check_allow_incoherent_impl(&self, attr_span: Span, span: Span, target: Target) { - match target { - Target::Method(MethodKind::Inherent) => {} - _ => { - self.dcx().emit_err(errors::AllowIncoherentImpl { attr_span, span }); - } - } - } - fn check_has_incoherent_inherent_impls(&self, attr: &Attribute, span: Span, target: Target) { match target { Target::Trait | Target::Struct | Target::Enum | Target::Union | Target::ForeignTy => {} @@ -1563,69 +1344,13 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_ffi_pure(&self, attr_span: Span, attrs: &[Attribute], target: Target) { - if target != Target::ForeignFn { - self.dcx().emit_err(errors::FfiPureInvalidTarget { attr_span }); - return; - } + fn check_ffi_pure(&self, attr_span: Span, attrs: &[Attribute]) { if find_attr!(attrs, AttributeKind::FfiConst(_)) { // `#[ffi_const]` functions cannot be `#[ffi_pure]` self.dcx().emit_err(errors::BothFfiConstAndPure { attr_span }); } } - fn check_ffi_const(&self, attr_span: Span, target: Target) { - if target != Target::ForeignFn { - self.dcx().emit_err(errors::FfiConstInvalidTarget { attr_span }); - } - } - - /// Warns against some misuses of `#[must_use]` - fn check_must_use(&self, hir_id: HirId, attr_span: Span, target: Target) { - if matches!( - target, - Target::Fn - | Target::Enum - | Target::Struct - | Target::Union - | Target::Method(MethodKind::Trait { body: false } | MethodKind::Inherent) - | Target::ForeignFn - // `impl Trait` in return position can trip - // `unused_must_use` if `Trait` is marked as - // `#[must_use]` - | Target::Trait - ) { - return; - } - - // `#[must_use]` can be applied to a trait method definition with a default body - if let Target::Method(MethodKind::Trait { body: true }) = target - && let parent_def_id = self.tcx.hir_get_parent_item(hir_id).def_id - && let containing_item = self.tcx.hir_expect_item(parent_def_id) - && let hir::ItemKind::Trait(..) = containing_item.kind - { - return; - } - - let article = match target { - Target::ExternCrate - | Target::Enum - | Target::Impl { .. } - | Target::Expression - | Target::Arm - | Target::AssocConst - | Target::AssocTy => "an", - _ => "a", - }; - - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::MustUseNoEffect { article, target }, - ); - } - /// Checks if `#[must_not_suspend]` is applied to a struct, enum, union, or trait. fn check_must_not_suspend(&self, attr: &Attribute, span: Span, target: Target) { match target { @@ -1657,32 +1382,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.dcx().emit_err(errors::InvalidMayDangle { attr_span }); } - /// Checks if `#[cold]` is applied to a non-function. - fn check_cold(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { - match target { - Target::Fn | Target::Method(..) | Target::ForeignFn | Target::Closure => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[cold]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "cold"); - } - _ => { - // FIXME: #[cold] was previously allowed on non-functions and some crates used - // this, so only emit a warning. - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::Cold { span, on_crate: hir_id == CRATE_HIR_ID }, - ); - } - } - } - /// Checks if `#[link]` is applied to an item other than a foreign module. - fn check_link(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) { + fn check_link(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { if target == Target::ForeignMod && let hir::Node::Item(item) = self.tcx.hir_node(hir_id) && let Item { kind: ItemKind::ForeignMod { abi, .. }, .. } = item @@ -1694,43 +1395,11 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::Link { span: (target != Target::ForeignMod).then_some(span) }, ); } - /// Checks if `#[link_name]` is applied to an item other than a foreign function or static. - fn check_link_name( - &self, - hir_id: HirId, - attr_span: Span, - name: Symbol, - span: Span, - target: Target, - ) { - match target { - Target::ForeignFn | Target::ForeignStatic => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[link_name]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "link_name"); - } - _ => { - // FIXME: #[link_name] was previously allowed on non-functions/statics and some crates - // used this, so only emit a warning. - let help_span = matches!(target, Target::ForeignMod).then_some(attr_span); - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::LinkName { span, help_span, value: name.as_str() }, - ); - } - } - } - /// Checks if `#[no_link]` is applied to an `extern crate`. fn check_no_link(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) { match target { @@ -1748,35 +1417,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn is_impl_item(&self, hir_id: HirId) -> bool { - matches!(self.tcx.hir_node(hir_id), hir::Node::ImplItem(..)) - } - - /// Checks if `#[export_name]` is applied to a function or static. - fn check_export_name(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { - match target { - Target::Static | Target::Fn => {} - Target::Method(..) if self.is_impl_item(hir_id) => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[export_name]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "export_name"); - } - _ => { - self.dcx().emit_err(errors::ExportName { attr_span, span }); - } - } - } - - fn check_rustc_layout_scalar_valid_range(&self, attr_span: Span, span: Span, target: Target) { - if target != Target::Struct { - self.dcx().emit_err(errors::RustcLayoutScalarValidRangeNotStruct { attr_span, span }); - return; - } - } - /// Checks if `#[rustc_legacy_const_generics]` is applied to a function and has a valid argument. fn check_rustc_legacy_const_generics( &self, @@ -1911,106 +1551,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - /// Checks if `#[link_section]` is applied to a function or static. - fn check_link_section(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { - match target { - Target::Static | Target::Fn | Target::Method(..) => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[link_section]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "link_section"); - } - _ => { - // FIXME: #[link_section] was previously allowed on non-functions/statics and some - // crates used this, so only emit a warning. - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::LinkSection { span }, - ); - } - } - } - - /// Checks if `#[no_mangle]` is applied to a function or static. - fn check_no_mangle(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { - match target { - Target::Static | Target::Fn => {} - Target::Method(..) if self.is_impl_item(hir_id) => {} - // FIXME(#80564): We permit struct fields, match arms and macro defs to have an - // `#[no_mangle]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "no_mangle"); - } - // FIXME: #[no_mangle] was previously allowed on non-functions/statics, this should be an error - // The error should specify that the item that is wrong is specifically a *foreign* fn/static - // otherwise the error seems odd - Target::ForeignFn | Target::ForeignStatic => { - let foreign_item_kind = match target { - Target::ForeignFn => "function", - Target::ForeignStatic => "static", - _ => unreachable!(), - }; - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::NoMangleForeign { span, attr_span, foreign_item_kind }, - ); - } - _ => { - // FIXME: #[no_mangle] was previously allowed on non-functions/statics and some - // crates used this, so only emit a warning. - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::NoMangle { span }, - ); - } - } - } - - /// Checks if the `#[align]` attributes on `item` are valid. - // FIXME(#82232, #143834): temporarily renamed to mitigate `#[align]` nameres ambiguity - fn check_align( - &self, - span: Span, - hir_id: HirId, - target: Target, - align: Align, - attr_span: Span, - ) { - match target { - Target::Fn | Target::Method(_) | Target::ForeignFn => {} - Target::Field => { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - AlignOnFields { span }, - ); - } - Target::Struct | Target::Union | Target::Enum => { - self.dcx().emit_err(errors::AlignShouldBeReprAlign { - span: attr_span, - item: target.name(), - align_bytes: align.bytes(), - }); - } - _ => { - self.dcx().emit_err(errors::AlignAttrApplication { hint_span: attr_span, span }); - } - } - - self.check_align_value(align, attr_span); - } - /// Checks if the `#[repr]` attributes on `item` are valid. fn check_repr( &self, @@ -2065,7 +1605,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Target::Fn | Target::Method(_) => { self.dcx().emit_err(errors::ReprAlignShouldBeAlign { span: *repr_span, - item: target.name(), + item: target.plural_name(), }); } _ => { @@ -2076,7 +1616,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - self.check_align_value(*align, *repr_span); + self.check_align(*align, *repr_span); } ReprAttr::ReprPacked(_) => { if target != Target::Struct && target != Target::Union { @@ -2135,7 +1675,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Target::Fn | Target::Method(_) => { self.dcx().emit_err(errors::ReprAlignShouldBeAlign { span: first_attr_span, - item: target.name(), + item: target.plural_name(), }); } _ => { @@ -2182,7 +1722,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_align_value(&self, align: Align, span: Span) { + fn check_align(&self, align: Align, span: Span) { if align.bytes() > 2_u64.pow(29) { // for values greater than 2^29, a different error will be emitted, make sure that happens self.dcx().span_delayed_bug( @@ -2201,61 +1741,16 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_used(&self, attr_span: Span, target: Target, target_span: Span) { - if target != Target::Static { - self.dcx().emit_err(errors::UsedStatic { - attr_span, - span: target_span, - target: target.name(), - }); - } - } - - /// Outputs an error for `#[allow_internal_unstable]` which can only be applied to macros. - /// (Allows proc_macro functions) - fn check_allow_internal_unstable( - &self, - hir_id: HirId, - attr_span: Span, - span: Span, - target: Target, - attrs: &[Attribute], - ) { - self.check_macro_only_attr( - hir_id, - attr_span, - span, - target, - attrs, - "allow_internal_unstable", - ) - } - - /// Outputs an error for `#[allow_internal_unsafe]` which can only be applied to macros. - /// (Allows proc_macro functions) - fn check_allow_internal_unsafe( - &self, - hir_id: HirId, - attr_span: Span, - span: Span, - target: Target, - attrs: &[Attribute], - ) { - self.check_macro_only_attr(hir_id, attr_span, span, target, attrs, "allow_internal_unsafe") - } - /// Outputs an error for attributes that can only be applied to macros, such as /// `#[allow_internal_unsafe]` and `#[allow_internal_unstable]`. /// (Allows proc_macro functions) // FIXME(jdonszelmann): if possible, move to attr parsing fn check_macro_only_attr( &self, - hir_id: HirId, attr_span: Span, span: Span, target: Target, attrs: &[Attribute], - attr_name: &str, ) { match target { Target::Fn => { @@ -2265,23 +1760,10 @@ impl<'tcx> CheckAttrVisitor<'tcx> { return; } } - // continue out of the match - } - // return on decl macros - Target::MacroDef => return, - // FIXME(#80564): We permit struct fields and match arms to have an - // `#[allow_internal_unstable]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm => { - self.inline_attr_str_error_without_macro_def(hir_id, attr_span, attr_name); - return; + self.tcx.dcx().emit_err(errors::MacroOnlyAttribute { attr_span, span }); } - // otherwise continue out of the match _ => {} } - - self.tcx.dcx().emit_err(errors::MacroOnlyAttribute { attr_span, span }); } /// Checks if the items on the `#[debugger_visualizer]` attribute are valid. @@ -2308,66 +1790,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> { target: Target, ) { match target { - Target::Fn | Target::Method(_) - if self.tcx.is_const_fn(hir_id.expect_owner().to_def_id()) => {} - // FIXME(#80564): We permit struct fields and match arms to have an - // `#[allow_internal_unstable]` attribute with just a lint, because we previously - // erroneously allowed it and some crates used it accidentally, to be compatible - // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => self - .inline_attr_str_error_with_macro_def(hir_id, attr_span, "allow_internal_unstable"), - _ => { - self.tcx.dcx().emit_err(errors::RustcAllowConstFnUnstable { attr_span, span }); - } - } - } - - fn check_unstable_feature_bound(&self, attr_span: Span, span: Span, target: Target) { - match target { - // FIXME(staged_api): There's no reason we can't support more targets here. We're just - // being conservative to begin with. - Target::Fn | Target::Impl { .. } | Target::Trait => {} - Target::ExternCrate - | Target::Use - | Target::Static - | Target::Const - | Target::Closure - | Target::Mod - | Target::ForeignMod - | Target::GlobalAsm - | Target::TyAlias - | Target::Enum - | Target::Variant - | Target::Struct - | Target::Field - | Target::Union - | Target::TraitAlias - | Target::Expression - | Target::Statement - | Target::Arm - | Target::AssocConst - | Target::Method(_) - | Target::AssocTy - | Target::ForeignFn - | Target::ForeignStatic - | Target::ForeignTy - | Target::GenericParam { .. } - | Target::MacroDef - | Target::Param - | Target::PatField - | Target::ExprField - | Target::WherePredicate => { - self.tcx.dcx().emit_err(errors::RustcUnstableFeatureBound { attr_span, span }); - } - } - } - - fn check_rustc_std_internal_symbol(&self, attr_span: Span, span: Span, target: Target) { - match target { - Target::Fn | Target::Static | Target::ForeignFn | Target::ForeignStatic => {} - _ => { - self.tcx.dcx().emit_err(errors::RustcStdInternalSymbol { attr_span, span }); + Target::Fn | Target::Method(_) => { + if !self.tcx.is_const_fn(hir_id.expect_owner().to_def_id()) { + self.tcx.dcx().emit_err(errors::RustcAllowConstFnUnstable { attr_span, span }); + } } + _ => {} } } @@ -2377,15 +1805,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { item_span: Span, level: &StabilityLevel, feature: Symbol, - target: Target, ) { - match target { - Target::Expression => { - self.dcx().emit_err(errors::StabilityPromotable { attr_span }); - } - _ => {} - } - // Stable *language* features shouldn't be used as unstable library features. // (Not doing this for stable library features is checked by tidy.) if level.is_unstable() @@ -2397,40 +1817,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_link_ordinal(&self, attr_span: Span, _span: Span, target: Target) { - match target { - Target::ForeignFn | Target::ForeignStatic => {} - _ => { - self.dcx().emit_err(errors::LinkOrdinal { attr_span }); - } - } - } - - fn check_confusables(&self, span: Span, target: Target) { - if !matches!(target, Target::Method(MethodKind::Inherent)) { - self.dcx().emit_err(errors::Confusables { attr_span: span }); - } - } - fn check_deprecated(&self, hir_id: HirId, attr: &Attribute, _span: Span, target: Target) { match target { - Target::Closure | Target::Expression | Target::Statement | Target::Arm => { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr.span(), - errors::Deprecated, - ); - } - Target::Impl { of_trait: true } - | Target::GenericParam { has_default: false, kind: _ } => { - self.tcx.emit_node_span_lint( - USELESS_DEPRECATED, - hir_id, - attr.span(), - errors::DeprecatedAnnotationHasNoEffect { span: attr.span() }, - ); - } Target::AssocConst | Target::Method(..) | Target::AssocTy if matches!( self.tcx.def_kind(self.tcx.local_parent(hir_id.owner.def_id)), @@ -2438,7 +1826,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { ) => { self.tcx.emit_node_span_lint( - USELESS_DEPRECATED, + UNUSED_ATTRIBUTES, hir_id, attr.span(), errors::DeprecatedAnnotationHasNoEffect { span: attr.span() }, @@ -2448,20 +1836,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_macro_use(&self, hir_id: HirId, name: Symbol, attr_span: Span, target: Target) { - match target { - Target::ExternCrate | Target::Mod => {} - _ => { - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::MacroUse { name }, - ); - } - } - } - fn check_macro_export(&self, hir_id: HirId, attr: &Attribute, target: Target) { if target != Target::MacroDef { self.tcx.emit_node_span_lint( @@ -2533,12 +1907,24 @@ impl<'tcx> CheckAttrVisitor<'tcx> { { if hir_id != CRATE_HIR_ID { match style { - Some(ast::AttrStyle::Outer) => self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr.span(), - errors::OuterCrateLevelAttr, - ), + Some(ast::AttrStyle::Outer) => { + let attr_span = attr.span(); + let bang_position = self + .tcx + .sess + .source_map() + .span_until_char(attr_span, '[') + .shrink_to_hi(); + + self.tcx.emit_node_span_lint( + UNUSED_ATTRIBUTES, + hir_id, + attr_span, + errors::OuterCrateLevelAttr { + suggestion: errors::OuterCrateLevelAttrSuggestion { bang_position }, + }, + ) + } Some(ast::AttrStyle::Inner) | None => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, @@ -2681,15 +2067,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_coroutine(&self, attr_span: Span, target: Target) { - match target { - Target::Closure => return, - _ => { - self.dcx().emit_err(errors::CoroutineOnNonClosure { span: attr_span }); - } - } - } - fn check_type_const(&self, hir_id: HirId, attr_span: Span, target: Target) { let tcx = self.tcx; if target == Target::AssocConst @@ -2707,19 +2084,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_linkage(&self, attr: &Attribute, span: Span, target: Target) { - match target { - Target::Fn - | Target::Method(..) - | Target::Static - | Target::ForeignStatic - | Target::ForeignFn => {} - _ => { - self.dcx().emit_err(errors::Linkage { attr_span: attr.span(), span }); - } - } - } - fn check_rustc_pub_transparent(&self, attr_span: Span, span: Span, attrs: &[Attribute]) { if !find_attr!(attrs, AttributeKind::Repr { reprs, .. } => reprs.iter().any(|(r, _)| r == &ReprAttr::ReprTransparent)) .unwrap_or(false) @@ -2728,43 +2092,28 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_rustc_force_inline( - &self, - hir_id: HirId, - attrs: &[Attribute], - span: Span, - target: Target, - ) { - match ( + fn check_rustc_force_inline(&self, hir_id: HirId, attrs: &[Attribute], target: Target) { + if let (Target::Closure, None) = ( target, find_attr!(attrs, AttributeKind::Inline(InlineAttr::Force { attr_span, .. }, _) => *attr_span), ) { - (Target::Closure, None) => { - let is_coro = matches!( - self.tcx.hir_expect_expr(hir_id).kind, - hir::ExprKind::Closure(hir::Closure { - kind: hir::ClosureKind::Coroutine(..) - | hir::ClosureKind::CoroutineClosure(..), - .. - }) - ); - let parent_did = self.tcx.hir_get_parent_item(hir_id).to_def_id(); - let parent_span = self.tcx.def_span(parent_did); + let is_coro = matches!( + self.tcx.hir_expect_expr(hir_id).kind, + hir::ExprKind::Closure(hir::Closure { + kind: hir::ClosureKind::Coroutine(..) | hir::ClosureKind::CoroutineClosure(..), + .. + }) + ); + let parent_did = self.tcx.hir_get_parent_item(hir_id).to_def_id(); + let parent_span = self.tcx.def_span(parent_did); - if let Some(attr_span) = find_attr!( - self.tcx.get_all_attrs(parent_did), - AttributeKind::Inline(InlineAttr::Force { attr_span, .. }, _) => *attr_span - ) && is_coro - { - self.dcx() - .emit_err(errors::RustcForceInlineCoro { attr_span, span: parent_span }); - } - } - (Target::Fn, _) => (), - (_, Some(attr_span)) => { - self.dcx().emit_err(errors::RustcForceInline { attr_span, span }); + if let Some(attr_span) = find_attr!( + self.tcx.get_all_attrs(parent_did), + AttributeKind::Inline(InlineAttr::Force { attr_span, .. }, _) => *attr_span + ) && is_coro + { + self.dcx().emit_err(errors::RustcForceInlineCoro { attr_span, span: parent_span }); } - (_, None) => (), } } @@ -2814,8 +2163,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { let node_span = self.tcx.hir_span(hir_id); if !matches!(target, Target::Expression) { - self.dcx().emit_err(errors::LoopMatchAttr { attr_span, node_span }); - return; + return; // Handled in target checking during attr parse } if !matches!(self.tcx.hir_expect_expr(hir_id).kind, hir::ExprKind::Loop(..)) { @@ -2827,14 +2175,55 @@ impl<'tcx> CheckAttrVisitor<'tcx> { let node_span = self.tcx.hir_span(hir_id); if !matches!(target, Target::Expression) { - self.dcx().emit_err(errors::ConstContinueAttr { attr_span, node_span }); - return; + return; // Handled in target checking during attr parse } if !matches!(self.tcx.hir_expect_expr(hir_id).kind, hir::ExprKind::Break(..)) { self.dcx().emit_err(errors::ConstContinueAttr { attr_span, node_span }); }; } + + fn check_custom_mir( + &self, + dialect: Option<(MirDialect, Span)>, + phase: Option<(MirPhase, Span)>, + attr_span: Span, + ) { + let Some((dialect, dialect_span)) = dialect else { + if let Some((_, phase_span)) = phase { + self.dcx() + .emit_err(errors::CustomMirPhaseRequiresDialect { attr_span, phase_span }); + } + return; + }; + + match dialect { + MirDialect::Analysis => { + if let Some((MirPhase::Optimized, phase_span)) = phase { + self.dcx().emit_err(errors::CustomMirIncompatibleDialectAndPhase { + dialect, + phase: MirPhase::Optimized, + attr_span, + dialect_span, + phase_span, + }); + } + } + + MirDialect::Built => { + if let Some((phase, phase_span)) = phase { + self.dcx().emit_err(errors::CustomMirIncompatibleDialectAndPhase { + dialect, + phase, + attr_span, + dialect_span, + phase_span, + }); + } + } + MirDialect::Runtime => {} + } + } } impl<'tcx> Visitor<'tcx> for CheckAttrVisitor<'tcx> { @@ -2871,6 +2260,7 @@ impl<'tcx> Visitor<'tcx> for CheckAttrVisitor<'tcx> { .hir_attrs(where_predicate.hir_id) .iter() .filter(|attr| !ATTRS_ALLOWED.iter().any(|&sym| attr.has_name(sym))) + .filter(|attr| !attr.is_parsed_attr()) .map(|attr| attr.span()) .collect::<Vec<_>>(); if !spans.is_empty() { @@ -3001,10 +2391,6 @@ fn check_invalid_crate_level_attr(tcx: TyCtxt<'_>, attrs: &[Attribute]) { }) = attr { (*first_attr_span, sym::repr) - } else if let Attribute::Parsed(AttributeKind::Path(.., span)) = attr { - (*span, sym::path) - } else if let Attribute::Parsed(AttributeKind::AutomaticallyDerived(span)) = attr { - (*span, sym::automatically_derived) } else { continue; }; diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index d5d7cc5dc2e..fc33405d455 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -8,7 +8,6 @@ use std::mem; use hir::def_id::{LocalDefIdMap, LocalDefIdSet}; use rustc_abi::FieldIdx; use rustc_data_structures::fx::FxIndexSet; -use rustc_data_structures::unord::UnordSet; use rustc_errors::MultiSpan; use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId}; @@ -79,7 +78,7 @@ struct MarkSymbolVisitor<'tcx> { worklist: Vec<(LocalDefId, ComesFromAllowExpect)>, tcx: TyCtxt<'tcx>, maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>, - scanned: UnordSet<(LocalDefId, ComesFromAllowExpect)>, + scanned: LocalDefIdSet, live_symbols: LocalDefIdSet, repr_unconditionally_treats_fields_as_live: bool, repr_has_repr_simd: bool, @@ -321,18 +320,8 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { fn mark_live_symbols(&mut self) { while let Some(work) = self.worklist.pop() { - if !self.scanned.insert(work) { - continue; - } - let (mut id, comes_from_allow_expect) = work; - // Avoid accessing the HIR for the synthesized associated type generated for RPITITs. - if self.tcx.is_impl_trait_in_trait(id.to_def_id()) { - self.live_symbols.insert(id); - continue; - } - // in the case of tuple struct constructors we want to check the item, // not the generated tuple struct constructor function if let DefKind::Ctor(..) = self.tcx.def_kind(id) { @@ -360,9 +349,23 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { // this "duplication" is essential as otherwise a function with `#[expect]` // called from a `pub fn` may be falsely reported as not live, falsely // triggering the `unfulfilled_lint_expectations` lint. - if comes_from_allow_expect != ComesFromAllowExpect::Yes { + match comes_from_allow_expect { + ComesFromAllowExpect::Yes => {} + ComesFromAllowExpect::No => { + self.live_symbols.insert(id); + } + } + + if !self.scanned.insert(id) { + continue; + } + + // Avoid accessing the HIR for the synthesized associated type generated for RPITITs. + if self.tcx.is_impl_trait_in_trait(id.to_def_id()) { self.live_symbols.insert(id); + continue; } + self.visit_node(self.tcx.hir_node_by_def_id(id)); } } @@ -371,7 +374,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { /// will be ignored for the purposes of dead code analysis (see PR #85200 /// for discussion). fn should_ignore_item(&mut self, def_id: DefId) -> bool { - if let Some(impl_of) = self.tcx.impl_of_assoc(def_id) { + if let Some(impl_of) = self.tcx.trait_impl_of_assoc(def_id) { if !self.tcx.is_automatically_derived(impl_of) { return false; } diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index 10b30fbe8c9..680e2a26d84 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -7,6 +7,7 @@ use rustc_errors::{ MultiSpan, Subdiagnostic, }; use rustc_hir::Target; +use rustc_hir::attrs::{MirDialect, MirPhase}; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::{MainDefinition, Ty}; use rustc_span::{DUMMY_SP, Span, Symbol}; @@ -63,7 +64,17 @@ pub(crate) struct MixedExportNameAndNoMangle { #[derive(LintDiagnostic)] #[diag(passes_outer_crate_level_attr)] -pub(crate) struct OuterCrateLevelAttr; +pub(crate) struct OuterCrateLevelAttr { + #[subdiagnostic] + pub suggestion: OuterCrateLevelAttrSuggestion, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(passes_outer_crate_level_attr_suggestion, style = "verbose")] +pub(crate) struct OuterCrateLevelAttrSuggestion { + #[suggestion_part(code = "!")] + pub bang_position: Span, +} #[derive(LintDiagnostic)] #[diag(passes_inner_crate_level_attr)] @@ -75,58 +86,6 @@ pub(crate) struct IgnoredAttrWithMacro<'a> { pub sym: &'a str, } -#[derive(LintDiagnostic)] -#[diag(passes_ignored_attr)] -pub(crate) struct IgnoredAttr<'a> { - pub sym: &'a str, -} - -#[derive(LintDiagnostic)] -#[diag(passes_inline_ignored_function_prototype)] -pub(crate) struct IgnoredInlineAttrFnProto; - -#[derive(LintDiagnostic)] -#[diag(passes_inline_ignored_constants)] -#[warning] -#[note] -pub(crate) struct IgnoredInlineAttrConstants; - -#[derive(Diagnostic)] -#[diag(passes_inline_not_fn_or_closure, code = E0518)] -pub(crate) struct InlineNotFnOrClosure { - #[primary_span] - pub attr_span: Span, - #[label] - pub defn_span: Span, -} - -/// "coverage attribute not allowed here" -#[derive(Diagnostic)] -#[diag(passes_coverage_attribute_not_allowed, code = E0788)] -pub(crate) struct CoverageAttributeNotAllowed { - #[primary_span] - pub attr_span: Span, - /// "not a function, impl block, or module" - #[label(passes_not_fn_impl_mod)] - pub not_fn_impl_mod: Option<Span>, - /// "function has no body" - #[label(passes_no_body)] - pub no_body: Option<Span>, - /// "coverage attribute can be applied to a function (with body), impl block, or module" - #[help] - pub help: (), -} - -#[derive(Diagnostic)] -#[diag(passes_optimize_invalid_target)] -pub(crate) struct OptimizeInvalidTarget { - #[primary_span] - pub attr_span: Span, - #[label] - pub defn_span: Span, - pub on_crate: bool, -} - #[derive(Diagnostic)] #[diag(passes_should_be_applied_to_fn)] pub(crate) struct AttrShouldBeAppliedToFn { @@ -138,25 +97,6 @@ pub(crate) struct AttrShouldBeAppliedToFn { } #[derive(Diagnostic)] -#[diag(passes_should_be_applied_to_fn, code = E0739)] -pub(crate) struct TrackedCallerWrongLocation { - #[primary_span] - pub attr_span: Span, - #[label] - pub defn_span: Span, - pub on_crate: bool, -} - -#[derive(Diagnostic)] -#[diag(passes_should_be_applied_to_struct_enum, code = E0701)] -pub(crate) struct NonExhaustiveWrongLocation { - #[primary_span] - pub attr_span: Span, - #[label] - pub defn_span: Span, -} - -#[derive(Diagnostic)] #[diag(passes_non_exhaustive_with_default_field_values)] pub(crate) struct NonExhaustiveWithDefaultFieldValues { #[primary_span] @@ -174,10 +114,6 @@ pub(crate) struct AttrShouldBeAppliedToTrait { pub defn_span: Span, } -#[derive(LintDiagnostic)] -#[diag(passes_target_feature_on_statement)] -pub(crate) struct TargetFeatureOnStatement; - #[derive(Diagnostic)] #[diag(passes_should_be_applied_to_static)] pub(crate) struct AttrShouldBeAppliedToStatic { @@ -259,10 +195,11 @@ pub(crate) struct DocAliasMalformed { } #[derive(Diagnostic)] -#[diag(passes_doc_keyword_empty_mod)] -pub(crate) struct DocKeywordEmptyMod { +#[diag(passes_doc_keyword_attribute_empty_mod)] +pub(crate) struct DocKeywordAttributeEmptyMod { #[primary_span] pub span: Span, + pub attr_name: &'static str, } #[derive(Diagnostic)] @@ -275,10 +212,20 @@ pub(crate) struct DocKeywordNotKeyword { } #[derive(Diagnostic)] -#[diag(passes_doc_keyword_not_mod)] -pub(crate) struct DocKeywordNotMod { +#[diag(passes_doc_attribute_not_attribute)] +#[help] +pub(crate) struct DocAttributeNotAttribute { + #[primary_span] + pub span: Span, + pub attribute: Symbol, +} + +#[derive(Diagnostic)] +#[diag(passes_doc_keyword_attribute_not_mod)] +pub(crate) struct DocKeywordAttributeNotMod { #[primary_span] pub span: Span, + pub attr_name: &'static str, } #[derive(Diagnostic)] @@ -417,24 +364,6 @@ pub(crate) struct DocTestUnknownInclude { pub(crate) struct DocInvalid; #[derive(Diagnostic)] -#[diag(passes_pass_by_value)] -pub(crate) struct PassByValue { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_allow_incoherent_impl)] -pub(crate) struct AllowIncoherentImpl { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(passes_has_incoherent_inherent_impl)] pub(crate) struct HasIncoherentInherentImpl { #[primary_span] @@ -451,27 +380,6 @@ pub(crate) struct BothFfiConstAndPure { } #[derive(Diagnostic)] -#[diag(passes_ffi_pure_invalid_target, code = E0755)] -pub(crate) struct FfiPureInvalidTarget { - #[primary_span] - pub attr_span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_ffi_const_invalid_target, code = E0756)] -pub(crate) struct FfiConstInvalidTarget { - #[primary_span] - pub attr_span: Span, -} - -#[derive(LintDiagnostic)] -#[diag(passes_must_use_no_effect)] -pub(crate) struct MustUseNoEffect { - pub article: &'static str, - pub target: rustc_hir::Target, -} - -#[derive(Diagnostic)] #[diag(passes_must_not_suspend)] pub(crate) struct MustNotSuspend { #[primary_span] @@ -481,15 +389,6 @@ pub(crate) struct MustNotSuspend { } #[derive(LintDiagnostic)] -#[diag(passes_cold)] -#[warning] -pub(crate) struct Cold { - #[label] - pub span: Span, - pub on_crate: bool, -} - -#[derive(LintDiagnostic)] #[diag(passes_link)] #[warning] pub(crate) struct Link { @@ -497,17 +396,6 @@ pub(crate) struct Link { pub span: Option<Span>, } -#[derive(LintDiagnostic)] -#[diag(passes_link_name)] -#[warning] -pub(crate) struct LinkName<'a> { - #[help] - pub help_span: Option<Span>, - #[label] - pub span: Span, - pub value: &'a str, -} - #[derive(Diagnostic)] #[diag(passes_no_link)] pub(crate) struct NoLink { @@ -518,24 +406,6 @@ pub(crate) struct NoLink { } #[derive(Diagnostic)] -#[diag(passes_export_name)] -pub(crate) struct ExportName { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_rustc_layout_scalar_valid_range_not_struct)] -pub(crate) struct RustcLayoutScalarValidRangeNotStruct { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(passes_rustc_legacy_const_generics_only)] pub(crate) struct RustcLegacyConstGenericsOnly { #[primary_span] @@ -576,42 +446,6 @@ pub(crate) struct RustcDirtyClean { pub span: Span, } -#[derive(LintDiagnostic)] -#[diag(passes_link_section)] -#[warning] -pub(crate) struct LinkSection { - #[label] - pub span: Span, -} - -#[derive(LintDiagnostic)] -#[diag(passes_no_mangle_foreign)] -#[warning] -#[note] -pub(crate) struct NoMangleForeign { - #[label] - pub span: Span, - #[suggestion(code = "", applicability = "machine-applicable")] - pub attr_span: Span, - pub foreign_item_kind: &'static str, -} - -#[derive(LintDiagnostic)] -#[diag(passes_no_mangle)] -#[warning] -pub(crate) struct NoMangle { - #[label] - pub span: Span, -} - -#[derive(LintDiagnostic)] -#[diag(passes_align_on_fields)] -#[warning] -pub(crate) struct AlignOnFields { - #[label] - pub span: Span, -} - #[derive(Diagnostic)] #[diag(passes_repr_conflicting, code = E0566)] pub(crate) struct ReprConflicting { @@ -633,16 +467,6 @@ pub(crate) struct InvalidReprAlignForTarget { pub(crate) struct ReprConflictingLint; #[derive(Diagnostic)] -#[diag(passes_used_static)] -pub(crate) struct UsedStatic { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, - pub target: &'static str, -} - -#[derive(Diagnostic)] #[diag(passes_macro_only_attribute)] pub(crate) struct MacroOnlyAttribute { #[primary_span] @@ -687,24 +511,6 @@ pub(crate) struct RustcAllowConstFnUnstable { } #[derive(Diagnostic)] -#[diag(passes_rustc_unstable_feature_bound)] -pub(crate) struct RustcUnstableFeatureBound { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_rustc_std_internal_symbol)] -pub(crate) struct RustcStdInternalSymbol { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(passes_rustc_pub_transparent)] pub(crate) struct RustcPubTransparent { #[primary_span] @@ -714,15 +520,6 @@ pub(crate) struct RustcPubTransparent { } #[derive(Diagnostic)] -#[diag(passes_rustc_force_inline)] -pub(crate) struct RustcForceInline { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(passes_rustc_force_inline_coro)] pub(crate) struct RustcForceInlineCoro { #[primary_span] @@ -731,53 +528,6 @@ pub(crate) struct RustcForceInlineCoro { pub span: Span, } -#[derive(Diagnostic)] -#[diag(passes_link_ordinal)] -pub(crate) struct LinkOrdinal { - #[primary_span] - pub attr_span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_confusables)] -pub(crate) struct Confusables { - #[primary_span] - pub attr_span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_coroutine_on_non_closure)] -pub(crate) struct CoroutineOnNonClosure { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_linkage)] -pub(crate) struct Linkage { - #[primary_span] - pub attr_span: Span, - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(passes_stability_promotable)] -pub(crate) struct StabilityPromotable { - #[primary_span] - pub attr_span: Span, -} - -#[derive(LintDiagnostic)] -#[diag(passes_deprecated)] -pub(crate) struct Deprecated; - -#[derive(LintDiagnostic)] -#[diag(passes_macro_use)] -pub(crate) struct MacroUse { - pub name: Symbol, -} - #[derive(LintDiagnostic)] pub(crate) enum MacroExport { #[diag(passes_macro_export)] @@ -1281,13 +1031,6 @@ pub(crate) struct UselessAssignment<'a> { } #[derive(LintDiagnostic)] -#[diag(passes_only_has_effect_on)] -pub(crate) struct OnlyHasEffectOn { - pub attr_name: String, - pub target_name: String, -} - -#[derive(LintDiagnostic)] #[diag(passes_inline_ignored_for_exported)] #[help] pub(crate) struct InlineIgnoredForExported {} @@ -1759,15 +1502,21 @@ pub(crate) struct AttrCrateLevelOnlySugg { pub attr: Span, } +/// "sanitize attribute not allowed here" #[derive(Diagnostic)] -#[diag(passes_no_sanitize)] -pub(crate) struct NoSanitize<'a> { +#[diag(passes_sanitize_attribute_not_allowed)] +pub(crate) struct SanitizeAttributeNotAllowed { #[primary_span] pub attr_span: Span, - #[label] - pub defn_span: Span, - pub accepted_kind: &'a str, - pub attr_str: &'a str, + /// "not a function, impl block, or module" + #[label(passes_not_fn_impl_mod)] + pub not_fn_impl_mod: Option<Span>, + /// "function has no body" + #[label(passes_no_body)] + pub no_body: Option<Span>, + /// "sanitize attribute can be applied to a function (with body), impl block, or module" + #[help] + pub help: (), } // FIXME(jdonszelmann): move back to rustc_attr @@ -1843,24 +1592,23 @@ pub(crate) struct ReprAlignShouldBeAlign { } #[derive(Diagnostic)] -#[diag(passes_align_should_be_repr_align)] -pub(crate) struct AlignShouldBeReprAlign { +#[diag(passes_custom_mir_phase_requires_dialect)] +pub(crate) struct CustomMirPhaseRequiresDialect { #[primary_span] - #[suggestion( - style = "verbose", - applicability = "machine-applicable", - code = "#[repr(align({align_bytes}))]" - )] - pub span: Span, - pub item: &'static str, - pub align_bytes: u64, + pub attr_span: Span, + #[label] + pub phase_span: Span, } #[derive(Diagnostic)] -#[diag(passes_align_attr_application)] -pub(crate) struct AlignAttrApplication { +#[diag(passes_custom_mir_incompatible_dialect_and_phase)] +pub(crate) struct CustomMirIncompatibleDialectAndPhase { + pub dialect: MirDialect, + pub phase: MirPhase, #[primary_span] - pub hint_span: Span, + pub attr_span: Span, #[label] - pub span: Span, + pub dialect_span: Span, + #[label] + pub phase_span: Span, } diff --git a/compiler/rustc_passes/src/lang_items.rs b/compiler/rustc_passes/src/lang_items.rs index 6fac01827a4..141a60a8ec3 100644 --- a/compiler/rustc_passes/src/lang_items.rs +++ b/compiler/rustc_passes/src/lang_items.rs @@ -329,7 +329,7 @@ impl<'ast, 'tcx> visit::Visitor<'ast> for LanguageItemCollector<'ast, 'tcx> { match &self.parent_item.unwrap().kind { ast::ItemKind::Impl(i) => { if i.of_trait.is_some() { - Target::Method(MethodKind::Trait { body }) + Target::Method(MethodKind::TraitImpl) } else { Target::Method(MethodKind::Inherent) } diff --git a/compiler/rustc_passes/src/reachable.rs b/compiler/rustc_passes/src/reachable.rs index 2f78c22c748..d1a703fc5d8 100644 --- a/compiler/rustc_passes/src/reachable.rs +++ b/compiler/rustc_passes/src/reachable.rs @@ -423,6 +423,7 @@ fn has_custom_linkage(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { if !tcx.def_kind(def_id).has_codegen_attrs() { return false; } + let codegen_attrs = tcx.codegen_fn_attrs(def_id); codegen_attrs.contains_extern_indicator() // FIXME(nbdd0121): `#[used]` are marked as reachable here so it's picked up by diff --git a/compiler/rustc_pattern_analysis/Cargo.toml b/compiler/rustc_pattern_analysis/Cargo.toml index a59f7bbeb9e..39f660c8771 100644 --- a/compiler/rustc_pattern_analysis/Cargo.toml +++ b/compiler/rustc_pattern_analysis/Cargo.toml @@ -19,7 +19,7 @@ rustc_middle = { path = "../rustc_middle", optional = true } rustc_session = { path = "../rustc_session", optional = true } rustc_span = { path = "../rustc_span", optional = true } smallvec = { version = "1.8.1", features = ["union"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [dev-dependencies] diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index 0c1b0d622f2..c9bf4fe4449 100644 --- a/compiler/rustc_pattern_analysis/src/rustc.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -8,7 +8,6 @@ use rustc_hir::HirId; use rustc_hir::def_id::DefId; use rustc_index::{Idx, IndexVec}; use rustc_middle::middle::stability::EvalResult; -use rustc_middle::mir::{self, Const}; use rustc_middle::thir::{self, Pat, PatKind, PatRange, PatRangeBoundary}; use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{ @@ -430,7 +429,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { match bdy { PatRangeBoundary::NegInfinity => MaybeInfiniteInt::NegInfinity, PatRangeBoundary::Finite(value) => { - let bits = value.eval_bits(self.tcx, self.typing_env); + let bits = value.try_to_scalar_int().unwrap().to_bits_unchecked(); match *ty.kind() { ty::Int(ity) => { let size = Integer::from_int_ty(&self.tcx, ity).size().bits(); @@ -520,75 +519,54 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { PatKind::Constant { value } => { match ty.kind() { ty::Bool => { - ctor = match value.try_eval_bool(cx.tcx, cx.typing_env) { - Some(b) => Bool(b), - None => Opaque(OpaqueId::new()), - }; + ctor = Bool(value.try_to_bool().unwrap()); fields = vec![]; arity = 0; } ty::Char | ty::Int(_) | ty::Uint(_) => { - ctor = match value.try_eval_bits(cx.tcx, cx.typing_env) { - Some(bits) => { - let x = match *ty.kind() { - ty::Int(ity) => { - let size = Integer::from_int_ty(&cx.tcx, ity).size().bits(); - MaybeInfiniteInt::new_finite_int(bits, size) - } - _ => MaybeInfiniteInt::new_finite_uint(bits), - }; - IntRange(IntRange::from_singleton(x)) - } - None => Opaque(OpaqueId::new()), + ctor = { + let bits = value.valtree.unwrap_leaf().to_bits_unchecked(); + let x = match *ty.kind() { + ty::Int(ity) => { + let size = Integer::from_int_ty(&cx.tcx, ity).size().bits(); + MaybeInfiniteInt::new_finite_int(bits, size) + } + _ => MaybeInfiniteInt::new_finite_uint(bits), + }; + IntRange(IntRange::from_singleton(x)) }; fields = vec![]; arity = 0; } ty::Float(ty::FloatTy::F16) => { - ctor = match value.try_eval_bits(cx.tcx, cx.typing_env) { - Some(bits) => { - use rustc_apfloat::Float; - let value = rustc_apfloat::ieee::Half::from_bits(bits); - F16Range(value, value, RangeEnd::Included) - } - None => Opaque(OpaqueId::new()), - }; + use rustc_apfloat::Float; + let bits = value.valtree.unwrap_leaf().to_u16(); + let value = rustc_apfloat::ieee::Half::from_bits(bits.into()); + ctor = F16Range(value, value, RangeEnd::Included); fields = vec![]; arity = 0; } ty::Float(ty::FloatTy::F32) => { - ctor = match value.try_eval_bits(cx.tcx, cx.typing_env) { - Some(bits) => { - use rustc_apfloat::Float; - let value = rustc_apfloat::ieee::Single::from_bits(bits); - F32Range(value, value, RangeEnd::Included) - } - None => Opaque(OpaqueId::new()), - }; + use rustc_apfloat::Float; + let bits = value.valtree.unwrap_leaf().to_u32(); + let value = rustc_apfloat::ieee::Single::from_bits(bits.into()); + ctor = F32Range(value, value, RangeEnd::Included); fields = vec![]; arity = 0; } ty::Float(ty::FloatTy::F64) => { - ctor = match value.try_eval_bits(cx.tcx, cx.typing_env) { - Some(bits) => { - use rustc_apfloat::Float; - let value = rustc_apfloat::ieee::Double::from_bits(bits); - F64Range(value, value, RangeEnd::Included) - } - None => Opaque(OpaqueId::new()), - }; + use rustc_apfloat::Float; + let bits = value.valtree.unwrap_leaf().to_u64(); + let value = rustc_apfloat::ieee::Double::from_bits(bits.into()); + ctor = F64Range(value, value, RangeEnd::Included); fields = vec![]; arity = 0; } ty::Float(ty::FloatTy::F128) => { - ctor = match value.try_eval_bits(cx.tcx, cx.typing_env) { - Some(bits) => { - use rustc_apfloat::Float; - let value = rustc_apfloat::ieee::Quad::from_bits(bits); - F128Range(value, value, RangeEnd::Included) - } - None => Opaque(OpaqueId::new()), - }; + use rustc_apfloat::Float; + let bits = value.valtree.unwrap_leaf().to_u128(); + let value = rustc_apfloat::ieee::Quad::from_bits(bits); + ctor = F128Range(value, value, RangeEnd::Included); fields = vec![]; arity = 0; } @@ -630,8 +608,12 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { } ty::Float(fty) => { use rustc_apfloat::Float; - let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.typing_env)); - let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.typing_env)); + let lo = lo + .as_finite() + .map(|c| c.try_to_scalar_int().unwrap().to_bits_unchecked()); + let hi = hi + .as_finite() + .map(|c| c.try_to_scalar_int().unwrap().to_bits_unchecked()); match fty { ty::FloatTy::F16 => { use rustc_apfloat::ieee::Half; @@ -739,8 +721,8 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { }; match ScalarInt::try_from_uint(bits, size) { Some(scalar) => { - let value = mir::Const::from_scalar(tcx, scalar.into(), ty.inner()); - PatRangeBoundary::Finite(value) + let valtree = ty::ValTree::from_scalar_int(tcx, scalar); + PatRangeBoundary::Finite(valtree) } // The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value // for a type, the problem isn't that the value is too small. So it must be too @@ -760,7 +742,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { "_".to_string() } else if range.is_singleton() { let lo = cx.hoist_pat_range_bdy(range.lo, ty); - let value = lo.as_finite().unwrap(); + let value = ty::Value { ty: ty.inner(), valtree: lo.as_finite().unwrap() }; value.to_string() } else { // We convert to an inclusive range for diagnostics. @@ -772,7 +754,9 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { // fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do // this). We show this to the user as `usize::MAX..` which is slightly incorrect but // probably clear enough. - lo = PatRangeBoundary::Finite(ty.numeric_max_val(cx.tcx).unwrap()); + let max = ty.numeric_max_val(cx.tcx).unwrap(); + let max = ty::ValTree::from_scalar_int(cx.tcx, max.try_to_scalar_int().unwrap()); + lo = PatRangeBoundary::Finite(max); } let hi = if let Some(hi) = range.hi.minus_one() { hi @@ -907,7 +891,7 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> { type Ty = RevealedTy<'tcx>; type Error = ErrorGuaranteed; type VariantIdx = VariantIdx; - type StrLit = Const<'tcx>; + type StrLit = ty::Value<'tcx>; type ArmData = HirId; type PatData = &'p Pat<'tcx>; diff --git a/compiler/rustc_pattern_analysis/src/rustc/print.rs b/compiler/rustc_pattern_analysis/src/rustc/print.rs index 7649f72f868..cdf62c2553d 100644 --- a/compiler/rustc_pattern_analysis/src/rustc/print.rs +++ b/compiler/rustc_pattern_analysis/src/rustc/print.rs @@ -101,23 +101,11 @@ pub(crate) fn write_struct_like<'tcx>( let num_fields = variant_and_name.as_ref().map_or(subpatterns.len(), |(v, _)| v.fields.len()); if num_fields != 0 || variant_and_name.is_none() { write!(f, "(")?; - for i in 0..num_fields { - write!(f, "{}", start_or_comma())?; - - // Common case: the field is where we expect it. - if let Some(p) = subpatterns.get(i) { - if p.field.index() == i { - write!(f, "{}", p.pattern)?; - continue; - } - } - - // Otherwise, we have to go looking for it. - if let Some(p) = subpatterns.iter().find(|p| p.field.index() == i) { - write!(f, "{}", p.pattern)?; - } else { - write!(f, "_")?; - } + for FieldPat { pattern, .. } in subpatterns { + write!(f, "{}{pattern}", start_or_comma())?; + } + if matches!(ty.kind(), ty::Tuple(..)) && num_fields == 1 { + write!(f, ",")?; } write!(f, ")")?; } diff --git a/compiler/rustc_privacy/Cargo.toml b/compiler/rustc_privacy/Cargo.toml index c8bfdb91304..7de58132e13 100644 --- a/compiler/rustc_privacy/Cargo.toml +++ b/compiler/rustc_privacy/Cargo.toml @@ -15,5 +15,5 @@ rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_ty_utils = { path = "../rustc_ty_utils" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_proc_macro/Cargo.toml b/compiler/rustc_proc_macro/Cargo.toml index beb95aa3b52..99e7fc7abc6 100644 --- a/compiler/rustc_proc_macro/Cargo.toml +++ b/compiler/rustc_proc_macro/Cargo.toml @@ -16,7 +16,7 @@ doctest = false [dependencies] # tidy-alphabetical-start -rustc-literal-escaper = "0.0.5" +rustc-literal-escaper.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_public/Cargo.toml b/compiler/rustc_public/Cargo.toml index 70af30c1a5f..71d339b5792 100644 --- a/compiler/rustc_public/Cargo.toml +++ b/compiler/rustc_public/Cargo.toml @@ -14,7 +14,7 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } scoped-tls = "1.0" serde = { version = "1.0.125", features = [ "derive" ] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_query_impl/Cargo.toml b/compiler/rustc_query_impl/Cargo.toml index e5cceacf15d..2005b8b9eca 100644 --- a/compiler/rustc_query_impl/Cargo.toml +++ b/compiler/rustc_query_impl/Cargo.toml @@ -15,5 +15,5 @@ rustc_query_system = { path = "../rustc_query_system" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_query_system/Cargo.toml b/compiler/rustc_query_system/Cargo.toml index 7480ba03474..0df933bc81c 100644 --- a/compiler/rustc_query_system/Cargo.toml +++ b/compiler/rustc_query_system/Cargo.toml @@ -21,7 +21,7 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_thread_pool = { path = "../rustc_thread_pool" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [dependencies.hashbrown] diff --git a/compiler/rustc_resolve/Cargo.toml b/compiler/rustc_resolve/Cargo.toml index eb98a6e85c0..7c5332c1662 100644 --- a/compiler/rustc_resolve/Cargo.toml +++ b/compiler/rustc_resolve/Cargo.toml @@ -5,9 +5,9 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true indexmap = "2.4.0" -itertools = "0.12" +itertools.workspace = true pulldown-cmark = { version = "0.11", features = ["html"], default-features = false } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } @@ -27,6 +27,6 @@ rustc_query_system = { path = "../rustc_query_system" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_resolve/messages.ftl b/compiler/rustc_resolve/messages.ftl index ceef558c0cf..47280a93677 100644 --- a/compiler/rustc_resolve/messages.ftl +++ b/compiler/rustc_resolve/messages.ftl @@ -93,6 +93,9 @@ resolve_consider_adding_a_derive = resolve_consider_adding_macro_export = consider adding a `#[macro_export]` to the macro in the imported module +resolve_consider_marking_as_pub_crate = + in case you want to use the macro within this crate only, reduce the visibility to `pub(crate)` + resolve_consider_declaring_with_pub = consider declaring type or module `{$ident}` with `pub` @@ -242,11 +245,14 @@ resolve_lowercase_self = attempt to use a non-constant value in a constant .suggestion = try using `Self` +resolve_macro_cannot_use_as_fn_like = + `{$ident}` exists, but has no rules for function-like invocation + resolve_macro_cannot_use_as_attr = `{$ident}` exists, but has no `attr` rules resolve_macro_cannot_use_as_derive = - `{$ident}` exists, but a declarative macro cannot be used as a derive macro + `{$ident}` exists, but has no `derive` rules resolve_macro_defined_later = a macro with the same name exists, but it appears later diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 3fee2ab6afe..f75a625a279 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -11,7 +11,7 @@ use std::sync::Arc; use rustc_ast::visit::{self, AssocCtxt, Visitor, WalkItemKind}; use rustc_ast::{ self as ast, AssocItem, AssocItemKind, Block, ConstItem, Delegation, Fn, ForeignItem, - ForeignItemKind, Item, ItemKind, NodeId, StaticItem, StmtKind, TyAlias, + ForeignItemKind, Inline, Item, ItemKind, NodeId, StaticItem, StmtKind, TyAlias, }; use rustc_attr_parsing as attr; use rustc_attr_parsing::AttributeParser; @@ -210,6 +210,17 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } + /// Add every proc macro accessible from the current crate to the `macro_map` so diagnostics can + /// find them for suggestions. + pub(crate) fn register_macros_for_all_crates(&mut self) { + if !self.all_crate_macros_already_registered { + for def_id in self.cstore().all_proc_macro_def_ids() { + self.get_macro_by_def_id(def_id); + } + self.all_crate_macros_already_registered = true; + } + } + pub(crate) fn build_reduced_graph( &mut self, fragment: &AstFragment, @@ -474,6 +485,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { root_span, root_id, vis, + vis_span: item.vis.span, }); self.r.indeterminate_imports.push(import); @@ -493,9 +505,6 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { }); } } - // We don't add prelude imports to the globs since they only affect lexical scopes, - // which are not relevant to import resolution. - ImportKind::Glob { is_prelude: true, .. } => {} ImportKind::Glob { .. } => current_module.globs.borrow_mut().push(import), _ => unreachable!(), } @@ -658,13 +667,19 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { self.add_import(module_path, kind, use_tree.span, item, root_span, item.id, vis); } ast::UseTreeKind::Glob => { - let kind = ImportKind::Glob { - is_prelude: ast::attr::contains_name(&item.attrs, sym::prelude_import), - max_vis: Cell::new(None), - id, - }; - - self.add_import(prefix, kind, use_tree.span, item, root_span, item.id, vis); + if !ast::attr::contains_name(&item.attrs, sym::prelude_import) { + let kind = ImportKind::Glob { max_vis: Cell::new(None), id }; + self.add_import(prefix, kind, use_tree.span, item, root_span, item.id, vis); + } else { + // Resolve the prelude import early. + let path_res = + self.r.cm().maybe_resolve_path(&prefix, None, &self.parent_scope, None); + if let PathResult::Module(ModuleOrUniformRoot::Module(module)) = path_res { + self.r.prelude = Some(module); + } else { + self.r.dcx().span_err(use_tree.span, "cannot resolve a prelude import"); + } + } } ast::UseTreeKind::Nested { ref items, .. } => { // Ensure there is at most one `self` in the list @@ -798,7 +813,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { ItemKind::Mod(_, ident, ref mod_kind) => { self.r.define_local(parent, ident, TypeNS, res, vis, sp, expansion); - if let ast::ModKind::Loaded(_, _, _, Err(_)) = mod_kind { + if let ast::ModKind::Loaded(_, Inline::No { had_parse_error: Err(_) }, _) = mod_kind + { self.r.mods_with_parse_errors.insert(def_id); } self.parent_scope.module = self.r.new_local_module( @@ -963,6 +979,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { span: item.span, module_path: Vec::new(), vis, + vis_span: item.vis.span, }); if used { self.r.import_use_map.insert(import, Used::Other); @@ -971,40 +988,35 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { let imported_binding = self.r.import(binding, import); if ident.name != kw::Underscore && parent == self.r.graph_root { let norm_ident = Macros20NormalizedIdent::new(ident); + // FIXME: this error is technically unnecessary now when extern prelude is split into + // two scopes, remove it with lang team approval. if let Some(entry) = self.r.extern_prelude.get(&norm_ident) && expansion != LocalExpnId::ROOT && orig_name.is_some() - && !entry.is_import() + && entry.item_binding.is_none() { self.r.dcx().emit_err( errors::MacroExpandedExternCrateCannotShadowExternArguments { span: item.span }, ); - // `return` is intended to discard this binding because it's an - // unregistered ambiguity error which would result in a panic - // caused by inconsistency `path_res` - // more details: https://github.com/rust-lang/rust/pull/111761 - return; } use indexmap::map::Entry; match self.r.extern_prelude.entry(norm_ident) { Entry::Occupied(mut occupied) => { let entry = occupied.get_mut(); - if let Some(old_binding) = entry.binding.get() - && old_binding.is_import() - { + if entry.item_binding.is_some() { let msg = format!("extern crate `{ident}` already in extern prelude"); self.r.tcx.dcx().span_delayed_bug(item.span, msg); } else { - // Binding from `extern crate` item in source code can replace - // a binding from `--extern` on command line here. - entry.binding.set(Some(imported_binding)); + entry.item_binding = Some(imported_binding); entry.introduced_by_item = orig_name.is_some(); } entry } Entry::Vacant(vacant) => vacant.insert(ExternPreludeEntry { - binding: Cell::new(Some(imported_binding)), + item_binding: Some(imported_binding), + flag_binding: Cell::new(None), + only_item: true, introduced_by_item: true, }), }; @@ -1102,6 +1114,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { span, module_path: Vec::new(), vis: Visibility::Restricted(CRATE_DEF_ID), + vis_span: item.vis.span, }) }; @@ -1232,7 +1245,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { ItemKind::Fn(box ast::Fn { ident: fn_ident, .. }) => { match self.proc_macro_stub(item, *fn_ident) { Some((macro_kind, ident, span)) => { - let res = Res::Def(DefKind::Macro(macro_kind), def_id.to_def_id()); + let macro_kinds = macro_kind.into(); + let res = Res::Def(DefKind::Macro(macro_kinds), def_id.to_def_id()); let macro_data = MacroData::new(self.r.dummy_ext(macro_kind)); self.r.new_local_macro(def_id, macro_data); self.r.proc_macro_stubs.insert(def_id); @@ -1271,6 +1285,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { span, module_path: Vec::new(), vis, + vis_span: item.vis.span, }); self.r.import_use_map.insert(import, Used::Other); let import_binding = self.r.import(binding, import); diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index 7d51fef28d3..14538df8187 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -5,6 +5,7 @@ use rustc_ast::*; use rustc_attr_parsing::{AttributeParser, Early, OmitDoc, ShouldEmit}; use rustc_expand::expand::AstFragment; use rustc_hir as hir; +use rustc_hir::Target; use rustc_hir::def::{CtorKind, CtorOf, DefKind}; use rustc_hir::def_id::LocalDefId; use rustc_middle::span_bug; @@ -138,6 +139,7 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> { &i.attrs, i.span, i.id, + Target::MacroDef, OmitDoc::Skip, std::convert::identity, |_l| { @@ -149,9 +151,9 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> { let macro_data = self.resolver.compile_macro(def, *ident, &attrs, i.span, i.id, edition); - let macro_kind = macro_data.ext.macro_kind(); + let macro_kinds = macro_data.ext.macro_kinds(); opt_macro_data = Some(macro_data); - DefKind::Macro(macro_kind) + DefKind::Macro(macro_kinds) } ItemKind::GlobalAsm(..) => DefKind::GlobalAsm, ItemKind::Use(use_tree) => { diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index 210ab72678c..337e7d2dd86 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -13,7 +13,7 @@ use rustc_errors::{ use rustc_feature::BUILTIN_ATTRIBUTES; use rustc_hir::attrs::{AttributeKind, CfgEntry, StrippedCfgItem}; use rustc_hir::def::Namespace::{self, *}; -use rustc_hir::def::{self, CtorKind, CtorOf, DefKind, NonMacroAttrKind, PerNS}; +use rustc_hir::def::{self, CtorKind, CtorOf, DefKind, MacroKinds, NonMacroAttrKind, PerNS}; use rustc_hir::def_id::{CRATE_DEF_ID, DefId}; use rustc_hir::{PrimTy, Stability, StabilityLevel, find_attr}; use rustc_middle::bug; @@ -1016,17 +1016,15 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { .emit() } - /// Lookup typo candidate in scope for a macro or import. - fn early_lookup_typo_candidate( + pub(crate) fn add_scope_set_candidates( &mut self, + suggestions: &mut Vec<TypoSuggestion>, scope_set: ScopeSet<'ra>, - parent_scope: &ParentScope<'ra>, - ident: Ident, + ps: &ParentScope<'ra>, + ctxt: SyntaxContext, filter_fn: &impl Fn(Res) -> bool, - ) -> Option<TypoSuggestion> { - let mut suggestions = Vec::new(); - let ctxt = ident.span.ctxt(); - self.cm().visit_scopes(scope_set, parent_scope, ctxt, |this, scope, use_prelude, _| { + ) { + self.cm().visit_scopes(scope_set, ps, ctxt, None, |this, scope, use_prelude, _| { match scope { Scope::DeriveHelpers(expn_id) => { let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper); @@ -1041,28 +1039,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } Scope::DeriveHelpersCompat => { - let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelperCompat); - if filter_fn(res) { - for derive in parent_scope.derives { - let parent_scope = &ParentScope { derives: &[], ..*parent_scope }; - let Ok((Some(ext), _)) = this.reborrow().resolve_macro_path( - derive, - Some(MacroKind::Derive), - parent_scope, - false, - false, - None, - None, - ) else { - continue; - }; - suggestions.extend( - ext.helper_attrs - .iter() - .map(|name| TypoSuggestion::typo_from_name(*name, res)), - ); - } - } + // Never recommend deprecated helper attributes. } Scope::MacroRules(macro_rules_scope) => { if let MacroRulesScope::Binding(macro_rules_binding) = macro_rules_scope.get() { @@ -1076,7 +1053,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } Scope::Module(module, _) => { - this.add_module_candidates(module, &mut suggestions, filter_fn, None); + this.add_module_candidates(module, suggestions, filter_fn, None); } Scope::MacroUsePrelude => { suggestions.extend(this.macro_use_prelude.iter().filter_map( @@ -1096,12 +1073,14 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ); } } - Scope::ExternPrelude => { + Scope::ExternPreludeItems => { + // Add idents from both item and flag scopes. suggestions.extend(this.extern_prelude.keys().filter_map(|ident| { let res = Res::Def(DefKind::Mod, CRATE_DEF_ID.to_def_id()); filter_fn(res).then_some(TypoSuggestion::typo_from_ident(ident.0, res)) })); } + Scope::ExternPreludeFlags => {} Scope::ToolPrelude => { let res = Res::NonMacroAttr(NonMacroAttrKind::Tool); suggestions.extend( @@ -1132,6 +1111,19 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { None::<()> }); + } + + /// Lookup typo candidate in scope for a macro or import. + fn early_lookup_typo_candidate( + &mut self, + scope_set: ScopeSet<'ra>, + parent_scope: &ParentScope<'ra>, + ident: Ident, + filter_fn: &impl Fn(Res) -> bool, + ) -> Option<TypoSuggestion> { + let mut suggestions = Vec::new(); + let ctxt = ident.span.ctxt(); + self.add_scope_set_candidates(&mut suggestions, scope_set, parent_scope, ctxt, filter_fn); // Make sure error reporting is deterministic. suggestions.sort_by(|a, b| a.candidate.as_str().cmp(b.candidate.as_str())); @@ -1477,34 +1469,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { krate: &Crate, sugg_span: Option<Span>, ) { - // Bring imported but unused `derive` macros into `macro_map` so we ensure they can be used - // for suggestions. - self.cm().visit_scopes( - ScopeSet::Macro(MacroKind::Derive), - &parent_scope, - ident.span.ctxt(), - |this, scope, _use_prelude, _ctxt| { - let Scope::Module(m, _) = scope else { - return None; - }; - for (_, resolution) in this.resolutions(m).borrow().iter() { - let Some(binding) = resolution.borrow().best_binding() else { - continue; - }; - let Res::Def(DefKind::Macro(MacroKind::Derive | MacroKind::Attr), def_id) = - binding.res() - else { - continue; - }; - // By doing this all *imported* macros get added to the `macro_map` even if they - // are *unused*, which makes the later suggestions find them and work. - let _ = this.get_macro_by_def_id(def_id); - } - None::<()> - }, - ); + // Bring all unused `derive` macros into `macro_map` so we ensure they can be used for + // suggestions. + self.register_macros_for_all_crates(); - let is_expected = &|res: Res| res.macro_kind() == Some(macro_kind); + let is_expected = + &|res: Res| res.macro_kinds().is_some_and(|k| k.contains(macro_kind.into())); let suggestion = self.early_lookup_typo_candidate( ScopeSet::Macro(macro_kind), parent_scope, @@ -1553,11 +1523,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { if let Some((def_id, unused_ident)) = unused_macro { let scope = self.local_macro_def_scopes[&def_id]; let parent_nearest = parent_scope.module.nearest_parent_mod(); - if Some(parent_nearest) == scope.opt_def_id() { + let unused_macro_kinds = self.local_macro_map[def_id].ext.macro_kinds(); + if !unused_macro_kinds.contains(macro_kind.into()) { match macro_kind { MacroKind::Bang => { - err.subdiagnostic(MacroDefinedLater { span: unused_ident.span }); - err.subdiagnostic(MacroSuggMovePosition { span: ident.span, ident }); + err.subdiagnostic(MacroRulesNot::Func { span: unused_ident.span, ident }); } MacroKind::Attr => { err.subdiagnostic(MacroRulesNot::Attr { span: unused_ident.span, ident }); @@ -1566,14 +1536,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { err.subdiagnostic(MacroRulesNot::Derive { span: unused_ident.span, ident }); } } - return; } - } - - if self.macro_names.contains(&ident.normalize_to_macros_2_0()) { - err.subdiagnostic(AddedMacroUse); - return; + if Some(parent_nearest) == scope.opt_def_id() { + err.subdiagnostic(MacroDefinedLater { span: unused_ident.span }); + err.subdiagnostic(MacroSuggMovePosition { span: ident.span, ident }); + return; + } } if ident.name == kw::Default @@ -1588,7 +1557,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }); } for ns in [Namespace::MacroNS, Namespace::TypeNS, Namespace::ValueNS] { - let Ok(binding) = self.cm().early_resolve_ident_in_lexical_scope( + let Ok(binding) = self.cm().resolve_ident_in_scope_set( ident, ScopeSet::All(ns), parent_scope, @@ -1601,13 +1570,18 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }; let desc = match binding.res() { - Res::Def(DefKind::Macro(MacroKind::Bang), _) => "a function-like macro".to_string(), - Res::Def(DefKind::Macro(MacroKind::Attr), _) | Res::NonMacroAttr(..) => { + Res::Def(DefKind::Macro(MacroKinds::BANG), _) => { + "a function-like macro".to_string() + } + Res::Def(DefKind::Macro(MacroKinds::ATTR), _) | Res::NonMacroAttr(..) => { format!("an attribute: `#[{ident}]`") } - Res::Def(DefKind::Macro(MacroKind::Derive), _) => { + Res::Def(DefKind::Macro(MacroKinds::DERIVE), _) => { format!("a derive macro: `#[derive({ident})]`") } + Res::Def(DefKind::Macro(kinds), _) => { + format!("{} {}", kinds.article(), kinds.descr()) + } Res::ToolMod => { // Don't confuse the user with tool modules. continue; @@ -1644,6 +1618,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { err.subdiagnostic(note); return; } + + if self.macro_names.contains(&ident.normalize_to_macros_2_0()) { + err.subdiagnostic(AddedMacroUse); + return; + } } /// Given an attribute macro that failed to be resolved, look for `derive` macros that could @@ -1862,14 +1841,20 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } - fn ambiguity_diagnostics(&self, ambiguity_error: &AmbiguityError<'_>) -> AmbiguityErrorDiag { + fn ambiguity_diagnostics(&self, ambiguity_error: &AmbiguityError<'ra>) -> AmbiguityErrorDiag { let AmbiguityError { kind, ident, b1, b2, misc1, misc2, .. } = *ambiguity_error; + let extern_prelude_ambiguity = || { + self.extern_prelude.get(&Macros20NormalizedIdent::new(ident)).is_some_and(|entry| { + entry.item_binding == Some(b1) && entry.flag_binding.get() == Some(b2) + }) + }; let (b1, b2, misc1, misc2, swapped) = if b2.span.is_dummy() && !b1.span.is_dummy() { // We have to print the span-less alternative first, otherwise formatting looks bad. (b2, b1, misc2, misc1, true) } else { (b1, b2, misc1, misc2, false) }; + let could_refer_to = |b: NameBinding<'_>, misc: AmbiguityErrorMisc, also: &str| { let what = self.binding_description(b, ident, misc == AmbiguityErrorMisc::FromPrelude); let note_msg = format!("`{ident}` could{also} refer to {what}"); @@ -1885,7 +1870,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { "consider adding an explicit import of `{ident}` to disambiguate" )) } - if b.is_extern_crate() && ident.span.at_least_rust_2018() { + if b.is_extern_crate() && ident.span.at_least_rust_2018() && !extern_prelude_ambiguity() + { help_msgs.push(format!("use `::{ident}` to refer to this {thing} unambiguously")) } match misc { @@ -2179,9 +2165,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let ast::ExprKind::Struct(struct_expr) = &expr.kind else { return }; // We don't have to handle type-relative paths because they're forbidden in ADT // expressions, but that would change with `#[feature(more_qualified_paths)]`. - let Some(Res::Def(_, def_id)) = - self.partial_res_map[&struct_expr.path.segments.iter().last().unwrap().id].full_res() - else { + let Some(segment) = struct_expr.path.segments.last() else { return }; + let Some(partial_res) = self.partial_res_map.get(&segment.id) else { return }; + let Some(Res::Def(_, def_id)) = partial_res.full_res() else { return; }; let Some(default_fields) = self.field_defaults(def_id) else { return }; @@ -2385,7 +2371,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } else { self.cm() - .early_resolve_ident_in_lexical_scope( + .resolve_ident_in_scope_set( ident, ScopeSet::All(ns_to_try), parent_scope, @@ -2488,7 +2474,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }, ) }); - if let Ok(binding) = self.cm().early_resolve_ident_in_lexical_scope( + if let Ok(binding) = self.cm().resolve_ident_in_scope_set( ident, ScopeSet::All(ValueNS), parent_scope, @@ -2748,9 +2734,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let binding_key = BindingKey::new(ident, MacroNS); let binding = self.resolution(crate_module, binding_key)?.binding()?; - let Res::Def(DefKind::Macro(MacroKind::Bang), _) = binding.res() else { + let Res::Def(DefKind::Macro(kinds), _) = binding.res() else { return None; }; + if !kinds.contains(MacroKinds::BANG) { + return None; + } let module_name = crate_module.kind.name().unwrap_or(kw::Crate); let import_snippet = match import.kind { ImportKind::Single { source, target, .. } if source != target => { @@ -3421,7 +3410,7 @@ impl<'tcx> visit::Visitor<'tcx> for UsePlacementFinder { fn visit_item(&mut self, item: &'tcx ast::Item) { if self.target_module == item.id { - if let ItemKind::Mod(_, _, ModKind::Loaded(items, _inline, mod_spans, _)) = &item.kind { + if let ItemKind::Mod(_, _, ModKind::Loaded(items, _inline, mod_spans)) = &item.kind { let inject = mod_spans.inject_use_span; if is_span_suitable_for_use_injection(inject) { self.first_legal_span = Some(inject); diff --git a/compiler/rustc_resolve/src/errors.rs b/compiler/rustc_resolve/src/errors.rs index 2747ba135ed..63d6fa23a14 100644 --- a/compiler/rustc_resolve/src/errors.rs +++ b/compiler/rustc_resolve/src/errors.rs @@ -672,6 +672,12 @@ pub(crate) struct MacroSuggMovePosition { #[derive(Subdiagnostic)] pub(crate) enum MacroRulesNot { + #[label(resolve_macro_cannot_use_as_fn_like)] + Func { + #[primary_span] + span: Span, + ident: Ident, + }, #[label(resolve_macro_cannot_use_as_attr)] Attr { #[primary_span] @@ -770,6 +776,17 @@ pub(crate) struct ConsiderAddingMacroExport { } #[derive(Subdiagnostic)] +#[suggestion( + resolve_consider_marking_as_pub_crate, + code = "pub(crate)", + applicability = "maybe-incorrect" +)] +pub(crate) struct ConsiderMarkingAsPubCrate { + #[primary_span] + pub(crate) vis_span: Span, +} + +#[derive(Subdiagnostic)] #[note(resolve_consider_marking_as_pub)] pub(crate) struct ConsiderMarkingAsPub { #[primary_span] diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index 9efcef695b7..dae42645bec 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -2,7 +2,7 @@ use Determinacy::*; use Namespace::*; use rustc_ast::{self as ast, NodeId}; use rustc_errors::ErrorGuaranteed; -use rustc_hir::def::{DefKind, Namespace, NonMacroAttrKind, PartialRes, PerNS}; +use rustc_hir::def::{DefKind, MacroKinds, Namespace, NonMacroAttrKind, PartialRes, PerNS}; use rustc_middle::bug; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK; @@ -19,7 +19,7 @@ use crate::{ AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, BindingKey, CmResolver, Determinacy, Finalize, ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot, NameBinding, NameBindingKind, ParentScope, PathResult, PrivacyError, Res, ResolutionError, - Resolver, Scope, ScopeSet, Segment, Used, Weak, errors, + Resolver, Scope, ScopeSet, Segment, Stage, Used, Weak, errors, }; #[derive(Copy, Clone)] @@ -49,6 +49,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { scope_set: ScopeSet<'ra>, parent_scope: &ParentScope<'ra>, ctxt: SyntaxContext, + derive_fallback_lint_id: Option<NodeId>, mut visitor: impl FnMut( &mut CmResolver<'r, 'ra, 'tcx>, Scope<'ra>, @@ -99,20 +100,21 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let rust_2015 = ctxt.edition().is_rust_2015(); let (ns, macro_kind) = match scope_set { - ScopeSet::All(ns) - | ScopeSet::ModuleAndExternPrelude(ns, _) - | ScopeSet::Late(ns, ..) => (ns, None), + ScopeSet::All(ns) | ScopeSet::ModuleAndExternPrelude(ns, _) => (ns, None), + ScopeSet::ExternPrelude => (TypeNS, None), ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind)), }; let module = match scope_set { // Start with the specified module. - ScopeSet::Late(_, module, _) | ScopeSet::ModuleAndExternPrelude(_, module) => module, + ScopeSet::ModuleAndExternPrelude(_, module) => module, // Jump out of trait or enum modules, they do not act as scopes. _ => parent_scope.module.nearest_item_scope(), }; let module_and_extern_prelude = matches!(scope_set, ScopeSet::ModuleAndExternPrelude(..)); + let extern_prelude = matches!(scope_set, ScopeSet::ExternPrelude); let mut scope = match ns { _ if module_and_extern_prelude => Scope::Module(module, None), + _ if extern_prelude => Scope::ExternPreludeItems, TypeNS | ValueNS => Scope::Module(module, None), MacroNS => Scope::DeriveHelpers(parent_scope.expansion), }; @@ -143,7 +145,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { Scope::Module(..) => true, Scope::MacroUsePrelude => use_prelude || rust_2015, Scope::BuiltinAttrs => true, - Scope::ExternPrelude => use_prelude || module_and_extern_prelude, + Scope::ExternPreludeItems | Scope::ExternPreludeFlags => { + use_prelude || module_and_extern_prelude || extern_prelude + } Scope::ToolPrelude => use_prelude, Scope::StdLibPrelude => use_prelude || ns == MacroNS, Scope::BuiltinTypes => true, @@ -182,16 +186,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { Scope::Module(..) if module_and_extern_prelude => match ns { TypeNS => { ctxt.adjust(ExpnId::root()); - Scope::ExternPrelude + Scope::ExternPreludeItems } ValueNS | MacroNS => break, }, Scope::Module(module, prev_lint_id) => { use_prelude = !module.no_implicit_prelude; - let derive_fallback_lint_id = match scope_set { - ScopeSet::Late(.., lint_id) => lint_id, - _ => None, - }; match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) { Some((parent_module, lint_id)) => { Scope::Module(parent_module, lint_id.or(prev_lint_id)) @@ -199,7 +199,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { None => { ctxt.adjust(ExpnId::root()); match ns { - TypeNS => Scope::ExternPrelude, + TypeNS => Scope::ExternPreludeItems, ValueNS => Scope::StdLibPrelude, MacroNS => Scope::MacroUsePrelude, } @@ -208,8 +208,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } Scope::MacroUsePrelude => Scope::StdLibPrelude, Scope::BuiltinAttrs => break, // nowhere else to search - Scope::ExternPrelude if module_and_extern_prelude => break, - Scope::ExternPrelude => Scope::ToolPrelude, + Scope::ExternPreludeItems => Scope::ExternPreludeFlags, + Scope::ExternPreludeFlags if module_and_extern_prelude || extern_prelude => break, + Scope::ExternPreludeFlags => Scope::ToolPrelude, Scope::ToolPrelude => Scope::StdLibPrelude, Scope::StdLibPrelude => match ns { TypeNS => Scope::BuiltinTypes, @@ -259,7 +260,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { { let ext = &self.get_macro_by_def_id(def_id).ext; if ext.builtin_name.is_none() - && ext.macro_kind() == MacroKind::Derive + && ext.macro_kinds() == MacroKinds::DERIVE && parent.expansion.outer_expn_is_descendant_of(*ctxt) { return Some((parent, derive_fallback_lint_id)); @@ -312,7 +313,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let normalized_ident = Ident { span: normalized_span, ..ident }; // Walk backwards up the ribs in scope. - let mut module = self.graph_root; for (i, rib) in ribs.iter().enumerate().rev() { debug!("walk rib\n{:?}", rib.bindings); // Use the rib kind to determine whether we are resolving parameters @@ -328,60 +328,54 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { *original_rib_ident_def, ribs, ))); + } else if let RibKind::Block(Some(module)) = rib.kind + && let Ok(binding) = self.cm().resolve_ident_in_module_unadjusted( + ModuleOrUniformRoot::Module(module), + ident, + ns, + parent_scope, + Shadowing::Unrestricted, + finalize.map(|finalize| Finalize { used: Used::Scope, ..finalize }), + ignore_binding, + None, + ) + { + // The ident resolves to an item in a block. + return Some(LexicalScopeBinding::Item(binding)); + } else if let RibKind::Module(module) = rib.kind { + // Encountered a module item, abandon ribs and look into that module and preludes. + let parent_scope = &ParentScope { module, ..*parent_scope }; + let finalize = finalize.map(|f| Finalize { stage: Stage::Late, ..f }); + return self + .cm() + .resolve_ident_in_scope_set( + orig_ident, + ScopeSet::All(ns), + parent_scope, + finalize, + finalize.is_some(), + ignore_binding, + None, + ) + .ok() + .map(LexicalScopeBinding::Item); } - module = match rib.kind { - RibKind::Module(module) => module, - RibKind::MacroDefinition(def) if def == self.macro_def(ident.span.ctxt()) => { - // If an invocation of this macro created `ident`, give up on `ident` - // and switch to `ident`'s source from the macro definition. - ident.span.remove_mark(); - continue; - } - _ => continue, - }; - - match module.kind { - ModuleKind::Block => {} // We can see through blocks - _ => break, - } - - let item = self.cm().resolve_ident_in_module_unadjusted( - ModuleOrUniformRoot::Module(module), - ident, - ns, - parent_scope, - Shadowing::Unrestricted, - finalize.map(|finalize| Finalize { used: Used::Scope, ..finalize }), - ignore_binding, - None, - ); - if let Ok(binding) = item { - // The ident resolves to an item. - return Some(LexicalScopeBinding::Item(binding)); + if let RibKind::MacroDefinition(def) = rib.kind + && def == self.macro_def(ident.span.ctxt()) + { + // If an invocation of this macro created `ident`, give up on `ident` + // and switch to `ident`'s source from the macro definition. + ident.span.remove_mark(); } } - self.cm() - .early_resolve_ident_in_lexical_scope( - orig_ident, - ScopeSet::Late(ns, module, finalize.map(|finalize| finalize.node_id)), - parent_scope, - finalize, - finalize.is_some(), - ignore_binding, - None, - ) - .ok() - .map(LexicalScopeBinding::Item) + + unreachable!() } - /// Resolve an identifier in lexical scope. - /// This is a variation of `fn resolve_ident_in_lexical_scope` that can be run during - /// expansion and import resolution (perhaps they can be merged in the future). - /// The function is used for resolving initial segments of macro paths (e.g., `foo` in - /// `foo::bar!();` or `foo!();`) and also for import paths on 2018 edition. + /// Resolve an identifier in the specified set of scopes. #[instrument(level = "debug", skip(self))] - pub(crate) fn early_resolve_ident_in_lexical_scope<'r>( + pub(crate) fn resolve_ident_in_scope_set<'r>( self: CmResolver<'r, 'ra, 'tcx>, orig_ident: Ident, scope_set: ScopeSet<'ra>, @@ -410,9 +404,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } let (ns, macro_kind) = match scope_set { - ScopeSet::All(ns) - | ScopeSet::ModuleAndExternPrelude(ns, _) - | ScopeSet::Late(ns, ..) => (ns, None), + ScopeSet::All(ns) | ScopeSet::ModuleAndExternPrelude(ns, _) => (ns, None), + ScopeSet::ExternPrelude => (TypeNS, None), ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind)), }; @@ -429,12 +422,21 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // to detect potential ambiguities. let mut innermost_result: Option<(NameBinding<'_>, Flags)> = None; let mut determinacy = Determinacy::Determined; + // Shadowed bindings don't need to be marked as used or non-speculatively loaded. + macro finalize_scope() { + if innermost_result.is_none() { finalize } else { None } + } // Go through all the scopes and try to resolve the name. + let derive_fallback_lint_id = match finalize { + Some(Finalize { node_id, stage: Stage::Late, .. }) => Some(node_id), + _ => None, + }; let break_result = self.visit_scopes( scope_set, parent_scope, orig_ident.span.ctxt(), + derive_fallback_lint_id, |this, scope, use_prelude, ctxt| { let ident = Ident::new(orig_ident.name, orig_ident.span.with_ctxt(ctxt)); let result = match scope { @@ -448,17 +450,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } Scope::DeriveHelpersCompat => { - // FIXME: Try running this logic earlier, to allocate name bindings for - // legacy derive helpers when creating an attribute invocation with - // following derives. Legacy derive helpers are not common, so it shouldn't - // affect performance. It should also allow to remove the `derives` - // component from `ParentScope`. let mut result = Err(Determinacy::Determined); for derive in parent_scope.derives { let parent_scope = &ParentScope { derives: &[], ..*parent_scope }; match this.reborrow().resolve_macro_path( derive, - Some(MacroKind::Derive), + MacroKind::Derive, parent_scope, true, force, @@ -494,7 +491,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { _ => Err(Determinacy::Determined), }, Scope::Module(module, derive_fallback_lint_id) => { - let (adjusted_parent_scope, finalize) = + // FIXME: use `finalize_scope` here. + let (adjusted_parent_scope, adjusted_finalize) = if matches!(scope_set, ScopeSet::ModuleAndExternPrelude(..)) { (parent_scope, finalize) } else { @@ -508,12 +506,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ident, ns, adjusted_parent_scope, - if matches!(scope_set, ScopeSet::Late(..)) { - Shadowing::Unrestricted - } else { - Shadowing::Restricted - }, - finalize, + Shadowing::Restricted, + adjusted_finalize, ignore_binding, ignore_import, ); @@ -526,7 +520,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { orig_ident.span, BuiltinLintDiag::ProcMacroDeriveResolutionFallback { span: orig_ident.span, - ns, + ns_descr: ns.descr(), ident, }, ); @@ -561,14 +555,21 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { Some(binding) => Ok((*binding, Flags::empty())), None => Err(Determinacy::Determined), }, - Scope::ExternPrelude => { - match this.reborrow().extern_prelude_get(ident, finalize.is_some()) { + Scope::ExternPreludeItems => { + // FIXME: use `finalize_scope` here. + match this.reborrow().extern_prelude_get_item(ident, finalize.is_some()) { Some(binding) => Ok((binding, Flags::empty())), None => Err(Determinacy::determined( this.graph_root.unexpanded_invocations.borrow().is_empty(), )), } } + Scope::ExternPreludeFlags => { + match this.extern_prelude_get_flag(ident, finalize_scope!().is_some()) { + Some(binding) => Ok((binding, Flags::empty())), + None => Err(Determinacy::Determined), + } + } Scope::ToolPrelude => match this.registered_tool_bindings.get(&ident) { Some(binding) => Ok((*binding, Flags::empty())), None => Err(Determinacy::Determined), @@ -599,8 +600,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { if matches!(ident.name, sym::f16) && !this.tcx.features().f16() && !ident.span.allows_unstable(sym::f16) - && finalize.is_some() - && innermost_result.is_none() + && finalize_scope!().is_some() { feature_err( this.tcx.sess, @@ -613,8 +613,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { if matches!(ident.name, sym::f128) && !this.tcx.features().f128() && !ident.span.allows_unstable(sym::f128) - && finalize.is_some() - && innermost_result.is_none() + && finalize_scope!().is_some() { feature_err( this.tcx.sess, @@ -632,21 +631,15 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { match result { Ok((binding, flags)) => { - let binding_macro_kind = binding.macro_kind(); - // If we're looking for an attribute, that might be supported by a - // `macro_rules!` macro. - // FIXME: Replace this with tracking multiple macro kinds for one Def. - if !(sub_namespace_match(binding_macro_kind, macro_kind) - || (binding_macro_kind == Some(MacroKind::Bang) - && macro_kind == Some(MacroKind::Attr) - && this - .get_macro(binding.res()) - .is_some_and(|macro_data| macro_data.attr_ext.is_some()))) - { + if !sub_namespace_match(binding.macro_kinds(), macro_kind) { return None; } - if finalize.is_none() || matches!(scope_set, ScopeSet::Late(..)) { + // Below we report various ambiguity errors. + // We do not need to report them if we are either in speculative resolution, + // or in late resolution when everything is already imported and expanded + // and no ambiguities exist. + if matches!(finalize, None | Some(Finalize { stage: Stage::Late, .. })) { return Some(Ok(binding)); } @@ -814,7 +807,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ModuleOrUniformRoot::Module(module) => module, ModuleOrUniformRoot::ModuleAndExternPrelude(module) => { assert_eq!(shadowing, Shadowing::Unrestricted); - let binding = self.early_resolve_ident_in_lexical_scope( + let binding = self.resolve_ident_in_scope_set( ident, ScopeSet::ModuleAndExternPrelude(ns, module), parent_scope, @@ -829,15 +822,17 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { assert_eq!(shadowing, Shadowing::Unrestricted); return if ns != TypeNS { Err((Determined, Weak::No)) - } else if let Some(binding) = - self.reborrow().extern_prelude_get(ident, finalize.is_some()) - { - Ok(binding) - } else if !self.graph_root.unexpanded_invocations.borrow().is_empty() { - // Macro-expanded `extern crate` items can add names to extern prelude. - Err((Undetermined, Weak::No)) } else { - Err((Determined, Weak::No)) + let binding = self.resolve_ident_in_scope_set( + ident, + ScopeSet::ExternPrelude, + parent_scope, + finalize, + finalize.is_some(), + ignore_binding, + ignore_import, + ); + return binding.map_err(|determinacy| (determinacy, Weak::No)); }; } ModuleOrUniformRoot::CurrentScope => { @@ -853,7 +848,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } - let binding = self.early_resolve_ident_in_lexical_scope( + let binding = self.resolve_ident_in_scope_set( ident, ScopeSet::All(ns), parent_scope, @@ -946,7 +941,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Now we are in situation when new item/import can appear only from a glob or a macro // expansion. With restricted shadowing names from globs and macro expansions cannot // shadow names from outer scopes, so we can freely fallback from module search to search - // in outer scopes. For `early_resolve_ident_in_lexical_scope` to continue search in outer + // in outer scopes. For `resolve_ident_in_scope_set` to continue search in outer // scopes we return `Undetermined` with `Weak::Yes`. // Check if one of unexpanded macros can still define the name, @@ -1041,6 +1036,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Forbid expanded shadowing to avoid time travel. if let Some(shadowed_glob) = shadowed_glob && shadowing == Shadowing::Restricted + && finalize.stage == Stage::Early && binding.expansion != LocalExpnId::ROOT && binding.res() != shadowed_glob.res() { @@ -1167,6 +1163,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { for rib in ribs { match rib.kind { RibKind::Normal + | RibKind::Block(..) | RibKind::FnOrCoroutine | RibKind::Module(..) | RibKind::MacroDefinition(..) @@ -1259,6 +1256,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { for rib in ribs { let (has_generic_params, def_kind) = match rib.kind { RibKind::Normal + | RibKind::Block(..) | RibKind::FnOrCoroutine | RibKind::Module(..) | RibKind::MacroDefinition(..) @@ -1352,6 +1350,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { for rib in ribs { let (has_generic_params, def_kind) = match rib.kind { RibKind::Normal + | RibKind::Block(..) | RibKind::FnOrCoroutine | RibKind::Module(..) | RibKind::MacroDefinition(..) @@ -1633,7 +1632,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { _ => Err(Determinacy::determined(finalize.is_some())), } } else { - self.reborrow().early_resolve_ident_in_lexical_scope( + self.reborrow().resolve_ident_in_scope_set( ident, ScopeSet::All(ns), parent_scope, diff --git a/compiler/rustc_resolve/src/imports.rs b/compiler/rustc_resolve/src/imports.rs index 403d440bee7..d7fc028287f 100644 --- a/compiler/rustc_resolve/src/imports.rs +++ b/compiler/rustc_resolve/src/imports.rs @@ -30,7 +30,7 @@ use crate::diagnostics::{DiagMode, Suggestion, import_candidates}; use crate::errors::{ CannotBeReexportedCratePublic, CannotBeReexportedCratePublicNS, CannotBeReexportedPrivate, CannotBeReexportedPrivateNS, CannotDetermineImportResolution, CannotGlobImportAllCrates, - ConsiderAddingMacroExport, ConsiderMarkingAsPub, + ConsiderAddingMacroExport, ConsiderMarkingAsPub, ConsiderMarkingAsPubCrate, }; use crate::{ AmbiguityError, AmbiguityKind, BindingKey, CmResolver, Determinacy, Finalize, ImportSuggestion, @@ -87,7 +87,6 @@ pub(crate) enum ImportKind<'ra> { id: NodeId, }, Glob { - is_prelude: bool, // The visibility of the greatest re-export. // n.b. `max_vis` is only used in `finalize_import` to check for re-export errors. max_vis: Cell<Option<Visibility>>, @@ -125,12 +124,9 @@ impl<'ra> std::fmt::Debug for ImportKind<'ra> { .field("nested", nested) .field("id", id) .finish(), - Glob { is_prelude, max_vis, id } => f - .debug_struct("Glob") - .field("is_prelude", is_prelude) - .field("max_vis", max_vis) - .field("id", id) - .finish(), + Glob { max_vis, id } => { + f.debug_struct("Glob").field("max_vis", max_vis).field("id", id).finish() + } ExternCrate { source, target, id } => f .debug_struct("ExternCrate") .field("source", source) @@ -188,6 +184,9 @@ pub(crate) struct ImportData<'ra> { /// |`use foo` | `ModuleOrUniformRoot::CurrentScope` | - | pub imported_module: Cell<Option<ModuleOrUniformRoot<'ra>>>, pub vis: Visibility, + + /// Span of the visibility. + pub vis_span: Span, } /// All imports are unique and allocated on a same arena, @@ -870,7 +869,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } ImportKind::Glob { .. } => { // FIXME: Use mutable resolver directly as a hack, this should be an output of - // specualtive resolution. + // speculative resolution. self.get_mut_unchecked().resolve_glob_import(import); return 0; } @@ -907,7 +906,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // We need the `target`, `source` can be extracted. let imported_binding = this.import(binding, import); // FIXME: Use mutable resolver directly as a hack, this should be an output of - // specualtive resolution. + // speculative resolution. this.get_mut_unchecked().define_binding_local( parent, target, @@ -921,7 +920,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { if target.name != kw::Underscore { let key = BindingKey::new(target, ns); // FIXME: Use mutable resolver directly as a hack, this should be an output of - // specualtive resolution. + // speculative resolution. this.get_mut_unchecked().update_local_resolution( parent, key, @@ -1073,7 +1072,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ImportKind::Single { source, target, ref bindings, type_ns_only, id, .. } => { (source, target, bindings, type_ns_only, id) } - ImportKind::Glob { is_prelude, ref max_vis, id } => { + ImportKind::Glob { ref max_vis, id } => { if import.module_path.len() <= 1 { // HACK(eddyb) `lint_if_path_starts_with_module` needs at least // 2 segments, so the `resolve_path` above won't trigger it. @@ -1096,8 +1095,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { module: None, }); } - if !is_prelude - && let Some(max_vis) = max_vis.get() + if let Some(max_vis) = max_vis.get() && !max_vis.is_at_least(import.vis, self.tcx) { let def_id = self.local_def_id(id); @@ -1373,6 +1371,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { err.subdiagnostic( ConsiderAddingMacroExport { span: binding.span, }); + err.subdiagnostic( ConsiderMarkingAsPubCrate { + vis_span: import.vis_span, + }); } _ => { err.subdiagnostic( ConsiderMarkingAsPub { @@ -1445,7 +1446,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { return; } - match this.cm().early_resolve_ident_in_lexical_scope( + match this.cm().resolve_ident_in_scope_set( target, ScopeSet::All(ns), &import.parent_scope, @@ -1485,7 +1486,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { fn resolve_glob_import(&mut self, import: Import<'ra>) { // This function is only called for glob imports. - let ImportKind::Glob { id, is_prelude, .. } = import.kind else { unreachable!() }; + let ImportKind::Glob { id, .. } = import.kind else { unreachable!() }; let ModuleOrUniformRoot::Module(module) = import.imported_module.get().unwrap() else { self.dcx().emit_err(CannotGlobImportAllCrates { span: import.span }); @@ -1504,9 +1505,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { if module == import.parent_scope.module { return; - } else if is_prelude { - self.prelude = Some(module); - return; } // Add to module's glob_importers diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index e52cbeb733a..d4f7fb276a9 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -192,6 +192,13 @@ pub(crate) enum RibKind<'ra> { /// No restriction needs to be applied. Normal, + /// We passed through an `ast::Block`. + /// Behaves like `Normal`, but also partially like `Module` if the block contains items. + /// `Block(None)` must be always processed in the same way as `Block(Some(module))` + /// with empty `module`. The module can be `None` only because creation of some definitely + /// empty modules is skipped as an optimization. + Block(Option<Module<'ra>>), + /// We passed through an impl or trait and are now in one of its /// methods or associated types. Allow references to ty params that impl or trait /// binds. Disallow any other upvars (including other ty params that are @@ -210,7 +217,7 @@ pub(crate) enum RibKind<'ra> { /// All other constants aren't allowed to use generic params at all. ConstantItem(ConstantHasGenerics, Option<(Ident, ConstantItemKind)>), - /// We passed through a module. + /// We passed through a module item. Module(Module<'ra>), /// We passed through a `macro_rules!` statement @@ -242,6 +249,7 @@ impl RibKind<'_> { pub(crate) fn contains_params(&self) -> bool { match self { RibKind::Normal + | RibKind::Block(..) | RibKind::FnOrCoroutine | RibKind::ConstantItem(..) | RibKind::Module(_) @@ -258,15 +266,8 @@ impl RibKind<'_> { fn is_label_barrier(self) -> bool { match self { RibKind::Normal | RibKind::MacroDefinition(..) => false, - - RibKind::AssocItem - | RibKind::FnOrCoroutine - | RibKind::Item(..) - | RibKind::ConstantItem(..) - | RibKind::Module(..) - | RibKind::ForwardGenericParamBan(_) - | RibKind::ConstParamTy - | RibKind::InlineAsmSym => true, + RibKind::FnOrCoroutine | RibKind::ConstantItem(..) => true, + kind => bug!("unexpected rib kind: {kind:?}"), } } } @@ -1527,19 +1528,6 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { ret } - fn with_mod_rib<T>(&mut self, id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T { - let module = self.r.expect_module(self.r.local_def_id(id).to_def_id()); - // Move down in the graph. - let orig_module = replace(&mut self.parent_scope.module, module); - self.with_rib(ValueNS, RibKind::Module(module), |this| { - this.with_rib(TypeNS, RibKind::Module(module), |this| { - let ret = f(this); - this.parent_scope.module = orig_module; - ret - }) - }) - } - fn visit_generic_params(&mut self, params: &'ast [GenericParam], add_self_upper: bool) { // For type parameter defaults, we have to ban access // to following type parameters, as the GenericArgs can only @@ -2677,20 +2665,25 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { } ItemKind::Mod(..) => { - self.with_mod_rib(item.id, |this| { - if mod_inner_docs { - this.resolve_doc_links(&item.attrs, MaybeExported::Ok(item.id)); - } - let old_macro_rules = this.parent_scope.macro_rules; - visit::walk_item(this, item); - // Maintain macro_rules scopes in the same way as during early resolution - // for diagnostics and doc links. - if item.attrs.iter().all(|attr| { - !attr.has_name(sym::macro_use) && !attr.has_name(sym::macro_escape) - }) { - this.parent_scope.macro_rules = old_macro_rules; - } + let module = self.r.expect_module(self.r.local_def_id(item.id).to_def_id()); + let orig_module = replace(&mut self.parent_scope.module, module); + self.with_rib(ValueNS, RibKind::Module(module), |this| { + this.with_rib(TypeNS, RibKind::Module(module), |this| { + if mod_inner_docs { + this.resolve_doc_links(&item.attrs, MaybeExported::Ok(item.id)); + } + let old_macro_rules = this.parent_scope.macro_rules; + visit::walk_item(this, item); + // Maintain macro_rules scopes in the same way as during early resolution + // for diagnostics and doc links. + if item.attrs.iter().all(|attr| { + !attr.has_name(sym::macro_use) && !attr.has_name(sym::macro_escape) + }) { + this.parent_scope.macro_rules = old_macro_rules; + } + }) }); + self.parent_scope.module = orig_module; } ItemKind::Static(box ast::StaticItem { @@ -2821,9 +2814,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { // We also can't shadow bindings from associated parent items. for ns in [ValueNS, TypeNS] { for parent_rib in self.ribs[ns].iter().rev() { - // Break at mod level, to account for nested items which are + // Break at module or block level, to account for nested items which are // allowed to shadow generic param names. - if matches!(parent_rib.kind, RibKind::Module(..)) { + if matches!(parent_rib.kind, RibKind::Module(..) | RibKind::Block(..)) { break; } @@ -4277,7 +4270,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { if path.len() == 2 && let [segment] = prefix_path { - // Delay to check whether methond name is an associated function or not + // Delay to check whether method name is an associated function or not // ``` // let foo = Foo {}; // foo::bar(); // possibly suggest to foo.bar(); @@ -4315,7 +4308,6 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { qself, path, ns, - path_span, source.defer_to_typeck(), finalize, source, @@ -4438,7 +4430,6 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { qself: &Option<Box<QSelf>>, path: &[Segment], primary_ns: Namespace, - span: Span, defer_to_typeck: bool, finalize: Finalize, source: PathSource<'_, 'ast, 'ra>, @@ -4463,21 +4454,11 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { } assert!(primary_ns != MacroNS); - - if qself.is_none() { - let path_seg = |seg: &Segment| PathSegment::from_ident(seg.ident); - let path = Path { segments: path.iter().map(path_seg).collect(), span, tokens: None }; - if let Ok((_, res)) = self.r.cm().resolve_macro_path( - &path, - None, - &self.parent_scope, - false, - false, - None, - None, - ) { - return Ok(Some(PartialRes::new(res))); - } + if qself.is_none() + && let PathResult::NonModule(res) = + self.r.cm().maybe_resolve_path(path, Some(MacroNS), &self.parent_scope, None) + { + return Ok(Some(res)); } Ok(fin_res) @@ -4664,16 +4645,16 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { debug!("(resolving block) entering block"); // Move down in the graph, if there's an anonymous module rooted here. let orig_module = self.parent_scope.module; - let anonymous_module = self.r.block_map.get(&block.id).cloned(); // clones a reference + let anonymous_module = self.r.block_map.get(&block.id).copied(); let mut num_macro_definition_ribs = 0; if let Some(anonymous_module) = anonymous_module { debug!("(resolving block) found anonymous module, moving down"); - self.ribs[ValueNS].push(Rib::new(RibKind::Module(anonymous_module))); - self.ribs[TypeNS].push(Rib::new(RibKind::Module(anonymous_module))); + self.ribs[ValueNS].push(Rib::new(RibKind::Block(Some(anonymous_module)))); + self.ribs[TypeNS].push(Rib::new(RibKind::Block(Some(anonymous_module)))); self.parent_scope.module = anonymous_module; } else { - self.ribs[ValueNS].push(Rib::new(RibKind::Normal)); + self.ribs[ValueNS].push(Rib::new(RibKind::Block(None))); } // Descend into the block. @@ -5035,7 +5016,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { } ResolveDocLinks::Exported if !maybe_exported.eval(self.r) - && !rustdoc::has_primitive_or_keyword_docs(attrs) => + && !rustdoc::has_primitive_or_keyword_or_attribute_docs(attrs) => { return; } diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index aca251da71d..80b2095d8cc 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -19,14 +19,13 @@ use rustc_errors::{ }; use rustc_hir as hir; use rustc_hir::def::Namespace::{self, *}; -use rustc_hir::def::{self, CtorKind, CtorOf, DefKind}; +use rustc_hir::def::{self, CtorKind, CtorOf, DefKind, MacroKinds}; use rustc_hir::def_id::{CRATE_DEF_ID, DefId}; use rustc_hir::{MissingLifetimeKind, PrimTy}; use rustc_middle::ty; use rustc_session::{Session, lint}; use rustc_span::edit_distance::{edit_distance, find_best_match_for_name}; use rustc_span::edition::Edition; -use rustc_span::hygiene::MacroKind; use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use thin_vec::ThinVec; use tracing::debug; @@ -39,8 +38,8 @@ use crate::late::{ }; use crate::ty::fast_reject::SimplifiedType; use crate::{ - Module, ModuleKind, ModuleOrUniformRoot, PathResult, PathSource, Resolver, Segment, errors, - path_names_to_string, + Module, ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult, PathSource, Resolver, + ScopeSet, Segment, errors, path_names_to_string, }; type Res = def::Res<ast::NodeId>; @@ -850,9 +849,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { } // Try to find in last block rib - if let Some(rib) = &self.last_block_rib - && let RibKind::Normal = rib.kind - { + if let Some(rib) = &self.last_block_rib { for (ident, &res) in &rib.bindings { if let Res::Local(_) = res && path.len() == 1 @@ -901,7 +898,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { if path.len() == 1 { for rib in self.ribs[ns].iter().rev() { let item = path[0].ident; - if let RibKind::Module(module) = rib.kind + if let RibKind::Module(module) | RibKind::Block(Some(module)) = rib.kind && let Some(did) = find_doc_alias_name(self.r, module, item.name) { return Some((did, item)); @@ -1850,12 +1847,12 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { match (res, source) { ( - Res::Def(DefKind::Macro(MacroKind::Bang), def_id), + Res::Def(DefKind::Macro(kinds), def_id), PathSource::Expr(Some(Expr { kind: ExprKind::Index(..) | ExprKind::Call(..), .. })) | PathSource::Struct(_), - ) => { + ) if kinds.contains(MacroKinds::BANG) => { // Don't suggest macro if it's unstable. let suggestable = def_id.is_local() || self.r.tcx.lookup_stability(def_id).is_none_or(|s| s.is_stable()); @@ -1880,7 +1877,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { err.note("if you want the `try` keyword, you need Rust 2018 or later"); } } - (Res::Def(DefKind::Macro(MacroKind::Bang), _), _) => { + (Res::Def(DefKind::Macro(kinds), _), _) if kinds.contains(MacroKinds::BANG) => { err.span_label(span, fallback_label.to_string()); } (Res::Def(DefKind::TyAlias, def_id), PathSource::Trait(_)) => { @@ -2459,45 +2456,29 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { } } + if let RibKind::Block(Some(module)) = rib.kind { + self.r.add_module_candidates(module, &mut names, &filter_fn, Some(ctxt)); + } else if let RibKind::Module(module) = rib.kind { + // Encountered a module item, abandon ribs and look into that module and preludes. + let parent_scope = &ParentScope { module, ..self.parent_scope }; + self.r.add_scope_set_candidates( + &mut names, + ScopeSet::All(ns), + parent_scope, + ctxt, + filter_fn, + ); + break; + } + if let RibKind::MacroDefinition(def) = rib.kind && def == self.r.macro_def(ctxt) { // If an invocation of this macro created `ident`, give up on `ident` // and switch to `ident`'s source from the macro definition. ctxt.remove_mark(); - continue; - } - - // Items in scope - if let RibKind::Module(module) = rib.kind { - // Items from this module - self.r.add_module_candidates(module, &mut names, &filter_fn, Some(ctxt)); - - if let ModuleKind::Block = module.kind { - // We can see through blocks - } else { - // Items from the prelude - if !module.no_implicit_prelude { - names.extend(self.r.extern_prelude.keys().flat_map(|ident| { - let res = Res::Def(DefKind::Mod, CRATE_DEF_ID.to_def_id()); - filter_fn(res) - .then_some(TypoSuggestion::typo_from_ident(ident.0, res)) - })); - - if let Some(prelude) = self.r.prelude { - self.r.add_module_candidates(prelude, &mut names, &filter_fn, None); - } - } - break; - } } } - // Add primitive types to the mix - if filter_fn(Res::PrimTy(PrimTy::Bool)) { - names.extend(PrimTy::ALL.iter().map(|prim_ty| { - TypoSuggestion::typo_from_name(prim_ty.name(), Res::PrimTy(*prim_ty)) - })) - } } else { // Search in module. let mod_path = &path[..path.len() - 1]; @@ -3114,7 +3095,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { } else { self.suggest_introducing_lifetime( &mut err, - Some(lifetime_ref.ident.name.as_str()), + Some(lifetime_ref.ident), |err, _, span, message, suggestion, span_suggs| { err.multipart_suggestion_verbose( message, @@ -3132,7 +3113,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { fn suggest_introducing_lifetime( &self, err: &mut Diag<'_>, - name: Option<&str>, + name: Option<Ident>, suggest: impl Fn( &mut Diag<'_>, bool, @@ -3179,7 +3160,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { let mut rm_inner_binders: FxIndexSet<Span> = Default::default(); let (span, sugg) = if span.is_empty() { let mut binder_idents: FxIndexSet<Ident> = Default::default(); - binder_idents.insert(Ident::from_str(name.unwrap_or("'a"))); + binder_idents.insert(name.unwrap_or(Ident::from_str("'a"))); // We need to special case binders in the following situation: // Change `T: for<'a> Trait<T> + 'b` to `for<'a, 'b> T: Trait<T> + 'b` @@ -3209,16 +3190,11 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { } } - let binders_sugg = binder_idents.into_iter().enumerate().fold( - "".to_string(), - |mut binders, (i, x)| { - if i != 0 { - binders += ", "; - } - binders += x.as_str(); - binders - }, - ); + let binders_sugg: String = binder_idents + .into_iter() + .map(|ident| ident.to_string()) + .intersperse(", ".to_owned()) + .collect(); let sugg = format!( "{}<{}>{}", if higher_ranked { "for" } else { "" }, @@ -3234,7 +3210,8 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { .source_map() .span_through_char(span, '<') .shrink_to_hi(); - let sugg = format!("{}, ", name.unwrap_or("'a")); + let sugg = + format!("{}, ", name.map(|i| i.to_string()).as_deref().unwrap_or("'a")); (span, sugg) }; @@ -3242,7 +3219,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { let message = Cow::from(format!( "consider making the {} lifetime-generic with a new `{}` lifetime", kind.descr(), - name.unwrap_or("'a"), + name.map(|i| i.to_string()).as_deref().unwrap_or("'a"), )); should_continue = suggest( err, diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index b43f71913d9..2afb52ef4d4 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -15,6 +15,8 @@ #![feature(arbitrary_self_types)] #![feature(assert_matches)] #![feature(box_patterns)] +#![feature(decl_macro)] +#![feature(default_field_values)] #![feature(if_let_guard)] #![feature(iter_intersperse)] #![feature(rustc_attrs)] @@ -47,13 +49,14 @@ use rustc_data_structures::intern::Interned; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{FreezeReadGuard, FreezeWriteGuard}; use rustc_data_structures::unord::{UnordMap, UnordSet}; -use rustc_errors::{Applicability, Diag, ErrCode, ErrorGuaranteed}; +use rustc_errors::{Applicability, Diag, ErrCode, ErrorGuaranteed, LintBuffer}; use rustc_expand::base::{DeriveResolution, SyntaxExtension, SyntaxExtensionKind}; use rustc_feature::BUILTIN_ATTRIBUTES; use rustc_hir::attrs::StrippedCfgItem; use rustc_hir::def::Namespace::{self, *}; use rustc_hir::def::{ - self, CtorOf, DefKind, DocLinkResMap, LifetimeRes, NonMacroAttrKind, PartialRes, PerNS, + self, CtorOf, DefKind, DocLinkResMap, LifetimeRes, MacroKinds, NonMacroAttrKind, PartialRes, + PerNS, }; use rustc_hir::def_id::{CRATE_DEF_ID, CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalDefIdMap}; use rustc_hir::definitions::DisambiguatorState; @@ -69,8 +72,8 @@ use rustc_middle::ty::{ ResolverGlobalCtxt, TyCtxt, TyCtxtFeed, Visibility, }; use rustc_query_system::ich::StableHashingContext; +use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::PRIVATE_MACRO_USE; -use rustc_session::lint::{BuiltinLintDiag, LintBuffer}; use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind, SyntaxContext, Transparency}; use rustc_span::{DUMMY_SP, Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym}; use smallvec::{SmallVec, smallvec}; @@ -113,39 +116,48 @@ impl Determinacy { } /// A specific scope in which a name can be looked up. -/// This enum is currently used only for early resolution (imports and macros), -/// but not for late resolution yet. #[derive(Clone, Copy, Debug)] enum Scope<'ra> { + /// Inert attributes registered by derive macros. DeriveHelpers(LocalExpnId), + /// Inert attributes registered by derive macros, but used before they are actually declared. + /// This scope will exist until the compatibility lint `LEGACY_DERIVE_HELPERS` + /// is turned into a hard error. DeriveHelpersCompat, + /// Textual `let`-like scopes introduced by `macro_rules!` items. MacroRules(MacroRulesScopeRef<'ra>), - // The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK` - // lint if it should be reported. + /// Names declared in the given module. + /// The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK` + /// lint if it should be reported. Module(Module<'ra>, Option<NodeId>), + /// Names introduced by `#[macro_use]` attributes on `extern crate` items. MacroUsePrelude, + /// Built-in attributes. BuiltinAttrs, - ExternPrelude, + /// Extern prelude names introduced by `extern crate` items. + ExternPreludeItems, + /// Extern prelude names introduced by `--extern` flags. + ExternPreludeFlags, + /// Tool modules introduced with `#![register_tool]`. ToolPrelude, + /// Standard library prelude introduced with an internal `#[prelude_import]` import. StdLibPrelude, + /// Built-in types. BuiltinTypes, } /// Names from different contexts may want to visit different subsets of all specific scopes /// with different restrictions when looking up the resolution. -/// This enum is currently used only for early resolution (imports and macros), -/// but not for late resolution yet. #[derive(Clone, Copy, Debug)] enum ScopeSet<'ra> { /// All scopes with the given namespace. All(Namespace), /// A module, then extern prelude (used for mixed 2015-2018 mode in macros). ModuleAndExternPrelude(Namespace, Module<'ra>), - /// All scopes with macro namespace and the given macro kind restriction. + /// Just two extern prelude scopes. + ExternPrelude, + /// Same as `All(MacroNS)`, but with the given macro kind restriction. Macro(MacroKind), - /// All scopes with the given namespace, used for partially performing late resolution. - /// The node id enables lints and is used for reporting them. - Late(Namespace, Module<'ra>, Option<NodeId>), } /// Everything you need to know about a name's location to resolve it. @@ -766,7 +778,10 @@ impl<'ra> std::ops::Deref for Module<'ra> { impl<'ra> fmt::Debug for Module<'ra> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self.res()) + match self.kind { + ModuleKind::Block => write!(f, "block"), + ModuleKind::Def(..) => write!(f, "{:?}", self.res()), + } } } @@ -969,8 +984,8 @@ impl<'ra> NameBindingData<'ra> { matches!(self.res(), Res::Def(DefKind::AssocConst | DefKind::AssocFn | DefKind::AssocTy, _)) } - fn macro_kind(&self) -> Option<MacroKind> { - self.res().macro_kind() + fn macro_kinds(&self) -> Option<MacroKinds> { + self.res().macro_kinds() } // Suppose that we resolved macro invocation with `invoc_parent_expansion` to binding `binding` @@ -1012,16 +1027,18 @@ impl<'ra> NameBindingData<'ra> { #[derive(Default, Clone)] struct ExternPreludeEntry<'ra> { - binding: Cell<Option<NameBinding<'ra>>>, + /// Binding from an `extern crate` item. + item_binding: Option<NameBinding<'ra>>, + /// Binding from an `--extern` flag, lazily populated on first use. + flag_binding: Cell<Option<NameBinding<'ra>>>, + /// There was no `--extern` flag introducing this name, + /// `flag_binding` doesn't need to be populated. + only_item: bool, + /// `item_binding` is non-redundant, happens either when `only_item` is true, + /// or when `extern crate` introducing `item_binding` used renaming. introduced_by_item: bool, } -impl ExternPreludeEntry<'_> { - fn is_import(&self) -> bool { - self.binding.get().is_some_and(|binding| binding.is_import()) - } -} - struct DeriveData { resolutions: Vec<DeriveResolution>, helper_attrs: Vec<(usize, Ident)>, @@ -1030,14 +1047,13 @@ struct DeriveData { struct MacroData { ext: Arc<SyntaxExtension>, - attr_ext: Option<Arc<SyntaxExtension>>, nrules: usize, macro_rules: bool, } impl MacroData { fn new(ext: Arc<SyntaxExtension>) -> MacroData { - MacroData { ext, attr_ext: None, nrules: 0, macro_rules: false } + MacroData { ext, nrules: 0, macro_rules: false } } } @@ -1060,7 +1076,7 @@ pub struct Resolver<'ra, 'tcx> { /// Assert that we are in speculative resolution mode. assert_speculative: bool, - prelude: Option<Module<'ra>>, + prelude: Option<Module<'ra>> = None, extern_prelude: FxIndexMap<Macros20NormalizedIdent, ExternPreludeEntry<'ra>>, /// N.B., this is used only for better diagnostics, not name resolution itself. @@ -1072,10 +1088,10 @@ pub struct Resolver<'ra, 'tcx> { field_visibility_spans: FxHashMap<DefId, Vec<Span>>, /// All imports known to succeed or fail. - determined_imports: Vec<Import<'ra>>, + determined_imports: Vec<Import<'ra>> = Vec::new(), /// All non-determined imports. - indeterminate_imports: Vec<Import<'ra>>, + indeterminate_imports: Vec<Import<'ra>> = Vec::new(), // Spans for local variables found during pattern resolution. // Used for suggestions during error reporting. @@ -1126,19 +1142,19 @@ pub struct Resolver<'ra, 'tcx> { /// Maps glob imports to the names of items actually imported. glob_map: FxIndexMap<LocalDefId, FxIndexSet<Symbol>>, - glob_error: Option<ErrorGuaranteed>, - visibilities_for_hashing: Vec<(LocalDefId, Visibility)>, + glob_error: Option<ErrorGuaranteed> = None, + visibilities_for_hashing: Vec<(LocalDefId, Visibility)> = Vec::new(), used_imports: FxHashSet<NodeId>, maybe_unused_trait_imports: FxIndexSet<LocalDefId>, /// Privacy errors are delayed until the end in order to deduplicate them. - privacy_errors: Vec<PrivacyError<'ra>>, + privacy_errors: Vec<PrivacyError<'ra>> = Vec::new(), /// Ambiguity errors are delayed for deduplication. - ambiguity_errors: Vec<AmbiguityError<'ra>>, + ambiguity_errors: Vec<AmbiguityError<'ra>> = Vec::new(), /// `use` injections are delayed for better placement and deduplication. - use_injections: Vec<UseError<'tcx>>, + use_injections: Vec<UseError<'tcx>> = Vec::new(), /// Crate-local macro expanded `macro_export` referred to by a module-relative path. - macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>, + macro_expanded_macro_export_errors: BTreeSet<(Span, Span)> = BTreeSet::new(), arenas: &'ra ResolverArenas<'ra>, dummy_binding: NameBinding<'ra>, @@ -1190,9 +1206,9 @@ pub struct Resolver<'ra, 'tcx> { /// Avoid duplicated errors for "name already defined". name_already_seen: FxHashMap<Symbol, Span>, - potentially_unused_imports: Vec<Import<'ra>>, + potentially_unused_imports: Vec<Import<'ra>> = Vec::new(), - potentially_unnecessary_qualifications: Vec<UnnecessaryQualification<'ra>>, + potentially_unnecessary_qualifications: Vec<UnnecessaryQualification<'ra>> = Vec::new(), /// Table for mapping struct IDs into struct constructor IDs, /// it's not used during normal resolution, only for better error reporting. @@ -1201,7 +1217,7 @@ pub struct Resolver<'ra, 'tcx> { lint_buffer: LintBuffer, - next_node_id: NodeId, + next_node_id: NodeId = CRATE_NODE_ID, node_id_to_def_id: NodeMap<Feed<'tcx, LocalDefId>>, @@ -1219,17 +1235,17 @@ pub struct Resolver<'ra, 'tcx> { item_generics_num_lifetimes: FxHashMap<LocalDefId, usize>, delegation_fn_sigs: LocalDefIdMap<DelegationFnSig>, - main_def: Option<MainDefinition>, + main_def: Option<MainDefinition> = None, trait_impls: FxIndexMap<DefId, Vec<LocalDefId>>, /// A list of proc macro LocalDefIds, written out in the order in which /// they are declared in the static array generated by proc_macro_harness. - proc_macros: Vec<LocalDefId>, + proc_macros: Vec<LocalDefId> = Vec::new(), confused_type_with_std_module: FxIndexMap<Span, Span>, /// Whether lifetime elision was successful. lifetime_elision_allowed: FxHashSet<NodeId>, /// Names of items that were stripped out via cfg with their corresponding cfg meta item. - stripped_cfg_items: Vec<StrippedCfgItem<NodeId>>, + stripped_cfg_items: Vec<StrippedCfgItem<NodeId>> = Vec::new(), effective_visibilities: EffectiveVisibilities, doc_link_resolutions: FxIndexMap<LocalDefId, DocLinkResMap>, @@ -1251,6 +1267,10 @@ pub struct Resolver<'ra, 'tcx> { mods_with_parse_errors: FxHashSet<DefId>, + /// Whether `Resolver::register_macros_for_all_crates` has been called once already, as we + /// don't need to run it more than once. + all_crate_macros_already_registered: bool = false, + // Stores pre-expansion and pre-placeholder-fragment-insertion names for `impl Trait` types // that were encountered during resolution. These names are used to generate item names // for APITs, so we don't want to leak details of resolution into these names. @@ -1543,9 +1563,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { field_defaults: Default::default(), field_visibility_spans: FxHashMap::default(), - determined_imports: Vec::new(), - indeterminate_imports: Vec::new(), - pat_span_map: Default::default(), partial_res_map: Default::default(), import_res_map: Default::default(), @@ -1564,16 +1581,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ast_transform_scopes: FxHashMap::default(), glob_map: Default::default(), - glob_error: None, - visibilities_for_hashing: Default::default(), used_imports: FxHashSet::default(), maybe_unused_trait_imports: Default::default(), - privacy_errors: Vec::new(), - ambiguity_errors: Vec::new(), - use_injections: Vec::new(), - macro_expanded_macro_export_errors: BTreeSet::new(), - arenas, dummy_binding: arenas.new_pub_res_binding(Res::Err, DUMMY_SP, LocalExpnId::ROOT), builtin_types_bindings: PrimTy::ALL @@ -1617,8 +1627,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { derive_data: Default::default(), local_macro_def_scopes: FxHashMap::default(), name_already_seen: FxHashMap::default(), - potentially_unused_imports: Vec::new(), - potentially_unnecessary_qualifications: Default::default(), struct_constructors: Default::default(), unused_macros: Default::default(), unused_macro_rules: Default::default(), @@ -1628,16 +1636,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { builtin_attrs: Default::default(), containers_deriving_copy: Default::default(), lint_buffer: LintBuffer::default(), - next_node_id: CRATE_NODE_ID, node_id_to_def_id, disambiguator: DisambiguatorState::new(), placeholder_field_indices: Default::default(), invocation_parents, legacy_const_generic_args: Default::default(), item_generics_num_lifetimes: Default::default(), - main_def: Default::default(), trait_impls: Default::default(), - proc_macros: Default::default(), confused_type_with_std_module: Default::default(), lifetime_elision_allowed: Default::default(), stripped_cfg_items: Default::default(), @@ -1652,6 +1657,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { current_crate_outer_attr_insert_span, mods_with_parse_errors: Default::default(), impl_trait_names: Default::default(), + .. }; let root_parent_scope = ParentScope::module(graph_root, resolver.arenas); @@ -1879,7 +1885,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } - self.cm().visit_scopes(ScopeSet::All(TypeNS), parent_scope, ctxt, |this, scope, _, _| { + let scope_set = ScopeSet::All(TypeNS); + self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |this, scope, _, _| { match scope { Scope::Module(module, _) => { this.get_mut().traits_in_module(module, assoc_item, &mut found_traits); @@ -1889,7 +1896,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { this.get_mut().traits_in_module(module, assoc_item, &mut found_traits); } } - Scope::ExternPrelude | Scope::ToolPrelude | Scope::BuiltinTypes => {} + Scope::ExternPreludeItems + | Scope::ExternPreludeFlags + | Scope::ToolPrelude + | Scope::BuiltinTypes => {} _ => unreachable!(), } None::<()> @@ -2054,7 +2064,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // but not introduce it, as used if they are accessed from lexical scope. if used == Used::Scope { if let Some(entry) = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident)) { - if !entry.introduced_by_item && entry.binding.get() == Some(used_binding) { + if !entry.introduced_by_item && entry.item_binding == Some(used_binding) { return; } } @@ -2210,26 +2220,30 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } - fn extern_prelude_get<'r>( + fn extern_prelude_get_item<'r>( mut self: CmResolver<'r, 'ra, 'tcx>, ident: Ident, finalize: bool, ) -> Option<NameBinding<'ra>> { - let mut record_use = None; let entry = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident)); - let binding = entry.and_then(|entry| match entry.binding.get() { - Some(binding) if binding.is_import() => { - if finalize { - record_use = Some(binding); - } - Some(binding) + entry.and_then(|entry| entry.item_binding).map(|binding| { + if finalize { + self.get_mut().record_use(ident, binding, Used::Scope); } + binding + }) + } + + fn extern_prelude_get_flag(&self, ident: Ident, finalize: bool) -> Option<NameBinding<'ra>> { + let entry = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident)); + entry.and_then(|entry| match entry.flag_binding.get() { Some(binding) => { if finalize { self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span); } Some(binding) } + None if entry.only_item => None, None => { let crate_id = if finalize { self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span) @@ -2241,19 +2255,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let res = Res::Def(DefKind::Mod, crate_id.as_def_id()); let binding = self.arenas.new_pub_res_binding(res, DUMMY_SP, LocalExpnId::ROOT); - entry.binding.set(Some(binding)); + entry.flag_binding.set(Some(binding)); Some(binding) } None => finalize.then_some(self.dummy_binding), } } - }); - - if let Some(binding) = record_use { - self.get_mut().record_use(ident, binding, Used::Scope); - } - - binding + }) } /// Rustdoc uses this to resolve doc link paths in a recoverable way. `PathResult<'a>` @@ -2445,6 +2453,17 @@ fn module_to_string(mut module: Module<'_>) -> Option<String> { Some(names_to_string(names.iter().rev().copied())) } +#[derive(Copy, Clone, PartialEq, Debug)] +enum Stage { + /// Resolving an import or a macro. + /// Used when macro expansion is either not yet finished, or we are finalizing its results. + /// Used by default as a more restrictive variant that can produce additional errors. + Early, + /// Resolving something in late resolution when all imports are resolved + /// and all macros are expanded. + Late, +} + #[derive(Copy, Clone, Debug)] struct Finalize { /// Node ID for linting. @@ -2457,9 +2476,11 @@ struct Finalize { root_span: Span, /// Whether to report privacy errors or silently return "no resolution" for them, /// similarly to speculative resolution. - report_private: bool, + report_private: bool = true, /// Tracks whether an item is used in scope or used relatively to a module. - used: Used, + used: Used = Used::Other, + /// Finalizing early or late resolution. + stage: Stage = Stage::Early, } impl Finalize { @@ -2468,7 +2489,7 @@ impl Finalize { } fn with_root_span(node_id: NodeId, path_span: Span, root_span: Span) -> Finalize { - Finalize { node_id, path_span, root_span, report_private: true, used: Used::Other } + Finalize { node_id, path_span, root_span, .. } } } diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index 9173d0d3ea5..5fa87b4cd9b 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -1,7 +1,6 @@ //! A bunch of methods and structures more or less related to resolving macros and //! interface provided by `Resolver` to macro expander. -use std::any::Any; use std::cell::Cell; use std::mem; use std::sync::Arc; @@ -13,13 +12,13 @@ use rustc_expand::base::{ Annotatable, DeriveResolution, Indeterminate, ResolverExpand, SyntaxExtension, SyntaxExtensionKind, }; +use rustc_expand::compile_declarative_macro; use rustc_expand::expand::{ AstFragment, AstFragmentKind, Invocation, InvocationKind, SupportsMacroExpansion, }; -use rustc_expand::{MacroRulesMacroExpander, compile_declarative_macro}; use rustc_hir::StabilityLevel; use rustc_hir::attrs::{CfgEntry, StrippedCfgItem}; -use rustc_hir::def::{self, DefKind, Namespace, NonMacroAttrKind}; +use rustc_hir::def::{self, DefKind, MacroKinds, Namespace, NonMacroAttrKind}; use rustc_hir::def_id::{CrateNum, DefId, LocalDefId}; use rustc_middle::middle::stability; use rustc_middle::ty::{RegisteredTools, TyCtxt}; @@ -86,22 +85,19 @@ pub(crate) type MacroRulesScopeRef<'ra> = &'ra Cell<MacroRulesScope<'ra>>; /// one for attribute-like macros (attributes, derives). /// We ignore resolutions from one sub-namespace when searching names in scope for another. pub(crate) fn sub_namespace_match( - candidate: Option<MacroKind>, + candidate: Option<MacroKinds>, requirement: Option<MacroKind>, ) -> bool { - #[derive(PartialEq)] - enum SubNS { - Bang, - AttrLike, - } - let sub_ns = |kind| match kind { - MacroKind::Bang => SubNS::Bang, - MacroKind::Attr | MacroKind::Derive => SubNS::AttrLike, - }; - let candidate = candidate.map(sub_ns); - let requirement = requirement.map(sub_ns); // "No specific sub-namespace" means "matches anything" for both requirements and candidates. - candidate.is_none() || requirement.is_none() || candidate == requirement + let (Some(candidate), Some(requirement)) = (candidate, requirement) else { + return true; + }; + match requirement { + MacroKind::Bang => candidate.contains(MacroKinds::BANG), + MacroKind::Attr | MacroKind::Derive => { + candidate.intersects(MacroKinds::ATTR | MacroKinds::DERIVE) + } + } } // We don't want to format a path using pretty-printing, @@ -323,6 +319,7 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> { parent_scope.expansion, span, fast_print_path(path), + kind, def_id, def_id.map(|def_id| self.macro_def_scope(def_id).nearest_parent_mod()), ), @@ -356,11 +353,7 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> { } let def_id = self.local_def_id(node_id); let m = &self.local_macro_map[&def_id]; - let SyntaxExtensionKind::LegacyBang(ref ext) = m.ext.kind else { - continue; - }; - let ext: &dyn Any = ext.as_ref(); - let Some(m) = ext.downcast_ref::<MacroRulesMacroExpander>() else { + let SyntaxExtensionKind::MacroRules(ref m) = m.ext.kind else { continue; }; for arm_i in unused_arms.iter() { @@ -405,7 +398,7 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> { resolution.exts = Some( match self.cm().resolve_macro_path( &resolution.path, - Some(MacroKind::Derive), + MacroKind::Derive, &parent_scope, true, force, @@ -570,7 +563,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ) -> Result<(Arc<SyntaxExtension>, Res), Indeterminate> { let (ext, res) = match self.cm().resolve_macro_or_delegation_path( path, - Some(kind), + kind, parent_scope, true, force, @@ -633,7 +626,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { self.check_stability_and_deprecation(&ext, path, node_id); - let unexpected_res = if ext.macro_kind() != kind { + let unexpected_res = if !ext.macro_kinds().contains(kind.into()) { Some((kind.article(), kind.descr_expected())) } else if matches!(res, Res::Def(..)) { match supports_macro_expansion { @@ -665,7 +658,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Suggest moving the macro out of the derive() if the macro isn't Derive if !path.span.from_expansion() && kind == MacroKind::Derive - && ext.macro_kind() != MacroKind::Derive + && !ext.macro_kinds().contains(MacroKinds::DERIVE) + && ext.macro_kinds().contains(MacroKinds::ATTR) { err.remove_surrounding_derive = Some(RemoveSurroundingDerive { span: path.span }); err.add_as_non_derive = Some(AddAsNonDerive { macro_path: &path_str }); @@ -716,7 +710,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { pub(crate) fn resolve_macro_path<'r>( self: CmResolver<'r, 'ra, 'tcx>, path: &ast::Path, - kind: Option<MacroKind>, + kind: MacroKind, parent_scope: &ParentScope<'ra>, trace: bool, force: bool, @@ -739,7 +733,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { fn resolve_macro_or_delegation_path<'r>( mut self: CmResolver<'r, 'ra, 'tcx>, ast_path: &ast::Path, - kind: Option<MacroKind>, + kind: MacroKind, parent_scope: &ParentScope<'ra>, trace: bool, force: bool, @@ -753,7 +747,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Possibly apply the macro helper hack if deleg_impl.is_none() - && kind == Some(MacroKind::Bang) + && kind == MacroKind::Bang && let [segment] = path.as_slice() && segment.ident.span.ctxt().outer_expn_data().local_inner_macros { @@ -781,7 +775,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }; if trace { - let kind = kind.expect("macro kind must be specified if tracing is enabled"); // FIXME: Should be an output of Speculative Resolution. self.multi_segment_macro_resolutions.borrow_mut().push(( path, @@ -796,10 +789,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { self.prohibit_imported_non_macro_attrs(None, res.ok(), path_span); res } else { - let scope_set = kind.map_or(ScopeSet::All(MacroNS), ScopeSet::Macro); - let binding = self.reborrow().early_resolve_ident_in_lexical_scope( + let binding = self.reborrow().resolve_ident_in_scope_set( path[0].ident, - scope_set, + ScopeSet::Macro(kind), parent_scope, None, force, @@ -811,7 +803,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } if trace { - let kind = kind.expect("macro kind must be specified if tracing is enabled"); // FIXME: Should be an output of Speculative Resolution. self.single_segment_macro_resolutions.borrow_mut().push(( path[0].ident, @@ -842,10 +833,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } _ => None, }, - None => self.get_macro(res).map(|macro_data| match kind { - Some(MacroKind::Attr) if let Some(ref ext) = macro_data.attr_ext => Arc::clone(ext), - _ => Arc::clone(¯o_data.ext), - }), + None => self.get_macro(res).map(|macro_data| Arc::clone(¯o_data.ext)), }; Ok((ext, res)) } @@ -963,7 +951,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // FIXME: Should be an output of Speculative Resolution. let macro_resolutions = self.single_segment_macro_resolutions.take(); for (ident, kind, parent_scope, initial_binding, sugg_span) in macro_resolutions { - match self.cm().early_resolve_ident_in_lexical_scope( + match self.cm().resolve_ident_in_scope_set( ident, ScopeSet::Macro(kind), &parent_scope, @@ -1018,7 +1006,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let builtin_attrs = mem::take(&mut self.builtin_attrs); for (ident, parent_scope) in builtin_attrs { - let _ = self.cm().early_resolve_ident_in_lexical_scope( + let _ = self.cm().resolve_ident_in_scope_set( ident, ScopeSet::Macro(MacroKind::Attr), &parent_scope, @@ -1114,7 +1102,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { && let Some(binding) = binding // This is a `macro_rules` itself, not some import. && let NameBindingKind::Res(res) = binding.kind - && let Res::Def(DefKind::Macro(MacroKind::Bang), def_id) = res + && let Res::Def(DefKind::Macro(kinds), def_id) = res + && kinds.contains(MacroKinds::BANG) // And the `macro_rules` is defined inside the attribute's module, // so it cannot be in scope unless imported. && self.tcx.is_descendant_of(def_id, mod_def_id.to_def_id()) @@ -1123,7 +1112,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // If such resolution is successful and gives the same result // (e.g. if the macro is re-imported), then silence the lint. let no_macro_rules = self.arenas.alloc_macro_rules_scope(MacroRulesScope::Empty); - let fallback_binding = self.reborrow().early_resolve_ident_in_lexical_scope( + let fallback_binding = self.reborrow().resolve_ident_in_scope_set( path.segments[0].ident, ScopeSet::Macro(MacroKind::Bang), &ParentScope { macro_rules: no_macro_rules, ..*parent_scope }, @@ -1161,8 +1150,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Reserve some names that are not quite covered by the general check // performed on `Resolver::builtin_attrs`. if ident.name == sym::cfg || ident.name == sym::cfg_attr { - let macro_kind = self.get_macro(res).map(|macro_data| macro_data.ext.macro_kind()); - if macro_kind.is_some() && sub_namespace_match(macro_kind, Some(MacroKind::Attr)) { + let macro_kinds = self.get_macro(res).map(|macro_data| macro_data.ext.macro_kinds()); + if macro_kinds.is_some() && sub_namespace_match(macro_kinds, Some(MacroKind::Attr)) { self.dcx() .emit_err(errors::NameReservedInAttributeNamespace { span: ident.span, ident }); } @@ -1181,7 +1170,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { node_id: NodeId, edition: Edition, ) -> MacroData { - let (mut ext, mut attr_ext, mut nrules) = compile_declarative_macro( + let (mut ext, mut nrules) = compile_declarative_macro( self.tcx.sess, self.tcx.features(), macro_def, @@ -1198,14 +1187,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // The macro is a built-in, replace its expander function // while still taking everything else from the source code. ext.kind = builtin_ext_kind.clone(); - attr_ext = None; nrules = 0; } else { self.dcx().emit_err(errors::CannotFindBuiltinMacroWithName { span, ident }); } } - MacroData { ext: Arc::new(ext), attr_ext, nrules, macro_rules: macro_def.macro_rules } + MacroData { ext: Arc::new(ext), nrules, macro_rules: macro_def.macro_rules } } fn path_accessible( diff --git a/compiler/rustc_resolve/src/rustdoc.rs b/compiler/rustc_resolve/src/rustdoc.rs index 6450f63472c..804792c6f28 100644 --- a/compiler/rustc_resolve/src/rustdoc.rs +++ b/compiler/rustc_resolve/src/rustdoc.rs @@ -207,8 +207,10 @@ pub fn attrs_to_doc_fragments<'a, A: AttributeExt + Clone + 'a>( attrs: impl Iterator<Item = (&'a A, Option<DefId>)>, doc_only: bool, ) -> (Vec<DocFragment>, ThinVec<A>) { - let mut doc_fragments = Vec::new(); - let mut other_attrs = ThinVec::<A>::new(); + let (min_size, max_size) = attrs.size_hint(); + let size_hint = max_size.unwrap_or(min_size); + let mut doc_fragments = Vec::with_capacity(size_hint); + let mut other_attrs = ThinVec::<A>::with_capacity(if doc_only { 0 } else { size_hint }); for (attr, item_id) in attrs { if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() { let doc = beautify_doc_string(doc_str, comment_kind); @@ -230,6 +232,9 @@ pub fn attrs_to_doc_fragments<'a, A: AttributeExt + Clone + 'a>( } } + doc_fragments.shrink_to_fit(); + other_attrs.shrink_to_fit(); + unindent_doc_fragments(&mut doc_fragments); (doc_fragments, other_attrs) @@ -368,8 +373,8 @@ pub fn inner_docs(attrs: &[impl AttributeExt]) -> bool { true } -/// Has `#[rustc_doc_primitive]` or `#[doc(keyword)]`. -pub fn has_primitive_or_keyword_docs(attrs: &[impl AttributeExt]) -> bool { +/// Has `#[rustc_doc_primitive]` or `#[doc(keyword)]` or `#[doc(attribute)]`. +pub fn has_primitive_or_keyword_or_attribute_docs(attrs: &[impl AttributeExt]) -> bool { for attr in attrs { if attr.has_name(sym::rustc_doc_primitive) { return true; @@ -377,7 +382,7 @@ pub fn has_primitive_or_keyword_docs(attrs: &[impl AttributeExt]) -> bool { && let Some(items) = attr.meta_item_list() { for item in items { - if item.has_name(sym::keyword) { + if item.has_name(sym::keyword) || item.has_name(sym::attribute) { return true; } } diff --git a/compiler/rustc_sanitizers/Cargo.toml b/compiler/rustc_sanitizers/Cargo.toml index 9069d2c233d..3dbeaa435f3 100644 --- a/compiler/rustc_sanitizers/Cargo.toml +++ b/compiler/rustc_sanitizers/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.5.0" +bitflags.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_hir = { path = "../rustc_hir" } @@ -13,6 +13,6 @@ rustc_middle = { path = "../rustc_middle" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -tracing = "0.1" +tracing.workspace = true twox-hash = "1.6.3" # tidy-alphabetical-end diff --git a/compiler/rustc_serialize/Cargo.toml b/compiler/rustc_serialize/Cargo.toml index 948242352e7..e9959735f9f 100644 --- a/compiler/rustc_serialize/Cargo.toml +++ b/compiler/rustc_serialize/Cargo.toml @@ -8,7 +8,7 @@ edition = "2024" indexmap = "2.0.0" rustc_hashes = { path = "../rustc_hashes" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2.12" +thin-vec.workspace = true # tidy-alphabetical-end [dev-dependencies] diff --git a/compiler/rustc_session/Cargo.toml b/compiler/rustc_session/Cargo.toml index 0516982aeab..97789b198a4 100644 --- a/compiler/rustc_session/Cargo.toml +++ b/compiler/rustc_session/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true getopts = "0.2" rand = "0.9.0" rustc_abi = { path = "../rustc_abi" } @@ -23,12 +23,14 @@ rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } termize = "0.2" -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [target.'cfg(unix)'.dependencies] +# FIXME: Remove this pin once this rustix issue is resolved +# https://github.com/bytecodealliance/rustix/issues/1496 # tidy-alphabetical-start -libc = "0.2" +libc = "=0.2.174" # tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] diff --git a/compiler/rustc_session/messages.ftl b/compiler/rustc_session/messages.ftl index 528c52eace7..b46e8ab4fdc 100644 --- a/compiler/rustc_session/messages.ftl +++ b/compiler/rustc_session/messages.ftl @@ -49,6 +49,8 @@ session_hexadecimal_float_literal_not_supported = hexadecimal float literal is n session_incompatible_linker_flavor = linker flavor `{$flavor}` is incompatible with the current target .note = compatible flavors are: {$compatible_list} +session_indirect_branch_cs_prefix_requires_x86_or_x86_64 = `-Zindirect-branch-cs-prefix` is only supported on x86 and x86_64 + session_instrumentation_not_supported = {$us} instrumentation is not supported for this target session_int_literal_too_large = integer literal is too large @@ -129,6 +131,10 @@ session_target_small_data_threshold_not_supported = `-Z small-data-threshold` is session_target_stack_protector_not_supported = `-Z stack-protector={$stack_protector}` is not supported for target {$target_triple} and will be ignored +session_unexpected_builtin_cfg = unexpected `--cfg {$cfg}` flag + .controlled_by = config `{$cfg_name}` is only supposed to be controlled by `{$controlled_by}` + .incoherent = manually setting a built-in cfg can and does create incoherent behaviors + session_unleashed_feature_help_named = skipping check for `{$gate}` feature session_unleashed_feature_help_unnamed = skipping check that does not even have a feature gate diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index c665c85d1fe..9793d8091e2 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -16,10 +16,11 @@ use std::{cmp, fmt, fs, iter}; use externs::{ExternOpt, split_extern_opt}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; -use rustc_data_structures::stable_hasher::{StableOrd, ToStableHashKey}; +use rustc_data_structures::stable_hasher::{StableHasher, StableOrd, ToStableHashKey}; use rustc_errors::emitter::HumanReadableErrorType; use rustc_errors::{ColorConfig, DiagArgValue, DiagCtxtFlags, IntoDiagArg}; use rustc_feature::UnstableFeatures; +use rustc_hashes::Hash64; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::edition::{DEFAULT_EDITION, EDITION_NAME_LIST, Edition, LATEST_STABLE_EDITION}; use rustc_span::source_map::FilePathMapping; @@ -1195,7 +1196,25 @@ pub struct OutputFilenames { pub const RLINK_EXT: &str = "rlink"; pub const RUST_CGU_EXT: &str = "rcgu"; pub const DWARF_OBJECT_EXT: &str = "dwo"; +pub const MAX_FILENAME_LENGTH: usize = 143; // ecryptfs limits filenames to 143 bytes see #49914 +/// Ensure the filename is not too long, as some filesystems have a limit. +/// If the filename is too long, hash part of it and append the hash to the filename. +/// This is a workaround for long crate names generating overly long filenames. +fn maybe_strip_file_name(mut path: PathBuf) -> PathBuf { + if path.file_name().map_or(0, |name| name.len()) > MAX_FILENAME_LENGTH { + let filename = path.file_name().unwrap().to_string_lossy(); + let hash_len = 64 / 4; // Hash64 is 64 bits encoded in hex + let stripped_len = filename.len() - MAX_FILENAME_LENGTH + hash_len; + + let mut hasher = StableHasher::new(); + filename[..stripped_len].hash(&mut hasher); + let hash = hasher.finish::<Hash64>(); + + path.set_file_name(format!("{:x}-{}", hash, &filename[stripped_len..])); + } + path +} impl OutputFilenames { pub fn new( out_directory: PathBuf, @@ -1288,7 +1307,7 @@ impl OutputFilenames { } let temps_directory = self.temps_directory.as_ref().unwrap_or(&self.out_directory); - self.with_directory_and_extension(temps_directory, &extension) + maybe_strip_file_name(self.with_directory_and_extension(temps_directory, &extension)) } pub fn temp_path_for_diagnostic(&self, ext: &str) -> PathBuf { @@ -2828,16 +2847,27 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M // This is the location used by the `rustc-dev` `rustup` component. real_source_base_dir("lib/rustlib/rustc-src/rust", "compiler/rustc/src/main.rs"); - let mut search_paths = vec![]; - for s in &matches.opt_strs("L") { - search_paths.push(SearchPath::from_cli_opt( - sysroot.path(), - &target_triple, - early_dcx, - s, - unstable_opts.unstable_options, - )); - } + // We eagerly scan all files in each passed -L path. If the same directory is passed multiple + // times, and the directory contains a lot of files, this can take a lot of time. + // So we remove -L paths that were passed multiple times, and keep only the first occurrence. + // We still have to keep the original order of the -L arguments. + let search_paths: Vec<SearchPath> = { + let mut seen_search_paths = FxHashSet::default(); + let search_path_matches: Vec<String> = matches.opt_strs("L"); + search_path_matches + .iter() + .filter(|p| seen_search_paths.insert(*p)) + .map(|path| { + SearchPath::from_cli_opt( + sysroot.path(), + &target_triple, + early_dcx, + &path, + unstable_opts.unstable_options, + ) + }) + .collect() + }; let working_dir = std::env::current_dir().unwrap_or_else(|e| { early_dcx.early_fatal(format!("Current directory is invalid: {e}")); diff --git a/compiler/rustc_session/src/config/cfg.rs b/compiler/rustc_session/src/config/cfg.rs index 62891eb4f26..8f63ce6f0ae 100644 --- a/compiler/rustc_session/src/config/cfg.rs +++ b/compiler/rustc_session/src/config/cfg.rs @@ -26,13 +26,12 @@ use std::iter; use rustc_abi::Align; use rustc_ast::ast; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; -use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::EXPLICIT_BUILTIN_CFGS_IN_FLAGS; use rustc_span::{Symbol, sym}; use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, Target}; -use crate::Session; use crate::config::{CrateType, FmtDebug}; +use crate::{Session, errors}; /// The parsed `--cfg` options that define the compilation environment of the /// crate, used to drive conditional compilation. @@ -99,7 +98,7 @@ pub(crate) fn disallow_cfgs(sess: &Session, user_cfgs: &Cfg) { EXPLICIT_BUILTIN_CFGS_IN_FLAGS, None, ast::CRATE_NODE_ID, - BuiltinLintDiag::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by }, + errors::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by }.into(), ) }; diff --git a/compiler/rustc_session/src/config/native_libs.rs b/compiler/rustc_session/src/config/native_libs.rs index f1f0aeb5e59..50a0593f887 100644 --- a/compiler/rustc_session/src/config/native_libs.rs +++ b/compiler/rustc_session/src/config/native_libs.rs @@ -5,10 +5,11 @@ //! which have their own parser in `rustc_metadata`.) use rustc_feature::UnstableFeatures; +use rustc_hir::attrs::NativeLibKind; use crate::EarlyDiagCtxt; use crate::config::UnstableOptions; -use crate::utils::{NativeLib, NativeLibKind}; +use crate::utils::NativeLib; #[cfg(test)] mod tests; diff --git a/compiler/rustc_session/src/cstore.rs b/compiler/rustc_session/src/cstore.rs index 4cfc745dec2..30f6256a75e 100644 --- a/compiler/rustc_session/src/cstore.rs +++ b/compiler/rustc_session/src/cstore.rs @@ -6,8 +6,8 @@ use std::any::Any; use std::path::PathBuf; use rustc_abi::ExternAbi; -use rustc_ast as ast; use rustc_data_structures::sync::{self, AppendOnlyIndexVec, FreezeLock}; +use rustc_hir::attrs::{CfgEntry, NativeLibKind, PeImportNameType}; use rustc_hir::def_id::{ CrateNum, DefId, LOCAL_CRATE, LocalDefId, StableCrateId, StableCrateIdMap, }; @@ -16,7 +16,6 @@ use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::{Span, Symbol}; use crate::search_paths::PathKind; -use crate::utils::NativeLibKind; // lonely orphan structs and enums looking for a better home @@ -72,7 +71,7 @@ pub struct NativeLib { pub name: Symbol, /// If packed_bundled_libs enabled, actual filename of library is stored. pub filename: Option<Symbol>, - pub cfg: Option<ast::MetaItemInner>, + pub cfg: Option<CfgEntry>, pub foreign_module: Option<DefId>, pub verbatim: Option<bool>, pub dll_imports: Vec<DllImport>, @@ -88,25 +87,6 @@ impl NativeLib { } } -/// Different ways that the PE Format can decorate a symbol name. -/// From <https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-name-type> -#[derive(Copy, Clone, Debug, Encodable, Decodable, HashStable_Generic, PartialEq, Eq)] -pub enum PeImportNameType { - /// IMPORT_ORDINAL - /// Uses the ordinal (i.e., a number) rather than the name. - Ordinal(u16), - /// Same as IMPORT_NAME - /// Name is decorated with all prefixes and suffixes. - Decorated, - /// Same as IMPORT_NAME_NOPREFIX - /// Prefix (e.g., the leading `_` or `@`) is skipped, but suffix is kept. - NoPrefix, - /// Same as IMPORT_NAME_UNDECORATE - /// Prefix (e.g., the leading `_` or `@`) and suffix (the first `@` and all - /// trailing characters) are skipped. - Undecorated, -} - #[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)] pub struct DllImport { pub name: Symbol, diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs index bf95014843d..34da54a20bf 100644 --- a/compiler/rustc_session/src/errors.rs +++ b/compiler/rustc_session/src/errors.rs @@ -7,7 +7,7 @@ use rustc_errors::{ Diag, DiagCtxtHandle, DiagMessage, Diagnostic, EmissionGuarantee, ErrorGuaranteed, Level, MultiSpan, }; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_span::{Span, Symbol}; use rustc_target::spec::{SplitDebuginfo, StackProtector, TargetTuple}; @@ -472,6 +472,10 @@ pub(crate) struct FunctionReturnRequiresX86OrX8664; pub(crate) struct FunctionReturnThunkExternRequiresNonLargeCodeModel; #[derive(Diagnostic)] +#[diag(session_indirect_branch_cs_prefix_requires_x86_or_x86_64)] +pub(crate) struct IndirectBranchCsPrefixRequiresX86OrX8664; + +#[derive(Diagnostic)] #[diag(session_unsupported_regparm)] pub(crate) struct UnsupportedRegparm { pub(crate) regparm: u32, @@ -501,3 +505,13 @@ pub(crate) struct SoftFloatIgnored; #[note] #[note(session_soft_float_deprecated_issue)] pub(crate) struct SoftFloatDeprecated; + +#[derive(LintDiagnostic)] +#[diag(session_unexpected_builtin_cfg)] +#[note(session_controlled_by)] +#[note(session_incoherent)] +pub(crate) struct UnexpectedBuiltinCfg { + pub(crate) cfg: String, + pub(crate) cfg_name: Symbol, + pub(crate) controlled_by: &'static str, +} diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 7c18fd89098..6d5be2d92cd 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -2165,6 +2165,8 @@ options! { "hash algorithm of source files used to check freshness in cargo (`blake3` or `sha256`)"), codegen_backend: Option<String> = (None, parse_opt_string, [TRACKED], "the backend to use"), + codegen_source_order: bool = (false, parse_bool, [UNTRACKED], + "emit mono items in the order of spans in source files (default: no)"), contract_checks: Option<bool> = (None, parse_opt_bool, [TRACKED], "emit runtime checks for contract pre- and post-conditions (default: no)"), coverage_options: CoverageOptions = (CoverageOptions::default(), parse_coverage_options, [TRACKED], @@ -2293,6 +2295,8 @@ options! { - hashes of green query instances - hash collisions of query keys - hash collisions when creating dep-nodes"), + indirect_branch_cs_prefix: bool = (false, parse_bool, [TRACKED TARGET_MODIFIER], + "add `cs` prefix to `call` and `jmp` to indirect thunks (default: no)"), inline_llvm: bool = (true, parse_bool, [TRACKED], "enable LLVM inlining (default: yes)"), inline_mir: Option<bool> = (None, parse_opt_bool, [TRACKED], diff --git a/compiler/rustc_session/src/parse.rs b/compiler/rustc_session/src/parse.rs index 426480f0dba..9048c51d5b4 100644 --- a/compiler/rustc_session/src/parse.rs +++ b/compiler/rustc_session/src/parse.rs @@ -11,8 +11,8 @@ use rustc_data_structures::sync::{AppendOnlyVec, Lock}; use rustc_errors::emitter::{FatalOnlyEmitter, HumanEmitter, stderr_destination}; use rustc_errors::translation::Translator; use rustc_errors::{ - ColorConfig, Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, EmissionGuarantee, MultiSpan, - StashKey, + BufferedEarlyLint, ColorConfig, DecorateDiagCompat, Diag, DiagCtxt, DiagCtxtHandle, + DiagMessage, EmissionGuarantee, MultiSpan, StashKey, }; use rustc_feature::{GateIssue, UnstableFeatures, find_feature_issue}; use rustc_span::edition::Edition; @@ -27,7 +27,7 @@ use crate::errors::{ FeatureDiagnosticSuggestion, FeatureGateError, SuggestUpgradeCompiler, }; use crate::lint::builtin::UNSTABLE_SYNTAX_PRE_EXPANSION; -use crate::lint::{BufferedEarlyLint, BuiltinLintDiag, Lint, LintId}; +use crate::lint::{Lint, LintId}; /// Collected spans during parsing for places where a certain feature was /// used and should be feature gated accordingly in `check_crate`. @@ -342,17 +342,17 @@ impl ParseSess { lint: &'static Lint, span: impl Into<MultiSpan>, node_id: NodeId, - diagnostic: BuiltinLintDiag, + diagnostic: impl Into<DecorateDiagCompat>, ) { - self.opt_span_buffer_lint(lint, Some(span.into()), node_id, diagnostic) + self.opt_span_buffer_lint(lint, Some(span.into()), node_id, diagnostic.into()) } - pub fn opt_span_buffer_lint( + pub(crate) fn opt_span_buffer_lint( &self, lint: &'static Lint, span: Option<MultiSpan>, node_id: NodeId, - diagnostic: BuiltinLintDiag, + diagnostic: DecorateDiagCompat, ) { self.buffered_lints.with_lock(|buffered_lints| { buffered_lints.push(BufferedEarlyLint { diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index c6956cf5f23..96767d05439 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -7,6 +7,7 @@ use std::sync::atomic::AtomicBool; use std::{env, fmt, io}; use rand::{RngCore, rng}; +use rustc_ast::NodeId; use rustc_data_structures::base_n::{CASE_INSENSITIVE, ToBaseN}; use rustc_data_structures::flock; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; @@ -22,7 +23,7 @@ use rustc_errors::timings::TimingSectionHandler; use rustc_errors::translation::Translator; use rustc_errors::{ Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, Diagnostic, ErrorGuaranteed, FatalAbort, - TerminalUrl, fallback_fluent_bundle, + LintEmitter, TerminalUrl, fallback_fluent_bundle, }; use rustc_macros::HashStable_Generic; pub use rustc_span::def_id::StableCrateId; @@ -44,6 +45,7 @@ use crate::config::{ SwitchWithOptPath, }; use crate::filesearch::FileSearch; +use crate::lint::LintId; use crate::parse::{ParseSess, add_feature_diagnostics}; use crate::search_paths::SearchPath; use crate::{errors, filesearch, lint}; @@ -139,7 +141,10 @@ pub struct CompilerIO { pub temps_dir: Option<PathBuf>, } -pub trait LintStoreMarker: Any + DynSync + DynSend {} +pub trait DynLintStore: Any + DynSync + DynSend { + /// Provides a way to access lint groups without depending on `rustc_lint` + fn lint_groups_iter(&self) -> Box<dyn Iterator<Item = LintGroup> + '_>; +} /// Represents the data associated with a compilation /// session for a single crate. @@ -164,7 +169,7 @@ pub struct Session { pub code_stats: CodeStats, /// This only ever stores a `LintStore` but we don't want a dependency on that type here. - pub lint_store: Option<Arc<dyn LintStoreMarker>>, + pub lint_store: Option<Arc<dyn DynLintStore>>, /// Cap lint level specified by a driver specifically. pub driver_lint_caps: FxHashMap<lint::LintId, lint::Level>, @@ -219,6 +224,20 @@ pub struct Session { pub invocation_temp: Option<String>, } +impl LintEmitter for &'_ Session { + type Id = NodeId; + + fn emit_node_span_lint( + self, + lint: &'static rustc_lint_defs::Lint, + node_id: Self::Id, + span: impl Into<rustc_errors::MultiSpan>, + decorator: impl for<'a> rustc_errors::LintDiagnostic<'a, ()> + DynSend + 'static, + ) { + self.psess.buffer_lint(lint, span, node_id, decorator); + } +} + #[derive(Clone, Copy)] pub enum CodegenUnits { /// Specified by the user. In this case we try fairly hard to produce the @@ -240,6 +259,12 @@ impl CodegenUnits { } } +pub struct LintGroup { + pub name: &'static str, + pub lints: Vec<LintId>, + pub is_externally_loaded: bool, +} + impl Session { pub fn miri_unleashed_feature(&self, span: Span, feature_gate: Option<Symbol>) { self.miri_unleashed_features.lock().push((span, feature_gate)); @@ -596,6 +621,13 @@ impl Session { (&*self.target.staticlib_prefix, &*self.target.staticlib_suffix) } } + + pub fn lint_groups_iter(&self) -> Box<dyn Iterator<Item = LintGroup> + '_> { + match self.lint_store { + Some(ref lint_store) => lint_store.lint_groups_iter(), + None => Box::new(std::iter::empty()), + } + } } // JUSTIFICATION: defn of the suggested wrapper fns @@ -1368,6 +1400,12 @@ fn validate_commandline_args_with_session_available(sess: &Session) { } } + if sess.opts.unstable_opts.indirect_branch_cs_prefix { + if sess.target.arch != "x86" && sess.target.arch != "x86_64" { + sess.dcx().emit_err(errors::IndirectBranchCsPrefixRequiresX86OrX8664); + } + } + if let Some(regparm) = sess.opts.unstable_opts.regparm { if regparm > 3 { sess.dcx().emit_err(errors::UnsupportedRegparm { regparm }); diff --git a/compiler/rustc_session/src/utils.rs b/compiler/rustc_session/src/utils.rs index e9ddd66b5e8..c64d9bc1efe 100644 --- a/compiler/rustc_session/src/utils.rs +++ b/compiler/rustc_session/src/utils.rs @@ -3,6 +3,7 @@ use std::sync::OnceLock; use rustc_data_structures::profiling::VerboseTimingGuard; use rustc_fs_util::try_canonicalize; +use rustc_hir::attrs::NativeLibKind; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use crate::session::Session; @@ -17,69 +18,6 @@ impl Session { } } -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] -#[derive(HashStable_Generic)] -pub enum NativeLibKind { - /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC) - Static { - /// Whether to bundle objects from static library into produced rlib - bundle: Option<bool>, - /// Whether to link static library without throwing any object files away - whole_archive: Option<bool>, - }, - /// Dynamic library (e.g. `libfoo.so` on Linux) - /// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC). - Dylib { - /// Whether the dynamic library will be linked only if it satisfies some undefined symbols - as_needed: Option<bool>, - }, - /// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library. - /// On Linux, it refers to a generated shared library stub. - RawDylib, - /// A macOS-specific kind of dynamic libraries. - Framework { - /// Whether the framework will be linked only if it satisfies some undefined symbols - as_needed: Option<bool>, - }, - /// Argument which is passed to linker, relative order with libraries and other arguments - /// is preserved - LinkArg, - - /// Module imported from WebAssembly - WasmImportModule, - - /// The library kind wasn't specified, `Dylib` is currently used as a default. - Unspecified, -} - -impl NativeLibKind { - pub fn has_modifiers(&self) -> bool { - match self { - NativeLibKind::Static { bundle, whole_archive } => { - bundle.is_some() || whole_archive.is_some() - } - NativeLibKind::Dylib { as_needed } | NativeLibKind::Framework { as_needed } => { - as_needed.is_some() - } - NativeLibKind::RawDylib - | NativeLibKind::Unspecified - | NativeLibKind::LinkArg - | NativeLibKind::WasmImportModule => false, - } - } - - pub fn is_statically_included(&self) -> bool { - matches!(self, NativeLibKind::Static { .. }) - } - - pub fn is_dllimport(&self) -> bool { - matches!( - self, - NativeLibKind::Dylib { .. } | NativeLibKind::RawDylib | NativeLibKind::Unspecified - ) - } -} - #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] #[derive(HashStable_Generic)] pub struct NativeLib { diff --git a/compiler/rustc_span/Cargo.toml b/compiler/rustc_span/Cargo.toml index 43a2d692577..2e45177e3b7 100644 --- a/compiler/rustc_span/Cargo.toml +++ b/compiler/rustc_span/Cargo.toml @@ -19,6 +19,6 @@ rustc_serialize = { path = "../rustc_serialize" } scoped-tls = "1.0" sha1 = "0.10.0" sha2 = "0.10.1" -tracing = "0.1" +tracing.workspace = true unicode-width = "0.2.0" # tidy-alphabetical-end diff --git a/compiler/rustc_span/src/caching_source_map_view.rs b/compiler/rustc_span/src/caching_source_map_view.rs index a887b50ec1e..41cfaa3ee34 100644 --- a/compiler/rustc_span/src/caching_source_map_view.rs +++ b/compiler/rustc_span/src/caching_source_map_view.rs @@ -123,27 +123,25 @@ impl<'sm> CachingSourceMapView<'sm> { if lo_cache_idx != -1 && hi_cache_idx != -1 { // Cache hit for span lo and hi. Check if they belong to the same file. - let result = { - let lo = &self.line_cache[lo_cache_idx as usize]; - let hi = &self.line_cache[hi_cache_idx as usize]; + let lo_file_index = self.line_cache[lo_cache_idx as usize].file_index; + let hi_file_index = self.line_cache[hi_cache_idx as usize].file_index; - if lo.file_index != hi.file_index { - return None; - } - - ( - lo.file.stable_id, - lo.line_number, - span_data.lo - lo.line.start, - hi.line_number, - span_data.hi - hi.line.start, - ) - }; + if lo_file_index != hi_file_index { + return None; + } self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); - return Some(result); + let lo = &self.line_cache[lo_cache_idx as usize]; + let hi = &self.line_cache[hi_cache_idx as usize]; + return Some(( + lo.file.stable_id, + lo.line_number, + span_data.lo - lo.line.start, + hi.line_number, + span_data.hi - hi.line.start, + )); } // No cache hit or cache hit for only one of span lo and hi. diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs index d9315149798..166842e374b 100644 --- a/compiler/rustc_span/src/source_map.rs +++ b/compiler/rustc_span/src/source_map.rs @@ -9,6 +9,7 @@ //! within the `SourceMap`, which upon request can be converted to line and column //! information, source code snippets, etc. +use std::fs::File; use std::io::{self, BorrowedBuf, Read}; use std::{fs, path}; @@ -115,13 +116,18 @@ impl FileLoader for RealFileLoader { } fn read_file(&self, path: &Path) -> io::Result<String> { - if path.metadata().is_ok_and(|metadata| metadata.len() > SourceFile::MAX_FILE_SIZE.into()) { + let mut file = File::open(path)?; + let size = file.metadata().map(|metadata| metadata.len()).ok().unwrap_or(0); + + if size > SourceFile::MAX_FILE_SIZE.into() { return Err(io::Error::other(format!( "text files larger than {} bytes are unsupported", SourceFile::MAX_FILE_SIZE ))); } - fs::read_to_string(path) + let mut contents = String::new(); + file.read_to_string(&mut contents)?; + Ok(contents) } fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>> { @@ -911,12 +917,13 @@ impl SourceMap { /// Returns a new span representing just the last character of this span. pub fn end_point(&self, sp: Span) -> Span { - let pos = sp.hi().0; + let sp = sp.data(); + let pos = sp.hi.0; let width = self.find_width_of_character_at_span(sp, false); let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); - let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); + let end_point = BytePos(cmp::max(corrected_end_position, sp.lo.0)); sp.with_lo(end_point) } @@ -930,8 +937,9 @@ impl SourceMap { if sp.is_dummy() { return sp; } - let start_of_next_point = sp.hi().0; + let sp = sp.data(); + let start_of_next_point = sp.hi.0; let width = self.find_width_of_character_at_span(sp, true); // If the width is 1, then the next span should only contain the next char besides current ending. // However, in the case of a multibyte character, where the width != 1, the next span should @@ -940,7 +948,7 @@ impl SourceMap { start_of_next_point.checked_add(width).unwrap_or(start_of_next_point); let end_of_next_point = BytePos(cmp::max(start_of_next_point + 1, end_of_next_point)); - Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt(), None) + Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt, None) } /// Check whether span is followed by some specified expected string in limit scope @@ -963,9 +971,7 @@ impl SourceMap { /// Finds the width of the character, either before or after the end of provided span, /// depending on the `forwards` parameter. #[instrument(skip(self, sp))] - fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { - let sp = sp.data(); - + fn find_width_of_character_at_span(&self, sp: SpanData, forwards: bool) -> u32 { if sp.lo == sp.hi && !forwards { debug!("early return empty span"); return 1; diff --git a/compiler/rustc_span/src/span_encoding.rs b/compiler/rustc_span/src/span_encoding.rs index a4a47dc99b0..e34efa784de 100644 --- a/compiler/rustc_span/src/span_encoding.rs +++ b/compiler/rustc_span/src/span_encoding.rs @@ -74,9 +74,8 @@ use crate::{BytePos, SPAN_TRACK, SpanData}; /// because `parent` isn't currently used by default. /// /// In order to reliably use parented spans in incremental compilation, -/// the dependency to the parent definition's span. This is performed -/// using the callback `SPAN_TRACK` to access the query engine. -/// +/// accesses to `lo` and `hi` must introduce a dependency to the parent definition's span. +/// This is performed using the callback `SPAN_TRACK` to access the query engine. #[derive(Clone, Copy, Eq, PartialEq, Hash)] #[rustc_pass_by_value] pub struct Span { diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index acbed7a9eed..77260d07c99 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -392,6 +392,7 @@ symbols! { __D, __H, __S, + __T, __awaitee, __try_var, _t, @@ -446,6 +447,7 @@ symbols! { altivec, alu32, always, + analysis, and, and_then, anon, @@ -464,6 +466,7 @@ symbols! { arm, arm_target_feature, array, + as_dash_needed: "as-needed", as_ptr, as_ref, as_str, @@ -538,13 +541,16 @@ symbols! { att_syntax, attr, attr_literals, + attribute, attributes, audit_that, augmented_assignments, auto_traits, + autodiff, autodiff_forward, autodiff_reverse, automatically_derived, + available_externally, avx, avx10_target_feature, avx512_target_feature, @@ -586,7 +592,9 @@ symbols! { btreemap_contains_key, btreemap_insert, btreeset_iter, + built, builtin_syntax, + bundle, c, c_dash_variadic, c_str, @@ -676,6 +684,7 @@ symbols! { cold_path, collapse_debuginfo, column, + common, compare_bytes, compare_exchange, compare_exchange_weak, @@ -743,6 +752,7 @@ symbols! { contracts_ensures, contracts_internals, contracts_requires, + convert, convert_identity, copy, copy_closures, @@ -810,6 +820,7 @@ symbols! { decl_macro, declare_lint_pass, decode, + decorated, default_alloc_error_handler, default_field_values, default_fn, @@ -844,11 +855,13 @@ symbols! { derive_const, derive_const_issue: "118304", derive_default_enum, + derive_from, derive_smart_pointer, destruct, destructuring_assignment, diagnostic, diagnostic_namespace, + dialect, direct, discriminant_kind, discriminant_type, @@ -892,6 +905,7 @@ symbols! { dynamic_no_pic: "dynamic-no-pic", e, edition_panic, + effective_target_features, effects, eh_catch_typeinfo, eh_personality, @@ -956,6 +970,7 @@ symbols! { extern_prelude, extern_system_varargs, extern_types, + extern_weak, external, external_doc, f, @@ -1051,6 +1066,7 @@ symbols! { fn_ptr_addr, fn_ptr_trait, forbid, + force_target_feature, forget, format, format_args, @@ -1063,6 +1079,7 @@ symbols! { format_macro, format_placeholder, format_unsafe_arg, + framework, freeze, freeze_impls, freg, @@ -1204,6 +1221,7 @@ symbols! { infer_static_outlives_requirements, inherent_associated_types, inherit, + initial, inlateout, inline, inline_const, @@ -1213,6 +1231,7 @@ symbols! { instruction_set, integer_: "integer", // underscore to avoid clashing with the function `sym::integer` below integral, + internal, internal_features, into_async_iter_into_iter, into_future, @@ -1249,6 +1268,7 @@ symbols! { iterator, iterator_collect_fn, kcfi, + kernel_address, keylocker_x86, keyword, kind, @@ -1280,6 +1300,7 @@ symbols! { link_arg_attribute, link_args, link_cfg, + link_dash_arg: "link-arg", link_llvm_intrinsics, link_name, link_ordinal, @@ -1287,6 +1308,8 @@ symbols! { linkage, linker, linker_messages, + linkonce, + linkonce_odr, lint_reasons, literal, load, @@ -1315,6 +1338,7 @@ symbols! { macro_attr, macro_attributes_in_derive_output, macro_concat, + macro_derive, macro_escape, macro_export, macro_lifetime_matcher, @@ -1510,6 +1534,7 @@ symbols! { noop_method_borrow, noop_method_clone, noop_method_deref, + noprefix, noreturn, nostack, not, @@ -1536,6 +1561,7 @@ symbols! { opt_out_copy, optimize, optimize_attribute, + optimized, optin_builtin_traits, option, option_env, @@ -1583,6 +1609,7 @@ symbols! { panic_const_shl_overflow, panic_const_shr_overflow, panic_const_sub_overflow, + panic_display, panic_fmt, panic_handler, panic_impl, @@ -1617,6 +1644,7 @@ symbols! { pattern_types, permissions_from_mode, phantom_data, + phase, pic, pie, pin, @@ -1633,6 +1661,7 @@ symbols! { poll, poll_next, position, + post_cleanup: "post-cleanup", post_dash_lto: "post-lto", postfix_match, powerpc_target_feature, @@ -1719,6 +1748,7 @@ symbols! { quote, range_inclusive_new, range_step, + raw_dash_dylib: "raw-dylib", raw_dylib, raw_dylib_elf, raw_eq, @@ -1734,6 +1764,7 @@ symbols! { readonly, realloc, reason, + reborrow, receiver, receiver_target, recursion_limit, @@ -1796,6 +1827,7 @@ symbols! { roundf128, rt, rtm_target_feature, + runtime, rust, rust_2015, rust_2018, @@ -1816,6 +1848,7 @@ symbols! { rustc_align, rustc_allocator, rustc_allocator_zeroed, + rustc_allocator_zeroed_variant, rustc_allow_const_fn_unstable, rustc_allow_incoherent_impl, rustc_allowed_through_unstable_modules, @@ -1828,7 +1861,6 @@ symbols! { rustc_coherence_is_core, rustc_coinductive, rustc_confusables, - rustc_const_panic_str, rustc_const_stable, rustc_const_stable_indirect, rustc_const_unstable, @@ -2253,6 +2285,7 @@ symbols! { unchecked_shl, unchecked_shr, unchecked_sub, + undecorated, underscore_const_names, underscore_imports, underscore_lifetimes, @@ -2324,6 +2357,7 @@ symbols! { va_start, val, validity, + value, values, var, variant_count, @@ -2340,6 +2374,7 @@ symbols! { vecdeque_iter, vecdeque_reserve, vector, + verbatim, version, vfp2, vis, @@ -2360,8 +2395,11 @@ symbols! { wasm_abi, wasm_import_module, wasm_target_feature, + weak, + weak_odr, where_clause_attrs, while_let, + whole_dash_archive: "whole-archive", width, windows, windows_subsystem, @@ -2392,9 +2430,11 @@ symbols! { yield_expr, ymm_reg, yreg, + zca, zfh, zfhmin, zmm_reg, + ztso, // tidy-alphabetical-end } } @@ -2509,10 +2549,16 @@ impl fmt::Debug for Ident { /// except that AST identifiers don't keep the rawness flag, so we have to guess it. impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&IdentPrinter::new(self.name, self.is_raw_guess(), None), f) + fmt::Display::fmt(&IdentPrinter::new(self.name, self.guess_print_mode(), None), f) } } +pub enum IdentPrintMode { + Normal, + RawIdent, + RawLifetime, +} + /// The most general type to print identifiers. /// /// AST pretty-printer is used as a fallback for turning AST structures into token streams for @@ -2528,7 +2574,7 @@ impl fmt::Display for Ident { /// done for a token stream or a single token. pub struct IdentPrinter { symbol: Symbol, - is_raw: bool, + mode: IdentPrintMode, /// Span used for retrieving the crate name to which `$crate` refers to, /// if this field is `None` then the `$crate` conversion doesn't happen. convert_dollar_crate: Option<Span>, @@ -2536,32 +2582,51 @@ pub struct IdentPrinter { impl IdentPrinter { /// The most general `IdentPrinter` constructor. Do not use this. - pub fn new(symbol: Symbol, is_raw: bool, convert_dollar_crate: Option<Span>) -> IdentPrinter { - IdentPrinter { symbol, is_raw, convert_dollar_crate } + pub fn new( + symbol: Symbol, + mode: IdentPrintMode, + convert_dollar_crate: Option<Span>, + ) -> IdentPrinter { + IdentPrinter { symbol, mode, convert_dollar_crate } } /// This implementation is supposed to be used when printing identifiers /// as a part of pretty-printing for larger AST pieces. /// Do not use this either. - pub fn for_ast_ident(ident: Ident, is_raw: bool) -> IdentPrinter { - IdentPrinter::new(ident.name, is_raw, Some(ident.span)) + pub fn for_ast_ident(ident: Ident, mode: IdentPrintMode) -> IdentPrinter { + IdentPrinter::new(ident.name, mode, Some(ident.span)) } } impl fmt::Display for IdentPrinter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.is_raw { - f.write_str("r#")?; - } else if self.symbol == kw::DollarCrate { - if let Some(span) = self.convert_dollar_crate { + let s = match self.mode { + IdentPrintMode::Normal + if self.symbol == kw::DollarCrate + && let Some(span) = self.convert_dollar_crate => + { let converted = span.ctxt().dollar_crate_name(); if !converted.is_path_segment_keyword() { f.write_str("::")?; } - return fmt::Display::fmt(&converted, f); + converted } - } - fmt::Display::fmt(&self.symbol, f) + IdentPrintMode::Normal => self.symbol, + IdentPrintMode::RawIdent => { + f.write_str("r#")?; + self.symbol + } + IdentPrintMode::RawLifetime => { + f.write_str("'r#")?; + let s = self + .symbol + .as_str() + .strip_prefix("'") + .expect("only lifetime idents should be passed with RawLifetime mode"); + Symbol::intern(s) + } + }; + s.fmt(f) } } @@ -2996,6 +3061,29 @@ impl Ident { self.name.can_be_raw() && self.is_reserved() } + /// Given the name of a lifetime without the first quote (`'`), + /// returns whether the lifetime name is reserved (therefore invalid) + pub fn is_reserved_lifetime(self) -> bool { + self.is_reserved() && ![kw::Underscore, kw::Static].contains(&self.name) + } + + pub fn is_raw_lifetime_guess(self) -> bool { + let name_without_apostrophe = self.without_first_quote(); + name_without_apostrophe.name != self.name + && name_without_apostrophe.name.can_be_raw() + && name_without_apostrophe.is_reserved_lifetime() + } + + pub fn guess_print_mode(self) -> IdentPrintMode { + if self.is_raw_lifetime_guess() { + IdentPrintMode::RawLifetime + } else if self.is_raw_guess() { + IdentPrintMode::RawIdent + } else { + IdentPrintMode::Normal + } + } + /// Whether this would be the identifier for a tuple field like `self.0`, as /// opposed to a named field like `self.thing`. pub fn is_numeric(self) -> bool { diff --git a/compiler/rustc_symbol_mangling/Cargo.toml b/compiler/rustc_symbol_mangling/Cargo.toml index 0df9c7682bf..48e02dabdde 100644 --- a/compiler/rustc_symbol_mangling/Cargo.toml +++ b/compiler/rustc_symbol_mangling/Cargo.toml @@ -15,5 +15,5 @@ rustc_hir = { path = "../rustc_hir" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index a7f64085bd9..4d9741f1d11 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -58,7 +58,7 @@ pub(super) fn mangle<'tcx>( let hash = get_symbol_hash(tcx, instance, instance_ty, instantiating_crate); - let mut p = SymbolPrinter { tcx, path: SymbolPath::new(), keep_within_component: false }; + let mut p = LegacySymbolMangler { tcx, path: SymbolPath::new(), keep_within_component: false }; p.print_def_path( def_id, if let ty::InstanceKind::DropGlue(_, _) @@ -213,13 +213,13 @@ impl SymbolPath { } } -struct SymbolPrinter<'tcx> { +struct LegacySymbolMangler<'tcx> { tcx: TyCtxt<'tcx>, path: SymbolPath, // When `true`, `finalize_pending_component` isn't used. - // This is needed when recursing into `path_qualified`, - // or `path_generic_args`, as any nested paths are + // This is needed when recursing into `print_path_with_qualified`, + // or `print_path_with_generic_args`, as any nested paths are // logically within one component. keep_within_component: bool, } @@ -228,14 +228,15 @@ struct SymbolPrinter<'tcx> { // `PrettyPrinter` aka pretty printing of e.g. types in paths, // symbol names should have their own printing machinery. -impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { +impl<'tcx> Printer<'tcx> for LegacySymbolMangler<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { // This might be reachable (via `pretty_print_dyn_existential`) even though - // `<Self As PrettyPrinter>::should_print_region` returns false. See #144994. + // `<Self As PrettyPrinter>::should_print_optional_region` returns false and + // `print_path_with_generic_args` filters out lifetimes. See #144994. Ok(()) } @@ -305,16 +306,17 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { Ok(()) } - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.write_str(self.tcx.crate_name(cnum).as_str())?; Ok(()) } - fn path_qualified( + + fn print_path_with_qualified( &mut self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError> { - // Similar to `pretty_path_qualified`, but for the other + // Similar to `pretty_print_path_with_qualified`, but for the other // types that are printed as paths (see `print_type` above). match self_ty.kind() { ty::FnDef(..) @@ -327,17 +329,17 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { self.print_type(self_ty) } - _ => self.pretty_path_qualified(self_ty, trait_ref), + _ => self.pretty_print_path_with_qualified(self_ty, trait_ref), } } - fn path_append_impl( + fn print_path_with_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError> { - self.pretty_path_append_impl( + self.pretty_print_path_with_impl( |cx| { print_prefix(cx)?; @@ -354,7 +356,8 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { trait_ref, ) } - fn path_append( + + fn print_path_with_simple( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, @@ -377,7 +380,8 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { Ok(()) } - fn path_generic_args( + + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], @@ -386,7 +390,6 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { let args = args.iter().cloned().filter(|arg| !matches!(arg.kind(), GenericArgKind::Lifetime(_))); - if args.clone().next().is_some() { self.generic_delimiters(|cx| cx.comma_sep(args)) } else { @@ -455,8 +458,8 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { } } -impl<'tcx> PrettyPrinter<'tcx> for SymbolPrinter<'tcx> { - fn should_print_region(&self, _region: ty::Region<'_>) -> bool { +impl<'tcx> PrettyPrinter<'tcx> for LegacySymbolMangler<'tcx> { + fn should_print_optional_region(&self, _region: ty::Region<'_>) -> bool { false } @@ -491,7 +494,7 @@ impl<'tcx> PrettyPrinter<'tcx> for SymbolPrinter<'tcx> { } } -impl fmt::Write for SymbolPrinter<'_> { +impl fmt::Write for LegacySymbolMangler<'_> { fn write_str(&mut self, s: &str) -> fmt::Result { // Name sanitation. LLVM will happily accept identifiers with weird names, but // gas doesn't! diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs index 6bcb7f6e093..d97ee956525 100644 --- a/compiler/rustc_symbol_mangling/src/lib.rs +++ b/compiler/rustc_symbol_mangling/src/lib.rs @@ -193,67 +193,54 @@ fn compute_symbol_name<'tcx>( // defining crate. // Weak lang items automatically get #[rustc_std_internal_symbol] // applied by the code computing the CodegenFnAttrs. - // We are mangling all #[rustc_std_internal_symbol] items that don't - // also have #[no_mangle] as a combination of the rustc version and the - // unmangled linkage name. This is to ensure that if we link against a - // staticlib compiled by a different rustc version, we don't get symbol - // conflicts or even UB due to a different implementation/ABI. Rust - // staticlibs currently export all symbols, including those that are - // hidden in cdylibs. + // We are mangling all #[rustc_std_internal_symbol] items as a + // combination of the rustc version and the unmangled linkage name. + // This is to ensure that if we link against a staticlib compiled by a + // different rustc version, we don't get symbol conflicts or even UB + // due to a different implementation/ABI. Rust staticlibs currently + // export all symbols, including those that are hidden in cdylibs. // We are using the v0 symbol mangling scheme here as we need to be // consistent across all crates and in some contexts the legacy symbol // mangling scheme can't be used. For example both the GCC backend and // Rust-for-Linux don't support some of the characters used by the // legacy symbol mangling scheme. - let name = if tcx.is_foreign_item(def_id) { - if let Some(name) = attrs.link_name { name } else { tcx.item_name(def_id) } - } else { - if let Some(name) = attrs.export_name { name } else { tcx.item_name(def_id) } - }; + let name = if let Some(name) = attrs.symbol_name { name } else { tcx.item_name(def_id) }; - if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { - return name.to_string(); - } else { - return v0::mangle_internal_symbol(tcx, name.as_str()); - } + return v0::mangle_internal_symbol(tcx, name.as_str()); } - // Foreign items by default use no mangling for their symbol name. There's a - // few exceptions to this rule though: - // - // * This can be overridden with the `#[link_name]` attribute - // - // * On the wasm32 targets there is a bug (or feature) in LLD [1] where the - // same-named symbol when imported from different wasm modules will get - // hooked up incorrectly. As a result foreign symbols, on the wasm target, - // with a wasm import module, get mangled. Additionally our codegen will - // deduplicate symbols based purely on the symbol name, but for wasm this - // isn't quite right because the same-named symbol on wasm can come from - // different modules. For these reasons if `#[link(wasm_import_module)]` - // is present we mangle everything on wasm because the demangled form will - // show up in the `wasm-import-name` custom attribute in LLVM IR. - // - // * `#[rustc_std_internal_symbol]` mangles the symbol name in a special way - // both for exports and imports through foreign items. This is handled above. - // [1]: https://bugs.llvm.org/show_bug.cgi?id=44316 - if tcx.is_foreign_item(def_id) - && (!tcx.sess.target.is_like_wasm - || !tcx.wasm_import_module_map(def_id.krate).contains_key(&def_id)) - { - if let Some(name) = attrs.link_name { + let wasm_import_module_exception_force_mangling = { + // * On the wasm32 targets there is a bug (or feature) in LLD [1] where the + // same-named symbol when imported from different wasm modules will get + // hooked up incorrectly. As a result foreign symbols, on the wasm target, + // with a wasm import module, get mangled. Additionally our codegen will + // deduplicate symbols based purely on the symbol name, but for wasm this + // isn't quite right because the same-named symbol on wasm can come from + // different modules. For these reasons if `#[link(wasm_import_module)]` + // is present we mangle everything on wasm because the demangled form will + // show up in the `wasm-import-name` custom attribute in LLVM IR. + // + // [1]: https://bugs.llvm.org/show_bug.cgi?id=44316 + // + // So, on wasm if a foreign item loses its `#[no_mangle]`, it might *still* + // be mangled if we're forced to. Note: I don't like this. + // These kinds of exceptions should be added during the `codegen_attrs` query. + // However, we don't have the wasm import module map there yet. + tcx.is_foreign_item(def_id) + && tcx.sess.target.is_like_wasm + && tcx.wasm_import_module_map(LOCAL_CRATE).contains_key(&def_id.into()) + }; + + if !wasm_import_module_exception_force_mangling { + if let Some(name) = attrs.symbol_name { + // Use provided name return name.to_string(); } - return tcx.item_name(def_id).to_string(); - } - - if let Some(name) = attrs.export_name { - // Use provided name - return name.to_string(); - } - if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { - // Don't mangle - return tcx.item_name(def_id).to_string(); + if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { + // Don't mangle + return tcx.item_name(def_id).to_string(); + } } // If we're dealing with an instance of a function that's inlined from diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index c2458ae814b..0cbd48ba08c 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -33,7 +33,7 @@ pub(super) fn mangle<'tcx>( let args = tcx.normalize_erasing_regions(ty::TypingEnv::fully_monomorphized(), instance.args); let prefix = "_R"; - let mut p: SymbolMangler<'_> = SymbolMangler { + let mut p: V0SymbolMangler<'_> = V0SymbolMangler { tcx, start_offset: prefix.len(), is_exportable, @@ -88,7 +88,7 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin } let prefix = "_R"; - let mut p: SymbolMangler<'_> = SymbolMangler { + let mut p: V0SymbolMangler<'_> = V0SymbolMangler { tcx, start_offset: prefix.len(), is_exportable: false, @@ -131,7 +131,7 @@ pub(super) fn mangle_typeid_for_trait_ref<'tcx>( trait_ref: ty::ExistentialTraitRef<'tcx>, ) -> String { // FIXME(flip1995): See comment in `mangle_typeid_for_fnabi`. - let mut p = SymbolMangler { + let mut p = V0SymbolMangler { tcx, start_offset: 0, is_exportable: false, @@ -159,7 +159,7 @@ struct BinderLevel { lifetime_depths: Range<u32>, } -struct SymbolMangler<'tcx> { +struct V0SymbolMangler<'tcx> { tcx: TyCtxt<'tcx>, binders: Vec<BinderLevel>, out: String, @@ -173,7 +173,7 @@ struct SymbolMangler<'tcx> { consts: FxHashMap<ty::Const<'tcx>, usize>, } -impl<'tcx> SymbolMangler<'tcx> { +impl<'tcx> V0SymbolMangler<'tcx> { fn push(&mut self, s: &str) { self.out.push_str(s); } @@ -272,7 +272,7 @@ impl<'tcx> SymbolMangler<'tcx> { } } -impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { +impl<'tcx> Printer<'tcx> for V0SymbolMangler<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } @@ -365,7 +365,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { // Encode impl generic params if the generic parameters contain non-region parameters // and this isn't an inherent impl. if impl_trait_ref.is_some() && args.iter().any(|a| a.has_non_region_param()) { - self.path_generic_args( + self.print_path_with_generic_args( |this| { this.path_append_ns( |p| p.print_def_path(parent_def_id, &[]), @@ -786,7 +786,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { None => { self.push("S"); for (field_def, field) in iter::zip(&variant_def.fields, fields) { - // HACK(eddyb) this mimics `path_append`, + // HACK(eddyb) this mimics `print_path_with_simple`, // instead of simply using `field_def.ident`, // just to be able to handle disambiguators. let disambiguated_field = @@ -819,7 +819,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { Ok(()) } - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.push("C"); if !self.is_exportable { let stable_crate_id = self.tcx.def_path_hash(cnum.as_def_id()).stable_crate_id(); @@ -830,7 +830,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { Ok(()) } - fn path_qualified( + fn print_path_with_qualified( &mut self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, @@ -843,7 +843,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { self.print_def_path(trait_ref.def_id, trait_ref.args) } - fn path_append_impl( + fn print_path_with_impl( &mut self, _: impl FnOnce(&mut Self) -> Result<(), PrintError>, _: Ty<'tcx>, @@ -853,7 +853,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { unreachable!() } - fn path_append( + fn print_path_with_simple( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, @@ -873,7 +873,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { DefPathData::SyntheticCoroutineBody => 's', DefPathData::NestedStatic => 'n', - // These should never show up as `path_append` arguments. + // These should never show up as `print_path_with_simple` arguments. DefPathData::CrateRoot | DefPathData::Use | DefPathData::GlobalAsm @@ -896,7 +896,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { ) } - fn path_generic_args( + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], diff --git a/compiler/rustc_target/Cargo.toml b/compiler/rustc_target/Cargo.toml index 56932c24922..3c257bf38a5 100644 --- a/compiler/rustc_target/Cargo.toml +++ b/compiler/rustc_target/Cargo.toml @@ -5,9 +5,11 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true +object = { version = "0.37.0", default-features = false, features = ["elf", "macho"] } rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_error_messages = { path = "../rustc_error_messages" } rustc_fs_util = { path = "../rustc_fs_util" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } @@ -16,12 +18,6 @@ serde = "1.0.219" serde_derive = "1.0.219" serde_json = "1.0.59" serde_path_to_error = "0.1.17" -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end -[dependencies.object] -# tidy-alphabetical-start -default-features = false -features = ["elf", "macho"] -version = "0.37.0" -# tidy-alphabetical-end diff --git a/compiler/rustc_target/src/callconv/mod.rs b/compiler/rustc_target/src/callconv/mod.rs index 63e56744aec..7a7c63c475b 100644 --- a/compiler/rustc_target/src/callconv/mod.rs +++ b/compiler/rustc_target/src/callconv/mod.rs @@ -114,11 +114,12 @@ mod attr_impl { bitflags::bitflags! { impl ArgAttribute: u8 { const NoAlias = 1 << 1; - const NoCapture = 1 << 2; + const CapturesAddress = 1 << 2; const NonNull = 1 << 3; const ReadOnly = 1 << 4; const InReg = 1 << 5; const NoUndef = 1 << 6; + const CapturesReadOnly = 1 << 7; } } rustc_data_structures::external_bitflags_debug! { ArgAttribute } @@ -399,11 +400,11 @@ impl<'a, Ty> ArgAbi<'a, Ty> { let mut attrs = ArgAttributes::new(); // For non-immediate arguments the callee gets its own copy of - // the value on the stack, so there are no aliases. It's also - // program-invisible so can't possibly capture + // the value on the stack, so there are no aliases. The function + // can capture the address of the argument, but not the provenance. attrs .set(ArgAttribute::NoAlias) - .set(ArgAttribute::NoCapture) + .set(ArgAttribute::CapturesAddress) .set(ArgAttribute::NonNull) .set(ArgAttribute::NoUndef); attrs.pointee_size = layout.size; diff --git a/compiler/rustc_target/src/spec/json.rs b/compiler/rustc_target/src/spec/json.rs index 6c44b7ff52f..f56a65d9c0c 100644 --- a/compiler/rustc_target/src/spec/json.rs +++ b/compiler/rustc_target/src/spec/json.rs @@ -168,7 +168,6 @@ impl Target { forward!(main_needs_argc_argv); forward!(has_thread_local); forward!(obj_is_bitcode); - forward!(bitcode_llvm_cmdline); forward_opt!(max_atomic_width); forward_opt!(min_atomic_width); forward!(atomic_cas); @@ -361,7 +360,6 @@ impl ToJson for Target { target_option_val!(main_needs_argc_argv); target_option_val!(has_thread_local); target_option_val!(obj_is_bitcode); - target_option_val!(bitcode_llvm_cmdline); target_option_val!(min_atomic_width); target_option_val!(max_atomic_width); target_option_val!(atomic_cas); @@ -555,7 +553,6 @@ struct TargetSpecJson { main_needs_argc_argv: Option<bool>, has_thread_local: Option<bool>, obj_is_bitcode: Option<bool>, - bitcode_llvm_cmdline: Option<StaticCow<str>>, max_atomic_width: Option<u64>, min_atomic_width: Option<u64>, atomic_cas: Option<bool>, diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index b9fbff8db05..c53d92bee9d 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -50,6 +50,7 @@ use rustc_abi::{ Align, CanonAbi, Endian, ExternAbi, Integer, Size, TargetDataLayout, TargetDataLayoutErrors, }; use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; +use rustc_error_messages::{DiagArgValue, IntoDiagArg, into_diag_arg_using_display}; use rustc_fs_util::try_canonicalize; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; @@ -875,6 +876,12 @@ impl FromStr for PanicStrategy { crate::json::serde_deserialize_from_str!(PanicStrategy); +impl IntoDiagArg for PanicStrategy { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.desc().to_string())) + } +} + impl ToJson for PanicStrategy { fn to_json(&self) -> Json { match *self { @@ -1518,6 +1525,8 @@ impl fmt::Display for SplitDebuginfo { } } +into_diag_arg_using_display!(SplitDebuginfo); + #[derive(Clone, Debug, PartialEq, Eq, serde_derive::Deserialize)] #[serde(tag = "kind")] #[serde(rename_all = "kebab-case")] @@ -1795,6 +1804,8 @@ impl fmt::Display for StackProtector { } } +into_diag_arg_using_display!(StackProtector); + #[derive(PartialEq, Clone, Debug)] pub enum BinaryFormat { Coff, @@ -1943,6 +1954,7 @@ supported_targets! { ("armv7-unknown-linux-musleabihf", armv7_unknown_linux_musleabihf), ("aarch64-unknown-linux-gnu", aarch64_unknown_linux_gnu), ("aarch64-unknown-linux-musl", aarch64_unknown_linux_musl), + ("aarch64_be-unknown-linux-musl", aarch64_be_unknown_linux_musl), ("x86_64-unknown-linux-musl", x86_64_unknown_linux_musl), ("i686-unknown-linux-musl", i686_unknown_linux_musl), ("i586-unknown-linux-musl", i586_unknown_linux_musl), @@ -2105,6 +2117,7 @@ supported_targets! { ("msp430-none-elf", msp430_none_elf), + ("aarch64_be-unknown-hermit", aarch64_be_unknown_hermit), ("aarch64-unknown-hermit", aarch64_unknown_hermit), ("riscv64gc-unknown-hermit", riscv64gc_unknown_hermit), ("x86_64-unknown-hermit", x86_64_unknown_hermit), @@ -2137,6 +2150,7 @@ supported_targets! { ("riscv64gc-unknown-none-elf", riscv64gc_unknown_none_elf), ("riscv64gc-unknown-linux-gnu", riscv64gc_unknown_linux_gnu), ("riscv64gc-unknown-linux-musl", riscv64gc_unknown_linux_musl), + ("riscv64a23-unknown-linux-gnu", riscv64a23_unknown_linux_gnu), ("sparc-unknown-none-elf", sparc_unknown_none_elf), @@ -2147,6 +2161,7 @@ supported_targets! { ("aarch64-unknown-none", aarch64_unknown_none), ("aarch64-unknown-none-softfloat", aarch64_unknown_none_softfloat), + ("aarch64_be-unknown-none-softfloat", aarch64_be_unknown_none_softfloat), ("aarch64-unknown-nuttx", aarch64_unknown_nuttx), ("x86_64-fortanix-unknown-sgx", x86_64_fortanix_unknown_sgx), @@ -2623,8 +2638,6 @@ pub struct TargetOptions { /// If we give emcc .o files that are actually .bc files it /// will 'just work'. pub obj_is_bitcode: bool, - /// Content of the LLVM cmdline section associated with embedded bitcode. - pub bitcode_llvm_cmdline: StaticCow<str>, /// Don't use this field; instead use the `.min_atomic_width()` method. pub min_atomic_width: Option<u64>, @@ -2988,7 +3001,6 @@ impl Default for TargetOptions { allow_asm: true, has_thread_local: false, obj_is_bitcode: false, - bitcode_llvm_cmdline: "".into(), min_atomic_width: None, max_atomic_width: None, atomic_cas: true, @@ -3808,3 +3820,5 @@ impl fmt::Display for TargetTuple { write!(f, "{}", self.debug_tuple()) } } + +into_diag_arg_using_display!(&TargetTuple); diff --git a/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_hermit.rs b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_hermit.rs new file mode 100644 index 00000000000..cad57abc2b1 --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_hermit.rs @@ -0,0 +1,25 @@ +use rustc_abi::Endian; + +use crate::spec::{StackProbeType, Target, TargetMetadata, TargetOptions, base}; + +pub(crate) fn target() -> Target { + Target { + llvm_target: "aarch64_be-unknown-hermit".into(), + metadata: TargetMetadata { + description: Some("ARM64 Hermit (big-endian)".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(true), + }, + pointer_width: 64, + arch: "aarch64".into(), + data_layout: "E-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(), + options: TargetOptions { + features: "+v8a,+strict-align,+neon,+fp-armv8".into(), + max_atomic_width: Some(128), + stack_probes: StackProbeType::Inline, + endian: Endian::Big, + ..base::hermit::opts() + }, + } +} diff --git a/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs new file mode 100644 index 00000000000..be5ac4a843b --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs @@ -0,0 +1,40 @@ +use rustc_abi::Endian; + +use crate::spec::{ + FramePointer, SanitizerSet, StackProbeType, Target, TargetMetadata, TargetOptions, base, +}; + +pub(crate) fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.max_atomic_width = Some(128); + base.supports_xray = true; + base.features = "+v8a,+outline-atomics".into(); + base.stack_probes = StackProbeType::Inline; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::THREAD; + + Target { + llvm_target: "aarch64_be-unknown-linux-musl".into(), + metadata: TargetMetadata { + description: Some("ARM64 Linux (big-endian) with musl-libc 1.2.5".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(true), + }, + pointer_width: 64, + data_layout: "E-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(), + arch: "aarch64".into(), + options: TargetOptions { + // the AAPCS64 expects use of non-leaf frame pointers per + // https://github.com/ARM-software/abi-aa/blob/4492d1570eb70c8fd146623e0db65b2d241f12e7/aapcs64/aapcs64.rst#the-frame-pointer + // and we tend to encounter interesting bugs in AArch64 unwinding code if we do not + frame_pointer: FramePointer::NonLeaf, + mcount: "\u{1}_mcount".into(), + endian: Endian::Big, + ..base + }, + } +} diff --git a/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_none_softfloat.rs b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_none_softfloat.rs new file mode 100644 index 00000000000..7f918e85080 --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_none_softfloat.rs @@ -0,0 +1,43 @@ +// Generic big-endian AArch64 target for bare-metal code - Floating point disabled +// +// Can be used in conjunction with the `target-feature` and +// `target-cpu` compiler flags to opt-in more hardware-specific +// features. +// +// For example, `-C target-cpu=cortex-a53`. +use rustc_abi::Endian; + +use crate::spec::{ + Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, SanitizerSet, StackProbeType, Target, + TargetMetadata, TargetOptions, +}; + +pub(crate) fn target() -> Target { + let opts = TargetOptions { + abi: "softfloat".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + features: "+v8a,+strict-align,-neon,-fp-armv8".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(128), + supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS, + stack_probes: StackProbeType::Inline, + panic_strategy: PanicStrategy::Abort, + endian: Endian::Big, + ..Default::default() + }; + Target { + llvm_target: "aarch64_be-unknown-none".into(), + metadata: TargetMetadata { + description: Some("Bare ARM64 (big-endian), softfloat".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(false), + }, + pointer_width: 64, + data_layout: "E-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(), + arch: "aarch64".into(), + options: opts, + } +} diff --git a/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs b/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs index 9b81362b27d..61e4cad3fa2 100644 --- a/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs +++ b/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs @@ -19,7 +19,7 @@ pub(crate) fn target() -> Target { data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(), arch: "aarch64".into(), options: TargetOptions { - features: "+v8a".into(), + features: "+v8a,+neon,+crypto,+crc".into(), linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), linker: Some("rust-lld".into()), link_script: Some(LINKER_SCRIPT.into()), diff --git a/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld b/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld index a4783de0183..0f3e6eeee19 100644 --- a/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld +++ b/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld @@ -17,15 +17,25 @@ PROVIDE(__vcodesig_type = 0); /* V5_SIG_TYPE_USER */ PROVIDE(__vcodesig_owner = 2); /* V5_SIG_OWNER_PARTNER */ PROVIDE(__vcodesig_options = 0); /* none (0) */ -PROVIDE(__user_ram_start = 0x03800000); -PROVIDE(__user_ram_length = 48M); -PROVIDE(__user_ram_end = __user_ram_start + __user_ram_length); /* 0x8000000 */ +__user_ram_start = 0x03800000; +__user_ram_end = 0x08000000; +/* (0x48 =) 72 MiB length */ +__user_ram_length = __user_ram_start - __user_ram_end; -PROVIDE(__code_signature_length = 0x20); +/* + * VEXos provides a method for pre-loading a "linked file" at a specified + * address in User RAM, conventionally near the end, after the primary + * program binary. We need to be sure not to place any data in that location, + * so we allow the user of this linker script to inform the start address of + * this blob. + */ +PROVIDE(__linked_file_length = 0); +PROVIDE(__linked_file_end = __user_ram_end); +PROVIDE(__linked_file_start = __linked_file_end - __linked_file_length); PROVIDE(__stack_length = 4M); -PROVIDE(__heap_end = __user_ram_end - __stack_length); -PROVIDE(__user_length = __heap_start - __user_ram_start); +PROVIDE(__stack_top = __linked_file_start); +PROVIDE(__stack_bottom = __linked_file_start - __stack_length); MEMORY { USER_RAM (RWX) : ORIGIN = __user_ram_start, LENGTH = __user_ram_length @@ -44,7 +54,7 @@ SECTIONS { LONG(__vcodesig_options) FILL(0) - . = __user_ram_start + __code_signature_length; + . = __user_ram_start + 0x20; } > USER_RAM /* @@ -125,7 +135,8 @@ SECTIONS { */ .heap (NOLOAD) : { __heap_start = .; - . = __heap_end; + . = __stack_bottom; + __heap_end = .; } > USER_RAM .stack (NOLOAD) : ALIGN(8) { diff --git a/compiler/rustc_target/src/spec/targets/avr_none.rs b/compiler/rustc_target/src/spec/targets/avr_none.rs index 07ed2a37803..ad056d02326 100644 --- a/compiler/rustc_target/src/spec/targets/avr_none.rs +++ b/compiler/rustc_target/src/spec/targets/avr_none.rs @@ -9,7 +9,7 @@ pub(crate) fn target() -> Target { host_tools: None, std: None, }, - data_layout: "e-P1-p:16:8-i8:8-i16:8-i32:8-i64:8-f32:8-f64:8-n8-a:8".into(), + data_layout: "e-P1-p:16:8-i8:8-i16:8-i32:8-i64:8-f32:8-f64:8-n8:16-a:8".into(), llvm_target: "avr-unknown-unknown".into(), pointer_width: 16, options: TargetOptions { diff --git a/compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs new file mode 100644 index 00000000000..60f2e7da042 --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs @@ -0,0 +1,27 @@ +use std::borrow::Cow; + +use crate::spec::{CodeModel, SplitDebuginfo, Target, TargetMetadata, TargetOptions, base}; + +pub(crate) fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-linux-gnu".into(), + metadata: TargetMetadata { + description: Some("RISC-V Linux (kernel 6.8.0, glibc 2.39)".into()), + tier: Some(3), + host_tools: Some(true), + std: Some(true), + }, + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+rva23u64".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), + ..base::linux_gnu::opts() + }, + } +} diff --git a/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs b/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs index 53e2cb469ee..8892c50d844 100644 --- a/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs +++ b/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs @@ -7,7 +7,7 @@ pub(crate) fn target() -> Target { llvm_target, metadata: TargetMetadata { description: Some("x86_64 Apple macOS (10.12+, Sierra+)".into()), - tier: Some(1), + tier: Some(2), host_tools: Some(true), std: Some(true), }, diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs index 297d9ed84c5..4c1b8c99426 100644 --- a/compiler/rustc_target/src/target_features.rs +++ b/compiler/rustc_target/src/target_features.rs @@ -248,6 +248,10 @@ static AARCH64_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("mte", Stable, &[]), // FEAT_AdvSimd & FEAT_FP ("neon", Stable, &[]), + // Backend option to turn atomic operations into an intrinsic call when `lse` is not known to be + // available, so the intrinsic can do runtime LSE feature detection rather than unconditionally + // using slower non-LSE operations. Unstable since it doesn't need to user-togglable. + ("outline-atomics", Unstable(sym::aarch64_unstable_target_feature), &[]), // FEAT_PAUTH (address authentication) ("paca", Stable, &[]), // FEAT_PAUTH (generic authentication) @@ -471,9 +475,9 @@ static X86_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("sse3", Stable, &["sse2"]), ("sse4.1", Stable, &["ssse3"]), ("sse4.2", Stable, &["sse4.1"]), - ("sse4a", Unstable(sym::sse4a_target_feature), &["sse3"]), + ("sse4a", Stable, &["sse3"]), ("ssse3", Stable, &["sse3"]), - ("tbm", Unstable(sym::tbm_target_feature), &[]), + ("tbm", Stable, &[]), ("vaes", Stable, &["avx2", "aes"]), ("vpclmulqdq", Stable, &["avx", "pclmulqdq"]), ("widekl", Stable, &["kl"]), @@ -597,6 +601,49 @@ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ), ("m", Stable, &[]), ("relax", Unstable(sym::riscv_target_feature), &[]), + ( + "rva23u64", + Unstable(sym::riscv_target_feature), + &[ + "m", + "a", + "f", + "d", + "c", + "b", + "v", + "zicsr", + "zicntr", + "zihpm", + "ziccif", + "ziccrse", + "ziccamoa", + "zicclsm", + "zic64b", + "za64rs", + "zihintpause", + "zba", + "zbb", + "zbs", + "zicbom", + "zicbop", + "zicboz", + "zfhmin", + "zkt", + "zvfhmin", + "zvbb", + "zvkt", + "zihintntl", + "zicond", + "zimop", + "zcmop", + "zcb", + "zfa", + "zawrs", + "supm", + ], + ), + ("supm", Unstable(sym::riscv_target_feature), &[]), ("unaligned-scalar-mem", Unstable(sym::riscv_target_feature), &[]), ("unaligned-vector-mem", Unstable(sym::riscv_target_feature), &[]), ("v", Unstable(sym::riscv_target_feature), &["zvl128b", "zve64d"]), @@ -763,6 +810,7 @@ static CSKY_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ static LOONGARCH_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ // tidy-alphabetical-start + ("32s", Unstable(sym::loongarch_target_feature), &[]), ("d", Stable, &["f"]), ("div32", Unstable(sym::loongarch_target_feature), &[]), ("f", Stable, &[]), diff --git a/compiler/rustc_thread_pool/src/lib.rs b/compiler/rustc_thread_pool/src/lib.rs index 34252d919e3..7ce7fbc27ea 100644 --- a/compiler/rustc_thread_pool/src/lib.rs +++ b/compiler/rustc_thread_pool/src/lib.rs @@ -787,18 +787,7 @@ impl ThreadPoolBuildError { } } -const GLOBAL_POOL_ALREADY_INITIALIZED: &str = - "The global thread pool has already been initialized."; - impl Error for ThreadPoolBuildError { - #[allow(deprecated)] - fn description(&self) -> &str { - match self.kind { - ErrorKind::GlobalPoolAlreadyInitialized => GLOBAL_POOL_ALREADY_INITIALIZED, - ErrorKind::IOError(ref e) => e.description(), - } - } - fn source(&self) -> Option<&(dyn Error + 'static)> { match &self.kind { ErrorKind::GlobalPoolAlreadyInitialized => None, @@ -810,7 +799,9 @@ impl Error for ThreadPoolBuildError { impl fmt::Display for ThreadPoolBuildError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.kind { - ErrorKind::GlobalPoolAlreadyInitialized => GLOBAL_POOL_ALREADY_INITIALIZED.fmt(f), + ErrorKind::GlobalPoolAlreadyInitialized => { + "The global thread pool has already been initialized.".fmt(f) + } ErrorKind::IOError(e) => e.fmt(f), } } diff --git a/compiler/rustc_trait_selection/Cargo.toml b/compiler/rustc_trait_selection/Cargo.toml index 1071105522d..6b79b98d1bf 100644 --- a/compiler/rustc_trait_selection/Cargo.toml +++ b/compiler/rustc_trait_selection/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } @@ -21,6 +21,6 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_transmute = { path = "../rustc_transmute", features = ["rustc"] } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -thin-vec = "0.2" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 8551780bcd5..9447612d026 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -224,41 +224,41 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { use ty::GenericArg; use ty::print::Printer; - struct AbsolutePathPrinter<'tcx> { + struct ConflictingPathPrinter<'tcx> { tcx: TyCtxt<'tcx>, segments: Vec<Symbol>, } - impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { + impl<'tcx> Printer<'tcx> for ConflictingPathPrinter<'tcx> { fn tcx<'a>(&'a self) -> TyCtxt<'tcx> { self.tcx } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } fn print_type(&mut self, _ty: Ty<'tcx>) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } fn print_dyn_existential( &mut self, _predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>, ) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } fn print_const(&mut self, _ct: ty::Const<'tcx>) -> Result<(), PrintError> { - unreachable!(); // because `path_generic_args` ignores the `GenericArgs` + unreachable!(); // because `print_path_with_generic_args` ignores the `GenericArgs` } - fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + fn print_crate_name(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.segments = vec![self.tcx.crate_name(cnum)]; Ok(()) } - fn path_qualified( + fn print_path_with_qualified( &mut self, _self_ty: Ty<'tcx>, _trait_ref: Option<ty::TraitRef<'tcx>>, @@ -266,7 +266,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { Err(fmt::Error) } - fn path_append_impl( + fn print_path_with_impl( &mut self, _print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _self_ty: Ty<'tcx>, @@ -275,7 +275,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { Err(fmt::Error) } - fn path_append( + fn print_path_with_simple( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, @@ -285,7 +285,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { Ok(()) } - fn path_generic_args( + fn print_path_with_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _args: &[GenericArg<'tcx>], @@ -300,28 +300,27 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // let _ = [{struct Foo; Foo}, {struct Foo; Foo}]; if did1.krate != did2.krate { let abs_path = |def_id| { - let mut p = AbsolutePathPrinter { tcx: self.tcx, segments: vec![] }; + let mut p = ConflictingPathPrinter { tcx: self.tcx, segments: vec![] }; p.print_def_path(def_id, &[]).map(|_| p.segments) }; - // We compare strings because DefPath can be different - // for imported and non-imported crates + // We compare strings because DefPath can be different for imported and + // non-imported crates. let expected_str = self.tcx.def_path_str(did1); let found_str = self.tcx.def_path_str(did2); let Ok(expected_abs) = abs_path(did1) else { return false }; let Ok(found_abs) = abs_path(did2) else { return false }; - let same_path = || -> Result<_, PrintError> { - Ok(expected_str == found_str || expected_abs == found_abs) - }; - // We want to use as unique a type path as possible. If both types are "locally - // known" by the same name, we use the "absolute path" which uses the original - // crate name instead. - let (expected, found) = if expected_str == found_str { - (join_path_syms(&expected_abs), join_path_syms(&found_abs)) - } else { - (expected_str.clone(), found_str.clone()) - }; - if same_path().unwrap_or(false) { + let same_path = expected_str == found_str || expected_abs == found_abs; + if same_path { + // We want to use as unique a type path as possible. If both types are "locally + // known" by the same name, we use the "absolute path" which uses the original + // crate name instead. + let (expected, found) = if expected_str == found_str { + (join_path_syms(&expected_abs), join_path_syms(&found_abs)) + } else { + (expected_str.clone(), found_str.clone()) + }; + // We've displayed "expected `a::b`, found `a::b`". We add context to // differentiate the different cases where that might happen. let expected_crate_name = self.tcx.crate_name(did1.krate); @@ -1620,8 +1619,18 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { { let e = self.tcx.erase_regions(e); let f = self.tcx.erase_regions(f); - let expected = with_forced_trimmed_paths!(e.sort_string(self.tcx)); - let found = with_forced_trimmed_paths!(f.sort_string(self.tcx)); + let mut expected = with_forced_trimmed_paths!(e.sort_string(self.tcx)); + let mut found = with_forced_trimmed_paths!(f.sort_string(self.tcx)); + if let ObligationCauseCode::Pattern { span, .. } = cause.code() + && let Some(span) = span + && !span.from_expansion() + && cause.span.from_expansion() + { + // When the type error comes from a macro like `assert!()`, and we are pointing at + // code the user wrote the cause and effect are reversed as the expected value is + // what the macro expanded to. + (found, expected) = (expected, found); + } if expected == found { label_or_note(span, terr.to_string(self.tcx)); } else { @@ -2144,7 +2153,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ) -> Option<(DiagStyledString, DiagStyledString)> { match values { ValuePairs::Regions(exp_found) => self.expected_found_str(exp_found), - ValuePairs::Terms(exp_found) => self.expected_found_str_term(exp_found, long_ty_path), + ValuePairs::Terms(exp_found) => { + self.expected_found_str_term(cause, exp_found, long_ty_path) + } ValuePairs::Aliases(exp_found) => self.expected_found_str(exp_found), ValuePairs::ExistentialTraitRef(exp_found) => self.expected_found_str(exp_found), ValuePairs::ExistentialProjection(exp_found) => self.expected_found_str(exp_found), @@ -2183,6 +2194,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn expected_found_str_term( &self, + cause: &ObligationCause<'tcx>, exp_found: ty::error::ExpectedFound<ty::Term<'tcx>>, long_ty_path: &mut Option<PathBuf>, ) -> Option<(DiagStyledString, DiagStyledString)> { @@ -2190,8 +2202,27 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if exp_found.references_error() { return None; } + let (mut expected, mut found) = (exp_found.expected, exp_found.found); + + if let ObligationCauseCode::Pattern { span, .. } = cause.code() + && let Some(span) = span + && !span.from_expansion() + && cause.span.from_expansion() + { + // When the type error comes from a macro like `assert!()`, and we are pointing at + // code the user wrote, the cause and effect are reversed as the expected value is + // what the macro expanded to. So if the user provided a `Type` when the macro is + // written in such a way that a `bool` was expected, we want to print: + // = note: expected `bool` + // found `Type`" + // but as far as the compiler is concerned, after expansion what was expected was `Type` + // = note: expected `Type` + // found `bool`" + // so we reverse them here to match user expectation. + (expected, found) = (found, expected); + } - Some(match (exp_found.expected.kind(), exp_found.found.kind()) { + Some(match (expected.kind(), found.kind()) { (ty::TermKind::Ty(expected), ty::TermKind::Ty(found)) => { let (mut exp, mut fnd) = self.cmp(expected, found); // Use the terminal width as the basis to determine when to compress the printed diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index 8f0f6d0bf26..40285e5d0e9 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -537,7 +537,7 @@ impl<T> Trait<T> for X { } } TypeError::TargetFeatureCast(def_id) => { - let target_spans = find_attr!(tcx.get_all_attrs(def_id), AttributeKind::TargetFeature(.., span) => *span); + let target_spans = find_attr!(tcx.get_all_attrs(def_id), AttributeKind::TargetFeature{attr_span: span, was_forced: false, ..} => *span); diag.note( "functions with `#[target_feature]` can only be coerced to `unsafe` function pointers" ); @@ -946,7 +946,7 @@ fn foo(&self) -> Self::T { String::new() } pub fn format_generic_args(&self, args: &[ty::GenericArg<'tcx>]) -> String { FmtPrinter::print_string(self.tcx, hir::def::Namespace::TypeNS, |p| { - p.path_generic_args(|_| Ok(()), args) + p.print_path_with_generic_args(|_| Ok(()), args) }) .expect("could not write to `String`.") } diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 62859329de3..d768e0bf63f 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -37,7 +37,6 @@ use super::on_unimplemented::{AppendConstMessage, OnUnimplementedNote}; use super::suggestions::get_explanation_based_on_obligation; use super::{ ArgKind, CandidateSimilarity, FindExprBySpan, GetSafeTransmuteErrorAndReason, ImplCandidate, - UnsatisfiedConst, }; use crate::error_reporting::TypeErrCtxt; use crate::error_reporting::infer::TyCategory; @@ -276,8 +275,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { *err.long_ty_path() = long_ty_file; let mut suggested = false; + let mut noted_missing_impl = false; if is_try_conversion || is_question_mark { - suggested = self.try_conversion_context(&obligation, main_trait_predicate, &mut err); + (suggested, noted_missing_impl) = self.try_conversion_context(&obligation, main_trait_predicate, &mut err); } if let Some(ret_span) = self.return_type_span(&obligation) { @@ -336,6 +336,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { return err.emit(); } + let ty_span = match leaf_trait_predicate.self_ty().skip_binder().kind() { + ty::Adt(def, _) if def.did().is_local() + && !self.can_suggest_derive(&obligation, leaf_trait_predicate) => self.tcx.def_span(def.did()), + _ => DUMMY_SP, + }; if let Some(s) = label { // If it has a custom `#[rustc_on_unimplemented]` // error message, let's display it as the label! @@ -348,15 +353,28 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Don't say "the trait `FromResidual<Option<Infallible>>` is // not implemented for `Result<T, E>`". { - err.help(explanation); + // We do this just so that the JSON output's `help` position is the + // right one and not `file.rs:1:1`. The render is the same. + if ty_span == DUMMY_SP { + err.help(explanation); + } else { + err.span_help(ty_span, explanation); + } } } else if let Some(custom_explanation) = safe_transmute_explanation { err.span_label(span, custom_explanation); - } else if explanation.len() > self.tcx.sess.diagnostic_width() { + } else if (explanation.len() > self.tcx.sess.diagnostic_width() || ty_span != DUMMY_SP) && !noted_missing_impl { // Really long types don't look good as span labels, instead move it // to a `help`. err.span_label(span, "unsatisfied trait bound"); - err.help(explanation); + + // We do this just so that the JSON output's `help` position is the + // right one and not `file.rs:1:1`. The render is the same. + if ty_span == DUMMY_SP { + err.help(explanation); + } else { + err.span_help(ty_span, explanation); + } } else { err.span_label(span, explanation); } @@ -374,13 +392,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } - let UnsatisfiedConst(unsatisfied_const) = self - .maybe_add_note_for_unsatisfied_const( - leaf_trait_predicate, - &mut err, - span, - ); - if let Some((msg, span)) = type_def { err.span_label(span, msg); } @@ -506,7 +517,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { span, is_fn_trait, suggested, - unsatisfied_const, ); // Changing mutability doesn't make a difference to whether we have @@ -948,7 +958,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { obligation: &PredicateObligation<'tcx>, trait_pred: ty::PolyTraitPredicate<'tcx>, err: &mut Diag<'_>, - ) -> bool { + ) -> (bool, bool) { let span = obligation.cause.span; /// Look for the (direct) sub-expr of `?`, and return it if it's a `.` method call. struct FindMethodSubexprOfTry { @@ -968,21 +978,22 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } let hir_id = self.tcx.local_def_id_to_hir_id(obligation.cause.body_id); - let Some(body_id) = self.tcx.hir_node(hir_id).body_id() else { return false }; + let Some(body_id) = self.tcx.hir_node(hir_id).body_id() else { return (false, false) }; let ControlFlow::Break(expr) = (FindMethodSubexprOfTry { search_span: span }).visit_body(self.tcx.hir_body(body_id)) else { - return false; + return (false, false); }; let Some(typeck) = &self.typeck_results else { - return false; + return (false, false); }; let ObligationCauseCode::QuestionMark = obligation.cause.code().peel_derives() else { - return false; + return (false, false); }; let self_ty = trait_pred.skip_binder().self_ty(); let found_ty = trait_pred.skip_binder().trait_ref.args.get(1).and_then(|a| a.as_type()); - self.note_missing_impl_for_question_mark(err, self_ty, found_ty, trait_pred); + let noted_missing_impl = + self.note_missing_impl_for_question_mark(err, self_ty, found_ty, trait_pred); let mut prev_ty = self.resolve_vars_if_possible( typeck.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(self.tcx)), @@ -1146,7 +1157,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } prev = Some(err_ty); } - suggested + (suggested, noted_missing_impl) } fn note_missing_impl_for_question_mark( @@ -1155,7 +1166,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self_ty: Ty<'_>, found_ty: Option<Ty<'_>>, trait_pred: ty::PolyTraitPredicate<'tcx>, - ) { + ) -> bool { match (self_ty.kind(), found_ty) { (ty::Adt(def, _), Some(ty)) if let ty::Adt(found, _) = ty.kind() @@ -1196,8 +1207,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { format!("`{ty}` needs to implement `Into<{self_ty}>`"), ); } - _ => {} + _ => return false, } + true } fn report_const_param_not_wf( @@ -2716,7 +2728,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { span: Span, is_fn_trait: bool, suggested: bool, - unsatisfied_const: bool, ) { let body_def_id = obligation.cause.body_id; let span = if let ObligationCauseCode::BinOp { rhs_span: Some(rhs_span), .. } = @@ -2763,10 +2774,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self.tcx.def_span(trait_def_id), crate::fluent_generated::trait_selection_trait_has_no_impls, ); - } else if !suggested - && !unsatisfied_const - && trait_predicate.polarity() == ty::PredicatePolarity::Positive - { + } else if !suggested && trait_predicate.polarity() == ty::PredicatePolarity::Positive { // Can't show anything else useful, try to find similar impls. let impl_candidates = self.find_similar_impl_candidates(trait_predicate); if !self.report_similar_impl_candidates( @@ -2878,17 +2886,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } - fn maybe_add_note_for_unsatisfied_const( - &self, - _trait_predicate: ty::PolyTraitPredicate<'tcx>, - _err: &mut Diag<'_>, - _span: Span, - ) -> UnsatisfiedConst { - let unsatisfied_const = UnsatisfiedConst(false); - // FIXME(const_trait_impl) - unsatisfied_const - } - fn report_closure_error( &self, obligation: &PredicateObligation<'tcx>, diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs index 1d8b934cef3..c8500b2d9d4 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs @@ -51,8 +51,6 @@ enum GetSafeTransmuteErrorAndReason { Error { err_msg: String, safe_transmute_explanation: Option<String> }, } -struct UnsatisfiedConst(pub bool); - /// Crude way of getting back an `Expr` from a `Span`. pub struct FindExprBySpan<'hir> { pub span: Span, diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index bc8c8a44405..aa153d3607b 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -820,16 +820,20 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if matches!(obligation.cause.code(), ObligationCauseCode::FunctionArg { .. }) && obligation.cause.span.can_be_used_for_suggestions() { + let (span, sugg) = if let Some(snippet) = + self.tcx.sess.source_map().span_to_snippet(obligation.cause.span).ok() + && snippet.starts_with("|") + { + (obligation.cause.span, format!("({snippet})({args})")) + } else { + (obligation.cause.span.shrink_to_hi(), format!("({args})")) + }; + // When the obligation error has been ensured to have been caused by // an argument, the `obligation.cause.span` points at the expression // of the argument, so we can provide a suggestion. Otherwise, we give // a more general note. - err.span_suggestion_verbose( - obligation.cause.span.shrink_to_hi(), - msg, - format!("({args})"), - Applicability::HasPlaceholders, - ); + err.span_suggestion_verbose(span, msg, sugg, Applicability::HasPlaceholders); } else if let DefIdOrName::DefId(def_id) = def_id_or_name { let name = match self.tcx.hir_get_if_local(def_id) { Some(hir::Node::Expr(hir::Expr { @@ -2878,7 +2882,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // we check if `TraitB` can be reachable from `S` // to determine whether to note `TraitA` is sealed trait. if let ty::Adt(adt, _) = ty.kind() { - let visibilities = tcx.effective_visibilities(()); + let visibilities = &tcx.resolutions(()).effective_visibilities; visibilities.effective_vis(local).is_none_or(|v| { v.at_level(Level::Reexported) .is_accessible_from(adt.did(), tcx) @@ -3853,59 +3857,71 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } - pub fn suggest_derive( + pub fn can_suggest_derive( &self, obligation: &PredicateObligation<'tcx>, - err: &mut Diag<'_>, trait_pred: ty::PolyTraitPredicate<'tcx>, - ) { + ) -> bool { if trait_pred.polarity() == ty::PredicatePolarity::Negative { - return; + return false; } let Some(diagnostic_name) = self.tcx.get_diagnostic_name(trait_pred.def_id()) else { - return; + return false; }; let (adt, args) = match trait_pred.skip_binder().self_ty().kind() { ty::Adt(adt, args) if adt.did().is_local() => (adt, args), - _ => return, + _ => return false, }; - let can_derive = { - let is_derivable_trait = match diagnostic_name { - sym::Default => !adt.is_enum(), - sym::PartialEq | sym::PartialOrd => { - let rhs_ty = trait_pred.skip_binder().trait_ref.args.type_at(1); - trait_pred.skip_binder().self_ty() == rhs_ty - } - sym::Eq | sym::Ord | sym::Clone | sym::Copy | sym::Hash | sym::Debug => true, - _ => false, - }; - is_derivable_trait && - // Ensure all fields impl the trait. - adt.all_fields().all(|field| { - let field_ty = ty::GenericArg::from(field.ty(self.tcx, args)); - let trait_args = match diagnostic_name { - sym::PartialEq | sym::PartialOrd => { - Some(field_ty) - } - _ => None, - }; - let trait_pred = trait_pred.map_bound_ref(|tr| ty::TraitPredicate { - trait_ref: ty::TraitRef::new(self.tcx, - trait_pred.def_id(), - [field_ty].into_iter().chain(trait_args), - ), - ..*tr - }); - let field_obl = Obligation::new( - self.tcx, - obligation.cause.clone(), - obligation.param_env, - trait_pred, - ); - self.predicate_must_hold_modulo_regions(&field_obl) - }) + let is_derivable_trait = match diagnostic_name { + sym::Default => !adt.is_enum(), + sym::PartialEq | sym::PartialOrd => { + let rhs_ty = trait_pred.skip_binder().trait_ref.args.type_at(1); + trait_pred.skip_binder().self_ty() == rhs_ty + } + sym::Eq | sym::Ord | sym::Clone | sym::Copy | sym::Hash | sym::Debug => true, + _ => false, + }; + is_derivable_trait && + // Ensure all fields impl the trait. + adt.all_fields().all(|field| { + let field_ty = ty::GenericArg::from(field.ty(self.tcx, args)); + let trait_args = match diagnostic_name { + sym::PartialEq | sym::PartialOrd => { + Some(field_ty) + } + _ => None, + }; + let trait_pred = trait_pred.map_bound_ref(|tr| ty::TraitPredicate { + trait_ref: ty::TraitRef::new(self.tcx, + trait_pred.def_id(), + [field_ty].into_iter().chain(trait_args), + ), + ..*tr + }); + let field_obl = Obligation::new( + self.tcx, + obligation.cause.clone(), + obligation.param_env, + trait_pred, + ); + self.predicate_must_hold_modulo_regions(&field_obl) + }) + } + + pub fn suggest_derive( + &self, + obligation: &PredicateObligation<'tcx>, + err: &mut Diag<'_>, + trait_pred: ty::PolyTraitPredicate<'tcx>, + ) { + let Some(diagnostic_name) = self.tcx.get_diagnostic_name(trait_pred.def_id()) else { + return; + }; + let adt = match trait_pred.skip_binder().self_ty().kind() { + ty::Adt(adt, _) if adt.did().is_local() => adt, + _ => return, }; - if can_derive { + if self.can_suggest_derive(obligation, trait_pred) { err.span_suggestion_verbose( self.tcx.def_span(adt.did()).shrink_to_lo(), format!( diff --git a/compiler/rustc_trait_selection/src/opaque_types.rs b/compiler/rustc_trait_selection/src/opaque_types.rs index d5bde9192d5..bc7bdd372ba 100644 --- a/compiler/rustc_trait_selection/src/opaque_types.rs +++ b/compiler/rustc_trait_selection/src/opaque_types.rs @@ -4,8 +4,8 @@ use rustc_hir::def_id::LocalDefId; use rustc_infer::infer::outlives::env::OutlivesEnvironment; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; use rustc_middle::ty::{ - self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, TyCtxt, TypeVisitableExt, - TypingMode, fold_regions, + self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, Ty, TyCtxt, + TypeVisitableExt, TypingMode, fold_regions, }; use rustc_span::{ErrorGuaranteed, Span}; @@ -13,22 +13,23 @@ use crate::errors::NonGenericOpaqueTypeParam; use crate::regions::OutlivesEnvironmentBuildExt; use crate::traits::ObligationCtxt; -pub enum InvalidOpaqueTypeArgs<'tcx> { - AlreadyReported(ErrorGuaranteed), +#[derive(Debug)] +pub enum NonDefiningUseReason<'tcx> { + Tainted(ErrorGuaranteed), NotAParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_index: usize, span: Span }, DuplicateParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_indices: Vec<usize>, span: Span }, } -impl From<ErrorGuaranteed> for InvalidOpaqueTypeArgs<'_> { +impl From<ErrorGuaranteed> for NonDefiningUseReason<'_> { fn from(guar: ErrorGuaranteed) -> Self { - InvalidOpaqueTypeArgs::AlreadyReported(guar) + NonDefiningUseReason::Tainted(guar) } } -impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { +impl<'tcx> NonDefiningUseReason<'tcx> { pub fn report(self, infcx: &InferCtxt<'tcx>) -> ErrorGuaranteed { let tcx = infcx.tcx; match self { - InvalidOpaqueTypeArgs::AlreadyReported(guar) => guar, - InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index, span } => { + NonDefiningUseReason::Tainted(guar) => guar, + NonDefiningUseReason::NotAParam { opaque_type_key, param_index, span } => { let opaque_generics = tcx.generics_of(opaque_type_key.def_id); let opaque_param = opaque_generics.param_at(param_index, tcx); let kind = opaque_param.kind.descr(); @@ -39,7 +40,7 @@ impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { param_span: tcx.def_span(opaque_param.def_id), }) } - InvalidOpaqueTypeArgs::DuplicateParam { opaque_type_key, param_indices, span } => { + NonDefiningUseReason::DuplicateParam { opaque_type_key, param_indices, span } => { let opaque_generics = tcx.generics_of(opaque_type_key.def_id); let descr = opaque_generics.param_at(param_indices[0], tcx).kind.descr(); let spans: Vec<_> = param_indices @@ -57,15 +58,17 @@ impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { } /// Opaque type parameter validity check as documented in the [rustc-dev-guide chapter]. +/// With the new solver, uses which fail this check are simply treated as non-defining +/// and we only emit an error if no defining use exists. /// /// [rustc-dev-guide chapter]: /// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html -pub fn check_opaque_type_parameter_valid<'tcx>( +pub fn opaque_type_has_defining_use_args<'tcx>( infcx: &InferCtxt<'tcx>, opaque_type_key: OpaqueTypeKey<'tcx>, span: Span, defining_scope_kind: DefiningScopeKind, -) -> Result<(), InvalidOpaqueTypeArgs<'tcx>> { +) -> Result<(), NonDefiningUseReason<'tcx>> { let tcx = infcx.tcx; let opaque_env = LazyOpaqueTyEnv::new(tcx, opaque_type_key.def_id); let mut seen_params: FxIndexMap<_, Vec<_>> = FxIndexMap::default(); @@ -104,13 +107,13 @@ pub fn check_opaque_type_parameter_valid<'tcx>( } else { // Prevent `fn foo() -> Foo<u32>` from being defining. opaque_env.param_is_error(i)?; - return Err(InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index: i, span }); + return Err(NonDefiningUseReason::NotAParam { opaque_type_key, param_index: i, span }); } } for (_, param_indices) in seen_params { if param_indices.len() > 1 { - return Err(InvalidOpaqueTypeArgs::DuplicateParam { + return Err(NonDefiningUseReason::DuplicateParam { opaque_type_key, param_indices, span, @@ -205,3 +208,27 @@ impl<'tcx> LazyOpaqueTyEnv<'tcx> { canonical_args } } + +pub fn report_item_does_not_constrain_error<'tcx>( + tcx: TyCtxt<'tcx>, + item_def_id: LocalDefId, + def_id: LocalDefId, + non_defining_use: Option<(OpaqueTypeKey<'tcx>, Span)>, +) -> ErrorGuaranteed { + let span = tcx.def_ident_span(item_def_id).unwrap_or_else(|| tcx.def_span(item_def_id)); + let opaque_type_span = tcx.def_span(def_id); + let opaque_type_name = tcx.def_path_str(def_id); + + let mut err = + tcx.dcx().struct_span_err(span, format!("item does not constrain `{opaque_type_name}`")); + err.note("consider removing `#[define_opaque]` or adding an empty `#[define_opaque()]`"); + err.span_note(opaque_type_span, "this opaque type is supposed to be constrained"); + if let Some((key, span)) = non_defining_use { + let opaque_ty = Ty::new_opaque(tcx, key.def_id.into(), key.args); + err.span_note( + span, + format!("this use of `{opaque_ty}` does not have unique universal generic arguments"), + ); + } + err.emit() +} diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index 3f628d80662..575e0472e0e 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -252,7 +252,9 @@ where // inside of an opaque type, e.g. if there's `Opaque = (?x, ?x)` in the // storage, we can also rely on structural identity of `?x` even if we // later uniquify it in MIR borrowck. - if infcx.in_hir_typeck && obligation.has_non_region_infer() { + if infcx.in_hir_typeck + && (obligation.has_non_region_infer() || obligation.has_free_regions()) + { infcx.push_hir_typeck_potentially_region_dependent_goal(obligation); } } diff --git a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs index ea1eed95723..4b493c95d59 100644 --- a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs +++ b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs @@ -106,6 +106,10 @@ fn dyn_compatibility_violations_for_trait( if !spans.is_empty() { violations.push(DynCompatibilityViolation::SupertraitNonLifetimeBinder(spans)); } + let spans = super_predicates_are_unconditionally_const(tcx, trait_def_id); + if !spans.is_empty() { + violations.push(DynCompatibilityViolation::SupertraitConst(spans)); + } violations } @@ -247,16 +251,31 @@ fn super_predicates_have_non_lifetime_binders( tcx: TyCtxt<'_>, trait_def_id: DefId, ) -> SmallVec<[Span; 1]> { - // If non_lifetime_binders is disabled, then exit early - if !tcx.features().non_lifetime_binders() { - return SmallVec::new(); - } tcx.explicit_super_predicates_of(trait_def_id) .iter_identity_copied() .filter_map(|(pred, span)| pred.has_non_region_bound_vars().then_some(span)) .collect() } +/// Checks for `const Trait` supertraits. We're okay with `[const] Trait`, +/// supertraits since for a non-const instantiation of that trait, the +/// conditionally-const supertrait is also not required to be const. +fn super_predicates_are_unconditionally_const( + tcx: TyCtxt<'_>, + trait_def_id: DefId, +) -> SmallVec<[Span; 1]> { + tcx.explicit_super_predicates_of(trait_def_id) + .iter_identity_copied() + .filter_map(|(pred, span)| { + if let ty::ClauseKind::HostEffect(_) = pred.kind().skip_binder() { + Some(span) + } else { + None + } + }) + .collect() +} + fn trait_has_sized_self(tcx: TyCtxt<'_>, trait_def_id: DefId) -> bool { tcx.generics_require_sized_self(trait_def_id) } diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index 581191b2036..884d53732fe 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1507,7 +1507,7 @@ fn confirm_builtin_candidate<'cx, 'tcx>( let tcx = selcx.tcx(); let self_ty = obligation.predicate.self_ty(); let item_def_id = obligation.predicate.def_id; - let trait_def_id = tcx.trait_of_assoc(item_def_id).unwrap(); + let trait_def_id = tcx.parent(item_def_id); let args = tcx.mk_args(&[self_ty.into()]); let (term, obligations) = if tcx.is_lang_item(trait_def_id, LangItem::DiscriminantKind) { let discriminant_def_id = diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs index 0ca2d216228..6ce68507d65 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs @@ -1,6 +1,7 @@ use std::fmt; use rustc_errors::ErrorGuaranteed; +use rustc_hir::def_id::LocalDefId; use rustc_infer::infer::region_constraints::RegionConstraintData; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{TyCtxt, TypeFoldable}; @@ -42,13 +43,14 @@ where fn fully_perform( self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> { if cfg!(debug_assertions) { info!("fully_perform({:?})", self); } - Ok(scrape_region_constraints(infcx, self.closure, self.description, span)?.0) + Ok(scrape_region_constraints(infcx, root_def_id, self.description, span, self.closure)?.0) } } @@ -62,9 +64,10 @@ impl<F> fmt::Debug for CustomTypeOp<F> { /// constraints that result, creating query-region-constraints. pub fn scrape_region_constraints<'tcx, Op, R>( infcx: &InferCtxt<'tcx>, - op: impl FnOnce(&ObligationCtxt<'_, 'tcx>) -> Result<R, NoSolution>, + root_def_id: LocalDefId, name: &'static str, span: Span, + op: impl FnOnce(&ObligationCtxt<'_, 'tcx>) -> Result<R, NoSolution>, ) -> Result<(TypeOpOutput<'tcx, Op>, RegionConstraintData<'tcx>), ErrorGuaranteed> where R: TypeFoldable<TyCtxt<'tcx>>, @@ -94,6 +97,8 @@ where let errors = ocx.select_all_or_error(); if errors.is_empty() { Ok(value) + } else if let Err(guar) = infcx.tcx.check_potentially_region_dependent_goals(root_def_id) { + Err(guar) } else { Err(infcx .dcx() diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs index 018e9748cf0..4b8bf868123 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs @@ -1,6 +1,7 @@ use std::fmt; use rustc_errors::ErrorGuaranteed; +use rustc_hir::def_id::LocalDefId; use rustc_infer::traits::PredicateObligations; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{ParamEnvAnd, TyCtxt, TypeFoldable}; @@ -37,6 +38,7 @@ pub trait TypeOp<'tcx>: Sized + fmt::Debug { fn fully_perform( self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed>; } @@ -140,6 +142,7 @@ where fn fully_perform( self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> { // In the new trait solver, query type ops are performed locally. This @@ -152,9 +155,10 @@ where if infcx.next_trait_solver() { return Ok(scrape_region_constraints( infcx, - |ocx| QueryTypeOp::perform_locally_with_next_solver(ocx, self, span), + root_def_id, "query type op", span, + |ocx| QueryTypeOp::perform_locally_with_next_solver(ocx, self, span), )? .0); } @@ -166,19 +170,15 @@ where // we sometimes end up with `Opaque<'a> = Opaque<'b>` instead of an actual hidden type. In that case we don't register a // hidden type but just equate the lifetimes. Thus we need to scrape the region constraints even though we're also manually // collecting region constraints via `region_constraints`. - let (mut output, _) = scrape_region_constraints( - infcx, - |ocx| { + let (mut output, _) = + scrape_region_constraints(infcx, root_def_id, "fully_perform", span, |ocx| { let (output, ei, obligations, _) = Q::fully_perform_into(self, infcx, &mut region_constraints, span)?; error_info = ei; ocx.register_obligations(obligations); Ok(output) - }, - "fully_perform", - span, - )?; + })?; output.error_info = error_info; if let Some(QueryRegionConstraints { outlives, assumptions }) = output.constraints { region_constraints.outlives.extend(outlives.iter().cloned()); diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 468c42abf48..1dd31990ab7 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -4,9 +4,9 @@ use std::assert_matches::assert_matches; use std::cell::{Cell, RefCell}; +use std::cmp; use std::fmt::{self, Display}; use std::ops::ControlFlow; -use std::{cmp, iter}; use hir::def::DefKind; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; @@ -2185,32 +2185,23 @@ impl<'tcx> SelectionContext<'_, 'tcx> { ty::Binder::dummy(vec![ty]) } - ty::Coroutine(coroutine_def_id, args) => { - match self.tcx().coroutine_movability(coroutine_def_id) { - hir::Movability::Static => { - unreachable!("tried to assemble `Sized` for static coroutine") - } - hir::Movability::Movable => { - if self.tcx().features().coroutine_clone() { - ty::Binder::dummy( - args.as_coroutine() - .upvar_tys() - .iter() - .chain([Ty::new_coroutine_witness( - self.tcx(), - coroutine_def_id, - self.tcx().mk_args(args.as_coroutine().parent_args()), - )]) - .collect::<Vec<_>>(), - ) - } else { - unreachable!( - "tried to assemble `Sized` for coroutine without enabled feature" - ) - } + ty::Coroutine(def_id, args) => match self.tcx().coroutine_movability(def_id) { + hir::Movability::Static => { + unreachable!("tried to assemble `Clone` for static coroutine") + } + hir::Movability::Movable => { + if self.tcx().features().coroutine_clone() { + ty::Binder::dummy(vec![ + args.as_coroutine().tupled_upvars_ty(), + Ty::new_coroutine_witness_for_coroutine(self.tcx(), def_id, args), + ]) + } else { + unreachable!( + "tried to assemble `Clone` for coroutine without enabled feature" + ) } } - } + }, ty::CoroutineWitness(def_id, args) => self .infcx @@ -2334,25 +2325,9 @@ impl<'tcx> SelectionContext<'_, 'tcx> { ty::Coroutine(def_id, args) => { let ty = self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); let tcx = self.tcx(); - let witness = Ty::new_coroutine_witness( - tcx, - def_id, - ty::GenericArgs::for_item(tcx, def_id, |def, _| match def.kind { - // HACK: Coroutine witnesse types are lifetime erased, so they - // never reference any lifetime args from the coroutine. We erase - // the regions here since we may get into situations where a - // coroutine is recursively contained within itself, leading to - // witness types that differ by region args. This means that - // cycle detection in fulfillment will not kick in, which leads - // to unnecessary overflows in async code. See the issue: - // <https://github.com/rust-lang/rust/issues/145151>. - ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), - ty::GenericParamDefKind::Type { .. } - | ty::GenericParamDefKind::Const { .. } => args[def.index as usize], - }), - ); + let witness = Ty::new_coroutine_witness_for_coroutine(tcx, def_id, args); ty::Binder::dummy(AutoImplConstituents { - types: [ty].into_iter().chain(iter::once(witness)).collect(), + types: vec![ty, witness], assumptions: vec![], }) } diff --git a/compiler/rustc_trait_selection/src/traits/util.rs b/compiler/rustc_trait_selection/src/traits/util.rs index 83c0969762f..335942d5bcc 100644 --- a/compiler/rustc_trait_selection/src/traits/util.rs +++ b/compiler/rustc_trait_selection/src/traits/util.rs @@ -9,8 +9,8 @@ pub use rustc_infer::traits::util::*; use rustc_middle::bug; use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::{ - self, PolyTraitPredicate, SizedTraitKind, TraitPredicate, TraitRef, Ty, TyCtxt, TypeFoldable, - TypeFolder, TypeSuperFoldable, TypeVisitableExt, + self, PolyTraitPredicate, PredicatePolarity, SizedTraitKind, TraitPredicate, TraitRef, Ty, + TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, }; pub use rustc_next_trait_solver::placeholder::BoundVarReplacer; use rustc_span::Span; @@ -427,7 +427,9 @@ pub(crate) fn lazily_elaborate_sizedness_candidate<'tcx>( return candidate; } - if obligation.predicate.polarity() != candidate.polarity() { + if obligation.predicate.polarity() != PredicatePolarity::Positive + || candidate.polarity() != PredicatePolarity::Positive + { return candidate; } diff --git a/compiler/rustc_traits/Cargo.toml b/compiler/rustc_traits/Cargo.toml index 04aef4e7b9e..24360a94cc7 100644 --- a/compiler/rustc_traits/Cargo.toml +++ b/compiler/rustc_traits/Cargo.toml @@ -6,10 +6,9 @@ edition = "2024" [dependencies] # tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures" } -rustc_hir = { path = "../rustc_hir" } rustc_infer = { path = "../rustc_infer" } rustc_middle = { path = "../rustc_middle" } rustc_span = { path = "../rustc_span" } rustc_trait_selection = { path = "../rustc_trait_selection" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_traits/src/coroutine_witnesses.rs b/compiler/rustc_traits/src/coroutine_witnesses.rs index 8a2a0832ddb..20f9b013724 100644 --- a/compiler/rustc_traits/src/coroutine_witnesses.rs +++ b/compiler/rustc_traits/src/coroutine_witnesses.rs @@ -1,9 +1,9 @@ -use rustc_hir::def_id::DefId; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::infer::canonical::query_response::make_query_region_constraints; use rustc_infer::infer::resolve::OpportunisticRegionResolver; use rustc_infer::traits::{Obligation, ObligationCause}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, fold_regions}; +use rustc_span::def_id::DefId; use rustc_trait_selection::traits::{ObligationCtxt, with_replaced_escaping_bound_vars}; /// Return the set of types that should be taken into account when checking diff --git a/compiler/rustc_traits/src/dropck_outlives.rs b/compiler/rustc_traits/src/dropck_outlives.rs index 5eddad39e2b..d33ade7a9e1 100644 --- a/compiler/rustc_traits/src/dropck_outlives.rs +++ b/compiler/rustc_traits/src/dropck_outlives.rs @@ -1,5 +1,4 @@ use rustc_data_structures::fx::FxHashSet; -use rustc_hir::def_id::DefId; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::infer::canonical::{Canonical, QueryResponse}; use rustc_middle::bug; @@ -7,6 +6,7 @@ use rustc_middle::query::Providers; use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult}; use rustc_middle::ty::{self, GenericArgs, TyCtxt}; use rustc_span::DUMMY_SP; +use rustc_span::def_id::DefId; use rustc_trait_selection::infer::InferCtxtBuilderExt; use rustc_trait_selection::traits::query::dropck_outlives::{ compute_dropck_outlives_inner, dtorck_constraint_for_ty_inner, diff --git a/compiler/rustc_transmute/Cargo.toml b/compiler/rustc_transmute/Cargo.toml index e61717e5e9c..2ac2b46cc58 100644 --- a/compiler/rustc_transmute/Cargo.toml +++ b/compiler/rustc_transmute/Cargo.toml @@ -11,12 +11,12 @@ rustc_hir = { path = "../rustc_hir", optional = true } rustc_middle = { path = "../rustc_middle", optional = true } rustc_span = { path = "../rustc_span", optional = true } smallvec = "1.8.1" -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end [dev-dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true # tidy-alphabetical-end [features] diff --git a/compiler/rustc_ty_utils/Cargo.toml b/compiler/rustc_ty_utils/Cargo.toml index ce08b300cc8..394cde5a323 100644 --- a/compiler/rustc_ty_utils/Cargo.toml +++ b/compiler/rustc_ty_utils/Cargo.toml @@ -5,7 +5,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" +itertools.workspace = true rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } @@ -20,5 +20,5 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -tracing = "0.1" +tracing.workspace = true # tidy-alphabetical-end diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index 89d1dd8cf23..e4ed084b073 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -356,6 +356,7 @@ fn arg_attrs_for_rust_scalar<'tcx>( if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return { attrs.set(ArgAttribute::ReadOnly); + attrs.set(ArgAttribute::CapturesReadOnly); } } } diff --git a/compiler/rustc_type_ir/Cargo.toml b/compiler/rustc_type_ir/Cargo.toml index e7fee574dd4..1dba7fe4269 100644 --- a/compiler/rustc_type_ir/Cargo.toml +++ b/compiler/rustc_type_ir/Cargo.toml @@ -5,21 +5,22 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -bitflags = "2.4.1" +bitflags.workspace = true derive-where = "1.2.7" ena = "0.14.3" indexmap = "2.0.0" rustc-hash = "2.0.0" rustc_ast_ir = { path = "../rustc_ast_ir", default-features = false } rustc_data_structures = { path = "../rustc_data_structures", optional = true } +rustc_error_messages = { path = "../rustc_error_messages", optional = true } rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } rustc_span = { path = "../rustc_span", optional = true } rustc_type_ir_macros = { path = "../rustc_type_ir_macros" } smallvec = { version = "1.8.1", default-features = false, features = ["const_generics"] } -thin-vec = "0.2.12" -tracing = "0.1" +thin-vec.workspace = true +tracing.workspace = true # tidy-alphabetical-end [features] @@ -27,6 +28,7 @@ tracing = "0.1" default = ["nightly"] nightly = [ "dep:rustc_data_structures", + "dep:rustc_error_messages", "dep:rustc_macros", "dep:rustc_serialize", "dep:rustc_span", diff --git a/compiler/rustc_type_ir/src/infer_ctxt.rs b/compiler/rustc_type_ir/src/infer_ctxt.rs index e39b99e992b..b4462294700 100644 --- a/compiler/rustc_type_ir/src/infer_ctxt.rs +++ b/compiler/rustc_type_ir/src/infer_ctxt.rs @@ -148,10 +148,6 @@ pub trait InferCtxtLike: Sized { true } - fn in_hir_typeck(&self) -> bool { - false - } - fn typing_mode(&self) -> TypingMode<Self::Interner>; fn universe(&self) -> ty::UniverseIndex; diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 1a6c99ce7de..569570b5783 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -91,6 +91,12 @@ pub trait Ty<I: Interner<Ty = Self>>: fn new_coroutine_witness(interner: I, def_id: I::DefId, args: I::GenericArgs) -> Self; + fn new_coroutine_witness_for_coroutine( + interner: I, + def_id: I::DefId, + coroutine_args: I::GenericArgs, + ) -> Self; + fn new_ptr(interner: I, ty: Self, mutbl: Mutability) -> Self; fn new_ref(interner: I, region: I::Region, ty: Self, mutbl: Mutability) -> Self; diff --git a/compiler/rustc_type_ir/src/ir_print.rs b/compiler/rustc_type_ir/src/ir_print.rs index 388ad09cb20..82bb8791b84 100644 --- a/compiler/rustc_type_ir/src/ir_print.rs +++ b/compiler/rustc_type_ir/src/ir_print.rs @@ -1,9 +1,9 @@ use std::fmt; use crate::{ - AliasTerm, AliasTy, Binder, CoercePredicate, ExistentialProjection, ExistentialTraitRef, FnSig, - HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, PatternKind, - ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef, + AliasTerm, AliasTy, Binder, ClosureKind, CoercePredicate, ExistentialProjection, + ExistentialTraitRef, FnSig, HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, + PatternKind, ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef, UnevaluatedConst, }; pub trait IrPrint<T> { @@ -70,3 +70,46 @@ where <I as IrPrint<OutlivesPredicate<I, T>>>::print(self, fmt) } } + +#[cfg(feature = "nightly")] +mod into_diag_arg_impls { + use rustc_error_messages::{DiagArgValue, IntoDiagArg}; + + use super::*; + + impl<I: Interner> IntoDiagArg for TraitRef<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } + } + + impl<I: Interner> IntoDiagArg for ExistentialTraitRef<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } + } + + impl<I: Interner> IntoDiagArg for UnevaluatedConst<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + format!("{self:?}").into_diag_arg(path) + } + } + + impl<I: Interner> IntoDiagArg for FnSig<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + format!("{self:?}").into_diag_arg(path) + } + } + + impl<I: Interner, T: IntoDiagArg> IntoDiagArg for Binder<I, T> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.skip_binder().into_diag_arg(path) + } + } + + impl IntoDiagArg for ClosureKind { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(self.as_str().into()) + } + } +} diff --git a/compiler/rustc_type_ir/src/region_kind.rs b/compiler/rustc_type_ir/src/region_kind.rs index cca81dcb4a0..06048af0436 100644 --- a/compiler/rustc_type_ir/src/region_kind.rs +++ b/compiler/rustc_type_ir/src/region_kind.rs @@ -154,7 +154,7 @@ pub enum RegionKind<I: Interner> { /// parameters via `tcx.liberate_late_bound_regions`. They are then treated /// the same way as `ReEarlyParam` while inside of the function. /// - /// See <https://rustc-dev-guide.rust-lang.org/early-late-bound-params/early-late-bound-summary.html> for + /// See <https://rustc-dev-guide.rust-lang.org/early_late_parameters.html> for /// more info about early and late bound lifetime parameters. ReLateParam(I::LateParamRegion), diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs index daacb4a3e44..348178a527f 100644 --- a/compiler/rustc_type_ir/src/search_graph/mod.rs +++ b/compiler/rustc_type_ir/src/search_graph/mod.rs @@ -12,10 +12,11 @@ //! The global cache has to be completely unobservable, while the per-cycle cache may impact //! behavior as long as the resulting behavior is still correct. use std::cmp::Ordering; -use std::collections::BTreeMap; use std::collections::hash_map::Entry; +use std::collections::{BTreeMap, btree_map}; use std::fmt::Debug; use std::hash::Hash; +use std::iter; use std::marker::PhantomData; use derive_where::derive_where; @@ -169,27 +170,76 @@ impl PathKind { /// This is used to avoid rerunning a cycle if there's /// just a single usage kind and the final result matches /// its provisional result. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum UsageKind { - Single(PathKind), - Mixed, +/// +/// While it tracks the amount of usages using `u32`, we only ever +/// care whether there are any. We only count them to be able to ignore +/// usages from irrelevant candidates while evaluating a goal. +/// +/// This cares about how nested goals relied on a cycle head. It does +/// not care about how frequently the nested goal relied on it. +#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)] +struct HeadUsages { + inductive: u32, + unknown: u32, + coinductive: u32, + forced_ambiguity: u32, } -impl From<PathKind> for UsageKind { - fn from(path: PathKind) -> UsageKind { - UsageKind::Single(path) + +impl HeadUsages { + fn add_usage(&mut self, path: PathKind) { + match path { + PathKind::Inductive => self.inductive += 1, + PathKind::Unknown => self.unknown += 1, + PathKind::Coinductive => self.coinductive += 1, + PathKind::ForcedAmbiguity => self.forced_ambiguity += 1, + } + } + + /// This adds the usages which occurred while computing a nested goal. + /// + /// We don't actually care about how frequently the nested goal relied + /// on its cycle heads, only whether it did. + fn add_usages_from_nested(&mut self, usages: HeadUsages) { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = usages; + self.inductive += if inductive == 0 { 0 } else { 1 }; + self.unknown += if unknown == 0 { 0 } else { 1 }; + self.coinductive += if coinductive == 0 { 0 } else { 1 }; + self.forced_ambiguity += if forced_ambiguity == 0 { 0 } else { 1 }; + } + + fn ignore_usages(&mut self, usages: HeadUsages) { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = usages; + self.inductive = self.inductive.checked_sub(inductive).unwrap(); + self.unknown = self.unknown.checked_sub(unknown).unwrap(); + self.coinductive = self.coinductive.checked_sub(coinductive).unwrap(); + self.forced_ambiguity = self.forced_ambiguity.checked_sub(forced_ambiguity).unwrap(); + } + + fn is_empty(self) -> bool { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = self; + inductive == 0 && unknown == 0 && coinductive == 0 && forced_ambiguity == 0 } } -impl UsageKind { - #[must_use] - fn merge(self, other: impl Into<Self>) -> Self { - match (self, other.into()) { - (UsageKind::Mixed, _) | (_, UsageKind::Mixed) => UsageKind::Mixed, - (UsageKind::Single(lhs), UsageKind::Single(rhs)) => { - if lhs == rhs { - UsageKind::Single(lhs) - } else { - UsageKind::Mixed + +#[derive(Debug, Default)] +pub struct CandidateHeadUsages { + usages: Option<Box<HashMap<StackDepth, HeadUsages>>>, +} +impl CandidateHeadUsages { + pub fn merge_usages(&mut self, other: CandidateHeadUsages) { + if let Some(other_usages) = other.usages { + if let Some(ref mut self_usages) = self.usages { + #[allow(rustc::potential_query_instability)] + for (head_index, head) in other_usages.into_iter() { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = head; + let self_usages = self_usages.entry(head_index).or_default(); + self_usages.inductive += inductive; + self_usages.unknown += unknown; + self_usages.coinductive += coinductive; + self_usages.forced_ambiguity += forced_ambiguity; } + } else { + self.usages = Some(other_usages); } } } @@ -230,13 +280,24 @@ impl AvailableDepth { } } +#[derive(Clone, Copy, Debug)] +struct CycleHead { + paths_to_head: PathsToNested, + /// If the `usages` are empty, the result of that head does not matter + /// for the current goal. However, we still don't completely drop this + /// cycle head as whether or not it exists impacts which queries we + /// access, so ignoring it would cause incremental compilation verification + /// failures or hide query cycles. + usages: HeadUsages, +} + /// All cycle heads a given goal depends on, ordered by their stack depth. /// /// We also track all paths from this goal to that head. This is necessary /// when rebasing provisional cache results. #[derive(Clone, Debug, Default)] struct CycleHeads { - heads: BTreeMap<StackDepth, PathsToNested>, + heads: BTreeMap<StackDepth, CycleHead>, } impl CycleHeads { @@ -244,43 +305,51 @@ impl CycleHeads { self.heads.is_empty() } - fn highest_cycle_head(&self) -> StackDepth { - self.opt_highest_cycle_head().unwrap() + fn highest_cycle_head(&self) -> (StackDepth, CycleHead) { + self.heads.last_key_value().map(|(k, v)| (*k, *v)).unwrap() } - fn opt_highest_cycle_head(&self) -> Option<StackDepth> { + fn highest_cycle_head_index(&self) -> StackDepth { + self.opt_highest_cycle_head_index().unwrap() + } + + fn opt_highest_cycle_head_index(&self) -> Option<StackDepth> { self.heads.last_key_value().map(|(k, _)| *k) } - fn opt_lowest_cycle_head(&self) -> Option<StackDepth> { + fn opt_lowest_cycle_head_index(&self) -> Option<StackDepth> { self.heads.first_key_value().map(|(k, _)| *k) } - fn remove_highest_cycle_head(&mut self) -> PathsToNested { + fn remove_highest_cycle_head(&mut self) -> CycleHead { let last = self.heads.pop_last(); last.unwrap().1 } - fn insert(&mut self, head: StackDepth, path_from_entry: impl Into<PathsToNested> + Copy) { - *self.heads.entry(head).or_insert(path_from_entry.into()) |= path_from_entry.into(); + fn insert( + &mut self, + head_index: StackDepth, + path_from_entry: impl Into<PathsToNested> + Copy, + usages: HeadUsages, + ) { + match self.heads.entry(head_index) { + btree_map::Entry::Vacant(entry) => { + entry.insert(CycleHead { paths_to_head: path_from_entry.into(), usages }); + } + btree_map::Entry::Occupied(entry) => { + let head = entry.into_mut(); + head.paths_to_head |= path_from_entry.into(); + head.usages.add_usages_from_nested(usages); + } + } } - fn iter(&self) -> impl Iterator<Item = (StackDepth, PathsToNested)> + '_ { - self.heads.iter().map(|(k, v)| (*k, *v)) + fn ignore_usages(&mut self, head_index: StackDepth, usages: HeadUsages) { + self.heads.get_mut(&head_index).unwrap().usages.ignore_usages(usages) } - /// Update the cycle heads of a goal at depth `this` given the cycle heads - /// of a nested goal. This merges the heads after filtering the parent goal - /// itself. - fn extend_from_child(&mut self, this: StackDepth, step_kind: PathKind, child: &CycleHeads) { - for (&head, &path_from_entry) in child.heads.iter() { - match head.cmp(&this) { - Ordering::Less => {} - Ordering::Equal => continue, - Ordering::Greater => unreachable!(), - } - self.insert(head, path_from_entry.extend_with(step_kind)); - } + fn iter(&self) -> impl Iterator<Item = (StackDepth, CycleHead)> + '_ { + self.heads.iter().map(|(k, v)| (*k, *v)) } } @@ -487,9 +556,6 @@ impl<X: Cx> EvaluationResult<X> { pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> { root_depth: AvailableDepth, - /// The stack of goals currently being computed. - /// - /// An element is *deeper* in the stack if its index is *lower*. stack: Stack<X>, /// The provisional cache contains entries for already computed goals which /// still depend on goals higher-up in the stack. We don't move them to the @@ -511,6 +577,7 @@ pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> { /// cache entry. enum UpdateParentGoalCtxt<'a, X: Cx> { Ordinary(&'a NestedGoals<X>), + CycleOnStack(X::Input), ProvisionalCacheHit, } @@ -532,21 +599,47 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { stack: &mut Stack<X>, step_kind_from_parent: PathKind, required_depth_for_nested: usize, - heads: &CycleHeads, + heads: impl Iterator<Item = (StackDepth, CycleHead)>, encountered_overflow: bool, context: UpdateParentGoalCtxt<'_, X>, ) { - if let Some(parent_index) = stack.last_index() { - let parent = &mut stack[parent_index]; + if let Some((parent_index, parent)) = stack.last_mut_with_index() { parent.required_depth = parent.required_depth.max(required_depth_for_nested + 1); parent.encountered_overflow |= encountered_overflow; - parent.heads.extend_from_child(parent_index, step_kind_from_parent, heads); + for (head_index, head) in heads { + if let Some(candidate_usages) = &mut parent.candidate_usages { + candidate_usages + .usages + .get_or_insert_default() + .entry(head_index) + .or_default() + .add_usages_from_nested(head.usages); + } + match head_index.cmp(&parent_index) { + Ordering::Less => parent.heads.insert( + head_index, + head.paths_to_head.extend_with(step_kind_from_parent), + head.usages, + ), + Ordering::Equal => { + parent.usages.get_or_insert_default().add_usages_from_nested(head.usages); + } + Ordering::Greater => unreachable!(), + } + } let parent_depends_on_cycle = match context { UpdateParentGoalCtxt::Ordinary(nested_goals) => { parent.nested_goals.extend_from_child(step_kind_from_parent, nested_goals); !nested_goals.is_empty() } + UpdateParentGoalCtxt::CycleOnStack(head) => { + // We lookup provisional cache entries before detecting cycles. + // We therefore can't use a global cache entry if it contains a cycle + // whose head is in the provisional cache. + parent.nested_goals.insert(head, step_kind_from_parent.into()); + true + } UpdateParentGoalCtxt::ProvisionalCacheHit => true, }; // Once we've got goals which encountered overflow or a cycle, @@ -587,6 +680,29 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { stack.cycle_step_kinds(head).fold(step_kind_to_head, |curr, step| curr.extend(step)) } + pub fn enter_single_candidate(&mut self) { + let prev = self.stack.last_mut().unwrap().candidate_usages.replace(Default::default()); + debug_assert!(prev.is_none(), "existing candidate_usages: {prev:?}"); + } + + pub fn finish_single_candidate(&mut self) -> CandidateHeadUsages { + self.stack.last_mut().unwrap().candidate_usages.take().unwrap() + } + + pub fn ignore_candidate_head_usages(&mut self, usages: CandidateHeadUsages) { + if let Some(usages) = usages.usages { + let (entry_index, entry) = self.stack.last_mut_with_index().unwrap(); + #[allow(rustc::potential_query_instability)] + for (head_index, usages) in usages.into_iter() { + if head_index == entry_index { + entry.usages.unwrap().ignore_usages(usages); + } else { + entry.heads.ignore_usages(head_index, usages); + } + } + } + } + /// Probably the most involved method of the whole solver. /// /// While goals get computed via `D::compute_goal`, this function handles @@ -655,7 +771,8 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { required_depth: 0, heads: Default::default(), encountered_overflow: false, - has_been_used: None, + usages: None, + candidate_usages: None, nested_goals: Default::default(), }); @@ -674,7 +791,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { &mut self.stack, step_kind_from_parent, evaluation_result.required_depth, - &evaluation_result.heads, + evaluation_result.heads.iter(), evaluation_result.encountered_overflow, UpdateParentGoalCtxt::Ordinary(&evaluation_result.nested_goals), ); @@ -703,7 +820,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { let path_from_head = Self::cycle_path_kind( &self.stack, step_kind_from_parent, - heads.highest_cycle_head(), + heads.highest_cycle_head_index(), ); let provisional_cache_entry = ProvisionalCacheEntry { encountered_overflow, heads, path_from_head, result }; @@ -741,11 +858,17 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { /// When reevaluating a goal with a changed provisional result, all provisional cache entry /// which depend on this goal get invalidated. - fn clear_dependent_provisional_results(&mut self) { - let head = self.stack.next_index(); + /// + /// Note that we keep provisional cache entries which accessed this goal as a cycle head, but + /// don't depend on its value. + fn clear_dependent_provisional_results_for_rerun(&mut self) { + let rerun_index = self.stack.next_index(); #[allow(rustc::potential_query_instability)] self.provisional_cache.retain(|_, entries| { - entries.retain(|entry| entry.heads.highest_cycle_head() != head); + entries.retain(|entry| { + let (head_index, head) = entry.heads.highest_cycle_head(); + head_index != rerun_index || head.usages.is_empty() + }); !entries.is_empty() }); } @@ -772,7 +895,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { stack_entry: &StackEntry<X>, mut mutate_result: impl FnMut(X::Input, X::Result) -> X::Result, ) { - let popped_head = self.stack.next_index(); + let popped_head_index = self.stack.next_index(); #[allow(rustc::potential_query_instability)] self.provisional_cache.retain(|&input, entries| { entries.retain_mut(|entry| { @@ -782,45 +905,65 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { path_from_head, result, } = entry; - let ep = if heads.highest_cycle_head() == popped_head { + let popped_head = if heads.highest_cycle_head_index() == popped_head_index { heads.remove_highest_cycle_head() } else { return true; }; // We're rebasing an entry `e` over a head `p`. This head - // has a number of own heads `h` it depends on. We need to - // make sure that the path kind of all paths `hph` remain the - // same after rebasing. + // has a number of own heads `h` it depends on. // - // After rebasing the cycles `hph` will go through `e`. We need to make - // sure that forall possible paths `hep`, `heph` is equal to `hph.` - for (h, ph) in stack_entry.heads.iter() { - let hp = - Self::cycle_path_kind(&self.stack, stack_entry.step_kind_from_parent, h); - - // We first validate that all cycles while computing `p` would stay - // the same if we were to recompute it as a nested goal of `e`. - let he = hp.extend(*path_from_head); - for ph in ph.iter_paths() { - let hph = hp.extend(ph); - for ep in ep.iter_paths() { - let hep = ep.extend(he); - let heph = hep.extend(ph); - if hph != heph { - return false; + // This causes our provisional result to depend on the heads + // of `p` to avoid moving any goal which uses this cache entry to + // the global cache. + if popped_head.usages.is_empty() { + // The result of `e` does not depend on the value of `p`. This we can + // keep using the result of this provisional cache entry even if evaluating + // `p` as a nested goal of `e` would have a different result. + for (head_index, _) in stack_entry.heads.iter() { + heads.insert(head_index, PathsToNested::EMPTY, HeadUsages::default()); + } + } else { + // The entry `e` actually depends on the value of `p`. We need + // to make sure that the value of `p` wouldn't change even if we + // were to reevaluate it as a nested goal of `e` instead. For this + // we check that the path kind of all paths `hph` remain the + // same after rebasing. + // + // After rebasing the cycles `hph` will go through `e`. We need to make + // sure that forall possible paths `hep`, `heph` is equal to `hph.` + let ep = popped_head.paths_to_head; + for (head_index, head) in stack_entry.heads.iter() { + let ph = head.paths_to_head; + let hp = Self::cycle_path_kind( + &self.stack, + stack_entry.step_kind_from_parent, + head_index, + ); + // We first validate that all cycles while computing `p` would stay + // the same if we were to recompute it as a nested goal of `e`. + let he = hp.extend(*path_from_head); + for ph in ph.iter_paths() { + let hph = hp.extend(ph); + for ep in ep.iter_paths() { + let hep = ep.extend(he); + let heph = hep.extend(ph); + if hph != heph { + return false; + } } } - } - // If so, all paths reached while computing `p` have to get added - // the heads of `e` to make sure that rebasing `e` again also considers - // them. - let eph = ep.extend_with_paths(ph); - heads.insert(h, eph); + // If so, all paths reached while computing `p` have to get added + // the heads of `e` to make sure that rebasing `e` again also considers + // them. + let eph = ep.extend_with_paths(ph); + heads.insert(head_index, eph, head.usages); + } } - let Some(head) = heads.opt_highest_cycle_head() else { + let Some(head_index) = heads.opt_highest_cycle_head_index() else { return false; }; @@ -829,7 +972,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { *path_from_head = path_from_head.extend(Self::cycle_path_kind( &self.stack, stack_entry.step_kind_from_parent, - head, + head_index, )); // Mutate the result of the provisional cache entry in case we did // not reach a fixpoint. @@ -853,7 +996,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { for &ProvisionalCacheEntry { encountered_overflow, ref heads, path_from_head, result } in entries { - let head = heads.highest_cycle_head(); + let head_index = heads.highest_cycle_head_index(); if encountered_overflow { // This check is overly strict and very subtle. We need to make sure that if // a global cache entry depends on some goal without adding it to its @@ -865,24 +1008,26 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // current goal is already part of the same cycle. This check could be // improved but seems to be good enough for now. let last = self.stack.last().unwrap(); - if last.heads.opt_lowest_cycle_head().is_none_or(|lowest| lowest > head) { + if last.heads.opt_lowest_cycle_head_index().is_none_or(|lowest| lowest > head_index) + { continue; } } // A provisional cache entry is only valid if the current path from its // highest cycle head to the goal is the same. - if path_from_head == Self::cycle_path_kind(&self.stack, step_kind_from_parent, head) { + if path_from_head + == Self::cycle_path_kind(&self.stack, step_kind_from_parent, head_index) + { Self::update_parent_goal( &mut self.stack, step_kind_from_parent, 0, - heads, + heads.iter(), encountered_overflow, UpdateParentGoalCtxt::ProvisionalCacheHit, ); - debug_assert!(self.stack[head].has_been_used.is_some()); - debug!(?head, ?path_from_head, "provisional cache hit"); + debug!(?head_index, ?path_from_head, "provisional cache hit"); return Some(result); } } @@ -940,8 +1085,9 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // // We check if any of the paths taken while computing the global goal // would end up with an applicable provisional cache entry. - let head = heads.highest_cycle_head(); - let head_to_curr = Self::cycle_path_kind(&self.stack, step_kind_from_parent, head); + let head_index = heads.highest_cycle_head_index(); + let head_to_curr = + Self::cycle_path_kind(&self.stack, step_kind_from_parent, head_index); let full_paths = path_from_global_entry.extend_with(head_to_curr); if full_paths.contains(head_to_provisional.into()) { debug!( @@ -993,12 +1139,12 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // We don't move cycle participants to the global cache, so the // cycle heads are always empty. - let heads = Default::default(); + let heads = iter::empty(); Self::update_parent_goal( &mut self.stack, step_kind_from_parent, required_depth, - &heads, + heads, encountered_overflow, UpdateParentGoalCtxt::Ordinary(nested_goals), ); @@ -1014,34 +1160,30 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { input: X::Input, step_kind_from_parent: PathKind, ) -> Option<X::Result> { - let head = self.stack.find(input)?; + let head_index = self.stack.find(input)?; // We have a nested goal which directly relies on a goal deeper in the stack. // // We start by tagging all cycle participants, as that's necessary for caching. // // Finally we can return either the provisional response or the initial response // in case we're in the first fixpoint iteration for this goal. - let path_kind = Self::cycle_path_kind(&self.stack, step_kind_from_parent, head); - debug!(?path_kind, "encountered cycle with depth {head:?}"); - let usage_kind = UsageKind::Single(path_kind); - self.stack[head].has_been_used = - Some(self.stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); - - // Subtle: when encountering a cyclic goal, we still first checked for overflow, - // so we have to update the reached depth. - let last_index = self.stack.last_index().unwrap(); - let last = &mut self.stack[last_index]; - last.required_depth = last.required_depth.max(1); - - last.nested_goals.insert(input, step_kind_from_parent.into()); - last.nested_goals.insert(last.input, PathsToNested::EMPTY); - if last_index != head { - last.heads.insert(head, step_kind_from_parent); - } + let path_kind = Self::cycle_path_kind(&self.stack, step_kind_from_parent, head_index); + debug!(?path_kind, "encountered cycle with depth {head_index:?}"); + let mut usages = HeadUsages::default(); + usages.add_usage(path_kind); + let head = CycleHead { paths_to_head: step_kind_from_parent.into(), usages }; + Self::update_parent_goal( + &mut self.stack, + step_kind_from_parent, + 0, + iter::once((head_index, head)), + false, + UpdateParentGoalCtxt::CycleOnStack(input), + ); // Return the provisional result or, if we're in the first iteration, // start with no constraints. - if let Some(result) = self.stack[head].provisional_result { + if let Some(result) = self.stack[head_index].provisional_result { Some(result) } else { Some(D::initial_provisional_result(cx, path_kind, input)) @@ -1053,15 +1195,31 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { &mut self, cx: X, stack_entry: &StackEntry<X>, - usage_kind: UsageKind, + usages: HeadUsages, result: X::Result, ) -> bool { - if let Some(prev) = stack_entry.provisional_result { - prev == result - } else if let UsageKind::Single(kind) = usage_kind { - D::is_initial_provisional_result(cx, kind, stack_entry.input, result) + let provisional_result = stack_entry.provisional_result; + if usages.is_empty() { + true + } else if let Some(provisional_result) = provisional_result { + provisional_result == result } else { - false + let check = |k| D::is_initial_provisional_result(cx, k, stack_entry.input, result); + match usages { + HeadUsages { inductive: _, unknown: 0, coinductive: 0, forced_ambiguity: 0 } => { + check(PathKind::Inductive) + } + HeadUsages { inductive: 0, unknown: _, coinductive: 0, forced_ambiguity: 0 } => { + check(PathKind::Unknown) + } + HeadUsages { inductive: 0, unknown: 0, coinductive: _, forced_ambiguity: 0 } => { + check(PathKind::Coinductive) + } + HeadUsages { inductive: 0, unknown: 0, coinductive: 0, forced_ambiguity: _ } => { + check(PathKind::ForcedAmbiguity) + } + _ => false, + } } } @@ -1091,7 +1249,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // If the current goal is not the root of a cycle, we are done. // // There are no provisional cache entries which depend on this goal. - let Some(usage_kind) = stack_entry.has_been_used else { + let Some(usages) = stack_entry.usages else { return EvaluationResult::finalize(stack_entry, encountered_overflow, result); }; @@ -1106,7 +1264,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // is equal to the provisional result of the previous iteration, or because // this was only the root of either coinductive or inductive cycles, and the // final result is equal to the initial response for that case. - if self.reached_fixpoint(cx, &stack_entry, usage_kind, result) { + if self.reached_fixpoint(cx, &stack_entry, usages, result) { self.rebase_provisional_cache_entries(&stack_entry, |_, result| result); return EvaluationResult::finalize(stack_entry, encountered_overflow, result); } @@ -1144,7 +1302,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // Clear all provisional cache entries which depend on a previous provisional // result of this goal and rerun. - self.clear_dependent_provisional_results(); + self.clear_dependent_provisional_results_for_rerun(); debug!(?result, "fixpoint changed provisional results"); self.stack.push(StackEntry { @@ -1163,7 +1321,8 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // similar to the previous iterations when reevaluating, it's better // for caching if the reevaluation also starts out with `false`. encountered_overflow: false, - has_been_used: None, + usages: None, + candidate_usages: None, }); } } diff --git a/compiler/rustc_type_ir/src/search_graph/stack.rs b/compiler/rustc_type_ir/src/search_graph/stack.rs index a58cd82b023..3fd8e2bd16e 100644 --- a/compiler/rustc_type_ir/src/search_graph/stack.rs +++ b/compiler/rustc_type_ir/src/search_graph/stack.rs @@ -1,9 +1,11 @@ -use std::ops::{Index, IndexMut}; +use std::ops::Index; use derive_where::derive_where; use rustc_index::IndexVec; -use crate::search_graph::{AvailableDepth, Cx, CycleHeads, NestedGoals, PathKind, UsageKind}; +use crate::search_graph::{ + AvailableDepth, CandidateHeadUsages, Cx, CycleHeads, HeadUsages, NestedGoals, PathKind, +}; rustc_index::newtype_index! { #[orderable] @@ -26,13 +28,13 @@ pub(super) struct StackEntry<X: Cx> { /// The available depth of a given goal, immutable. pub available_depth: AvailableDepth, + /// The maximum depth required while evaluating this goal. + pub required_depth: usize, + /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. pub provisional_result: Option<X::Result>, - /// The maximum depth required while evaluating this goal. - pub required_depth: usize, - /// All cycle heads this goal depends on. Lazily updated and only /// up-to date for the top of the stack. pub heads: CycleHeads, @@ -40,14 +42,27 @@ pub(super) struct StackEntry<X: Cx> { /// Whether evaluating this goal encountered overflow. Lazily updated. pub encountered_overflow: bool, - /// Whether this goal has been used as the root of a cycle. This gets - /// eagerly updated when encountering a cycle. - pub has_been_used: Option<UsageKind>, + /// Whether and how this goal has been used as the root of a cycle. Lazily updated. + pub usages: Option<HeadUsages>, + + /// We want to be able to ignore head usages if they happen inside of candidates + /// which don't impact the result of a goal. This enables us to avoid rerunning goals + /// and is also used when rebasing provisional cache entries. + /// + /// To implement this, we track all usages while evaluating a candidate. If this candidate + /// then ends up ignored, we manually remove its usages from `usages` and `heads`. + pub candidate_usages: Option<CandidateHeadUsages>, /// The nested goals of this goal, see the doc comment of the type. pub nested_goals: NestedGoals<X>, } +/// The stack of goals currently being computed. +/// +/// An element is *deeper* in the stack if its index is *lower*. +/// +/// Only the last entry of the stack is mutable. All other entries get +/// lazily updated in `update_parent_goal`. #[derive_where(Default; X: Cx)] pub(super) struct Stack<X: Cx> { entries: IndexVec<StackDepth, StackEntry<X>>, @@ -62,10 +77,6 @@ impl<X: Cx> Stack<X> { self.entries.len() } - pub(super) fn last_index(&self) -> Option<StackDepth> { - self.entries.last_index() - } - pub(super) fn last(&self) -> Option<&StackEntry<X>> { self.entries.raw.last() } @@ -74,6 +85,10 @@ impl<X: Cx> Stack<X> { self.entries.raw.last_mut() } + pub(super) fn last_mut_with_index(&mut self) -> Option<(StackDepth, &mut StackEntry<X>)> { + self.entries.last_index().map(|idx| (idx, &mut self.entries[idx])) + } + pub(super) fn next_index(&self) -> StackDepth { self.entries.next_index() } @@ -108,9 +123,3 @@ impl<X: Cx> Index<StackDepth> for Stack<X> { &self.entries[index] } } - -impl<X: Cx> IndexMut<StackDepth> for Stack<X> { - fn index_mut(&mut self, index: StackDepth) -> &mut Self::Output { - &mut self.entries[index] - } -} |
