diff options
Diffstat (limited to 'compiler')
294 files changed, 6587 insertions, 5289 deletions
diff --git a/compiler/rustc_abi/Cargo.toml b/compiler/rustc_abi/Cargo.toml index 5f9afc46a1a..83d96d8d04d 100644 --- a/compiler/rustc_abi/Cargo.toml +++ b/compiler/rustc_abi/Cargo.toml @@ -9,6 +9,7 @@ bitflags = "2.4.1" rand = { version = "0.9.0", default-features = false, optional = true } rand_xoshiro = { version = "0.7.0", optional = true } rustc_data_structures = { path = "../rustc_data_structures", optional = true } +rustc_error_messages = { path = "../rustc_error_messages", optional = true } rustc_hashes = { path = "../rustc_hashes" } rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } @@ -24,6 +25,7 @@ default = ["nightly", "randomize"] # without depending on rustc_data_structures, rustc_macros and rustc_serialize nightly = [ "dep:rustc_data_structures", + "dep:rustc_error_messages", "dep:rustc_macros", "dep:rustc_serialize", "dep:rustc_span", diff --git a/compiler/rustc_abi/src/extern_abi.rs b/compiler/rustc_abi/src/extern_abi.rs index 29a3678abf3..41d744e1946 100644 --- a/compiler/rustc_abi/src/extern_abi.rs +++ b/compiler/rustc_abi/src/extern_abi.rs @@ -223,6 +223,9 @@ impl StableOrd for ExternAbi { const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = (); } +#[cfg(feature = "nightly")] +rustc_error_messages::into_diag_arg_using_display!(ExternAbi); + impl ExternAbi { /// An ABI "like Rust" /// diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 87c9c797ea5..de3e0e0c87f 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -3137,7 +3137,7 @@ impl FnRetTy { #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, Walkable)] pub enum Inline { Yes, - No, + No { had_parse_error: Result<(), ErrorGuaranteed> }, } /// Module item kind. @@ -3147,7 +3147,7 @@ pub enum ModKind { /// or with definition outlined to a separate file `mod foo;` and already loaded from it. /// The inner span is from the first token past `{` to the last token until `}`, /// or from the first to the last token in the loaded file. - Loaded(ThinVec<Box<Item>>, Inline, ModSpans, Result<(), ErrorGuaranteed>), + Loaded(ThinVec<Box<Item>>, Inline, ModSpans), /// Module with definition outlined to a separate file `mod foo;` but not yet loaded from it. Unloaded, } diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index fc816f2cb79..ea98bebd305 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -7,6 +7,7 @@ pub use NtPatKind::*; pub use TokenKind::*; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::edition::Edition; +use rustc_span::symbol::IdentPrintMode; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; #[allow(clippy::useless_attribute)] // FIXME: following use of `hidden_glob_reexports` incorrectly triggers `useless_attribute` lint. #[allow(hidden_glob_reexports)] @@ -344,15 +345,24 @@ pub enum IdentIsRaw { Yes, } -impl From<bool> for IdentIsRaw { - fn from(b: bool) -> Self { - if b { Self::Yes } else { Self::No } +impl IdentIsRaw { + pub fn to_print_mode_ident(self) -> IdentPrintMode { + match self { + IdentIsRaw::No => IdentPrintMode::Normal, + IdentIsRaw::Yes => IdentPrintMode::RawIdent, + } + } + pub fn to_print_mode_lifetime(self) -> IdentPrintMode { + match self { + IdentIsRaw::No => IdentPrintMode::Normal, + IdentIsRaw::Yes => IdentPrintMode::RawLifetime, + } } } -impl From<IdentIsRaw> for bool { - fn from(is_raw: IdentIsRaw) -> bool { - matches!(is_raw, IdentIsRaw::Yes) +impl From<bool> for IdentIsRaw { + fn from(b: bool) -> Self { + if b { Self::Yes } else { Self::No } } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index e55399adfb8..f4f35a4d2ee 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -907,6 +907,12 @@ impl TokenTreeCursor { pub fn bump(&mut self) { self.index += 1; } + + // For skipping ahead in rare circumstances. + #[inline] + pub fn bump_to_end(&mut self) { + self.index = self.stream.len(); + } } /// A `TokenStream` cursor that produces `Token`s. It's a bit odd that diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 72817a0a9a0..bb559bd8921 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -251,7 +251,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ItemKind::Mod(_, ident, mod_kind) => { let ident = self.lower_ident(*ident); match mod_kind { - ModKind::Loaded(items, _, spans, _) => { + ModKind::Loaded(items, _, spans) => { hir::ItemKind::Mod(ident, self.lower_mod(items, spans)) } ModKind::Unloaded => panic!("`mod` items should have been loaded by now"), @@ -1596,7 +1596,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let safety = self.lower_safety(h.safety, default_safety); // Treat safe `#[target_feature]` functions as unsafe, but also remember that we did so. - let safety = if find_attr!(attrs, AttributeKind::TargetFeature { .. }) + let safety = if find_attr!(attrs, AttributeKind::TargetFeature { was_forced: false, .. }) && safety.is_safe() && !self.tcx.sess.target.is_like_wasm { diff --git a/compiler/rustc_ast_passes/Cargo.toml b/compiler/rustc_ast_passes/Cargo.toml index 1940628b44a..3e04f8b11ec 100644 --- a/compiler/rustc_ast_passes/Cargo.toml +++ b/compiler/rustc_ast_passes/Cargo.toml @@ -15,7 +15,6 @@ rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_macros = { path = "../rustc_macros" } -rustc_parse = { path = "../rustc_parse" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl index 73cbcdd30ce..a95f1443968 100644 --- a/compiler/rustc_ast_passes/messages.ftl +++ b/compiler/rustc_ast_passes/messages.ftl @@ -20,6 +20,10 @@ ast_passes_abi_must_not_have_return_type= .note = functions with the {$abi} ABI cannot have a return type .help = remove the return type +ast_passes_abi_x86_interrupt = + invalid signature for `extern "x86-interrupt"` function + .note = functions with the "x86-interrupt" ABI must be have either 1 or 2 parameters (but found {$param_count}) + ast_passes_assoc_const_without_body = associated constant in `impl` without body .suggestion = provide a definition for the constant @@ -109,6 +113,10 @@ ast_passes_extern_without_abi = `extern` declarations without an explicit ABI ar .suggestion = specify an ABI .help = prior to Rust 2024, a default ABI was inferred +ast_passes_extern_without_abi_sugg = `extern` declarations without an explicit ABI are deprecated + .label = ABI should be specified here + .suggestion = explicitly specify the {$default_abi} ABI + ast_passes_feature_on_non_nightly = `#![feature]` may not be used on the {$channel} release channel .suggestion = remove the attribute .stable_since = the feature `{$name}` has been stable since `{$since}` and no longer requires an attribute to enable diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 0c72f319007..6133cc3548b 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -25,16 +25,16 @@ use rustc_abi::{CanonAbi, ExternAbi, InterruptKind}; use rustc_ast::visit::{AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor, walk_list}; use rustc_ast::*; use rustc_ast_pretty::pprust::{self, State}; +use rustc_attr_parsing::validate_attr; use rustc_data_structures::fx::FxIndexMap; -use rustc_errors::DiagCtxtHandle; +use rustc_errors::{DiagCtxtHandle, LintBuffer}; use rustc_feature::Features; -use rustc_parse::validate_attr; use rustc_session::Session; +use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{ DEPRECATED_WHERE_CLAUSE_LOCATION, MISSING_ABI, MISSING_UNSAFE_ON_EXTERN, PATTERNS_IN_FNS_WITHOUT_BODY, }; -use rustc_session::lint::{BuiltinLintDiag, LintBuffer}; use rustc_span::{Ident, Span, kw, sym}; use rustc_target::spec::{AbiMap, AbiMapping}; use thin_vec::thin_vec; @@ -405,6 +405,17 @@ impl<'a> AstValidator<'a> { if let InterruptKind::X86 = interrupt_kind { // "x86-interrupt" is special because it does have arguments. // FIXME(workingjubilee): properly lint on acceptable input types. + let inputs = &sig.decl.inputs; + let param_count = inputs.len(); + if !matches!(param_count, 1 | 2) { + let mut spans: Vec<Span> = + inputs.iter().map(|arg| arg.span).collect(); + if spans.is_empty() { + spans = vec![sig.span]; + } + self.dcx().emit_err(errors::AbiX86Interrupt { spans, param_count }); + } + if let FnRetTy::Ty(ref ret_ty) = sig.decl.output && match &ret_ty.kind { TyKind::Never => false, @@ -865,7 +876,7 @@ impl<'a> AstValidator<'a> { MISSING_ABI, id, span, - BuiltinLintDiag::MissingAbi(span, ExternAbi::FALLBACK), + errors::MissingAbiSugg { span, default_abi: ExternAbi::FALLBACK }, ) } } @@ -1169,7 +1180,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.dcx().emit_err(errors::UnsafeItem { span, kind: "module" }); } // Ensure that `path` attributes on modules are recorded as used (cf. issue #35584). - if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _, _)) + if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _)) && !attr::contains_name(&item.attrs, sym::path) { self.check_mod_file_item_asciionly(*ident); diff --git a/compiler/rustc_ast_passes/src/errors.rs b/compiler/rustc_ast_passes/src/errors.rs index 5ecc0d21411..ae8f056cb4e 100644 --- a/compiler/rustc_ast_passes/src/errors.rs +++ b/compiler/rustc_ast_passes/src/errors.rs @@ -4,7 +4,7 @@ use rustc_abi::ExternAbi; use rustc_ast::ParamKindOrd; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, EmissionGuarantee, Subdiagnostic}; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_span::{Ident, Span, Symbol}; use crate::fluent_generated as fluent; @@ -815,6 +815,14 @@ pub(crate) struct MissingAbi { pub span: Span, } +#[derive(LintDiagnostic)] +#[diag(ast_passes_extern_without_abi_sugg)] +pub(crate) struct MissingAbiSugg { + #[suggestion(code = "extern {default_abi}", applicability = "machine-applicable")] + pub span: Span, + pub default_abi: ExternAbi, +} + #[derive(Diagnostic)] #[diag(ast_passes_abi_custom_safe_foreign_function)] pub(crate) struct AbiCustomSafeForeignFunction { @@ -891,3 +899,12 @@ pub(crate) struct AbiMustNotHaveReturnType { pub span: Span, pub abi: ExternAbi, } + +#[derive(Diagnostic)] +#[diag(ast_passes_abi_x86_interrupt)] +#[note] +pub(crate) struct AbiX86Interrupt { + #[primary_span] + pub spans: Vec<Span>, + pub param_count: usize, +} diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index c9344a76a7b..e763c9d69fc 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -524,6 +524,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { gate_all!(where_clause_attrs, "attributes in `where` clause are unstable"); gate_all!(super_let, "`super let` is experimental"); gate_all!(frontmatter, "frontmatters are experimental"); + gate_all!(coroutines, "coroutine syntax is experimental"); if !visitor.features.never_patterns() { if let Some(spans) = spans.get(&sym::never_patterns) { diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 85f76036df7..a056ce3e29d 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -10,7 +10,7 @@ use std::borrow::Cow; use std::sync::Arc; use rustc_ast::attr::AttrIdGenerator; -use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree}; use rustc_ast::util::classify; use rustc_ast::util::comments::{Comment, CommentStyle}; @@ -441,7 +441,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); fn print_ident(&mut self, ident: Ident) { - self.word(IdentPrinter::for_ast_ident(ident, ident.is_raw_guess()).to_string()); + self.word(IdentPrinter::for_ast_ident(ident, ident.guess_print_mode()).to_string()); self.ann_post(ident) } @@ -1015,17 +1015,16 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere /* Name components */ token::Ident(name, is_raw) => { - IdentPrinter::new(name, is_raw.into(), convert_dollar_crate).to_string().into() + IdentPrinter::new(name, is_raw.to_print_mode_ident(), convert_dollar_crate) + .to_string() + .into() } token::NtIdent(ident, is_raw) => { - IdentPrinter::for_ast_ident(ident, is_raw.into()).to_string().into() + IdentPrinter::for_ast_ident(ident, is_raw.to_print_mode_ident()).to_string().into() } - token::Lifetime(name, IdentIsRaw::No) - | token::NtLifetime(Ident { name, .. }, IdentIsRaw::No) => name.to_string().into(), - token::Lifetime(name, IdentIsRaw::Yes) - | token::NtLifetime(Ident { name, .. }, IdentIsRaw::Yes) => { - format!("'r#{}", &name.as_str()[1..]).into() + token::Lifetime(name, is_raw) | token::NtLifetime(Ident { name, .. }, is_raw) => { + IdentPrinter::new(name, is_raw.to_print_mode_lifetime(), None).to_string().into() } /* Other */ diff --git a/compiler/rustc_attr_parsing/Cargo.toml b/compiler/rustc_attr_parsing/Cargo.toml index bac89373b67..fd8f7ffb2ed 100644 --- a/compiler/rustc_attr_parsing/Cargo.toml +++ b/compiler/rustc_attr_parsing/Cargo.toml @@ -5,7 +5,6 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -itertools = "0.12" rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } @@ -15,6 +14,7 @@ rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hir = { path = "../rustc_hir" } rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } +rustc_parse = { path = "../rustc_parse" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } thin-vec = "0.2.12" diff --git a/compiler/rustc_attr_parsing/messages.ftl b/compiler/rustc_attr_parsing/messages.ftl index 4fb66a81652..40e9d597530 100644 --- a/compiler/rustc_attr_parsing/messages.ftl +++ b/compiler/rustc_attr_parsing/messages.ftl @@ -12,9 +12,11 @@ attr_parsing_empty_attribute = attr_parsing_invalid_target = `#[{$name}]` attribute cannot be used on {$target} .help = `#[{$name}]` can {$only}be applied to {$applied} + .suggestion = remove the attribute attr_parsing_invalid_target_lint = `#[{$name}]` attribute cannot be used on {$target} .warn = {-attr_parsing_previously_accepted} .help = `#[{$name}]` can {$only}be applied to {$applied} + .suggestion = remove the attribute attr_parsing_empty_confusables = expected at least one confusable name @@ -168,3 +170,22 @@ attr_parsing_unused_multiple = -attr_parsing_previously_accepted = this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + +attr_parsing_meta_bad_delim = wrong meta list delimiters +attr_parsing_meta_bad_delim_suggestion = the delimiters should be `(` and `)` + +attr_parsing_unsafe_attr_outside_unsafe = unsafe attribute used without unsafe + .label = usage of unsafe attribute +attr_parsing_unsafe_attr_outside_unsafe_suggestion = wrap the attribute in `unsafe(...)` + +attr_parsing_invalid_attr_unsafe = `{$name}` is not an unsafe attribute + .label = this is not an unsafe attribute + .suggestion = remove the `unsafe(...)` + .note = extraneous unsafe is not allowed in attributes + +attr_parsing_invalid_meta_item = expected a literal (`1u8`, `1.0f32`, `"string"`, etc.) here, found {$descr} + .remove_neg_sugg = negative numbers are not literals, try removing the `-` sign + .quote_ident_sugg = surround the identifier with quotation marks to make it into a string literal + +attr_parsing_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes + .help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.) diff --git a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs index 4d995027814..088fa73d742 100644 --- a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs +++ b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs @@ -1,14 +1,6 @@ use std::iter; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Span, Symbol, sym}; - -use super::{CombineAttributeParser, ConvertFn}; -use crate::context::MaybeWarn::{Allow, Warn}; -use crate::context::{AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; use crate::session_diagnostics; pub(crate) struct AllowInternalUnstableParser; diff --git a/compiler/rustc_attr_parsing/src/attributes/body.rs b/compiler/rustc_attr_parsing/src/attributes/body.rs index 88540384621..a1492d76194 100644 --- a/compiler/rustc_attr_parsing/src/attributes/body.rs +++ b/compiler/rustc_attr_parsing/src/attributes/body.rs @@ -1,12 +1,6 @@ //! Attributes that can be found in function body. -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; - -use super::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::MaybeWarn::Allow; -use crate::context::{AllowedTargets, Stage}; +use super::prelude::*; pub(crate) struct CoroutineParser; diff --git a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs index 6ea073896c2..ffdacff7152 100644 --- a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs @@ -1,16 +1,7 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, CoverageAttrKind, OptimizeAttr, UsedBy}; -use rustc_hir::{MethodKind, Target}; +use rustc_hir::attrs::{CoverageAttrKind, OptimizeAttr, SanitizerSet, UsedBy}; use rustc_session::parse::feature_err; -use rustc_span::{Span, Symbol, sym}; - -use super::{ - AcceptMapping, AttributeOrder, AttributeParser, CombineAttributeParser, ConvertFn, - NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, -}; -use crate::context::MaybeWarn::{Allow, Warn}; -use crate::context::{AcceptContext, AllowedTargets, FinalizeContext, Stage}; -use crate::parser::ArgParser; + +use super::prelude::*; use crate::session_diagnostics::{NakedFunctionIncompatibleAttribute, NullOnExport}; pub(crate) struct OptimizeParser; @@ -44,7 +35,7 @@ impl<S: Stage> SingleAttributeParser<S> for OptimizeParser { Some(sym::speed) => OptimizeAttr::Speed, Some(sym::none) => OptimizeAttr::DoNotOptimize, _ => { - cx.expected_specific_argument(single.span(), vec!["size", "speed", "none"]); + cx.expected_specific_argument(single.span(), &[sym::size, sym::speed, sym::none]); OptimizeAttr::Default } }; @@ -91,7 +82,7 @@ impl<S: Stage> SingleAttributeParser<S> for CoverageParser { fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { let Some(args) = args.list() else { - cx.expected_specific_argument_and_list(cx.attr_span, vec!["on", "off"]); + cx.expected_specific_argument_and_list(cx.attr_span, &[sym::on, sym::off]); return None; }; @@ -100,7 +91,8 @@ impl<S: Stage> SingleAttributeParser<S> for CoverageParser { return None; }; - let fail_incorrect_argument = |span| cx.expected_specific_argument(span, vec!["on", "off"]); + let fail_incorrect_argument = + |span| cx.expected_specific_argument(span, &[sym::on, sym::off]); let Some(arg) = arg.meta_item() else { fail_incorrect_argument(args.span); @@ -135,6 +127,7 @@ impl<S: Stage> SingleAttributeParser<S> for ExportNameParser { Warn(Target::Field), Warn(Target::Arm), Warn(Target::MacroDef), + Warn(Target::MacroCall), ]); const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name"); @@ -182,6 +175,7 @@ impl<S: Stage> AttributeParser<S> for NakedParser { Allow(Target::Method(MethodKind::Inherent)), Allow(Target::Method(MethodKind::Trait { body: true })), Allow(Target::Method(MethodKind::TraitImpl)), + Warn(Target::MacroCall), ]); fn finalize(self, cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { @@ -286,6 +280,7 @@ impl<S: Stage> NoArgsAttributeParser<S> for TrackCallerParser { Warn(Target::MacroDef), Warn(Target::Arm), Warn(Target::Field), + Warn(Target::MacroCall), ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::TrackCaller; } @@ -352,7 +347,7 @@ impl<S: Stage> AttributeParser<S> for UsedParser { UsedBy::Linker } _ => { - cx.expected_specific_argument(l.span(), vec!["compiler", "linker"]); + cx.expected_specific_argument(l.span(), &[sym::compiler, sym::linker]); return; } } @@ -373,7 +368,8 @@ impl<S: Stage> AttributeParser<S> for UsedParser { } }, )]; - const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Static)]); + const ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Static), Warn(Target::MacroCall)]); fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { // Ratcheting behaviour, if both `linker` and `compiler` are specified, use `linker` @@ -385,57 +381,68 @@ impl<S: Stage> AttributeParser<S> for UsedParser { } } +fn parse_tf_attribute<'c, S: Stage>( + cx: &'c mut AcceptContext<'_, '_, S>, + args: &'c ArgParser<'_>, +) -> impl IntoIterator<Item = (Symbol, Span)> + 'c { + let mut features = Vec::new(); + let ArgParser::List(list) = args else { + cx.expected_list(cx.attr_span); + return features; + }; + if list.is_empty() { + cx.warn_empty_attribute(cx.attr_span); + return features; + } + for item in list.mixed() { + let Some(name_value) = item.meta_item() else { + cx.expected_name_value(item.span(), Some(sym::enable)); + return features; + }; + + // Validate name + let Some(name) = name_value.path().word_sym() else { + cx.expected_name_value(name_value.path().span(), Some(sym::enable)); + return features; + }; + if name != sym::enable { + cx.expected_name_value(name_value.path().span(), Some(sym::enable)); + return features; + } + + // Use value + let Some(name_value) = name_value.args().name_value() else { + cx.expected_name_value(item.span(), Some(sym::enable)); + return features; + }; + let Some(value_str) = name_value.value_as_str() else { + cx.expected_string_literal(name_value.value_span, Some(name_value.value_as_lit())); + return features; + }; + for feature in value_str.as_str().split(",") { + features.push((Symbol::intern(feature), item.span())); + } + } + features +} + pub(crate) struct TargetFeatureParser; impl<S: Stage> CombineAttributeParser<S> for TargetFeatureParser { type Item = (Symbol, Span); const PATH: &[Symbol] = &[sym::target_feature]; - const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::TargetFeature(items, span); + const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::TargetFeature { + features: items, + attr_span: span, + was_forced: false, + }; const TEMPLATE: AttributeTemplate = template!(List: &["enable = \"feat1, feat2\""]); fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, args: &'c ArgParser<'_>, ) -> impl IntoIterator<Item = Self::Item> + 'c { - let mut features = Vec::new(); - let ArgParser::List(list) = args else { - cx.expected_list(cx.attr_span); - return features; - }; - if list.is_empty() { - cx.warn_empty_attribute(cx.attr_span); - return features; - } - for item in list.mixed() { - let Some(name_value) = item.meta_item() else { - cx.expected_name_value(item.span(), Some(sym::enable)); - return features; - }; - - // Validate name - let Some(name) = name_value.path().word_sym() else { - cx.expected_name_value(name_value.path().span(), Some(sym::enable)); - return features; - }; - if name != sym::enable { - cx.expected_name_value(name_value.path().span(), Some(sym::enable)); - return features; - } - - // Use value - let Some(name_value) = name_value.args().name_value() else { - cx.expected_name_value(item.span(), Some(sym::enable)); - return features; - }; - let Some(value_str) = name_value.value_as_str() else { - cx.expected_string_literal(name_value.value_span, Some(name_value.value_as_lit())); - return features; - }; - for feature in value_str.as_str().split(",") { - features.push((Symbol::intern(feature), item.span())); - } - } - features + parse_tf_attribute(cx, args) } const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ @@ -447,5 +454,134 @@ impl<S: Stage> CombineAttributeParser<S> for TargetFeatureParser { Warn(Target::Field), Warn(Target::Arm), Warn(Target::MacroDef), + Warn(Target::MacroCall), + ]); +} + +pub(crate) struct ForceTargetFeatureParser; + +impl<S: Stage> CombineAttributeParser<S> for ForceTargetFeatureParser { + type Item = (Symbol, Span); + const PATH: &[Symbol] = &[sym::force_target_feature]; + const CONVERT: ConvertFn<Self::Item> = |items, span| AttributeKind::TargetFeature { + features: items, + attr_span: span, + was_forced: true, + }; + const TEMPLATE: AttributeTemplate = template!(List: &["enable = \"feat1, feat2\""]); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::Fn), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + ]); + + fn extend<'c>( + cx: &'c mut AcceptContext<'_, '_, S>, + args: &'c ArgParser<'_>, + ) -> impl IntoIterator<Item = Self::Item> + 'c { + parse_tf_attribute(cx, args) + } +} + +pub(crate) struct SanitizeParser; + +impl<S: Stage> SingleAttributeParser<S> for SanitizeParser { + const PATH: &[Symbol] = &[sym::sanitize]; + + // FIXME: still checked in check_attrs.rs + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); + + const TEMPLATE: AttributeTemplate = template!(List: &[ + r#"address = "on|off""#, + r#"kernel_address = "on|off""#, + r#"cfi = "on|off""#, + r#"hwaddress = "on|off""#, + r#"kcfi = "on|off""#, + r#"memory = "on|off""#, + r#"memtag = "on|off""#, + r#"shadow_call_stack = "on|off""#, + r#"thread = "on|off""# ]); + + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); + return None; + }; + + let mut on_set = SanitizerSet::empty(); + let mut off_set = SanitizerSet::empty(); + + for item in list.mixed() { + let Some(item) = item.meta_item() else { + cx.expected_name_value(item.span(), None); + continue; + }; + + let path = item.path().word_sym(); + let Some(value) = item.args().name_value() else { + cx.expected_name_value(item.span(), path); + continue; + }; + + let mut apply = |s: SanitizerSet| { + let is_on = match value.value_as_str() { + Some(sym::on) => true, + Some(sym::off) => false, + Some(_) => { + cx.expected_specific_argument_strings( + value.value_span, + &[sym::on, sym::off], + ); + return; + } + None => { + cx.expected_string_literal(value.value_span, Some(value.value_as_lit())); + return; + } + }; + + if is_on { + on_set |= s; + } else { + off_set |= s; + } + }; + + match path { + Some(sym::address) | Some(sym::kernel_address) => { + apply(SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS) + } + Some(sym::cfi) => apply(SanitizerSet::CFI), + Some(sym::kcfi) => apply(SanitizerSet::KCFI), + Some(sym::memory) => apply(SanitizerSet::MEMORY), + Some(sym::memtag) => apply(SanitizerSet::MEMTAG), + Some(sym::shadow_call_stack) => apply(SanitizerSet::SHADOWCALLSTACK), + Some(sym::thread) => apply(SanitizerSet::THREAD), + Some(sym::hwaddress) => apply(SanitizerSet::HWADDRESS), + _ => { + cx.expected_specific_argument_strings( + item.path().span(), + &[ + sym::address, + sym::cfi, + sym::kcfi, + sym::memory, + sym::memtag, + sym::shadow_call_stack, + sym::thread, + sym::hwaddress, + ], + ); + continue; + } + } + } + + Some(AttributeKind::Sanitize { on_set, off_set, span: cx.attr_span }) + } } diff --git a/compiler/rustc_attr_parsing/src/attributes/confusables.rs b/compiler/rustc_attr_parsing/src/attributes/confusables.rs index 00f949c82c5..97e78dfb136 100644 --- a/compiler/rustc_attr_parsing/src/attributes/confusables.rs +++ b/compiler/rustc_attr_parsing/src/attributes/confusables.rs @@ -1,13 +1,6 @@ -use rustc_feature::template; -use rustc_hir::attrs::AttributeKind; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Span, Symbol, sym}; -use thin_vec::ThinVec; - -use super::{AcceptMapping, AttributeParser}; -use crate::context::MaybeWarn::Allow; -use crate::context::{AllowedTargets, FinalizeContext, Stage}; -use crate::session_diagnostics; +use super::prelude::*; +use crate::session_diagnostics::EmptyConfusables; + #[derive(Default)] pub(crate) struct ConfusablesParser { confusables: ThinVec<Symbol>, @@ -25,7 +18,7 @@ impl<S: Stage> AttributeParser<S> for ConfusablesParser { }; if list.is_empty() { - cx.emit_err(session_diagnostics::EmptyConfusables { span: cx.attr_span }); + cx.emit_err(EmptyConfusables { span: cx.attr_span }); } for param in list.mixed() { diff --git a/compiler/rustc_attr_parsing/src/attributes/crate_level.rs b/compiler/rustc_attr_parsing/src/attributes/crate_level.rs new file mode 100644 index 00000000000..9175d7479e1 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/crate_level.rs @@ -0,0 +1,33 @@ +use super::prelude::*; + +pub(crate) struct CrateNameParser; + +impl<S: Stage> SingleAttributeParser<S> for CrateNameParser { + const PATH: &[Symbol] = &[sym::crate_name]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name"); + + // FIXME: crate name is allowed on all targets and ignored, + // even though it should only be valid on crates of course + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let ArgParser::NameValue(n) = args else { + cx.expected_name_value(cx.attr_span, None); + return None; + }; + + let Some(name) = n.value_as_str() else { + cx.expected_string_literal(n.value_span, Some(n.value_as_lit())); + return None; + }; + + Some(AttributeKind::CrateName { + name, + name_span: n.value_span, + attr_span: cx.attr_span, + style: cx.attr_style, + }) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs index d3a61f3a653..31c698228ef 100644 --- a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs +++ b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs @@ -1,14 +1,11 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, DeprecatedSince, Deprecation}; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Span, Symbol, sym}; +use rustc_hir::attrs::{DeprecatedSince, Deprecation}; +use super::prelude::*; use super::util::parse_version; -use super::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::MaybeWarn::{Allow, Error}; -use crate::context::{AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; -use crate::session_diagnostics; +use crate::session_diagnostics::{ + DeprecatedItemSuggestion, InvalidSince, MissingNote, MissingSince, +}; + pub(crate) struct DeprecationParser; fn get<S: Stage>( @@ -102,7 +99,7 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { } Some(name @ sym::suggestion) => { if !features.deprecated_suggestion() { - cx.emit_err(session_diagnostics::DeprecatedItemSuggestion { + cx.emit_err(DeprecatedItemSuggestion { span: param.span(), is_nightly: cx.sess().is_nightly_build(), details: (), @@ -144,18 +141,18 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { } else if let Some(version) = parse_version(since) { DeprecatedSince::RustcVersion(version) } else { - cx.emit_err(session_diagnostics::InvalidSince { span: cx.attr_span }); + cx.emit_err(InvalidSince { span: cx.attr_span }); DeprecatedSince::Err } } else if is_rustc { - cx.emit_err(session_diagnostics::MissingSince { span: cx.attr_span }); + cx.emit_err(MissingSince { span: cx.attr_span }); DeprecatedSince::Err } else { DeprecatedSince::Unspecified }; if is_rustc && note.is_none() { - cx.emit_err(session_diagnostics::MissingNote { span: cx.attr_span }); + cx.emit_err(MissingNote { span: cx.attr_span }); return None; } diff --git a/compiler/rustc_attr_parsing/src/attributes/dummy.rs b/compiler/rustc_attr_parsing/src/attributes/dummy.rs index 85842b1b5c5..7293cee842c 100644 --- a/compiler/rustc_attr_parsing/src/attributes/dummy.rs +++ b/compiler/rustc_attr_parsing/src/attributes/dummy.rs @@ -3,8 +3,10 @@ use rustc_hir::attrs::AttributeKind; use rustc_span::{Symbol, sym}; use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{ALL_TARGETS, AcceptContext, AllowedTargets, Stage}; +use crate::context::{AcceptContext, Stage}; use crate::parser::ArgParser; +use crate::target_checking::{ALL_TARGETS, AllowedTargets}; + pub(crate) struct DummyParser; impl<S: Stage> SingleAttributeParser<S> for DummyParser { const PATH: &[Symbol] = &[sym::rustc_dummy]; diff --git a/compiler/rustc_attr_parsing/src/attributes/inline.rs b/compiler/rustc_attr_parsing/src/attributes/inline.rs index 33c21bad240..a73430c9d00 100644 --- a/compiler/rustc_attr_parsing/src/attributes/inline.rs +++ b/compiler/rustc_attr_parsing/src/attributes/inline.rs @@ -2,17 +2,10 @@ // note: need to model better how duplicate attr errors work when not using // SingleAttributeParser which is what we have two of here. -use rustc_feature::{AttributeTemplate, template}; use rustc_hir::attrs::{AttributeKind, InlineAttr}; -use rustc_hir::lints::AttributeLintKind; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Symbol, sym}; -use super::{AcceptContext, AttributeOrder, OnDuplicate}; -use crate::attributes::SingleAttributeParser; -use crate::context::MaybeWarn::{Allow, Warn}; -use crate::context::{AllowedTargets, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; + pub(crate) struct InlineParser; impl<S: Stage> SingleAttributeParser<S> for InlineParser { @@ -32,6 +25,7 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser { Warn(Target::MacroDef), Warn(Target::Arm), Warn(Target::AssocConst), + Warn(Target::MacroCall), ]); const TEMPLATE: AttributeTemplate = template!( Word, @@ -56,7 +50,7 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser { Some(AttributeKind::Inline(InlineAttr::Never, cx.attr_span)) } _ => { - cx.expected_specific_argument(l.span(), vec!["always", "never"]); + cx.expected_specific_argument(l.span(), &[sym::always, sym::never]); return None; } } diff --git a/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs index 552b9dfabc2..5e4551ccd79 100644 --- a/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs @@ -1,16 +1,10 @@ -use rustc_feature::{AttributeTemplate, template}; use rustc_hir::attrs::AttributeKind::{LinkName, LinkOrdinal, LinkSection}; -use rustc_hir::attrs::{AttributeKind, Linkage}; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Span, Symbol, sym}; - -use crate::attributes::{ - AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, -}; -use crate::context::MaybeWarn::Allow; -use crate::context::{ALL_TARGETS, AcceptContext, AllowedTargets, Stage, parse_single_integer}; -use crate::parser::ArgParser; +use rustc_hir::attrs::Linkage; + +use super::prelude::*; +use super::util::parse_single_integer; use crate::session_diagnostics::{LinkOrdinalOutOfRange, NullOnLinkSection}; + pub(crate) struct LinkNameParser; impl<S: Stage> SingleAttributeParser<S> for LinkNameParser { @@ -116,8 +110,11 @@ impl<S: Stage> SingleAttributeParser<S> for LinkOrdinalParser { const PATH: &[Symbol] = &[sym::link_ordinal]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; - const ALLOWED_TARGETS: AllowedTargets = - AllowedTargets::AllowList(&[Allow(Target::ForeignFn), Allow(Target::ForeignStatic)]); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + Warn(Target::MacroCall), + ]); const TEMPLATE: AttributeTemplate = template!( List: &["ordinal"], "https://doc.rust-lang.org/reference/items/external-blocks.html#the-link_ordinal-attribute" @@ -212,16 +209,16 @@ impl<S: Stage> SingleAttributeParser<S> for LinkageParser { _ => { cx.expected_specific_argument( name_value.value_span, - vec![ - "available_externally", - "common", - "extern_weak", - "external", - "internal", - "linkonce", - "linkonce_odr", - "weak", - "weak_odr", + &[ + sym::available_externally, + sym::common, + sym::extern_weak, + sym::external, + sym::internal, + sym::linkonce, + sym::linkonce_odr, + sym::weak, + sym::weak_odr, ], ); return None; diff --git a/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs b/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs index 2b586d4003c..63b0809d0d8 100644 --- a/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs +++ b/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs @@ -1,10 +1,5 @@ -use rustc_hir::attrs::AttributeKind; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Span, Symbol, sym}; +use super::prelude::*; -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::MaybeWarn::{Allow, Error}; -use crate::context::{AllowedTargets, Stage}; pub(crate) struct AsPtrParser; impl<S: Stage> NoArgsAttributeParser<S> for AsPtrParser { const PATH: &[Symbol] = &[sym::rustc_as_ptr]; diff --git a/compiler/rustc_attr_parsing/src/attributes/loop_match.rs b/compiler/rustc_attr_parsing/src/attributes/loop_match.rs index 242e2f2c1bc..528090b8673 100644 --- a/compiler/rustc_attr_parsing/src/attributes/loop_match.rs +++ b/compiler/rustc_attr_parsing/src/attributes/loop_match.rs @@ -1,10 +1,5 @@ -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; +use super::prelude::*; -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::MaybeWarn::Allow; -use crate::context::{AllowedTargets, Stage}; pub(crate) struct LoopMatchParser; impl<S: Stage> NoArgsAttributeParser<S> for LoopMatchParser { const PATH: &[Symbol] = &[sym::loop_match]; diff --git a/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs index 8928129c201..180130c7be4 100644 --- a/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/macro_attrs.rs @@ -1,15 +1,9 @@ use rustc_errors::DiagArgValue; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::Target; -use rustc_hir::attrs::{AttributeKind, MacroUseArgs}; -use rustc_span::{Span, Symbol, sym}; -use thin_vec::ThinVec; +use rustc_hir::attrs::MacroUseArgs; + +use super::prelude::*; +use crate::session_diagnostics::IllFormedAttributeInputLint; -use crate::attributes::{AcceptMapping, AttributeParser, NoArgsAttributeParser, OnDuplicate}; -use crate::context::MaybeWarn::{Allow, Error, Warn}; -use crate::context::{AcceptContext, AllowedTargets, FinalizeContext, Stage}; -use crate::parser::ArgParser; -use crate::session_diagnostics; pub(crate) struct MacroEscapeParser; impl<S: Stage> NoArgsAttributeParser<S> for MacroEscapeParser { const PATH: &[Symbol] = &[sym::macro_escape]; @@ -108,7 +102,7 @@ impl<S: Stage> AttributeParser<S> for MacroUseParser { } ArgParser::NameValue(_) => { let suggestions = MACRO_USE_TEMPLATE.suggestions(cx.attr_style, sym::macro_use); - cx.emit_err(session_diagnostics::IllFormedAttributeInputLint { + cx.emit_err(IllFormedAttributeInputLint { num_suggestions: suggestions.len(), suggestions: DiagArgValue::StrListSepByAnd( suggestions.into_iter().map(|s| format!("`{s}`").into()).collect(), diff --git a/compiler/rustc_attr_parsing/src/attributes/mod.rs b/compiler/rustc_attr_parsing/src/attributes/mod.rs index 3d6e26a24b8..9dad9c893f0 100644 --- a/compiler/rustc_attr_parsing/src/attributes/mod.rs +++ b/compiler/rustc_attr_parsing/src/attributes/mod.rs @@ -7,9 +7,9 @@ //! Specifically, you might not care about managing the state of your [`AttributeParser`] //! state machine yourself. In this case you can choose to implement: //! -//! - [`SingleAttributeParser`]: makes it easy to implement an attribute which should error if it +//! - [`SingleAttributeParser`](crate::attributes::SingleAttributeParser): makes it easy to implement an attribute which should error if it //! appears more than once in a list of attributes -//! - [`CombineAttributeParser`]: makes it easy to implement an attribute which should combine the +//! - [`CombineAttributeParser`](crate::attributes::CombineAttributeParser): makes it easy to implement an attribute which should combine the //! contents of attributes, if an attribute appear multiple times in a list //! //! Attributes should be added to `crate::context::ATTRIBUTE_PARSERS` to be parsed. @@ -21,9 +21,13 @@ use rustc_hir::attrs::AttributeKind; use rustc_span::{Span, Symbol}; use thin_vec::ThinVec; -use crate::context::{AcceptContext, AllowedTargets, FinalizeContext, Stage}; +use crate::context::{AcceptContext, FinalizeContext, Stage}; use crate::parser::ArgParser; use crate::session_diagnostics::UnusedMultiple; +use crate::target_checking::AllowedTargets; + +/// All the parsers require roughly the same imports, so this prelude has most of the often-needed ones. +mod prelude; pub(crate) mod allow_unstable; pub(crate) mod body; @@ -31,6 +35,7 @@ pub(crate) mod cfg; pub(crate) mod cfg_old; pub(crate) mod codegen_attrs; pub(crate) mod confusables; +pub(crate) mod crate_level; pub(crate) mod deprecation; pub(crate) mod dummy; pub(crate) mod inline; diff --git a/compiler/rustc_attr_parsing/src/attributes/must_use.rs b/compiler/rustc_attr_parsing/src/attributes/must_use.rs index b0ee3d1ba6e..e6a5141d783 100644 --- a/compiler/rustc_attr_parsing/src/attributes/must_use.rs +++ b/compiler/rustc_attr_parsing/src/attributes/must_use.rs @@ -1,19 +1,29 @@ use rustc_errors::DiagArgValue; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::{ALL_TARGETS, AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; -use crate::session_diagnostics; +use super::prelude::*; +use crate::session_diagnostics::IllFormedAttributeInputLint; + pub(crate) struct MustUseParser; impl<S: Stage> SingleAttributeParser<S> for MustUseParser { const PATH: &[Symbol] = &[sym::must_use]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; - const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); //FIXME Still checked fully in `check_attr.rs` + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowListWarnRest(&[ + Allow(Target::Fn), + Allow(Target::Enum), + Allow(Target::Struct), + Allow(Target::Union), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::ForeignFn), + // `impl Trait` in return position can trip + // `unused_must_use` if `Trait` is marked as + // `#[must_use]` + Allow(Target::Trait), + Error(Target::WherePredicate), + ]); const TEMPLATE: AttributeTemplate = template!( Word, NameValueStr: "reason", "https://doc.rust-lang.org/reference/attributes/diagnostics.html#the-must_use-attribute" @@ -37,7 +47,7 @@ impl<S: Stage> SingleAttributeParser<S> for MustUseParser { ArgParser::List(_) => { let suggestions = <Self as SingleAttributeParser<S>>::TEMPLATE .suggestions(cx.attr_style, "must_use"); - cx.emit_err(session_diagnostics::IllFormedAttributeInputLint { + cx.emit_err(IllFormedAttributeInputLint { num_suggestions: suggestions.len(), suggestions: DiagArgValue::StrListSepByAnd( suggestions.into_iter().map(|s| format!("`{s}`").into()).collect(), diff --git a/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs b/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs index 589faf38f73..40073ea0f46 100644 --- a/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs +++ b/compiler/rustc_attr_parsing/src/attributes/no_implicit_prelude.rs @@ -1,10 +1,5 @@ -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, sym}; +use super::prelude::*; -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::MaybeWarn::Allow; -use crate::context::{AllowedTargets, Stage}; pub(crate) struct NoImplicitPreludeParser; impl<S: Stage> NoArgsAttributeParser<S> for NoImplicitPreludeParser { diff --git a/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs b/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs index 41e9ca4de41..fc41c073fd2 100644 --- a/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs +++ b/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs @@ -3,8 +3,10 @@ use rustc_hir::attrs::AttributeKind; use rustc_span::{Span, Symbol, sym}; use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::MaybeWarn::{Allow, Warn}; -use crate::context::{AllowedTargets, Stage}; +use crate::context::Stage; +use crate::target_checking::AllowedTargets; +use crate::target_checking::Policy::{Allow, Warn}; + pub(crate) struct NonExhaustiveParser; impl<S: Stage> NoArgsAttributeParser<S> for NonExhaustiveParser { @@ -17,6 +19,7 @@ impl<S: Stage> NoArgsAttributeParser<S> for NonExhaustiveParser { Warn(Target::Field), Warn(Target::Arm), Warn(Target::MacroDef), + Warn(Target::MacroCall), ]); const CREATE: fn(Span) -> AttributeKind = AttributeKind::NonExhaustive; } diff --git a/compiler/rustc_attr_parsing/src/attributes/path.rs b/compiler/rustc_attr_parsing/src/attributes/path.rs index f9191d1abed..e4cb806bb42 100644 --- a/compiler/rustc_attr_parsing/src/attributes/path.rs +++ b/compiler/rustc_attr_parsing/src/attributes/path.rs @@ -1,12 +1,5 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; +use super::prelude::*; -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::MaybeWarn::{Allow, Error}; -use crate::context::{AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; pub(crate) struct PathParser; impl<S: Stage> SingleAttributeParser<S> for PathParser { diff --git a/compiler/rustc_attr_parsing/src/attributes/prelude.rs b/compiler/rustc_attr_parsing/src/attributes/prelude.rs new file mode 100644 index 00000000000..2bcdee55c75 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/prelude.rs @@ -0,0 +1,20 @@ +// parsing +// templates +pub(super) use rustc_feature::{AttributeTemplate, template}; +// data structures +pub(super) use rustc_hir::attrs::AttributeKind; +pub(super) use rustc_hir::lints::AttributeLintKind; +pub(super) use rustc_hir::{MethodKind, Target}; +pub(super) use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym}; +pub(super) use thin_vec::ThinVec; + +pub(super) use crate::attributes::{ + AcceptMapping, AttributeOrder, AttributeParser, CombineAttributeParser, ConvertFn, + NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, +}; +// contexts +pub(super) use crate::context::{AcceptContext, FinalizeContext, Stage}; +pub(super) use crate::parser::*; +// target checking +pub(super) use crate::target_checking::Policy::{Allow, Error, Warn}; +pub(super) use crate::target_checking::{ALL_TARGETS, AllowedTargets}; diff --git a/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs index 4624fa36287..b9929d6f1f8 100644 --- a/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs @@ -1,21 +1,13 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; -use thin_vec::ThinVec; +use super::prelude::*; + +const PROC_MACRO_ALLOWED_TARGETS: AllowedTargets = + AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate), Warn(Target::MacroCall)]); -use crate::attributes::{ - AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, -}; -use crate::context::MaybeWarn::{Allow, Warn}; -use crate::context::{AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; pub(crate) struct ProcMacroParser; impl<S: Stage> NoArgsAttributeParser<S> for ProcMacroParser { const PATH: &[Symbol] = &[sym::proc_macro]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; - const ALLOWED_TARGETS: AllowedTargets = - AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate)]); + const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS; const CREATE: fn(Span) -> AttributeKind = AttributeKind::ProcMacro; } @@ -23,8 +15,7 @@ pub(crate) struct ProcMacroAttributeParser; impl<S: Stage> NoArgsAttributeParser<S> for ProcMacroAttributeParser { const PATH: &[Symbol] = &[sym::proc_macro_attribute]; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; - const ALLOWED_TARGETS: AllowedTargets = - AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate)]); + const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS; const CREATE: fn(Span) -> AttributeKind = AttributeKind::ProcMacroAttribute; } @@ -33,8 +24,7 @@ impl<S: Stage> SingleAttributeParser<S> for ProcMacroDeriveParser { const PATH: &[Symbol] = &[sym::proc_macro_derive]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; - const ALLOWED_TARGETS: AllowedTargets = - AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate)]); + const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS; const TEMPLATE: AttributeTemplate = template!( List: &["TraitName", "TraitName, attributes(name1, name2, ...)"], "https://doc.rust-lang.org/reference/procedural-macros.html#derive-macros" @@ -110,7 +100,7 @@ fn parse_derive_like<S: Stage>( return None; }; if !attr_list.path().word_is(sym::attributes) { - cx.expected_specific_argument(attrs.span(), vec!["attributes"]); + cx.expected_specific_argument(attrs.span(), &[sym::attributes]); return None; } let Some(attr_list) = attr_list.args().list() else { diff --git a/compiler/rustc_attr_parsing/src/attributes/prototype.rs b/compiler/rustc_attr_parsing/src/attributes/prototype.rs index fb1e47298b4..80fe82bf542 100644 --- a/compiler/rustc_attr_parsing/src/attributes/prototype.rs +++ b/compiler/rustc_attr_parsing/src/attributes/prototype.rs @@ -7,8 +7,10 @@ use rustc_span::{Span, Symbol, sym}; use super::{AttributeOrder, OnDuplicate}; use crate::attributes::SingleAttributeParser; -use crate::context::{AcceptContext, AllowedTargets, MaybeWarn, Stage}; +use crate::context::{AcceptContext, Stage}; use crate::parser::ArgParser; +use crate::target_checking::AllowedTargets; +use crate::target_checking::Policy::Allow; pub(crate) struct CustomMirParser; @@ -19,8 +21,7 @@ impl<S: Stage> SingleAttributeParser<S> for CustomMirParser { const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; - const ALLOWED_TARGETS: AllowedTargets = - AllowedTargets::AllowList(&[MaybeWarn::Allow(Target::Fn)]); + const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Fn)]); const TEMPLATE: AttributeTemplate = template!(List: &[r#"dialect = "...", phase = "...""#]); @@ -108,7 +109,7 @@ fn parse_dialect<S: Stage>( sym::runtime => MirDialect::Runtime, _ => { - cx.expected_specific_argument(span, vec!["analysis", "built", "runtime"]); + cx.expected_specific_argument(span, &[sym::analysis, sym::built, sym::runtime]); *failed = true; return None; } @@ -130,7 +131,7 @@ fn parse_phase<S: Stage>( sym::optimized => MirPhase::Optimized, _ => { - cx.expected_specific_argument(span, vec!["initial", "post-cleanup", "optimized"]); + cx.expected_specific_argument(span, &[sym::initial, sym::post_cleanup, sym::optimized]); *failed = true; return None; } diff --git a/compiler/rustc_attr_parsing/src/attributes/repr.rs b/compiler/rustc_attr_parsing/src/attributes/repr.rs index 7ab58ed9347..23aabd15597 100644 --- a/compiler/rustc_attr_parsing/src/attributes/repr.rs +++ b/compiler/rustc_attr_parsing/src/attributes/repr.rs @@ -1,16 +1,10 @@ use rustc_abi::Align; use rustc_ast::{IntTy, LitIntType, LitKind, UintTy}; -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::{AttributeKind, IntType, ReprAttr}; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{DUMMY_SP, Span, Symbol, sym}; - -use super::{AcceptMapping, AttributeParser, CombineAttributeParser, ConvertFn, FinalizeContext}; -use crate::context::MaybeWarn::Allow; -use crate::context::{ALL_TARGETS, AcceptContext, AllowedTargets, Stage}; -use crate::parser::{ArgParser, MetaItemListParser, MetaItemParser}; -use crate::session_diagnostics; -use crate::session_diagnostics::IncorrectReprFormatGenericCause; +use rustc_hir::attrs::{IntType, ReprAttr}; + +use super::prelude::*; +use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause}; + /// Parse #[repr(...)] forms. /// /// Valid repr contents: any of the primitive integral type names (see diff --git a/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs b/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs index efd7b650e44..a995549fc7c 100644 --- a/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs +++ b/compiler/rustc_attr_parsing/src/attributes/rustc_internal.rs @@ -1,12 +1,6 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Symbol, sym}; - -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::MaybeWarn::Allow; -use crate::context::{AcceptContext, AllowedTargets, Stage, parse_single_integer}; -use crate::parser::ArgParser; +use super::prelude::*; +use super::util::parse_single_integer; + pub(crate) struct RustcLayoutScalarValidRangeStart; impl<S: Stage> SingleAttributeParser<S> for RustcLayoutScalarValidRangeStart { diff --git a/compiler/rustc_attr_parsing/src/attributes/semantics.rs b/compiler/rustc_attr_parsing/src/attributes/semantics.rs index d4ad861a3a2..d7f62483297 100644 --- a/compiler/rustc_attr_parsing/src/attributes/semantics.rs +++ b/compiler/rustc_attr_parsing/src/attributes/semantics.rs @@ -1,8 +1,5 @@ -use rustc_hir::attrs::AttributeKind; -use rustc_span::{Span, Symbol, sym}; +use super::prelude::*; -use crate::attributes::{NoArgsAttributeParser, OnDuplicate}; -use crate::context::{ALL_TARGETS, AllowedTargets, Stage}; pub(crate) struct MayDangleParser; impl<S: Stage> NoArgsAttributeParser<S> for MayDangleParser { const PATH: &[Symbol] = &[sym::may_dangle]; diff --git a/compiler/rustc_attr_parsing/src/attributes/stability.rs b/compiler/rustc_attr_parsing/src/attributes/stability.rs index 5a26178f84b..b94e23477ff 100644 --- a/compiler/rustc_attr_parsing/src/attributes/stability.rs +++ b/compiler/rustc_attr_parsing/src/attributes/stability.rs @@ -1,20 +1,13 @@ use std::num::NonZero; use rustc_errors::ErrorGuaranteed; -use rustc_feature::template; -use rustc_hir::attrs::AttributeKind; use rustc_hir::{ DefaultBodyStability, MethodKind, PartialConstStability, Stability, StabilityLevel, StableSince, Target, UnstableReason, VERSION_PLACEHOLDER, }; -use rustc_span::{Ident, Span, Symbol, sym}; +use super::prelude::*; use super::util::parse_version; -use super::{AcceptMapping, AttributeParser, OnDuplicate}; -use crate::attributes::NoArgsAttributeParser; -use crate::context::MaybeWarn::Allow; -use crate::context::{AcceptContext, AllowedTargets, FinalizeContext, Stage}; -use crate::parser::{ArgParser, MetaItemParser}; use crate::session_diagnostics::{self, UnsupportedLiteralReason}; macro_rules! reject_outside_std { @@ -54,6 +47,7 @@ const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[ Allow(Target::Static), Allow(Target::ForeignFn), Allow(Target::ForeignStatic), + Allow(Target::ExternCrate), ]); #[derive(Default)] diff --git a/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs index 164c680b8a8..510ff1ded49 100644 --- a/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs +++ b/compiler/rustc_attr_parsing/src/attributes/test_attrs.rs @@ -1,13 +1,5 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; -use rustc_hir::lints::AttributeLintKind; -use rustc_span::{Symbol, sym}; +use super::prelude::*; -use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::MaybeWarn::{Allow, Error}; -use crate::context::{AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; pub(crate) struct IgnoreParser; impl<S: Stage> SingleAttributeParser<S> for IgnoreParser { @@ -89,7 +81,7 @@ impl<S: Stage> SingleAttributeParser<S> for ShouldPanicParser { return None; }; if !single.path().word_is(sym::expected) { - cx.expected_specific_argument_strings(list.span, vec!["expected"]); + cx.expected_specific_argument_strings(list.span, &[sym::expected]); return None; } let Some(nv) = single.args().name_value() else { diff --git a/compiler/rustc_attr_parsing/src/attributes/traits.rs b/compiler/rustc_attr_parsing/src/attributes/traits.rs index ee9d7ba99cd..89ac1b07d16 100644 --- a/compiler/rustc_attr_parsing/src/attributes/traits.rs +++ b/compiler/rustc_attr_parsing/src/attributes/traits.rs @@ -1,16 +1,14 @@ -use core::mem; - -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::attrs::AttributeKind; -use rustc_hir::{MethodKind, Target}; -use rustc_span::{Span, Symbol, sym}; +use std::mem; +use super::prelude::*; use crate::attributes::{ AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser, }; -use crate::context::MaybeWarn::{Allow, Warn}; -use crate::context::{ALL_TARGETS, AcceptContext, AllowedTargets, Stage}; +use crate::context::{AcceptContext, Stage}; use crate::parser::ArgParser; +use crate::target_checking::Policy::{Allow, Warn}; +use crate::target_checking::{ALL_TARGETS, AllowedTargets}; + pub(crate) struct SkipDuringMethodDispatchParser; impl<S: Stage> SingleAttributeParser<S> for SkipDuringMethodDispatchParser { const PATH: &[Symbol] = &[sym::rustc_skip_during_method_dispatch]; @@ -44,7 +42,7 @@ impl<S: Stage> SingleAttributeParser<S> for SkipDuringMethodDispatchParser { Some(key @ sym::array) => (key, &mut array), Some(key @ sym::boxed_slice) => (key, &mut boxed_slice), _ => { - cx.expected_specific_argument(path.span(), vec!["array", "boxed_slice"]); + cx.expected_specific_argument(path.span(), &[sym::array, sym::boxed_slice]); continue; } }; diff --git a/compiler/rustc_attr_parsing/src/attributes/transparency.rs b/compiler/rustc_attr_parsing/src/attributes/transparency.rs index 0ffcf434b52..ea1f5549c4e 100644 --- a/compiler/rustc_attr_parsing/src/attributes/transparency.rs +++ b/compiler/rustc_attr_parsing/src/attributes/transparency.rs @@ -1,13 +1,7 @@ -use rustc_feature::{AttributeTemplate, template}; -use rustc_hir::Target; -use rustc_hir::attrs::AttributeKind; use rustc_span::hygiene::Transparency; -use rustc_span::{Symbol, sym}; -use super::{AttributeOrder, OnDuplicate, SingleAttributeParser}; -use crate::context::MaybeWarn::Allow; -use crate::context::{AcceptContext, AllowedTargets, Stage}; -use crate::parser::ArgParser; +use super::prelude::*; + pub(crate) struct TransparencyParser; // FIXME(jdonszelmann): make these proper diagnostics @@ -35,7 +29,7 @@ impl<S: Stage> SingleAttributeParser<S> for TransparencyParser { Some(_) => { cx.expected_specific_argument_strings( nv.value_span, - vec!["transparent", "semitransparent", "opaque"], + &[sym::transparent, sym::semitransparent, sym::opaque], ); None } diff --git a/compiler/rustc_attr_parsing/src/attributes/util.rs b/compiler/rustc_attr_parsing/src/attributes/util.rs index 10134915b27..77e8c32e59d 100644 --- a/compiler/rustc_attr_parsing/src/attributes/util.rs +++ b/compiler/rustc_attr_parsing/src/attributes/util.rs @@ -1,8 +1,12 @@ -use rustc_ast::attr::{AttributeExt, first_attr_value_str_by_name}; +use rustc_ast::LitKind; +use rustc_ast::attr::AttributeExt; use rustc_feature::is_builtin_attr_name; use rustc_hir::RustcVersion; use rustc_span::{Symbol, sym}; +use crate::context::{AcceptContext, Stage}; +use crate::parser::ArgParser; + /// Parse a rustc version number written inside string literal in an attribute, /// like appears in `since = "1.0.0"`. Suffixes like "-dev" and "-nightly" are /// not accepted in this position, unlike when parsing CFG_RELEASE. @@ -23,10 +27,6 @@ pub fn is_builtin_attr(attr: &impl AttributeExt) -> bool { attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name)) } -pub fn find_crate_name(attrs: &[impl AttributeExt]) -> Option<Symbol> { - first_attr_value_str_by_name(attrs, sym::crate_name) -} - pub fn is_doc_alias_attrs_contain_symbol<'tcx, T: AttributeExt + 'tcx>( attrs: impl Iterator<Item = &'tcx T>, symbol: Symbol, @@ -56,3 +56,32 @@ pub fn is_doc_alias_attrs_contain_symbol<'tcx, T: AttributeExt + 'tcx>( } false } + +/// Parse a single integer. +/// +/// Used by attributes that take a single integer as argument, such as +/// `#[link_ordinal]` and `#[rustc_layout_scalar_valid_range_start]`. +/// `cx` is the context given to the attribute. +/// `args` is the parser for the attribute arguments. +pub(crate) fn parse_single_integer<S: Stage>( + cx: &mut AcceptContext<'_, '_, S>, + args: &ArgParser<'_>, +) -> Option<u128> { + let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); + return None; + }; + let Some(single) = list.single() else { + cx.expected_single_argument(list.span); + return None; + }; + let Some(lit) = single.lit() else { + cx.expected_integer_literal(single.span()); + return None; + }; + let LitKind::Int(num, _ty) = lit.kind else { + cx.expected_integer_literal(single.span()); + return None; + }; + Some(num.0) +} diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index c0d3bc99ba9..d4b9cfe00ad 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -3,28 +3,28 @@ use std::collections::BTreeMap; use std::ops::{Deref, DerefMut}; use std::sync::LazyLock; -use itertools::Itertools; use private::Sealed; -use rustc_ast::{self as ast, AttrStyle, LitKind, MetaItemLit, NodeId}; -use rustc_errors::{DiagCtxtHandle, Diagnostic}; -use rustc_feature::{AttributeTemplate, Features}; +use rustc_ast::{AttrStyle, MetaItemLit, NodeId}; +use rustc_errors::{Diag, Diagnostic, Level}; +use rustc_feature::AttributeTemplate; use rustc_hir::attrs::AttributeKind; use rustc_hir::lints::{AttributeLint, AttributeLintKind}; -use rustc_hir::{ - AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId, HirId, MethodKind, Target, -}; +use rustc_hir::{AttrPath, HirId}; use rustc_session::Session; -use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; +use rustc_span::{ErrorGuaranteed, Span, Symbol}; +use crate::AttributeParser; use crate::attributes::allow_unstable::{ AllowConstFnUnstableParser, AllowInternalUnstableParser, UnstableFeatureBoundParser, }; use crate::attributes::body::CoroutineParser; use crate::attributes::codegen_attrs::{ - ColdParser, CoverageParser, ExportNameParser, NakedParser, NoMangleParser, OptimizeParser, - TargetFeatureParser, TrackCallerParser, UsedParser, + ColdParser, CoverageParser, ExportNameParser, ForceTargetFeatureParser, NakedParser, + NoMangleParser, OptimizeParser, SanitizeParser, TargetFeatureParser, TrackCallerParser, + UsedParser, }; use crate::attributes::confusables::ConfusablesParser; +use crate::attributes::crate_level::CrateNameParser; use crate::attributes::deprecation::DeprecationParser; use crate::attributes::dummy::DummyParser; use crate::attributes::inline::{InlineParser, RustcForceInlineParser}; @@ -65,23 +65,21 @@ use crate::attributes::traits::{ }; use crate::attributes::transparency::TransparencyParser; use crate::attributes::{AttributeParser as _, Combine, Single, WithoutArgs}; -use crate::context::MaybeWarn::{Allow, Error, Warn}; -use crate::parser::{ArgParser, MetaItemParser, PathParser}; -use crate::session_diagnostics::{ - AttributeParseError, AttributeParseErrorReason, InvalidTarget, UnknownMetaItem, -}; +use crate::parser::{ArgParser, PathParser}; +use crate::session_diagnostics::{AttributeParseError, AttributeParseErrorReason, UnknownMetaItem}; +use crate::target_checking::AllowedTargets; type GroupType<S> = LazyLock<GroupTypeInner<S>>; -struct GroupTypeInner<S: Stage> { - accepters: BTreeMap<&'static [Symbol], Vec<GroupTypeInnerAccept<S>>>, - finalizers: Vec<FinalizeFn<S>>, +pub(super) struct GroupTypeInner<S: Stage> { + pub(super) accepters: BTreeMap<&'static [Symbol], Vec<GroupTypeInnerAccept<S>>>, + pub(super) finalizers: Vec<FinalizeFn<S>>, } -struct GroupTypeInnerAccept<S: Stage> { - template: AttributeTemplate, - accept_fn: AcceptFn<S>, - allowed_targets: AllowedTargets, +pub(super) struct GroupTypeInnerAccept<S: Stage> { + pub(super) template: AttributeTemplate, + pub(super) accept_fn: AcceptFn<S>, + pub(super) allowed_targets: AllowedTargets, } type AcceptFn<S> = @@ -161,6 +159,7 @@ attribute_parsers!( // tidy-alphabetical-start Combine<AllowConstFnUnstableParser>, Combine<AllowInternalUnstableParser>, + Combine<ForceTargetFeatureParser>, Combine<ReprParser>, Combine<TargetFeatureParser>, Combine<UnstableFeatureBoundParser>, @@ -168,6 +167,7 @@ attribute_parsers!( // tidy-alphabetical-start Single<CoverageParser>, + Single<CrateNameParser>, Single<CustomMirParser>, Single<DeprecationParser>, Single<DummyParser>, @@ -187,6 +187,7 @@ attribute_parsers!( Single<RustcLayoutScalarValidRangeEnd>, Single<RustcLayoutScalarValidRangeStart>, Single<RustcObjectLifetimeDefaultParser>, + Single<SanitizeParser>, Single<ShouldPanicParser>, Single<SkipDuringMethodDispatchParser>, Single<TransparencyParser>, @@ -264,11 +265,7 @@ impl Stage for Early { sess: &'sess Session, diag: impl for<'x> Diagnostic<'x>, ) -> ErrorGuaranteed { - if self.emit_errors.should_emit() { - sess.dcx().emit_err(diag) - } else { - sess.dcx().create_err(diag).delay_as_bug() - } + self.should_emit().emit_err(sess.dcx().create_err(diag)) } fn should_emit(&self) -> ShouldEmit { @@ -315,7 +312,9 @@ pub struct AcceptContext<'f, 'sess, S: Stage> { /// The span of the attribute currently being parsed pub(crate) attr_span: Span, + /// Whether it is an inner or outer attribute pub(crate) attr_style: AttrStyle, + /// The expected structure of the attribute. /// /// Used in reporting errors to give a hint to users what the attribute *should* look like. @@ -334,7 +333,7 @@ impl<'f, 'sess: 'f, S: Stage> SharedContext<'f, 'sess, S> { /// must be delayed until after HIR is built. This method will take care of the details of /// that. pub(crate) fn emit_lint(&mut self, lint: AttributeLintKind, span: Span) { - if !self.stage.should_emit().should_emit() { + if matches!(self.stage.should_emit(), ShouldEmit::Nothing) { return; } let id = self.target_id; @@ -506,10 +505,11 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { }) } + /// produces an error along the lines of `expected one of [foo, meow]` pub(crate) fn expected_specific_argument( &self, span: Span, - possibilities: Vec<&'static str>, + possibilities: &[Symbol], ) -> ErrorGuaranteed { self.emit_err(AttributeParseError { span, @@ -525,10 +525,12 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { }) } + /// produces an error along the lines of `expected one of [foo, meow] as an argument`. + /// i.e. slightly different wording to [`expected_specific_argument`](Self::expected_specific_argument). pub(crate) fn expected_specific_argument_and_list( &self, span: Span, - possibilities: Vec<&'static str>, + possibilities: &[Symbol], ) -> ErrorGuaranteed { self.emit_err(AttributeParseError { span, @@ -544,10 +546,11 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { }) } + /// produces an error along the lines of `expected one of ["foo", "meow"]` pub(crate) fn expected_specific_argument_strings( &self, span: Span, - possibilities: Vec<&'static str>, + possibilities: &[Symbol], ) -> ErrorGuaranteed { self.emit_err(AttributeParseError { span, @@ -595,7 +598,7 @@ pub struct SharedContext<'p, 'sess, S: Stage> { /// The id ([`NodeId`] if `S` is `Early`, [`HirId`] if `S` is `Late`) of the syntactical component this attribute was applied to pub(crate) target_id: S::Id, - emit_lint: &'p mut dyn FnMut(AttributeLint<S::Id>), + pub(crate) emit_lint: &'p mut dyn FnMut(AttributeLint<S::Id>), } /// Context given to every attribute parser during finalization. @@ -648,8 +651,13 @@ pub enum OmitDoc { Skip, } -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] pub enum ShouldEmit { + /// The operations will emit errors, and lints, and errors are fatal. + /// + /// Only relevant when early parsing, in late parsing equivalent to `ErrorsAndLints`. + /// Late parsing is never fatal, and instead tries to emit as many diagnostics as possible. + EarlyFatal, /// The operation will emit errors and lints. /// This is usually what you need. ErrorsAndLints, @@ -659,564 +667,12 @@ pub enum ShouldEmit { } impl ShouldEmit { - pub fn should_emit(&self) -> bool { - match self { - ShouldEmit::ErrorsAndLints => true, - ShouldEmit::Nothing => false, - } - } -} - -#[derive(Debug)] -pub(crate) enum AllowedTargets { - AllowList(&'static [MaybeWarn]), - AllowListWarnRest(&'static [MaybeWarn]), -} - -pub(crate) enum AllowedResult { - Allowed, - Warn, - Error, -} - -impl AllowedTargets { - pub(crate) fn is_allowed(&self, target: Target) -> AllowedResult { - match self { - AllowedTargets::AllowList(list) => { - if list.contains(&Allow(target)) { - AllowedResult::Allowed - } else if list.contains(&Warn(target)) { - AllowedResult::Warn - } else { - AllowedResult::Error - } - } - AllowedTargets::AllowListWarnRest(list) => { - if list.contains(&Allow(target)) { - AllowedResult::Allowed - } else if list.contains(&Error(target)) { - AllowedResult::Error - } else { - AllowedResult::Warn - } - } - } - } - - pub(crate) fn allowed_targets(&self) -> Vec<Target> { + pub(crate) fn emit_err(&self, diag: Diag<'_>) -> ErrorGuaranteed { match self { - AllowedTargets::AllowList(list) => list, - AllowedTargets::AllowListWarnRest(list) => list, + ShouldEmit::EarlyFatal if diag.level() == Level::DelayedBug => diag.emit(), + ShouldEmit::EarlyFatal => diag.upgrade_to_fatal().emit(), + ShouldEmit::ErrorsAndLints => diag.emit(), + ShouldEmit::Nothing => diag.delay_as_bug(), } - .iter() - .filter_map(|target| match target { - Allow(target) => Some(*target), - Warn(_) => None, - Error(_) => None, - }) - .collect() - } -} - -#[derive(Debug, Eq, PartialEq)] -pub(crate) enum MaybeWarn { - Allow(Target), - Warn(Target), - Error(Target), -} - -/// Context created once, for example as part of the ast lowering -/// context, through which all attributes can be lowered. -pub struct AttributeParser<'sess, S: Stage = Late> { - pub(crate) tools: Vec<Symbol>, - features: Option<&'sess Features>, - sess: &'sess Session, - stage: S, - - /// *Only* parse attributes with this symbol. - /// - /// Used in cases where we want the lowering infrastructure for parse just a single attribute. - parse_only: Option<Symbol>, -} - -impl<'sess> AttributeParser<'sess, Early> { - /// This method allows you to parse attributes *before* you have access to features or tools. - /// One example where this is necessary, is to parse `feature` attributes themselves for - /// example. - /// - /// Try to use this as little as possible. Attributes *should* be lowered during - /// `rustc_ast_lowering`. Some attributes require access to features to parse, which would - /// crash if you tried to do so through [`parse_limited`](Self::parse_limited). - /// - /// To make sure use is limited, supply a `Symbol` you'd like to parse. Only attributes with - /// that symbol are picked out of the list of instructions and parsed. Those are returned. - /// - /// No diagnostics will be emitted when parsing limited. Lints are not emitted at all, while - /// errors will be emitted as a delayed bugs. in other words, we *expect* attributes parsed - /// with `parse_limited` to be reparsed later during ast lowering where we *do* emit the errors - pub fn parse_limited( - sess: &'sess Session, - attrs: &[ast::Attribute], - sym: Symbol, - target_span: Span, - target_node_id: NodeId, - features: Option<&'sess Features>, - ) -> Option<Attribute> { - let mut p = Self { - features, - tools: Vec::new(), - parse_only: Some(sym), - sess, - stage: Early { emit_errors: ShouldEmit::Nothing }, - }; - let mut parsed = p.parse_attribute_list( - attrs, - target_span, - target_node_id, - Target::Crate, // Does not matter, we're not going to emit errors anyways - OmitDoc::Skip, - std::convert::identity, - |_lint| { - panic!("can't emit lints here for now (nothing uses this atm)"); - }, - ); - assert!(parsed.len() <= 1); - - parsed.pop() - } - - pub fn parse_single<T>( - sess: &'sess Session, - attr: &ast::Attribute, - target_span: Span, - target_node_id: NodeId, - features: Option<&'sess Features>, - emit_errors: ShouldEmit, - parse_fn: fn(cx: &mut AcceptContext<'_, '_, Early>, item: &ArgParser<'_>) -> T, - template: &AttributeTemplate, - ) -> T { - let mut parser = Self { - features, - tools: Vec::new(), - parse_only: None, - sess, - stage: Early { emit_errors }, - }; - let ast::AttrKind::Normal(normal_attr) = &attr.kind else { - panic!("parse_single called on a doc attr") - }; - let meta_parser = MetaItemParser::from_attr(normal_attr, parser.dcx()); - let path = meta_parser.path(); - let args = meta_parser.args(); - let mut cx: AcceptContext<'_, 'sess, Early> = AcceptContext { - shared: SharedContext { - cx: &mut parser, - target_span, - target_id: target_node_id, - emit_lint: &mut |_lint| { - panic!("can't emit lints here for now (nothing uses this atm)"); - }, - }, - attr_span: attr.span, - attr_style: attr.style, - template, - attr_path: path.get_attribute_path(), - }; - parse_fn(&mut cx, args) - } -} - -impl<'sess, S: Stage> AttributeParser<'sess, S> { - pub fn new( - sess: &'sess Session, - features: &'sess Features, - tools: Vec<Symbol>, - stage: S, - ) -> Self { - Self { features: Some(features), tools, parse_only: None, sess, stage } - } - - pub(crate) fn sess(&self) -> &'sess Session { - &self.sess - } - - pub(crate) fn features(&self) -> &'sess Features { - self.features.expect("features not available at this point in the compiler") } - - pub(crate) fn features_option(&self) -> Option<&'sess Features> { - self.features - } - - pub(crate) fn dcx(&self) -> DiagCtxtHandle<'sess> { - self.sess().dcx() - } - - /// Parse a list of attributes. - /// - /// `target_span` is the span of the thing this list of attributes is applied to, - /// and when `omit_doc` is set, doc attributes are filtered out. - pub fn parse_attribute_list( - &mut self, - attrs: &[ast::Attribute], - target_span: Span, - target_id: S::Id, - target: Target, - omit_doc: OmitDoc, - - lower_span: impl Copy + Fn(Span) -> Span, - mut emit_lint: impl FnMut(AttributeLint<S::Id>), - ) -> Vec<Attribute> { - let mut attributes = Vec::new(); - let mut attr_paths = Vec::new(); - - for attr in attrs { - // If we're only looking for a single attribute, skip all the ones we don't care about. - if let Some(expected) = self.parse_only { - if !attr.has_name(expected) { - continue; - } - } - - // Sometimes, for example for `#![doc = include_str!("readme.md")]`, - // doc still contains a non-literal. You might say, when we're lowering attributes - // that's expanded right? But no, sometimes, when parsing attributes on macros, - // we already use the lowering logic and these are still there. So, when `omit_doc` - // is set we *also* want to ignore these. - if omit_doc == OmitDoc::Skip && attr.has_name(sym::doc) { - continue; - } - - match &attr.kind { - ast::AttrKind::DocComment(comment_kind, symbol) => { - if omit_doc == OmitDoc::Skip { - continue; - } - - attributes.push(Attribute::Parsed(AttributeKind::DocComment { - style: attr.style, - kind: *comment_kind, - span: lower_span(attr.span), - comment: *symbol, - })) - } - // // FIXME: make doc attributes go through a proper attribute parser - // ast::AttrKind::Normal(n) if n.has_name(sym::doc) => { - // let p = GenericMetaItemParser::from_attr(&n, self.dcx()); - // - // attributes.push(Attribute::Parsed(AttributeKind::DocComment { - // style: attr.style, - // kind: CommentKind::Line, - // span: attr.span, - // comment: p.args().name_value(), - // })) - // } - ast::AttrKind::Normal(n) => { - attr_paths.push(PathParser::Ast(&n.item.path)); - - let parser = MetaItemParser::from_attr(n, self.dcx()); - let path = parser.path(); - let args = parser.args(); - let parts = path.segments().map(|i| i.name).collect::<Vec<_>>(); - - if let Some(accepts) = S::parsers().accepters.get(parts.as_slice()) { - for accept in accepts { - let mut cx: AcceptContext<'_, 'sess, S> = AcceptContext { - shared: SharedContext { - cx: self, - target_span, - target_id, - emit_lint: &mut emit_lint, - }, - attr_span: lower_span(attr.span), - attr_style: attr.style, - template: &accept.template, - attr_path: path.get_attribute_path(), - }; - - (accept.accept_fn)(&mut cx, args); - - if self.stage.should_emit().should_emit() { - match accept.allowed_targets.is_allowed(target) { - AllowedResult::Allowed => {} - AllowedResult::Warn => { - let allowed_targets = - accept.allowed_targets.allowed_targets(); - let (applied, only) = allowed_targets_applied( - allowed_targets, - target, - self.features, - ); - emit_lint(AttributeLint { - id: target_id, - span: attr.span, - kind: AttributeLintKind::InvalidTarget { - name: parts[0], - target, - only: if only { "only " } else { "" }, - applied, - }, - }); - } - AllowedResult::Error => { - let allowed_targets = - accept.allowed_targets.allowed_targets(); - let (applied, only) = allowed_targets_applied( - allowed_targets, - target, - self.features, - ); - self.dcx().emit_err(InvalidTarget { - span: attr.span, - name: parts[0], - target: target.plural_name(), - only: if only { "only " } else { "" }, - applied, - }); - } - } - } - } - } else { - // If we're here, we must be compiling a tool attribute... Or someone - // forgot to parse their fancy new attribute. Let's warn them in any case. - // If you are that person, and you really think your attribute should - // remain unparsed, carefully read the documentation in this module and if - // you still think so you can add an exception to this assertion. - - // FIXME(jdonszelmann): convert other attributes, and check with this that - // we caught em all - // const FIXME_TEMPORARY_ATTR_ALLOWLIST: &[Symbol] = &[sym::cfg]; - // assert!( - // self.tools.contains(&parts[0]) || true, - // // || FIXME_TEMPORARY_ATTR_ALLOWLIST.contains(&parts[0]), - // "attribute {path} wasn't parsed and isn't a know tool attribute", - // ); - - attributes.push(Attribute::Unparsed(Box::new(AttrItem { - path: AttrPath::from_ast(&n.item.path), - args: self.lower_attr_args(&n.item.args, lower_span), - id: HashIgnoredAttrId { attr_id: attr.id }, - style: attr.style, - span: lower_span(attr.span), - }))); - } - } - } - } - - let mut parsed_attributes = Vec::new(); - for f in &S::parsers().finalizers { - if let Some(attr) = f(&mut FinalizeContext { - shared: SharedContext { - cx: self, - target_span, - target_id, - emit_lint: &mut emit_lint, - }, - all_attrs: &attr_paths, - }) { - parsed_attributes.push(Attribute::Parsed(attr)); - } - } - - attributes.extend(parsed_attributes); - - attributes - } - - /// Returns whether there is a parser for an attribute with this name - pub fn is_parsed_attribute(path: &[Symbol]) -> bool { - Late::parsers().accepters.contains_key(path) - } - - fn lower_attr_args(&self, args: &ast::AttrArgs, lower_span: impl Fn(Span) -> Span) -> AttrArgs { - match args { - ast::AttrArgs::Empty => AttrArgs::Empty, - ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(args.clone()), - // This is an inert key-value attribute - it will never be visible to macros - // after it gets lowered to HIR. Therefore, we can extract literals to handle - // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). - ast::AttrArgs::Eq { eq_span, expr } => { - // In valid code the value always ends up as a single literal. Otherwise, a dummy - // literal suffices because the error is handled elsewhere. - let lit = if let ast::ExprKind::Lit(token_lit) = expr.kind - && let Ok(lit) = - ast::MetaItemLit::from_token_lit(token_lit, lower_span(expr.span)) - { - lit - } else { - let guar = self.dcx().span_delayed_bug( - args.span().unwrap_or(DUMMY_SP), - "expr in place where literal is expected (builtin attr parsing)", - ); - ast::MetaItemLit { - symbol: sym::dummy, - suffix: None, - kind: ast::LitKind::Err(guar), - span: DUMMY_SP, - } - }; - AttrArgs::Eq { eq_span: lower_span(*eq_span), expr: lit } - } - } - } -} - -/// Takes a list of `allowed_targets` for an attribute, and the `target` the attribute was applied to. -/// Does some heuristic-based filtering to remove uninteresting targets, and formats the targets into a string -pub(crate) fn allowed_targets_applied( - mut allowed_targets: Vec<Target>, - target: Target, - features: Option<&Features>, -) -> (String, bool) { - // Remove unstable targets from `allowed_targets` if their features are not enabled - if let Some(features) = features { - if !features.fn_delegation() { - allowed_targets.retain(|t| !matches!(t, Target::Delegation { .. })); - } - if !features.stmt_expr_attributes() { - allowed_targets.retain(|t| !matches!(t, Target::Expression | Target::Statement)); - } - if !features.extern_types() { - allowed_targets.retain(|t| !matches!(t, Target::ForeignTy)); - } - } - - // We define groups of "similar" targets. - // If at least two of the targets are allowed, and the `target` is not in the group, - // we collapse the entire group to a single entry to simplify the target list - const FUNCTION_LIKE: &[Target] = &[ - Target::Fn, - Target::Closure, - Target::ForeignFn, - Target::Method(MethodKind::Inherent), - Target::Method(MethodKind::Trait { body: false }), - Target::Method(MethodKind::Trait { body: true }), - Target::Method(MethodKind::TraitImpl), - ]; - const METHOD_LIKE: &[Target] = &[ - Target::Method(MethodKind::Inherent), - Target::Method(MethodKind::Trait { body: false }), - Target::Method(MethodKind::Trait { body: true }), - Target::Method(MethodKind::TraitImpl), - ]; - const IMPL_LIKE: &[Target] = - &[Target::Impl { of_trait: false }, Target::Impl { of_trait: true }]; - const ADT_LIKE: &[Target] = &[Target::Struct, Target::Enum]; - - let mut added_fake_targets = Vec::new(); - filter_targets( - &mut allowed_targets, - FUNCTION_LIKE, - "functions", - target, - &mut added_fake_targets, - ); - filter_targets(&mut allowed_targets, METHOD_LIKE, "methods", target, &mut added_fake_targets); - filter_targets(&mut allowed_targets, IMPL_LIKE, "impl blocks", target, &mut added_fake_targets); - filter_targets(&mut allowed_targets, ADT_LIKE, "data types", target, &mut added_fake_targets); - - // If there is now only 1 target left, show that as the only possible target - ( - added_fake_targets - .iter() - .copied() - .chain(allowed_targets.iter().map(|t| t.plural_name())) - .join(", "), - allowed_targets.len() + added_fake_targets.len() == 1, - ) -} - -fn filter_targets( - allowed_targets: &mut Vec<Target>, - target_group: &'static [Target], - target_group_name: &'static str, - target: Target, - added_fake_targets: &mut Vec<&'static str>, -) { - if target_group.contains(&target) { - return; - } - if allowed_targets.iter().filter(|at| target_group.contains(at)).count() < 2 { - return; - } - allowed_targets.retain(|t| !target_group.contains(t)); - added_fake_targets.push(target_group_name); -} - -/// This is the list of all targets to which a attribute can be applied -/// This is used for: -/// - `rustc_dummy`, which can be applied to all targets -/// - Attributes that are not parted to the new target system yet can use this list as a placeholder -pub(crate) const ALL_TARGETS: &'static [MaybeWarn] = &[ - Allow(Target::ExternCrate), - Allow(Target::Use), - Allow(Target::Static), - Allow(Target::Const), - Allow(Target::Fn), - Allow(Target::Closure), - Allow(Target::Mod), - Allow(Target::ForeignMod), - Allow(Target::GlobalAsm), - Allow(Target::TyAlias), - Allow(Target::Enum), - Allow(Target::Variant), - Allow(Target::Struct), - Allow(Target::Field), - Allow(Target::Union), - Allow(Target::Trait), - Allow(Target::TraitAlias), - Allow(Target::Impl { of_trait: false }), - Allow(Target::Impl { of_trait: true }), - Allow(Target::Expression), - Allow(Target::Statement), - Allow(Target::Arm), - Allow(Target::AssocConst), - Allow(Target::Method(MethodKind::Inherent)), - Allow(Target::Method(MethodKind::Trait { body: false })), - Allow(Target::Method(MethodKind::Trait { body: true })), - Allow(Target::Method(MethodKind::TraitImpl)), - Allow(Target::AssocTy), - Allow(Target::ForeignFn), - Allow(Target::ForeignStatic), - Allow(Target::ForeignTy), - Allow(Target::MacroDef), - Allow(Target::Param), - Allow(Target::PatField), - Allow(Target::ExprField), - Allow(Target::WherePredicate), - Allow(Target::MacroCall), - Allow(Target::Crate), - Allow(Target::Delegation { mac: false }), - Allow(Target::Delegation { mac: true }), -]; - -/// Parse a single integer. -/// -/// Used by attributes that take a single integer as argument, such as -/// `#[link_ordinal]` and `#[rustc_layout_scalar_valid_range_start]`. -/// `cx` is the context given to the attribute. -/// `args` is the parser for the attribute arguments. -pub(crate) fn parse_single_integer<S: Stage>( - cx: &mut AcceptContext<'_, '_, S>, - args: &ArgParser<'_>, -) -> Option<u128> { - let Some(list) = args.list() else { - cx.expected_list(cx.attr_span); - return None; - }; - let Some(single) = list.single() else { - cx.expected_single_argument(list.span); - return None; - }; - let Some(lit) = single.lit() else { - cx.expected_integer_literal(single.span()); - return None; - }; - let LitKind::Int(num, _ty) = lit.kind else { - cx.expected_integer_literal(single.span()); - return None; - }; - Some(num.0) } diff --git a/compiler/rustc_attr_parsing/src/interface.rs b/compiler/rustc_attr_parsing/src/interface.rs new file mode 100644 index 00000000000..60523c2877c --- /dev/null +++ b/compiler/rustc_attr_parsing/src/interface.rs @@ -0,0 +1,370 @@ +use std::borrow::Cow; + +use rustc_ast as ast; +use rustc_ast::NodeId; +use rustc_errors::DiagCtxtHandle; +use rustc_feature::{AttributeTemplate, Features}; +use rustc_hir::attrs::AttributeKind; +use rustc_hir::lints::AttributeLint; +use rustc_hir::{AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId, Target}; +use rustc_session::Session; +use rustc_span::{DUMMY_SP, Span, Symbol, sym}; + +use crate::context::{AcceptContext, FinalizeContext, SharedContext, Stage}; +use crate::parser::{ArgParser, MetaItemParser, PathParser}; +use crate::{Early, Late, OmitDoc, ShouldEmit}; + +/// Context created once, for example as part of the ast lowering +/// context, through which all attributes can be lowered. +pub struct AttributeParser<'sess, S: Stage = Late> { + pub(crate) tools: Vec<Symbol>, + pub(crate) features: Option<&'sess Features>, + pub(crate) sess: &'sess Session, + pub(crate) stage: S, + + /// *Only* parse attributes with this symbol. + /// + /// Used in cases where we want the lowering infrastructure for parse just a single attribute. + parse_only: Option<Symbol>, +} + +impl<'sess> AttributeParser<'sess, Early> { + /// This method allows you to parse attributes *before* you have access to features or tools. + /// One example where this is necessary, is to parse `feature` attributes themselves for + /// example. + /// + /// Try to use this as little as possible. Attributes *should* be lowered during + /// `rustc_ast_lowering`. Some attributes require access to features to parse, which would + /// crash if you tried to do so through [`parse_limited`](Self::parse_limited). + /// + /// To make sure use is limited, supply a `Symbol` you'd like to parse. Only attributes with + /// that symbol are picked out of the list of instructions and parsed. Those are returned. + /// + /// No diagnostics will be emitted when parsing limited. Lints are not emitted at all, while + /// errors will be emitted as a delayed bugs. in other words, we *expect* attributes parsed + /// with `parse_limited` to be reparsed later during ast lowering where we *do* emit the errors + pub fn parse_limited( + sess: &'sess Session, + attrs: &[ast::Attribute], + sym: Symbol, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + ) -> Option<Attribute> { + Self::parse_limited_should_emit( + sess, + attrs, + sym, + target_span, + target_node_id, + features, + ShouldEmit::Nothing, + ) + } + + /// Usually you want `parse_limited`, which defaults to no errors. + pub fn parse_limited_should_emit( + sess: &'sess Session, + attrs: &[ast::Attribute], + sym: Symbol, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + should_emit: ShouldEmit, + ) -> Option<Attribute> { + let mut parsed = Self::parse_limited_all( + sess, + attrs, + Some(sym), + Target::Crate, // Does not matter, we're not going to emit errors anyways + target_span, + target_node_id, + features, + should_emit, + ); + assert!(parsed.len() <= 1); + parsed.pop() + } + + pub fn parse_limited_all( + sess: &'sess Session, + attrs: &[ast::Attribute], + parse_only: Option<Symbol>, + target: Target, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + emit_errors: ShouldEmit, + ) -> Vec<Attribute> { + let mut p = + Self { features, tools: Vec::new(), parse_only, sess, stage: Early { emit_errors } }; + p.parse_attribute_list( + attrs, + target_span, + target_node_id, + target, + OmitDoc::Skip, + std::convert::identity, + |lint| { + crate::lints::emit_attribute_lint(&lint, sess); + }, + ) + } + + pub fn parse_single<T>( + sess: &'sess Session, + attr: &ast::Attribute, + target_span: Span, + target_node_id: NodeId, + features: Option<&'sess Features>, + emit_errors: ShouldEmit, + parse_fn: fn(cx: &mut AcceptContext<'_, '_, Early>, item: &ArgParser<'_>) -> Option<T>, + template: &AttributeTemplate, + ) -> Option<T> { + let mut parser = Self { + features, + tools: Vec::new(), + parse_only: None, + sess, + stage: Early { emit_errors }, + }; + let ast::AttrKind::Normal(normal_attr) = &attr.kind else { + panic!("parse_single called on a doc attr") + }; + let parts = + normal_attr.item.path.segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>(); + let meta_parser = MetaItemParser::from_attr(normal_attr, &parts, &sess.psess, emit_errors)?; + let path = meta_parser.path(); + let args = meta_parser.args(); + let mut cx: AcceptContext<'_, 'sess, Early> = AcceptContext { + shared: SharedContext { + cx: &mut parser, + target_span, + target_id: target_node_id, + emit_lint: &mut |lint| { + crate::lints::emit_attribute_lint(&lint, sess); + }, + }, + attr_span: attr.span, + attr_style: attr.style, + template, + attr_path: path.get_attribute_path(), + }; + parse_fn(&mut cx, args) + } +} + +impl<'sess, S: Stage> AttributeParser<'sess, S> { + pub fn new( + sess: &'sess Session, + features: &'sess Features, + tools: Vec<Symbol>, + stage: S, + ) -> Self { + Self { features: Some(features), tools, parse_only: None, sess, stage } + } + + pub(crate) fn sess(&self) -> &'sess Session { + &self.sess + } + + pub(crate) fn features(&self) -> &'sess Features { + self.features.expect("features not available at this point in the compiler") + } + + pub(crate) fn features_option(&self) -> Option<&'sess Features> { + self.features + } + + pub(crate) fn dcx(&self) -> DiagCtxtHandle<'sess> { + self.sess().dcx() + } + + /// Parse a list of attributes. + /// + /// `target_span` is the span of the thing this list of attributes is applied to, + /// and when `omit_doc` is set, doc attributes are filtered out. + pub fn parse_attribute_list( + &mut self, + attrs: &[ast::Attribute], + target_span: Span, + target_id: S::Id, + target: Target, + omit_doc: OmitDoc, + + lower_span: impl Copy + Fn(Span) -> Span, + mut emit_lint: impl FnMut(AttributeLint<S::Id>), + ) -> Vec<Attribute> { + let mut attributes = Vec::new(); + let mut attr_paths = Vec::new(); + + for attr in attrs { + // If we're only looking for a single attribute, skip all the ones we don't care about. + if let Some(expected) = self.parse_only { + if !attr.has_name(expected) { + continue; + } + } + + // Sometimes, for example for `#![doc = include_str!("readme.md")]`, + // doc still contains a non-literal. You might say, when we're lowering attributes + // that's expanded right? But no, sometimes, when parsing attributes on macros, + // we already use the lowering logic and these are still there. So, when `omit_doc` + // is set we *also* want to ignore these. + if omit_doc == OmitDoc::Skip && attr.has_name(sym::doc) { + continue; + } + + match &attr.kind { + ast::AttrKind::DocComment(comment_kind, symbol) => { + if omit_doc == OmitDoc::Skip { + continue; + } + + attributes.push(Attribute::Parsed(AttributeKind::DocComment { + style: attr.style, + kind: *comment_kind, + span: lower_span(attr.span), + comment: *symbol, + })) + } + // // FIXME: make doc attributes go through a proper attribute parser + // ast::AttrKind::Normal(n) if n.has_name(sym::doc) => { + // let p = GenericMetaItemParser::from_attr(&n, self.dcx()); + // + // attributes.push(Attribute::Parsed(AttributeKind::DocComment { + // style: attr.style, + // kind: CommentKind::Line, + // span: attr.span, + // comment: p.args().name_value(), + // })) + // } + ast::AttrKind::Normal(n) => { + attr_paths.push(PathParser(Cow::Borrowed(&n.item.path))); + + let parts = + n.item.path.segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>(); + + if let Some(accepts) = S::parsers().accepters.get(parts.as_slice()) { + let Some(parser) = MetaItemParser::from_attr( + n, + &parts, + &self.sess.psess, + self.stage.should_emit(), + ) else { + continue; + }; + let path = parser.path(); + let args = parser.args(); + for accept in accepts { + let mut cx: AcceptContext<'_, 'sess, S> = AcceptContext { + shared: SharedContext { + cx: self, + target_span, + target_id, + emit_lint: &mut emit_lint, + }, + attr_span: lower_span(attr.span), + attr_style: attr.style, + template: &accept.template, + attr_path: path.get_attribute_path(), + }; + + (accept.accept_fn)(&mut cx, args); + + if !matches!(self.stage.should_emit(), ShouldEmit::Nothing) { + self.check_target( + path.get_attribute_path(), + attr.span, + &accept.allowed_targets, + target, + target_id, + &mut emit_lint, + ); + } + } + } else { + // If we're here, we must be compiling a tool attribute... Or someone + // forgot to parse their fancy new attribute. Let's warn them in any case. + // If you are that person, and you really think your attribute should + // remain unparsed, carefully read the documentation in this module and if + // you still think so you can add an exception to this assertion. + + // FIXME(jdonszelmann): convert other attributes, and check with this that + // we caught em all + // const FIXME_TEMPORARY_ATTR_ALLOWLIST: &[Symbol] = &[sym::cfg]; + // assert!( + // self.tools.contains(&parts[0]) || true, + // // || FIXME_TEMPORARY_ATTR_ALLOWLIST.contains(&parts[0]), + // "attribute {path} wasn't parsed and isn't a know tool attribute", + // ); + + attributes.push(Attribute::Unparsed(Box::new(AttrItem { + path: AttrPath::from_ast(&n.item.path), + args: self.lower_attr_args(&n.item.args, lower_span), + id: HashIgnoredAttrId { attr_id: attr.id }, + style: attr.style, + span: lower_span(attr.span), + }))); + } + } + } + } + + let mut parsed_attributes = Vec::new(); + for f in &S::parsers().finalizers { + if let Some(attr) = f(&mut FinalizeContext { + shared: SharedContext { + cx: self, + target_span, + target_id, + emit_lint: &mut emit_lint, + }, + all_attrs: &attr_paths, + }) { + parsed_attributes.push(Attribute::Parsed(attr)); + } + } + + attributes.extend(parsed_attributes); + + attributes + } + + /// Returns whether there is a parser for an attribute with this name + pub fn is_parsed_attribute(path: &[Symbol]) -> bool { + Late::parsers().accepters.contains_key(path) + } + + fn lower_attr_args(&self, args: &ast::AttrArgs, lower_span: impl Fn(Span) -> Span) -> AttrArgs { + match args { + ast::AttrArgs::Empty => AttrArgs::Empty, + ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(args.clone()), + // This is an inert key-value attribute - it will never be visible to macros + // after it gets lowered to HIR. Therefore, we can extract literals to handle + // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). + ast::AttrArgs::Eq { eq_span, expr } => { + // In valid code the value always ends up as a single literal. Otherwise, a dummy + // literal suffices because the error is handled elsewhere. + let lit = if let ast::ExprKind::Lit(token_lit) = expr.kind + && let Ok(lit) = + ast::MetaItemLit::from_token_lit(token_lit, lower_span(expr.span)) + { + lit + } else { + let guar = self.dcx().span_delayed_bug( + args.span().unwrap_or(DUMMY_SP), + "expr in place where literal is expected (builtin attr parsing)", + ); + ast::MetaItemLit { + symbol: sym::dummy, + suffix: None, + kind: ast::LitKind::Err(guar), + span: DUMMY_SP, + } + }; + AttrArgs::Eq { eq_span: lower_span(*eq_span), expr: lit } + } + } + } +} diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs index fc1377e5314..4dd908cdc40 100644 --- a/compiler/rustc_attr_parsing/src/lib.rs +++ b/compiler/rustc_attr_parsing/src/lib.rs @@ -84,18 +84,31 @@ // tidy-alphabetical-end #[macro_use] +/// All the individual attribute parsers for each of rustc's built-in attributes. mod attributes; + +/// All the important types given to attribute parsers when parsing pub(crate) mod context; -mod lints; + +/// Code that other crates interact with, to actually parse a list (or sometimes single) +/// attribute. +mod interface; + +/// Despite this entire module called attribute parsing and the term being a little overloaded, +/// in this module the code lives that actually breaks up tokenstreams into semantic pieces of attributes, +/// like lists or name-value pairs. pub mod parser; + +mod lints; mod session_diagnostics; +mod target_checking; +pub mod validate_attr; pub use attributes::cfg::{CFG_TEMPLATE, EvalConfigResult, eval_config_entry, parse_cfg_attr}; pub use attributes::cfg_old::*; -pub use attributes::util::{ - find_crate_name, is_builtin_attr, is_doc_alias_attrs_contain_symbol, parse_version, -}; -pub use context::{AttributeParser, Early, Late, OmitDoc, ShouldEmit}; +pub use attributes::util::{is_builtin_attr, is_doc_alias_attrs_contain_symbol, parse_version}; +pub use context::{Early, Late, OmitDoc, ShouldEmit}; +pub use interface::AttributeParser; pub use lints::emit_attribute_lint; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_attr_parsing/src/lints.rs b/compiler/rustc_attr_parsing/src/lints.rs index 733225bab59..84ae19c4fc6 100644 --- a/compiler/rustc_attr_parsing/src/lints.rs +++ b/compiler/rustc_attr_parsing/src/lints.rs @@ -1,11 +1,13 @@ +use std::borrow::Cow; + use rustc_errors::{DiagArgValue, LintEmitter}; +use rustc_hir::Target; use rustc_hir::lints::{AttributeLint, AttributeLintKind}; -use rustc_hir::{HirId, Target}; use rustc_span::sym; use crate::session_diagnostics; -pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<HirId>, lint_emitter: L) { +pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<L::Id>, lint_emitter: L) { let AttributeLint { id, span, kind } = lint; match kind { @@ -35,12 +37,18 @@ pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<HirId>, lint_emi *first_span, session_diagnostics::EmptyAttributeList { attr_span: *first_span }, ), - &AttributeLintKind::InvalidTarget { name, target, ref applied, only } => lint_emitter + AttributeLintKind::InvalidTarget { name, target, applied, only } => lint_emitter .emit_node_span_lint( // This check is here because `deprecated` had its own lint group and removing this would be a breaking change - if name == sym::deprecated - && ![Target::Closure, Target::Expression, Target::Statement, Target::Arm] - .contains(&target) + if name.segments[0].name == sym::deprecated + && ![ + Target::Closure, + Target::Expression, + Target::Statement, + Target::Arm, + Target::MacroCall, + ] + .contains(target) { rustc_session::lint::builtin::USELESS_DEPRECATED } else { @@ -49,10 +57,13 @@ pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<HirId>, lint_emi *id, *span, session_diagnostics::InvalidTargetLint { - name, + name: name.clone(), target: target.plural_name(), - applied: applied.clone(), + applied: DiagArgValue::StrListSepByAnd( + applied.into_iter().map(|i| Cow::Owned(i.to_string())).collect(), + ), only, + attr_span: *span, }, ), } diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs index aecaae947c9..6d3cf684296 100644 --- a/compiler/rustc_attr_parsing/src/parser.rs +++ b/compiler/rustc_attr_parsing/src/parser.rs @@ -3,45 +3,30 @@ //! //! FIXME(jdonszelmann): delete `rustc_ast/attr/mod.rs` +use std::borrow::Cow; use std::fmt::{Debug, Display}; -use std::iter::Peekable; -use rustc_ast::token::{self, Delimiter, Token}; -use rustc_ast::tokenstream::{TokenStreamIter, TokenTree}; +use rustc_ast::token::{self, Delimiter, MetaVarKind}; +use rustc_ast::tokenstream::TokenStream; use rustc_ast::{AttrArgs, DelimArgs, Expr, ExprKind, LitKind, MetaItemLit, NormalAttr, Path}; use rustc_ast_pretty::pprust; -use rustc_errors::DiagCtxtHandle; +use rustc_errors::PResult; use rustc_hir::{self as hir, AttrPath}; -use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; - -pub struct SegmentIterator<'a> { - offset: usize, - path: &'a PathParser<'a>, -} - -impl<'a> Iterator for SegmentIterator<'a> { - type Item = &'a Ident; - - fn next(&mut self) -> Option<Self::Item> { - if self.offset >= self.path.len() { - return None; - } - - let res = match self.path { - PathParser::Ast(ast_path) => &ast_path.segments[self.offset].ident, - PathParser::Attr(attr_path) => &attr_path.segments[self.offset], - }; - - self.offset += 1; - Some(res) - } -} +use rustc_parse::exp; +use rustc_parse::parser::{Parser, PathStyle, token_descr}; +use rustc_session::errors::report_lit_error; +use rustc_session::parse::ParseSess; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, sym}; +use thin_vec::ThinVec; + +use crate::ShouldEmit; +use crate::session_diagnostics::{ + InvalidMetaItem, InvalidMetaItemQuoteIdentSugg, InvalidMetaItemRemoveNegSugg, MetaBadDelim, + MetaBadDelimSugg, SuffixedLiteralInAttribute, +}; #[derive(Clone, Debug)] -pub enum PathParser<'a> { - Ast(&'a Path), - Attr(AttrPath), -} +pub struct PathParser<'a>(pub Cow<'a, Path>); impl<'a> PathParser<'a> { pub fn get_attribute_path(&self) -> hir::AttrPath { @@ -52,21 +37,15 @@ impl<'a> PathParser<'a> { } pub fn segments(&'a self) -> impl Iterator<Item = &'a Ident> { - SegmentIterator { offset: 0, path: self } + self.0.segments.iter().map(|seg| &seg.ident) } pub fn span(&self) -> Span { - match self { - PathParser::Ast(path) => path.span, - PathParser::Attr(attr_path) => attr_path.span, - } + self.0.span } pub fn len(&self) -> usize { - match self { - PathParser::Ast(path) => path.segments.len(), - PathParser::Attr(attr_path) => attr_path.segments.len(), - } + self.0.segments.len() } pub fn segments_is(&self, segments: &[Symbol]) -> bool { @@ -99,10 +78,7 @@ impl<'a> PathParser<'a> { impl Display for PathParser<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - PathParser::Ast(path) => write!(f, "{}", pprust::path_to_string(path)), - PathParser::Attr(attr_path) => write!(f, "{attr_path}"), - } + write!(f, "{}", pprust::path_to_string(&self.0)) } } @@ -123,21 +99,39 @@ impl<'a> ArgParser<'a> { } } - pub fn from_attr_args<'sess>(value: &'a AttrArgs, dcx: DiagCtxtHandle<'sess>) -> Self { - match value { + pub fn from_attr_args<'sess>( + value: &'a AttrArgs, + parts: &[Symbol], + psess: &'sess ParseSess, + should_emit: ShouldEmit, + ) -> Option<Self> { + Some(match value { AttrArgs::Empty => Self::NoArgs, - AttrArgs::Delimited(args) if args.delim == Delimiter::Parenthesis => { - Self::List(MetaItemListParser::new(args, dcx)) - } AttrArgs::Delimited(args) => { - Self::List(MetaItemListParser { sub_parsers: vec![], span: args.dspan.entire() }) + // The arguments of rustc_dummy are not validated if the arguments are delimited + if parts == &[sym::rustc_dummy] { + return Some(ArgParser::List(MetaItemListParser { + sub_parsers: ThinVec::new(), + span: args.dspan.entire(), + })); + } + + if args.delim != Delimiter::Parenthesis { + psess.dcx().emit_err(MetaBadDelim { + span: args.dspan.entire(), + sugg: MetaBadDelimSugg { open: args.dspan.open, close: args.dspan.close }, + }); + return None; + } + + Self::List(MetaItemListParser::new(args, psess, should_emit)?) } AttrArgs::Eq { eq_span, expr } => Self::NameValue(NameValueParser { eq_span: *eq_span, - value: expr_to_lit(dcx, &expr, *eq_span), + value: expr_to_lit(psess, &expr, expr.span, should_emit)?, value_span: expr.span, }), - } + }) } /// Asserts that this MetaItem is a list @@ -249,11 +243,16 @@ impl<'a> Debug for MetaItemParser<'a> { impl<'a> MetaItemParser<'a> { /// Create a new parser from a [`NormalAttr`], which is stored inside of any /// [`ast::Attribute`](rustc_ast::Attribute) - pub fn from_attr<'sess>(attr: &'a NormalAttr, dcx: DiagCtxtHandle<'sess>) -> Self { - Self { - path: PathParser::Ast(&attr.item.path), - args: ArgParser::from_attr_args(&attr.item.args, dcx), - } + pub fn from_attr<'sess>( + attr: &'a NormalAttr, + parts: &[Symbol], + psess: &'sess ParseSess, + should_emit: ShouldEmit, + ) -> Option<Self> { + Some(Self { + path: PathParser(Cow::Borrowed(&attr.item.path)), + args: ArgParser::from_attr_args(&attr.item.args, parts, psess, should_emit)?, + }) } } @@ -318,215 +317,235 @@ impl NameValueParser { } } -fn expr_to_lit(dcx: DiagCtxtHandle<'_>, expr: &Expr, span: Span) -> MetaItemLit { - // In valid code the value always ends up as a single literal. Otherwise, a dummy - // literal suffices because the error is handled elsewhere. - if let ExprKind::Lit(token_lit) = expr.kind - && let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span) - { - lit +fn expr_to_lit( + psess: &ParseSess, + expr: &Expr, + span: Span, + should_emit: ShouldEmit, +) -> Option<MetaItemLit> { + if let ExprKind::Lit(token_lit) = expr.kind { + let res = MetaItemLit::from_token_lit(token_lit, expr.span); + match res { + Ok(lit) => { + if token_lit.suffix.is_some() { + should_emit.emit_err( + psess.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span }), + ); + None + } else { + if !lit.kind.is_unsuffixed() { + // Emit error and continue, we can still parse the attribute as if the suffix isn't there + should_emit.emit_err( + psess.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span }), + ); + } + + Some(lit) + } + } + Err(err) => { + let guar = report_lit_error(psess, err, token_lit, expr.span); + let lit = MetaItemLit { + symbol: token_lit.symbol, + suffix: token_lit.suffix, + kind: LitKind::Err(guar), + span: expr.span, + }; + Some(lit) + } + } } else { - let guar = dcx.span_delayed_bug( - span, - "expr in place where literal is expected (builtin attr parsing)", - ); - MetaItemLit { symbol: sym::dummy, suffix: None, kind: LitKind::Err(guar), span } + if matches!(should_emit, ShouldEmit::Nothing) { + return None; + } + + // Example cases: + // - `#[foo = 1+1]`: results in `ast::ExprKind::BinOp`. + // - `#[foo = include_str!("nonexistent-file.rs")]`: + // results in `ast::ExprKind::Err`. In that case we delay + // the error because an earlier error will have already + // been reported. + let msg = "attribute value must be a literal"; + let err = psess.dcx().struct_span_err(span, msg); + should_emit.emit_err(err); + None } } struct MetaItemListParserContext<'a, 'sess> { - // the tokens inside the delimiters, so `#[some::attr(a b c)]` would have `a b c` inside - inside_delimiters: Peekable<TokenStreamIter<'a>>, - dcx: DiagCtxtHandle<'sess>, + parser: &'a mut Parser<'sess>, + should_emit: ShouldEmit, } impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> { - fn done(&mut self) -> bool { - self.inside_delimiters.peek().is_none() - } - - fn next_path(&mut self) -> Option<AttrPath> { - // FIXME: Share code with `parse_path`. - let tt = self.inside_delimiters.next().map(|tt| TokenTree::uninterpolate(tt)); - - match tt.as_deref()? { - &TokenTree::Token( - Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span }, - _, - ) => { - // here we have either an ident or pathsep `::`. - - let mut segments = if let &token::Ident(name, _) = kind { - // when we lookahead another pathsep, more path's coming - if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - self.inside_delimiters.peek() - { - self.inside_delimiters.next(); - vec![Ident::new(name, span)] - } else { - // else we have a single identifier path, that's all - return Some(AttrPath { - segments: vec![Ident::new(name, span)].into_boxed_slice(), - span, - }); - } - } else { - // if `::` is all we get, we just got a path root - vec![Ident::new(kw::PathRoot, span)] - }; + fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'sess, MetaItemLit> { + let uninterpolated_span = self.parser.token_uninterpolated_span(); + let Some(token_lit) = self.parser.eat_token_lit() else { + return self.parser.handle_missing_lit(Parser::mk_meta_item_lit_char); + }; - // one segment accepted. accept n more - loop { - // another ident? - if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = - self.inside_delimiters - .next() - .map(|tt| TokenTree::uninterpolate(tt)) - .as_deref() - { - segments.push(Ident::new(name, span)); - } else { - return None; - } - // stop unless we see another `::` - if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - self.inside_delimiters.peek() - { - self.inside_delimiters.next(); - } else { - break; - } - } - let span = span.with_hi(segments.last().unwrap().span.hi()); - Some(AttrPath { segments: segments.into_boxed_slice(), span }) - } - TokenTree::Token(Token { kind, .. }, _) if kind.is_delim() => None, - _ => { - // malformed attributes can get here. We can't crash, but somewhere else should've - // already warned for this. - None + let lit = match MetaItemLit::from_token_lit(token_lit, self.parser.prev_token.span) { + Ok(lit) => lit, + Err(err) => { + let guar = + report_lit_error(&self.parser.psess, err, token_lit, uninterpolated_span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix); + MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap() } + }; + + if !lit.kind.is_unsuffixed() { + // Emit error and continue, we can still parse the attribute as if the suffix isn't there + self.should_emit.emit_err( + self.parser.dcx().create_err(SuffixedLiteralInAttribute { span: lit.span }), + ); } - } - fn value(&mut self) -> Option<MetaItemLit> { - match self.inside_delimiters.next() { - Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { - MetaItemListParserContext { - inside_delimiters: inner_tokens.iter().peekable(), - dcx: self.dcx, - } - .value() - } - Some(TokenTree::Token(token, _)) => MetaItemLit::from_token(token), - _ => None, + Ok(lit) + } + + fn parse_attr_item(&mut self) -> PResult<'sess, MetaItemParser<'static>> { + if let Some(MetaVarKind::Meta { has_meta_form }) = self.parser.token.is_metavar_seq() { + return if has_meta_form { + let attr_item = self + .parser + .eat_metavar_seq(MetaVarKind::Meta { has_meta_form: true }, |this| { + MetaItemListParserContext { parser: this, should_emit: self.should_emit } + .parse_attr_item() + }) + .unwrap(); + Ok(attr_item) + } else { + self.parser.unexpected_any() + }; } + + let path = self.parser.parse_path(PathStyle::Mod)?; + + // Check style of arguments that this meta item has + let args = if self.parser.check(exp!(OpenParen)) { + let start = self.parser.token.span; + let (sub_parsers, _) = self.parser.parse_paren_comma_seq(|parser| { + MetaItemListParserContext { parser, should_emit: self.should_emit } + .parse_meta_item_inner() + })?; + let end = self.parser.prev_token.span; + ArgParser::List(MetaItemListParser { sub_parsers, span: start.with_hi(end.hi()) }) + } else if self.parser.eat(exp!(Eq)) { + let eq_span = self.parser.prev_token.span; + let value = self.parse_unsuffixed_meta_item_lit()?; + + ArgParser::NameValue(NameValueParser { eq_span, value, value_span: value.span }) + } else { + ArgParser::NoArgs + }; + + Ok(MetaItemParser { path: PathParser(Cow::Owned(path)), args }) } - /// parses one element on the inside of a list attribute like `#[my_attr( <insides> )]` - /// - /// parses a path followed be either: - /// 1. nothing (a word attr) - /// 2. a parenthesized list - /// 3. an equals sign and a literal (name-value) - /// - /// Can also parse *just* a literal. This is for cases like as `#[my_attr("literal")]` - /// where no path is given before the literal - /// - /// Some exceptions too for interpolated attributes which are already pre-processed - fn next(&mut self) -> Option<MetaItemOrLitParser<'a>> { - // a list element is either a literal - if let Some(TokenTree::Token(token, _)) = self.inside_delimiters.peek() - && let Some(lit) = MetaItemLit::from_token(token) - { - self.inside_delimiters.next(); - return Some(MetaItemOrLitParser::Lit(lit)); - } else if let Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) = - self.inside_delimiters.peek() + fn parse_meta_item_inner(&mut self) -> PResult<'sess, MetaItemOrLitParser<'static>> { + match self.parse_unsuffixed_meta_item_lit() { + Ok(lit) => return Ok(MetaItemOrLitParser::Lit(lit)), + Err(err) => err.cancel(), // we provide a better error below + } + + match self.parse_attr_item() { + Ok(mi) => return Ok(MetaItemOrLitParser::MetaItemParser(mi)), + Err(err) => err.cancel(), // we provide a better error below + } + + let mut err = InvalidMetaItem { + span: self.parser.token.span, + descr: token_descr(&self.parser.token), + quote_ident_sugg: None, + remove_neg_sugg: None, + }; + + // Suggest quoting idents, e.g. in `#[cfg(key = value)]`. We don't use `Token::ident` and + // don't `uninterpolate` the token to avoid suggesting anything butchered or questionable + // when macro metavariables are involved. + if self.parser.prev_token == token::Eq + && let token::Ident(..) = self.parser.token.kind { - self.inside_delimiters.next(); - return MetaItemListParserContext { - inside_delimiters: inner_tokens.iter().peekable(), - dcx: self.dcx, + let before = self.parser.token.span.shrink_to_lo(); + while let token::Ident(..) = self.parser.token.kind { + self.parser.bump(); } - .next(); + err.quote_ident_sugg = Some(InvalidMetaItemQuoteIdentSugg { + before, + after: self.parser.prev_token.span.shrink_to_hi(), + }); } - // or a path. - let path = self.next_path()?; - - // Paths can be followed by: - // - `(more meta items)` (another list) - // - `= lit` (a name-value) - // - nothing - Some(MetaItemOrLitParser::MetaItemParser(match self.inside_delimiters.peek() { - Some(TokenTree::Delimited(dspan, _, Delimiter::Parenthesis, inner_tokens)) => { - self.inside_delimiters.next(); - - MetaItemParser { - path: PathParser::Attr(path), - args: ArgParser::List(MetaItemListParser::new_tts( - inner_tokens.iter(), - dspan.entire(), - self.dcx, - )), - } - } - Some(TokenTree::Delimited(_, ..)) => { - self.inside_delimiters.next(); - // self.dcx.span_delayed_bug(span.entire(), "wrong delimiters"); - return None; - } - Some(TokenTree::Token(Token { kind: token::Eq, span }, _)) => { - self.inside_delimiters.next(); - let value = self.value()?; - MetaItemParser { - path: PathParser::Attr(path), - args: ArgParser::NameValue(NameValueParser { - eq_span: *span, - value_span: value.span, - value, - }), - } - } - _ => MetaItemParser { path: PathParser::Attr(path), args: ArgParser::NoArgs }, - })) + if self.parser.token == token::Minus + && self + .parser + .look_ahead(1, |t| matches!(t.kind, rustc_ast::token::TokenKind::Literal { .. })) + { + err.remove_neg_sugg = + Some(InvalidMetaItemRemoveNegSugg { negative_sign: self.parser.token.span }); + self.parser.bump(); + self.parser.bump(); + } + + Err(self.parser.dcx().create_err(err)) } - fn parse(mut self, span: Span) -> MetaItemListParser<'a> { - let mut sub_parsers = Vec::new(); + fn parse( + tokens: TokenStream, + psess: &'sess ParseSess, + span: Span, + should_emit: ShouldEmit, + ) -> PResult<'sess, MetaItemListParser<'static>> { + let mut parser = Parser::new(psess, tokens, None); + let mut this = MetaItemListParserContext { parser: &mut parser, should_emit }; - while !self.done() { - let Some(n) = self.next() else { - continue; - }; - sub_parsers.push(n); + // Presumably, the majority of the time there will only be one attr. + let mut sub_parsers = ThinVec::with_capacity(1); + while this.parser.token != token::Eof { + sub_parsers.push(this.parse_meta_item_inner()?); - match self.inside_delimiters.peek() { - None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => { - self.inside_delimiters.next(); - } - Some(_) => {} + if !this.parser.eat(exp!(Comma)) { + break; } } - MetaItemListParser { sub_parsers, span } + if parser.token != token::Eof { + parser.unexpected()?; + } + + Ok(MetaItemListParser { sub_parsers, span }) } } #[derive(Debug, Clone)] pub struct MetaItemListParser<'a> { - sub_parsers: Vec<MetaItemOrLitParser<'a>>, + sub_parsers: ThinVec<MetaItemOrLitParser<'a>>, pub span: Span, } impl<'a> MetaItemListParser<'a> { - fn new<'sess>(delim: &'a DelimArgs, dcx: DiagCtxtHandle<'sess>) -> Self { - MetaItemListParser::new_tts(delim.tokens.iter(), delim.dspan.entire(), dcx) - } - - fn new_tts<'sess>(tts: TokenStreamIter<'a>, span: Span, dcx: DiagCtxtHandle<'sess>) -> Self { - MetaItemListParserContext { inside_delimiters: tts.peekable(), dcx }.parse(span) + fn new<'sess>( + delim: &'a DelimArgs, + psess: &'sess ParseSess, + should_emit: ShouldEmit, + ) -> Option<Self> { + match MetaItemListParserContext::parse( + delim.tokens.clone(), + psess, + delim.dspan.entire(), + should_emit, + ) { + Ok(s) => Some(s), + Err(e) => { + should_emit.emit_err(e); + None + } + } } /// Lets you pick and choose as what you want to parse each element in the list diff --git a/compiler/rustc_attr_parsing/src/session_diagnostics.rs b/compiler/rustc_attr_parsing/src/session_diagnostics.rs index c65937b35b3..72bee0ddfbf 100644 --- a/compiler/rustc_attr_parsing/src/session_diagnostics.rs +++ b/compiler/rustc_attr_parsing/src/session_diagnostics.rs @@ -1,6 +1,6 @@ use std::num::IntErrorKind; -use rustc_ast::{self as ast, AttrStyle}; +use rustc_ast::{self as ast, AttrStyle, Path}; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, @@ -485,10 +485,12 @@ pub(crate) struct EmptyAttributeList { #[warning] #[help] pub(crate) struct InvalidTargetLint { - pub name: Symbol, + pub name: AttrPath, pub target: &'static str, - pub applied: String, + pub applied: DiagArgValue, pub only: &'static str, + #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")] + pub attr_span: Span, } #[derive(Diagnostic)] @@ -496,10 +498,11 @@ pub(crate) struct InvalidTargetLint { #[diag(attr_parsing_invalid_target)] pub(crate) struct InvalidTarget { #[primary_span] + #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")] pub span: Span, - pub name: Symbol, + pub name: AttrPath, pub target: &'static str, - pub applied: String, + pub applied: DiagArgValue, pub only: &'static str, } @@ -555,7 +558,7 @@ pub(crate) struct LinkOrdinalOutOfRange { pub ordinal: u128, } -pub(crate) enum AttributeParseErrorReason { +pub(crate) enum AttributeParseErrorReason<'a> { ExpectedNoArgs, ExpectedStringLiteral { byte_string: Option<Span>, @@ -568,7 +571,7 @@ pub(crate) enum AttributeParseErrorReason { ExpectedNameValue(Option<Symbol>), DuplicateKey(Symbol), ExpectedSpecificArgument { - possibilities: Vec<&'static str>, + possibilities: &'a [Symbol], strings: bool, /// Should we tell the user to write a list when they didn't? list: bool, @@ -576,16 +579,16 @@ pub(crate) enum AttributeParseErrorReason { ExpectedIdentifier, } -pub(crate) struct AttributeParseError { +pub(crate) struct AttributeParseError<'a> { pub(crate) span: Span, pub(crate) attr_span: Span, pub(crate) attr_style: AttrStyle, pub(crate) template: AttributeTemplate, pub(crate) attribute: AttrPath, - pub(crate) reason: AttributeParseErrorReason, + pub(crate) reason: AttributeParseErrorReason<'a>, } -impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { +impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError<'_> { fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> { let name = self.attribute.to_string(); @@ -654,7 +657,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { list: false, } => { let quote = if strings { '"' } else { '`' }; - match possibilities.as_slice() { + match possibilities { &[] => {} &[x] => { diag.span_label( @@ -684,7 +687,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { list: true, } => { let quote = if strings { '"' } else { '`' }; - match possibilities.as_slice() { + match possibilities { &[] => {} &[x] => { diag.span_label( @@ -734,3 +737,92 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { diag } } + +#[derive(Diagnostic)] +#[diag(attr_parsing_invalid_attr_unsafe)] +#[note] +pub(crate) struct InvalidAttrUnsafe { + #[primary_span] + #[label] + pub span: Span, + pub name: Path, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_unsafe_attr_outside_unsafe)] +pub(crate) struct UnsafeAttrOutsideUnsafe { + #[primary_span] + #[label] + pub span: Span, + #[subdiagnostic] + pub suggestion: UnsafeAttrOutsideUnsafeSuggestion, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + attr_parsing_unsafe_attr_outside_unsafe_suggestion, + applicability = "machine-applicable" +)] +pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion { + #[suggestion_part(code = "unsafe(")] + pub left: Span, + #[suggestion_part(code = ")")] + pub right: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_meta_bad_delim)] +pub(crate) struct MetaBadDelim { + #[primary_span] + pub span: Span, + #[subdiagnostic] + pub sugg: MetaBadDelimSugg, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + attr_parsing_meta_bad_delim_suggestion, + applicability = "machine-applicable" +)] +pub(crate) struct MetaBadDelimSugg { + #[suggestion_part(code = "(")] + pub open: Span, + #[suggestion_part(code = ")")] + pub close: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_invalid_meta_item)] +pub(crate) struct InvalidMetaItem { + #[primary_span] + pub span: Span, + pub descr: String, + #[subdiagnostic] + pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>, + #[subdiagnostic] + pub remove_neg_sugg: Option<InvalidMetaItemRemoveNegSugg>, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(attr_parsing_quote_ident_sugg, applicability = "machine-applicable")] +pub(crate) struct InvalidMetaItemQuoteIdentSugg { + #[suggestion_part(code = "\"")] + pub before: Span, + #[suggestion_part(code = "\"")] + pub after: Span, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(attr_parsing_remove_neg_sugg, applicability = "machine-applicable")] +pub(crate) struct InvalidMetaItemRemoveNegSugg { + #[suggestion_part(code = "")] + pub negative_sign: Span, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_suffixed_literal_in_attribute)] +#[help] +pub(crate) struct SuffixedLiteralInAttribute { + #[primary_span] + pub span: Span, +} diff --git a/compiler/rustc_attr_parsing/src/target_checking.rs b/compiler/rustc_attr_parsing/src/target_checking.rs new file mode 100644 index 00000000000..9568b791b3f --- /dev/null +++ b/compiler/rustc_attr_parsing/src/target_checking.rs @@ -0,0 +1,247 @@ +use std::borrow::Cow; + +use rustc_errors::DiagArgValue; +use rustc_feature::Features; +use rustc_hir::lints::{AttributeLint, AttributeLintKind}; +use rustc_hir::{AttrPath, MethodKind, Target}; +use rustc_span::Span; + +use crate::AttributeParser; +use crate::context::Stage; +use crate::session_diagnostics::InvalidTarget; + +#[derive(Debug)] +pub(crate) enum AllowedTargets { + AllowList(&'static [Policy]), + AllowListWarnRest(&'static [Policy]), +} + +pub(crate) enum AllowedResult { + Allowed, + Warn, + Error, +} + +impl AllowedTargets { + pub(crate) fn is_allowed(&self, target: Target) -> AllowedResult { + match self { + AllowedTargets::AllowList(list) => { + if list.contains(&Policy::Allow(target)) { + AllowedResult::Allowed + } else if list.contains(&Policy::Warn(target)) { + AllowedResult::Warn + } else { + AllowedResult::Error + } + } + AllowedTargets::AllowListWarnRest(list) => { + if list.contains(&Policy::Allow(target)) { + AllowedResult::Allowed + } else if list.contains(&Policy::Error(target)) { + AllowedResult::Error + } else { + AllowedResult::Warn + } + } + } + } + + pub(crate) fn allowed_targets(&self) -> Vec<Target> { + match self { + AllowedTargets::AllowList(list) => list, + AllowedTargets::AllowListWarnRest(list) => list, + } + .iter() + .filter_map(|target| match target { + Policy::Allow(target) => Some(*target), + Policy::Warn(_) => None, + Policy::Error(_) => None, + }) + .collect() + } +} + +#[derive(Debug, Eq, PartialEq)] +pub(crate) enum Policy { + Allow(Target), + Warn(Target), + Error(Target), +} + +impl<S: Stage> AttributeParser<'_, S> { + pub(crate) fn check_target( + &self, + attr_name: AttrPath, + attr_span: Span, + allowed_targets: &AllowedTargets, + target: Target, + target_id: S::Id, + mut emit_lint: impl FnMut(AttributeLint<S::Id>), + ) { + match allowed_targets.is_allowed(target) { + AllowedResult::Allowed => {} + AllowedResult::Warn => { + let allowed_targets = allowed_targets.allowed_targets(); + let (applied, only) = + allowed_targets_applied(allowed_targets, target, self.features); + emit_lint(AttributeLint { + id: target_id, + span: attr_span, + kind: AttributeLintKind::InvalidTarget { + name: attr_name, + target, + only: if only { "only " } else { "" }, + applied, + }, + }); + } + AllowedResult::Error => { + let allowed_targets = allowed_targets.allowed_targets(); + let (applied, only) = + allowed_targets_applied(allowed_targets, target, self.features); + self.dcx().emit_err(InvalidTarget { + span: attr_span, + name: attr_name, + target: target.plural_name(), + only: if only { "only " } else { "" }, + applied: DiagArgValue::StrListSepByAnd( + applied.into_iter().map(Cow::Owned).collect(), + ), + }); + } + } + } +} + +/// Takes a list of `allowed_targets` for an attribute, and the `target` the attribute was applied to. +/// Does some heuristic-based filtering to remove uninteresting targets, and formats the targets into a string +pub(crate) fn allowed_targets_applied( + mut allowed_targets: Vec<Target>, + target: Target, + features: Option<&Features>, +) -> (Vec<String>, bool) { + // Remove unstable targets from `allowed_targets` if their features are not enabled + if let Some(features) = features { + if !features.fn_delegation() { + allowed_targets.retain(|t| !matches!(t, Target::Delegation { .. })); + } + if !features.stmt_expr_attributes() { + allowed_targets.retain(|t| !matches!(t, Target::Expression | Target::Statement)); + } + if !features.extern_types() { + allowed_targets.retain(|t| !matches!(t, Target::ForeignTy)); + } + } + + // We define groups of "similar" targets. + // If at least two of the targets are allowed, and the `target` is not in the group, + // we collapse the entire group to a single entry to simplify the target list + const FUNCTION_LIKE: &[Target] = &[ + Target::Fn, + Target::Closure, + Target::ForeignFn, + Target::Method(MethodKind::Inherent), + Target::Method(MethodKind::Trait { body: false }), + Target::Method(MethodKind::Trait { body: true }), + Target::Method(MethodKind::TraitImpl), + ]; + const METHOD_LIKE: &[Target] = &[ + Target::Method(MethodKind::Inherent), + Target::Method(MethodKind::Trait { body: false }), + Target::Method(MethodKind::Trait { body: true }), + Target::Method(MethodKind::TraitImpl), + ]; + const IMPL_LIKE: &[Target] = + &[Target::Impl { of_trait: false }, Target::Impl { of_trait: true }]; + const ADT_LIKE: &[Target] = &[Target::Struct, Target::Enum]; + + let mut added_fake_targets = Vec::new(); + filter_targets( + &mut allowed_targets, + FUNCTION_LIKE, + "functions", + target, + &mut added_fake_targets, + ); + filter_targets(&mut allowed_targets, METHOD_LIKE, "methods", target, &mut added_fake_targets); + filter_targets(&mut allowed_targets, IMPL_LIKE, "impl blocks", target, &mut added_fake_targets); + filter_targets(&mut allowed_targets, ADT_LIKE, "data types", target, &mut added_fake_targets); + + // If there is now only 1 target left, show that as the only possible target + ( + added_fake_targets + .iter() + .copied() + .chain(allowed_targets.iter().map(|t| t.plural_name())) + .map(|i| i.to_string()) + .collect(), + allowed_targets.len() + added_fake_targets.len() == 1, + ) +} + +fn filter_targets( + allowed_targets: &mut Vec<Target>, + target_group: &'static [Target], + target_group_name: &'static str, + target: Target, + added_fake_targets: &mut Vec<&'static str>, +) { + if target_group.contains(&target) { + return; + } + if allowed_targets.iter().filter(|at| target_group.contains(at)).count() < 2 { + return; + } + allowed_targets.retain(|t| !target_group.contains(t)); + added_fake_targets.push(target_group_name); +} + +/// This is the list of all targets to which a attribute can be applied +/// This is used for: +/// - `rustc_dummy`, which can be applied to all targets +/// - Attributes that are not parted to the new target system yet can use this list as a placeholder +pub(crate) const ALL_TARGETS: &'static [Policy] = { + use Policy::Allow; + &[ + Allow(Target::ExternCrate), + Allow(Target::Use), + Allow(Target::Static), + Allow(Target::Const), + Allow(Target::Fn), + Allow(Target::Closure), + Allow(Target::Mod), + Allow(Target::ForeignMod), + Allow(Target::GlobalAsm), + Allow(Target::TyAlias), + Allow(Target::Enum), + Allow(Target::Variant), + Allow(Target::Struct), + Allow(Target::Field), + Allow(Target::Union), + Allow(Target::Trait), + Allow(Target::TraitAlias), + Allow(Target::Impl { of_trait: false }), + Allow(Target::Impl { of_trait: true }), + Allow(Target::Expression), + Allow(Target::Statement), + Allow(Target::Arm), + Allow(Target::AssocConst), + Allow(Target::Method(MethodKind::Inherent)), + Allow(Target::Method(MethodKind::Trait { body: false })), + Allow(Target::Method(MethodKind::Trait { body: true })), + Allow(Target::Method(MethodKind::TraitImpl)), + Allow(Target::AssocTy), + Allow(Target::ForeignFn), + Allow(Target::ForeignStatic), + Allow(Target::ForeignTy), + Allow(Target::MacroDef), + Allow(Target::Param), + Allow(Target::PatField), + Allow(Target::ExprField), + Allow(Target::WherePredicate), + Allow(Target::MacroCall), + Allow(Target::Crate), + Allow(Target::Delegation { mac: false }), + Allow(Target::Delegation { mac: true }), + ] +}; diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_attr_parsing/src/validate_attr.rs index 68ef6d6f32c..7a7624893bd 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_attr_parsing/src/validate_attr.rs @@ -8,16 +8,16 @@ use rustc_ast::{ self as ast, AttrArgs, Attribute, DelimArgs, MetaItem, MetaItemInner, MetaItemKind, NodeId, Path, Safety, }; -use rustc_attr_parsing::{AttributeParser, Late}; use rustc_errors::{Applicability, DiagCtxtHandle, FatalError, PResult}; use rustc_feature::{AttributeSafety, AttributeTemplate, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute}; +use rustc_parse::parse_in; use rustc_session::errors::report_lit_error; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{ILL_FORMED_ATTRIBUTE_INPUT, UNSAFE_ATTR_OUTSIDE_UNSAFE}; use rustc_session::parse::ParseSess; use rustc_span::{Span, Symbol, sym}; -use crate::{errors, parse_in}; +use crate::{AttributeParser, Late, session_diagnostics as errors}; pub fn check_attr(psess: &ParseSess, attr: &Attribute, id: NodeId) { if attr.is_doc_comment() || attr.has_name(sym::cfg_trace) || attr.has_name(sym::cfg_attr_trace) @@ -33,7 +33,10 @@ pub fn check_attr(psess: &ParseSess, attr: &Attribute, id: NodeId) { // Check input tokens for built-in and key-value attributes. match builtin_attr_info { // `rustc_dummy` doesn't have any restrictions specific to built-in attributes. - Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => { + Some(BuiltinAttribute { name, template, .. }) => { + if AttributeParser::<Late>::is_parsed_attribute(slice::from_ref(&name)) { + return; + } match parse_meta(psess, attr) { // Don't check safety again, we just did that Ok(meta) => { @@ -133,16 +136,6 @@ fn check_meta_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) { }); } -pub(super) fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) { - if let Delimiter::Parenthesis = delim { - return; - } - psess.dcx().emit_err(errors::CfgAttrBadDelim { - span: span.entire(), - sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close }, - }); -} - /// Checks that the given meta-item is compatible with this `AttributeTemplate`. fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaItemKind) -> bool { let is_one_allowed_subword = |items: &[MetaItemInner]| match items { @@ -269,9 +262,6 @@ pub fn check_builtin_meta_item( ) { if !is_attr_template_compatible(&template, &meta.kind) { // attrs with new parsers are locally validated so excluded here - if AttributeParser::<Late>::is_parsed_attribute(slice::from_ref(&name)) { - return; - } emit_malformed_attribute(psess, style, meta.span, name, template); } diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs index 57db2e9fb57..d3f6c01ab8c 100644 --- a/compiler/rustc_borrowck/src/dataflow.rs +++ b/compiler/rustc_borrowck/src/dataflow.rs @@ -1,7 +1,6 @@ use std::fmt; use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::graph; use rustc_index::bit_set::{DenseBitSet, MixedBitSet}; use rustc_middle::mir::{ self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges, @@ -317,9 +316,8 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { loans_out_of_scope_at_location: FxIndexMap::default(), }; for (loan_idx, loan_data) in borrow_set.iter_enumerated() { - let issuing_region = loan_data.region; let loan_issued_at = loan_data.reserve_location; - prec.precompute_loans_out_of_scope(loan_idx, issuing_region, loan_issued_at); + prec.precompute_loans_out_of_scope(loan_idx, loan_issued_at); } prec.loans_out_of_scope_at_location @@ -328,45 +326,7 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { /// Loans are in scope while they are live: whether they are contained within any live region. /// In the location-insensitive analysis, a loan will be contained in a region if the issuing /// region can reach it in the subset graph. So this is a reachability problem. - fn precompute_loans_out_of_scope( - &mut self, - loan_idx: BorrowIndex, - issuing_region: RegionVid, - loan_issued_at: Location, - ) { - let sccs = self.regioncx.constraint_sccs(); - let universal_regions = self.regioncx.universal_regions(); - - // The loop below was useful for the location-insensitive analysis but shouldn't be - // impactful in the location-sensitive case. It seems that it does, however, as without it a - // handful of tests fail. That likely means some liveness or outlives data related to choice - // regions is missing - // FIXME: investigate the impact of loans traversing applied member constraints and why some - // tests fail otherwise. - // - // We first handle the cases where the loan doesn't go out of scope, depending on the - // issuing region's successors. - for successor in graph::depth_first_search(&self.regioncx.region_graph(), issuing_region) { - // Via applied member constraints - // - // The issuing region can flow into the choice regions, and they are either: - // - placeholders or free regions themselves, - // - or also transitively outlive a free region. - // - // That is to say, if there are applied member constraints here, the loan escapes the - // function and cannot go out of scope. We could early return here. - // - // For additional insurance via fuzzing and crater, we verify that the constraint's min - // choice indeed escapes the function. In the future, we could e.g. turn this check into - // a debug assert and early return as an optimization. - let scc = sccs.scc(successor); - for constraint in self.regioncx.applied_member_constraints(scc) { - if universal_regions.is_universal_region(constraint.min_choice) { - return; - } - } - } - + fn precompute_loans_out_of_scope(&mut self, loan_idx: BorrowIndex, loan_issued_at: Location) { let first_block = loan_issued_at.block; let first_bb_data = &self.body.basic_blocks[first_block]; diff --git a/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs b/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs index 83fe4a9f98f..f77f759035b 100644 --- a/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs +++ b/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs @@ -13,6 +13,8 @@ use rustc_middle::mir::{self, ConstraintCategory, Location}; use rustc_middle::ty::{ self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, }; +use rustc_span::Span; +use rustc_trait_selection::error_reporting::infer::region::unexpected_hidden_region_diagnostic; use rustc_trait_selection::errors::impl_trait_overcapture_suggestion; use crate::MirBorrowckCtxt; @@ -26,13 +28,61 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { if errors.is_empty() { return; } + + let infcx = self.infcx; let mut guar = None; + let mut last_unexpected_hidden_region: Option<(Span, Ty<'_>, ty::OpaqueTypeKey<'tcx>)> = + None; for error in errors { guar = Some(match error { - DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err) => err.report(self.infcx), + DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err) => err.report(infcx), DeferredOpaqueTypeError::LifetimeMismatchOpaqueParam(err) => { - self.infcx.dcx().emit_err(err) + infcx.dcx().emit_err(err) + } + DeferredOpaqueTypeError::UnexpectedHiddenRegion { + opaque_type_key, + hidden_type, + member_region, + } => { + let named_ty = + self.regioncx.name_regions_for_member_constraint(infcx.tcx, hidden_type.ty); + let named_key = self + .regioncx + .name_regions_for_member_constraint(infcx.tcx, opaque_type_key); + let named_region = + self.regioncx.name_regions_for_member_constraint(infcx.tcx, member_region); + let diag = unexpected_hidden_region_diagnostic( + infcx, + self.mir_def_id(), + hidden_type.span, + named_ty, + named_region, + named_key, + ); + if last_unexpected_hidden_region + != Some((hidden_type.span, named_ty, named_key)) + { + last_unexpected_hidden_region = + Some((hidden_type.span, named_ty, named_key)); + diag.emit() + } else { + diag.delay_as_bug() + } } + DeferredOpaqueTypeError::NonDefiningUseInDefiningScope { + span, + opaque_type_key, + } => infcx.dcx().span_err( + span, + format!( + "non-defining use of `{}` in the defining scope", + Ty::new_opaque( + infcx.tcx, + opaque_type_key.def_id.to_def_id(), + opaque_type_key.args + ) + ), + ), }); } let guar = guar.unwrap(); @@ -194,7 +244,7 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for FindOpaqueRegion<'_, 'tcx> { // Find a path between the borrow region and our opaque capture. if let Some((path, _)) = - self.regioncx.find_constraint_paths_between_regions(self.borrow_region, |r| { + self.regioncx.find_constraint_path_between_regions(self.borrow_region, |r| { r == opaque_region_vid }) { diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index fe4e1b6011e..c7d2267e5f7 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -23,7 +23,6 @@ use rustc_trait_selection::error_reporting::infer::nice_region_error::{ self, HirTraitObjectVisitor, NiceRegionError, TraitObjectVisitor, find_anon_type, find_param_with_region, suggest_adding_lifetime_params, }; -use rustc_trait_selection::error_reporting::infer::region::unexpected_hidden_region_diagnostic; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{Obligation, ObligationCtxt}; use tracing::{debug, instrument, trace}; @@ -105,18 +104,6 @@ pub(crate) enum RegionErrorKind<'tcx> { /// A generic bound failure for a type test (`T: 'a`). TypeTestError { type_test: TypeTest<'tcx> }, - /// An unexpected hidden region for an opaque type. - UnexpectedHiddenRegion { - /// The span for the member constraint. - span: Span, - /// The hidden type. - hidden_ty: Ty<'tcx>, - /// The opaque type. - key: ty::OpaqueTypeKey<'tcx>, - /// The unexpected region. - member_region: ty::Region<'tcx>, - }, - /// Higher-ranked subtyping error. BoundUniversalRegionError { /// The placeholder free region. @@ -323,11 +310,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { pub(crate) fn report_region_errors(&mut self, nll_errors: RegionErrors<'tcx>) { // Iterate through all the errors, producing a diagnostic for each one. The diagnostics are // buffered in the `MirBorrowckCtxt`. - let mut outlives_suggestion = OutlivesSuggestionBuilder::default(); - let mut last_unexpected_hidden_region: Option<(Span, Ty<'_>, ty::OpaqueTypeKey<'tcx>)> = - None; - for (nll_error, _) in nll_errors.into_iter() { match nll_error { RegionErrorKind::TypeTestError { type_test } => { @@ -378,30 +361,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - RegionErrorKind::UnexpectedHiddenRegion { span, hidden_ty, key, member_region } => { - let named_ty = - self.regioncx.name_regions_for_member_constraint(self.infcx.tcx, hidden_ty); - let named_key = - self.regioncx.name_regions_for_member_constraint(self.infcx.tcx, key); - let named_region = self - .regioncx - .name_regions_for_member_constraint(self.infcx.tcx, member_region); - let diag = unexpected_hidden_region_diagnostic( - self.infcx, - self.mir_def_id(), - span, - named_ty, - named_region, - named_key, - ); - if last_unexpected_hidden_region != Some((span, named_ty, named_key)) { - self.buffer_error(diag); - last_unexpected_hidden_region = Some((span, named_ty, named_key)); - } else { - diag.delay_as_bug(); - } - } - RegionErrorKind::BoundUniversalRegionError { longer_fr, placeholder, diff --git a/compiler/rustc_borrowck/src/handle_placeholders.rs b/compiler/rustc_borrowck/src/handle_placeholders.rs index 1614c112ab5..4661906fbeb 100644 --- a/compiler/rustc_borrowck/src/handle_placeholders.rs +++ b/compiler/rustc_borrowck/src/handle_placeholders.rs @@ -15,7 +15,6 @@ use tracing::debug; use crate::constraints::{ConstraintSccIndex, OutlivesConstraintSet}; use crate::consumers::OutlivesConstraint; use crate::diagnostics::UniverseInfo; -use crate::member_constraints::MemberConstraintSet; use crate::region_infer::values::{LivenessValues, PlaceholderIndices}; use crate::region_infer::{ConstraintSccs, RegionDefinition, Representative, TypeTest}; use crate::ty::VarianceDiagInfo; @@ -30,7 +29,6 @@ pub(crate) struct LoweredConstraints<'tcx> { pub(crate) constraint_sccs: Sccs<RegionVid, ConstraintSccIndex>, pub(crate) definitions: Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>, pub(crate) scc_annotations: IndexVec<ConstraintSccIndex, RegionTracker>, - pub(crate) member_constraints: MemberConstraintSet<'tcx, RegionVid>, pub(crate) outlives_constraints: Frozen<OutlivesConstraintSet<'tcx>>, pub(crate) type_tests: Vec<TypeTest<'tcx>>, pub(crate) liveness_constraints: LivenessValues, @@ -147,9 +145,10 @@ impl scc::Annotation for RegionTracker { /// Determines if the region variable definitions contain /// placeholders, and compute them for later use. -fn region_definitions<'tcx>( - universal_regions: &UniversalRegions<'tcx>, +// FIXME: This is also used by opaque type handling. Move it to a separate file. +pub(super) fn region_definitions<'tcx>( infcx: &BorrowckInferCtxt<'tcx>, + universal_regions: &UniversalRegions<'tcx>, ) -> (Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>, bool) { let var_infos = infcx.get_region_var_infos(); // Create a RegionDefinition for each inference variable. This happens here because @@ -213,14 +212,13 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>( infcx: &BorrowckInferCtxt<'tcx>, ) -> LoweredConstraints<'tcx> { let universal_regions = &universal_region_relations.universal_regions; - let (definitions, has_placeholders) = region_definitions(universal_regions, infcx); + let (definitions, has_placeholders) = region_definitions(infcx, universal_regions); let MirTypeckRegionConstraints { placeholder_indices, placeholder_index_to_region: _, liveness_constraints, mut outlives_constraints, - member_constraints, universe_causes, type_tests, } = constraints; @@ -246,7 +244,6 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>( return LoweredConstraints { type_tests, - member_constraints, constraint_sccs, scc_annotations: scc_annotations.scc_to_annotation, definitions, @@ -283,7 +280,6 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>( constraint_sccs, definitions, scc_annotations, - member_constraints, outlives_constraints: Frozen::freeze(outlives_constraints), type_tests, liveness_constraints, diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index d76d6a04e6e..ce78ae203a4 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -78,7 +78,6 @@ mod dataflow; mod def_use; mod diagnostics; mod handle_placeholders; -mod member_constraints; mod nll; mod path_utils; mod place_ext; @@ -301,7 +300,7 @@ fn do_mir_borrowck<'tcx>( def: LocalDefId, ) -> PropagatedBorrowCheckResults<'tcx> { let tcx = root_cx.tcx; - let infcx = BorrowckInferCtxt::new(tcx, def); + let infcx = BorrowckInferCtxt::new(tcx, def, root_cx.root_def_id()); let (input_body, promoted) = tcx.mir_promoted(def); let input_body: &Body<'_> = &input_body.borrow(); let input_promoted: &IndexSlice<_, _> = &promoted.borrow(); @@ -335,9 +334,10 @@ fn do_mir_borrowck<'tcx>( // Run the MIR type-checker. let MirTypeckResults { - constraints, + mut constraints, universal_region_relations, - opaque_type_values, + region_bound_pairs, + known_type_outlives_obligations, polonius_context, } = type_check::type_check( root_cx, @@ -352,6 +352,17 @@ fn do_mir_borrowck<'tcx>( Rc::clone(&location_map), ); + let opaque_type_errors = region_infer::opaque_types::handle_opaque_type_uses( + root_cx, + &infcx, + &body, + &universal_region_relations, + ®ion_bound_pairs, + &known_type_outlives_obligations, + &location_map, + &mut constraints, + ); + // Compute non-lexical lifetimes using the constraints computed // by typechecking the MIR body. let nll::NllOutput { @@ -375,8 +386,6 @@ fn do_mir_borrowck<'tcx>( polonius_context, ); - let opaque_type_errors = regioncx.infer_opaque_types(root_cx, &infcx, opaque_type_values); - // Dump MIR results into a file, if that is enabled. This lets us // write unit-tests, as well as helping with debugging. nll::dump_nll_mir(&infcx, body, ®ioncx, &opt_closure_req, &borrow_set); @@ -581,12 +590,13 @@ fn get_flow_results<'a, 'tcx>( pub(crate) struct BorrowckInferCtxt<'tcx> { pub(crate) infcx: InferCtxt<'tcx>, - pub(crate) reg_var_to_origin: RefCell<FxIndexMap<ty::RegionVid, RegionCtxt>>, + pub(crate) root_def_id: LocalDefId, pub(crate) param_env: ParamEnv<'tcx>, + pub(crate) reg_var_to_origin: RefCell<FxIndexMap<ty::RegionVid, RegionCtxt>>, } impl<'tcx> BorrowckInferCtxt<'tcx> { - pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Self { + pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId, root_def_id: LocalDefId) -> Self { let typing_mode = if tcx.use_typing_mode_borrowck() { TypingMode::borrowck(tcx, def_id) } else { @@ -594,7 +604,12 @@ impl<'tcx> BorrowckInferCtxt<'tcx> { }; let infcx = tcx.infer_ctxt().build(typing_mode); let param_env = tcx.param_env(def_id); - BorrowckInferCtxt { infcx, reg_var_to_origin: RefCell::new(Default::default()), param_env } + BorrowckInferCtxt { + infcx, + root_def_id, + reg_var_to_origin: RefCell::new(Default::default()), + param_env, + } } pub(crate) fn next_region_var<F>( diff --git a/compiler/rustc_borrowck/src/member_constraints.rs b/compiler/rustc_borrowck/src/member_constraints.rs deleted file mode 100644 index bdd0f6fe11e..00000000000 --- a/compiler/rustc_borrowck/src/member_constraints.rs +++ /dev/null @@ -1,226 +0,0 @@ -use std::hash::Hash; -use std::ops::Index; - -use rustc_data_structures::fx::FxIndexMap; -use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::ty::{self, Ty}; -use rustc_span::Span; -use tracing::instrument; - -/// Compactly stores a set of `R0 member of [R1...Rn]` constraints, -/// indexed by the region `R0`. -#[derive(Debug)] -pub(crate) struct MemberConstraintSet<'tcx, R> -where - R: Copy + Eq, -{ - /// Stores the first "member" constraint for a given `R0`. This is an - /// index into the `constraints` vector below. - first_constraints: FxIndexMap<R, NllMemberConstraintIndex>, - - /// Stores the data about each `R0 member of [R1..Rn]` constraint. - /// These are organized into a linked list, so each constraint - /// contains the index of the next constraint with the same `R0`. - constraints: IndexVec<NllMemberConstraintIndex, MemberConstraint<'tcx>>, - - /// Stores the `R1..Rn` regions for *all* sets. For any given - /// constraint, we keep two indices so that we can pull out a - /// slice. - choice_regions: Vec<ty::RegionVid>, -} - -/// Represents a `R0 member of [R1..Rn]` constraint -#[derive(Debug)] -pub(crate) struct MemberConstraint<'tcx> { - next_constraint: Option<NllMemberConstraintIndex>, - - /// The span where the hidden type was instantiated. - pub(crate) definition_span: Span, - - /// The hidden type in which `R0` appears. (Used in error reporting.) - pub(crate) hidden_ty: Ty<'tcx>, - - pub(crate) key: ty::OpaqueTypeKey<'tcx>, - - /// The region `R0`. - pub(crate) member_region_vid: ty::RegionVid, - - /// Index of `R1` in `choice_regions` vector from `MemberConstraintSet`. - start_index: usize, - - /// Index of `Rn` in `choice_regions` vector from `MemberConstraintSet`. - end_index: usize, -} - -rustc_index::newtype_index! { - #[debug_format = "MemberConstraintIndex({})"] - pub(crate) struct NllMemberConstraintIndex {} -} - -impl Default for MemberConstraintSet<'_, ty::RegionVid> { - fn default() -> Self { - Self { - first_constraints: Default::default(), - constraints: Default::default(), - choice_regions: Default::default(), - } - } -} - -impl<'tcx> MemberConstraintSet<'tcx, ty::RegionVid> { - pub(crate) fn is_empty(&self) -> bool { - self.constraints.is_empty() - } - - /// Pushes a member constraint into the set. - #[instrument(level = "debug", skip(self))] - pub(crate) fn add_member_constraint( - &mut self, - key: ty::OpaqueTypeKey<'tcx>, - hidden_ty: Ty<'tcx>, - definition_span: Span, - member_region_vid: ty::RegionVid, - choice_regions: &[ty::RegionVid], - ) { - let next_constraint = self.first_constraints.get(&member_region_vid).cloned(); - let start_index = self.choice_regions.len(); - self.choice_regions.extend(choice_regions); - let end_index = self.choice_regions.len(); - let constraint_index = self.constraints.push(MemberConstraint { - next_constraint, - member_region_vid, - definition_span, - hidden_ty, - key, - start_index, - end_index, - }); - self.first_constraints.insert(member_region_vid, constraint_index); - } -} - -impl<'tcx, R1> MemberConstraintSet<'tcx, R1> -where - R1: Copy + Hash + Eq, -{ - /// Remap the "member region" key using `map_fn`, producing a new - /// member constraint set. This is used in the NLL code to map from - /// the original `RegionVid` to an scc index. In some cases, we - /// may have multiple `R1` values mapping to the same `R2` key -- that - /// is ok, the two sets will be merged. - pub(crate) fn into_mapped<R2>( - self, - mut map_fn: impl FnMut(R1) -> R2, - ) -> MemberConstraintSet<'tcx, R2> - where - R2: Copy + Hash + Eq, - { - // We can re-use most of the original data, just tweaking the - // linked list links a bit. - // - // For example if we had two keys `Ra` and `Rb` that both now - // wind up mapped to the same key `S`, we would append the - // linked list for `Ra` onto the end of the linked list for - // `Rb` (or vice versa) -- this basically just requires - // rewriting the final link from one list to point at the other - // other (see `append_list`). - - let MemberConstraintSet { first_constraints, mut constraints, choice_regions } = self; - - let mut first_constraints2 = FxIndexMap::default(); - first_constraints2.reserve(first_constraints.len()); - - for (r1, start1) in first_constraints { - let r2 = map_fn(r1); - if let Some(&start2) = first_constraints2.get(&r2) { - append_list(&mut constraints, start1, start2); - } - first_constraints2.insert(r2, start1); - } - - MemberConstraintSet { first_constraints: first_constraints2, constraints, choice_regions } - } -} - -impl<'tcx, R> MemberConstraintSet<'tcx, R> -where - R: Copy + Hash + Eq, -{ - pub(crate) fn all_indices(&self) -> impl Iterator<Item = NllMemberConstraintIndex> { - self.constraints.indices() - } - - /// Iterate down the constraint indices associated with a given - /// peek-region. You can then use `choice_regions` and other - /// methods to access data. - pub(crate) fn indices( - &self, - member_region_vid: R, - ) -> impl Iterator<Item = NllMemberConstraintIndex> { - let mut next = self.first_constraints.get(&member_region_vid).cloned(); - std::iter::from_fn(move || -> Option<NllMemberConstraintIndex> { - if let Some(current) = next { - next = self.constraints[current].next_constraint; - Some(current) - } else { - None - } - }) - } - - /// Returns the "choice regions" for a given member - /// constraint. This is the `R1..Rn` from a constraint like: - /// - /// ```text - /// R0 member of [R1..Rn] - /// ``` - pub(crate) fn choice_regions(&self, pci: NllMemberConstraintIndex) -> &[ty::RegionVid] { - let MemberConstraint { start_index, end_index, .. } = &self.constraints[pci]; - &self.choice_regions[*start_index..*end_index] - } -} - -impl<'tcx, R> Index<NllMemberConstraintIndex> for MemberConstraintSet<'tcx, R> -where - R: Copy + Eq, -{ - type Output = MemberConstraint<'tcx>; - - fn index(&self, i: NllMemberConstraintIndex) -> &MemberConstraint<'tcx> { - &self.constraints[i] - } -} - -/// Given a linked list starting at `source_list` and another linked -/// list starting at `target_list`, modify `target_list` so that it is -/// followed by `source_list`. -/// -/// Before: -/// -/// ```text -/// target_list: A -> B -> C -> (None) -/// source_list: D -> E -> F -> (None) -/// ``` -/// -/// After: -/// -/// ```text -/// target_list: A -> B -> C -> D -> E -> F -> (None) -/// ``` -fn append_list( - constraints: &mut IndexSlice<NllMemberConstraintIndex, MemberConstraint<'_>>, - target_list: NllMemberConstraintIndex, - source_list: NllMemberConstraintIndex, -) { - let mut p = target_list; - loop { - let r = &mut constraints[p]; - match r.next_constraint { - Some(q) => p = q, - None => { - r.next_constraint = Some(source_list); - return; - } - } - } -} diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index c76c5c17431..3d95eb4663a 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -1,8 +1,6 @@ -use std::cell::OnceCell; use std::collections::VecDeque; use std::rc::Rc; -use rustc_data_structures::binary_search_util; use rustc_data_structures::frozen::Frozen; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::graph::scc::{self, Sccs}; @@ -24,15 +22,13 @@ use rustc_span::hygiene::DesugaringKind; use rustc_span::{DUMMY_SP, Span}; use tracing::{Level, debug, enabled, instrument, trace}; -use crate::constraints::graph::{self, NormalConstraintGraph, RegionGraph}; +use crate::constraints::graph::NormalConstraintGraph; use crate::constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet}; use crate::dataflow::BorrowIndex; use crate::diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo}; use crate::handle_placeholders::{LoweredConstraints, RegionTracker}; -use crate::member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}; use crate::polonius::LiveLoans; use crate::polonius::legacy::PoloniusOutput; -use crate::region_infer::reverse_sccs::ReverseSccGraph; use crate::region_infer::values::{LivenessValues, RegionElement, RegionValues, ToElementIndex}; use crate::type_check::Locations; use crate::type_check::free_region_relations::UniversalRegionRelations; @@ -120,20 +116,6 @@ pub struct RegionInferenceContext<'tcx> { scc_annotations: IndexVec<ConstraintSccIndex, RegionTracker>, - /// Reverse of the SCC constraint graph -- i.e., an edge `A -> B` exists if - /// `B: A`. This is used to compute the universal regions that are required - /// to outlive a given SCC. - rev_scc_graph: OnceCell<ReverseSccGraph>, - - /// The "R0 member of [R1..Rn]" constraints, indexed by SCC. - member_constraints: Rc<MemberConstraintSet<'tcx, ConstraintSccIndex>>, - - /// Records the member constraints that we applied to each scc. - /// This is useful for error reporting. Once constraint - /// propagation is done, this vector is sorted according to - /// `member_region_scc`. - member_constraints_applied: Vec<AppliedMemberConstraint>, - /// Map universe indexes to information on why we created it. universe_causes: FxIndexMap<ty::UniverseIndex, UniverseInfo<'tcx>>, @@ -150,32 +132,6 @@ pub struct RegionInferenceContext<'tcx> { universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>, } -/// Each time that `apply_member_constraint` is successful, it appends -/// one of these structs to the `member_constraints_applied` field. -/// This is used in error reporting to trace out what happened. -/// -/// The way that `apply_member_constraint` works is that it effectively -/// adds a new lower bound to the SCC it is analyzing: so you wind up -/// with `'R: 'O` where `'R` is the pick-region and `'O` is the -/// minimal viable option. -#[derive(Debug)] -pub(crate) struct AppliedMemberConstraint { - /// The SCC that was affected. (The "member region".) - /// - /// The vector if `AppliedMemberConstraint` elements is kept sorted - /// by this field. - pub(crate) member_region_scc: ConstraintSccIndex, - - /// The "best option" that `apply_member_constraint` found -- this was - /// added as an "ad-hoc" lower-bound to `member_region_scc`. - pub(crate) min_choice: ty::RegionVid, - - /// The "member constraint index" -- we can find out details about - /// the constraint from - /// `set.member_constraints[member_constraint_index]`. - pub(crate) member_constraint_index: NllMemberConstraintIndex, -} - #[derive(Debug)] pub(crate) struct RegionDefinition<'tcx> { /// What kind of variable is this -- a free region? existential @@ -268,7 +224,6 @@ enum Trace<'a, 'tcx> { StartRegion, FromGraph(&'a OutlivesConstraint<'tcx>), FromStatic(RegionVid), - FromMember(RegionVid, RegionVid, Span), NotVisited, } @@ -363,7 +318,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { liveness_constraints, universe_causes, placeholder_indices, - member_constraints, } = lowered_constraints; debug!("universal_regions: {:#?}", universal_region_relations.universal_regions); @@ -385,9 +339,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { scc_values.merge_liveness(scc, region, &liveness_constraints); } - let member_constraints = - Rc::new(member_constraints.into_mapped(|r| constraint_sccs.scc(r))); - let mut result = Self { definitions, liveness_constraints, @@ -395,9 +346,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { constraint_graph, constraint_sccs, scc_annotations, - rev_scc_graph: OnceCell::new(), - member_constraints, - member_constraints_applied: Vec::new(), universe_causes, scc_values, type_tests, @@ -550,19 +498,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.scc_values.placeholders_contained_in(scc) } - /// Once region solving has completed, this function will return the member constraints that - /// were applied to the value of a given SCC `scc`. See `AppliedMemberConstraint`. - pub(crate) fn applied_member_constraints( - &self, - scc: ConstraintSccIndex, - ) -> &[AppliedMemberConstraint] { - binary_search_util::binary_search_slice( - &self.member_constraints_applied, - |applied| applied.member_region_scc, - &scc, - ) - } - /// Performs region inference and report errors if we see any /// unsatisfiable constraints. If this is a closure, returns the /// region requirements to propagate to our creator, if any. @@ -607,12 +542,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { debug!(?errors_buffer); - if errors_buffer.is_empty() { - self.check_member_constraints(infcx, &mut errors_buffer); - } - - debug!(?errors_buffer); - let outlives_requirements = outlives_requirements.unwrap_or_default(); if outlives_requirements.is_empty() { @@ -642,146 +571,15 @@ impl<'tcx> RegionInferenceContext<'tcx> { }); // To propagate constraints, we walk the DAG induced by the - // SCC. For each SCC, we visit its successors and compute + // SCC. For each SCC `A`, we visit its successors and compute // their values, then we union all those values to get our // own. - for scc in self.constraint_sccs.all_sccs() { - self.compute_value_for_scc(scc); - } - - // Sort the applied member constraints so we can binary search - // through them later. - self.member_constraints_applied.sort_by_key(|applied| applied.member_region_scc); - } - - /// Computes the value of the SCC `scc_a`, which has not yet been - /// computed, by unioning the values of its successors. - /// Assumes that all successors have been computed already - /// (which is assured by iterating over SCCs in dependency order). - #[instrument(skip(self), level = "debug")] - fn compute_value_for_scc(&mut self, scc_a: ConstraintSccIndex) { - // Walk each SCC `B` such that `A: B`... - for &scc_b in self.constraint_sccs.successors(scc_a) { - debug!(?scc_b); - self.scc_values.add_region(scc_a, scc_b); - } - - // Now take member constraints into account. - let member_constraints = Rc::clone(&self.member_constraints); - for m_c_i in member_constraints.indices(scc_a) { - self.apply_member_constraint(scc_a, m_c_i, member_constraints.choice_regions(m_c_i)); - } - - debug!(value = ?self.scc_values.region_value_str(scc_a)); - } - - /// Invoked for each `R0 member of [R1..Rn]` constraint. - /// - /// `scc` is the SCC containing R0, and `choice_regions` are the - /// `R1..Rn` regions -- they are always known to be universal - /// regions (and if that's not true, we just don't attempt to - /// enforce the constraint). - /// - /// The current value of `scc` at the time the method is invoked - /// is considered a *lower bound*. If possible, we will modify - /// the constraint to set it equal to one of the option regions. - /// If we make any changes, returns true, else false. - /// - /// This function only adds the member constraints to the region graph, - /// it does not check them. They are later checked in - /// `check_member_constraints` after the region graph has been computed. - #[instrument(skip(self, member_constraint_index), level = "debug")] - fn apply_member_constraint( - &mut self, - scc: ConstraintSccIndex, - member_constraint_index: NllMemberConstraintIndex, - choice_regions: &[ty::RegionVid], - ) { - // Create a mutable vector of the options. We'll try to winnow - // them down. - let mut choice_regions: Vec<ty::RegionVid> = choice_regions.to_vec(); - - // Convert to the SCC representative: sometimes we have inference - // variables in the member constraint that wind up equated with - // universal regions. The scc representative is the minimal numbered - // one from the corresponding scc so it will be the universal region - // if one exists. - for c_r in &mut choice_regions { - let scc = self.constraint_sccs.scc(*c_r); - *c_r = self.scc_representative(scc); - } - - // If the member region lives in a higher universe, we currently choose - // the most conservative option by leaving it unchanged. - if !self.max_placeholder_universe_reached(scc).is_root() { - return; - } - - // The existing value for `scc` is a lower-bound. This will - // consist of some set `{P} + {LB}` of points `{P}` and - // lower-bound free regions `{LB}`. As each choice region `O` - // is a free region, it will outlive the points. But we can - // only consider the option `O` if `O: LB`. - choice_regions.retain(|&o_r| { - self.scc_values - .universal_regions_outlived_by(scc) - .all(|lb| self.universal_region_relations.outlives(o_r, lb)) - }); - debug!(?choice_regions, "after lb"); - - // Now find all the *upper bounds* -- that is, each UB is a - // free region that must outlive the member region `R0` (`UB: - // R0`). Therefore, we need only keep an option `O` if `UB: O` - // for all UB. - let universal_region_relations = &self.universal_region_relations; - for ub in self.reverse_scc_graph().upper_bounds(scc) { - debug!(?ub); - choice_regions.retain(|&o_r| universal_region_relations.outlives(ub, o_r)); - } - debug!(?choice_regions, "after ub"); - - // At this point we can pick any member of `choice_regions` and would like to choose - // it to be a small as possible. To avoid potential non-determinism we will pick the - // smallest such choice. - // - // Because universal regions are only partially ordered (i.e, not every two regions are - // comparable), we will ignore any region that doesn't compare to all others when picking - // the minimum choice. - // - // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where - // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`. - // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`. - let totally_ordered_subset = choice_regions.iter().copied().filter(|&r1| { - choice_regions.iter().all(|&r2| { - self.universal_region_relations.outlives(r1, r2) - || self.universal_region_relations.outlives(r2, r1) - }) - }); - // Now we're left with `['static, 'c]`. Pick `'c` as the minimum! - let Some(min_choice) = totally_ordered_subset.reduce(|r1, r2| { - let r1_outlives_r2 = self.universal_region_relations.outlives(r1, r2); - let r2_outlives_r1 = self.universal_region_relations.outlives(r2, r1); - match (r1_outlives_r2, r2_outlives_r1) { - (true, true) => r1.min(r2), - (true, false) => r2, - (false, true) => r1, - (false, false) => bug!("incomparable regions in total order"), + for scc_a in self.constraint_sccs.all_sccs() { + // Walk each SCC `B` such that `A: B`... + for &scc_b in self.constraint_sccs.successors(scc_a) { + debug!(?scc_b); + self.scc_values.add_region(scc_a, scc_b); } - }) else { - debug!("no unique minimum choice"); - return; - }; - - // As we require `'scc: 'min_choice`, we have definitely already computed - // its `scc_values` at this point. - let min_choice_scc = self.constraint_sccs.scc(min_choice); - debug!(?min_choice, ?min_choice_scc); - if self.scc_values.add_region(scc, min_choice_scc) { - self.member_constraints_applied.push(AppliedMemberConstraint { - member_region_scc: scc, - min_choice, - member_constraint_index, - }); } } @@ -1376,13 +1174,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.scc_annotations[scc].max_nameable_universe() } - pub(crate) fn max_placeholder_universe_reached( - &self, - scc: ConstraintSccIndex, - ) -> UniverseIndex { - self.scc_annotations[scc].max_placeholder_universe_reached() - } - /// Checks the final value for the free region `fr` to see if it /// grew too large. In particular, examine what `end(X)` points /// wound up in `fr`'s final value; for each `end(X)` where `X != @@ -1551,43 +1342,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - #[instrument(level = "debug", skip(self, infcx, errors_buffer))] - fn check_member_constraints( - &self, - infcx: &InferCtxt<'tcx>, - errors_buffer: &mut RegionErrors<'tcx>, - ) { - let member_constraints = Rc::clone(&self.member_constraints); - for m_c_i in member_constraints.all_indices() { - debug!(?m_c_i); - let m_c = &member_constraints[m_c_i]; - let member_region_vid = m_c.member_region_vid; - debug!( - ?member_region_vid, - value = ?self.region_value_str(member_region_vid), - ); - let choice_regions = member_constraints.choice_regions(m_c_i); - debug!(?choice_regions); - - // Did the member region wind up equal to any of the option regions? - if let Some(o) = - choice_regions.iter().find(|&&o_r| self.eval_equal(o_r, m_c.member_region_vid)) - { - debug!("evaluated as equal to {:?}", o); - continue; - } - - // If not, report an error. - let member_region = ty::Region::new_var(infcx.tcx, member_region_vid); - errors_buffer.push(RegionErrorKind::UnexpectedHiddenRegion { - span: m_c.definition_span, - hidden_ty: m_c.hidden_ty, - key: m_c.key, - member_region, - }); - } - } - /// We have a constraint `fr1: fr2` that is not satisfied, where /// `fr2` represents some universal region. Here, `r` is some /// region where we know that `fr1: r` and this function has the @@ -1651,19 +1405,40 @@ impl<'tcx> RegionInferenceContext<'tcx> { } /// Walks the graph of constraints (where `'a: 'b` is considered - /// an edge `'a -> 'b`) to find all paths from `from_region` to - /// `to_region`. The paths are accumulated into the vector - /// `results`. The paths are stored as a series of - /// `ConstraintIndex` values -- in other words, a list of *edges*. + /// an edge `'a -> 'b`) to find a path from `from_region` to + /// `to_region`. /// /// Returns: a series of constraints as well as the region `R` /// that passed the target test. #[instrument(skip(self, target_test), ret)] - pub(crate) fn find_constraint_paths_between_regions( + pub(crate) fn find_constraint_path_between_regions( &self, from_region: RegionVid, target_test: impl Fn(RegionVid) -> bool, ) -> Option<(Vec<OutlivesConstraint<'tcx>>, RegionVid)> { + self.find_constraint_path_between_regions_inner(true, from_region, &target_test).or_else( + || self.find_constraint_path_between_regions_inner(false, from_region, &target_test), + ) + } + + /// The constraints we get from equating the hidden type of each use of an opaque + /// with its final concrete type may end up getting preferred over other, potentially + /// longer constraint paths. + /// + /// Given that we compute the final concrete type by relying on this existing constraint + /// path, this can easily end up hiding the actual reason for why we require these regions + /// to be equal. + /// + /// To handle this, we first look at the path while ignoring these constraints and then + /// retry while considering them. This is not perfect, as the `from_region` may have already + /// been partially related to its argument region, so while we rely on a member constraint + /// to get a complete path, the most relevant step of that path already existed before then. + fn find_constraint_path_between_regions_inner( + &self, + ignore_opaque_type_constraints: bool, + from_region: RegionVid, + target_test: impl Fn(RegionVid) -> bool, + ) -> Option<(Vec<OutlivesConstraint<'tcx>>, RegionVid)> { let mut context = IndexVec::from_elem(Trace::NotVisited, &self.definitions); context[from_region] = Trace::StartRegion; @@ -1677,7 +1452,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { while let Some(r) = deque.pop_front() { debug!( - "find_constraint_paths_between_regions: from_region={:?} r={:?} value={}", + "find_constraint_path_between_regions: from_region={:?} r={:?} value={}", from_region, r, self.region_value_str(r), @@ -1711,20 +1486,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { result.push(c); } - Trace::FromMember(sup, sub, span) => { - let c = OutlivesConstraint { - sup, - sub, - locations: Locations::All(span), - span, - category: ConstraintCategory::OpaqueType, - variance_info: ty::VarianceDiagInfo::default(), - from_closure: false, - }; - p = c.sup; - result.push(c); - } - Trace::StartRegion => { result.reverse(); return Some((result, r)); @@ -1763,23 +1524,21 @@ impl<'tcx> RegionInferenceContext<'tcx> { let edges = self.constraint_graph.outgoing_edges_from_graph(r, &self.constraints); // This loop can be hot. for constraint in edges { - if matches!(constraint.category, ConstraintCategory::IllegalUniverse) { - debug!("Ignoring illegal universe constraint: {constraint:?}"); - continue; + match constraint.category { + ConstraintCategory::IllegalUniverse => { + debug!("Ignoring illegal universe constraint: {constraint:?}"); + continue; + } + ConstraintCategory::OpaqueType if ignore_opaque_type_constraints => { + debug!("Ignoring member constraint: {constraint:?}"); + continue; + } + _ => {} } debug_assert_eq!(constraint.sup, r); handle_trace(constraint.sub, Trace::FromGraph(constraint)); } } - - // Member constraints can also give rise to `'r: 'x` edges that - // were not part of the graph initially, so watch out for those. - // (But they are extremely rare; this loop is very cold.) - for constraint in self.applied_member_constraints(self.constraint_sccs.scc(r)) { - let sub = constraint.min_choice; - let p_c = &self.member_constraints[constraint.member_constraint_index]; - handle_trace(sub, Trace::FromMember(r, sub, p_c.definition_span)); - } } None @@ -1790,7 +1549,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { pub(crate) fn find_sub_region_live_at(&self, fr1: RegionVid, location: Location) -> RegionVid { trace!(scc = ?self.constraint_sccs.scc(fr1)); trace!(universe = ?self.max_nameable_universe(self.constraint_sccs.scc(fr1))); - self.find_constraint_paths_between_regions(fr1, |r| { + self.find_constraint_path_between_regions(fr1, |r| { // First look for some `r` such that `fr1: r` and `r` is live at `location` trace!(?r, liveness_constraints=?self.liveness_constraints.pretty_print_live_points(r)); self.liveness_constraints.is_live_at(r, location) @@ -1800,9 +1559,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // `fr1: r` and `r` is a placeholder from some universe // `fr1` cannot name. This would force `fr1` to be // `'static`. - self.find_constraint_paths_between_regions(fr1, |r| { - self.cannot_name_placeholder(fr1, r) - }) + self.find_constraint_path_between_regions(fr1, |r| self.cannot_name_placeholder(fr1, r)) }) .or_else(|| { // If we fail to find THAT, it may be that `fr1` is a @@ -1815,9 +1572,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // must be able to name the universe of R2, because R2 will // be at least `'empty(Universe(R2))`, and `R1` must be at // larger than that. - self.find_constraint_paths_between_regions(fr1, |r| { - self.cannot_name_placeholder(r, fr1) - }) + self.find_constraint_path_between_regions(fr1, |r| self.cannot_name_placeholder(r, fr1)) }) .map(|(_path, r)| r) .unwrap() @@ -1873,9 +1628,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { ) -> (BlameConstraint<'tcx>, Vec<OutlivesConstraint<'tcx>>) { // Find all paths let (path, target_region) = self - .find_constraint_paths_between_regions(from_region, target_test) + .find_constraint_path_between_regions(from_region, target_test) .or_else(|| { - self.find_constraint_paths_between_regions(from_region, |r| { + self.find_constraint_path_between_regions(from_region, |r| { self.cannot_name_placeholder(from_region, r) }) }) @@ -2111,11 +1866,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { &self.constraint_sccs } - /// Access to the region graph, built from the outlives constraints. - pub(crate) fn region_graph(&self) -> RegionGraph<'_, 'tcx, graph::Normal> { - self.constraint_graph.region_graph(&self.constraints, self.universal_regions().fr_static) - } - /// Returns the representative `RegionVid` for a given SCC. /// See `RegionTracker` for how a region variable ID is chosen. /// diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs deleted file mode 100644 index 23c4554aa15..00000000000 --- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs +++ /dev/null @@ -1,299 +0,0 @@ -use rustc_data_structures::fx::FxIndexMap; -use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin}; -use rustc_macros::extension; -use rustc_middle::ty::{ - self, DefiningScopeKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, - TypeVisitableExt, fold_regions, -}; -use rustc_span::Span; -use rustc_trait_selection::opaque_types::{ - InvalidOpaqueTypeArgs, check_opaque_type_parameter_valid, -}; -use tracing::{debug, instrument}; - -use super::RegionInferenceContext; -use crate::BorrowCheckRootCtxt; -use crate::session_diagnostics::LifetimeMismatchOpaqueParam; -use crate::universal_regions::RegionClassification; - -pub(crate) enum DeferredOpaqueTypeError<'tcx> { - InvalidOpaqueTypeArgs(InvalidOpaqueTypeArgs<'tcx>), - LifetimeMismatchOpaqueParam(LifetimeMismatchOpaqueParam<'tcx>), -} - -impl<'tcx> RegionInferenceContext<'tcx> { - /// Resolve any opaque types that were encountered while borrow checking - /// this item. This is then used to get the type in the `type_of` query. - /// - /// For example consider `fn f<'a>(x: &'a i32) -> impl Sized + 'a { x }`. - /// This is lowered to give HIR something like - /// - /// type f<'a>::_Return<'_x> = impl Sized + '_x; - /// fn f<'a>(x: &'a i32) -> f<'a>::_Return<'a> { x } - /// - /// When checking the return type record the type from the return and the - /// type used in the return value. In this case they might be `_Return<'1>` - /// and `&'2 i32` respectively. - /// - /// Once we to this method, we have completed region inference and want to - /// call `infer_opaque_definition_from_instantiation` to get the inferred - /// type of `_Return<'_x>`. `infer_opaque_definition_from_instantiation` - /// compares lifetimes directly, so we need to map the inference variables - /// back to concrete lifetimes: `'static`, `ReEarlyParam` or `ReLateParam`. - /// - /// First we map the regions in the generic parameters `_Return<'1>` to - /// their `external_name` giving `_Return<'a>`. This step is a bit involved. - /// See the [rustc-dev-guide chapter] for more info. - /// - /// Then we map all the lifetimes in the concrete type to an equal - /// universal region that occurs in the opaque type's args, in this case - /// this would result in `&'a i32`. We only consider regions in the args - /// in case there is an equal region that does not. For example, this should - /// be allowed: - /// `fn f<'a: 'b, 'b: 'a>(x: *mut &'b i32) -> impl Sized + 'a { x }` - /// - /// This will then allow `infer_opaque_definition_from_instantiation` to - /// determine that `_Return<'_x> = &'_x i32`. - /// - /// There's a slight complication around closures. Given - /// `fn f<'a: 'a>() { || {} }` the closure's type is something like - /// `f::<'a>::{{closure}}`. The region parameter from f is essentially - /// ignored by type checking so ends up being inferred to an empty region. - /// Calling `universal_upper_bound` for such a region gives `fr_fn_body`, - /// which has no `external_name` in which case we use `'{erased}` as the - /// region to pass to `infer_opaque_definition_from_instantiation`. - /// - /// [rustc-dev-guide chapter]: - /// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html - #[instrument(level = "debug", skip(self, root_cx, infcx))] - pub(crate) fn infer_opaque_types( - &self, - root_cx: &mut BorrowCheckRootCtxt<'tcx>, - infcx: &InferCtxt<'tcx>, - opaque_ty_decls: FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>>, - ) -> Vec<DeferredOpaqueTypeError<'tcx>> { - let mut errors = Vec::new(); - let mut decls_modulo_regions: FxIndexMap<OpaqueTypeKey<'tcx>, (OpaqueTypeKey<'tcx>, Span)> = - FxIndexMap::default(); - - for (opaque_type_key, concrete_type) in opaque_ty_decls { - debug!(?opaque_type_key, ?concrete_type); - - let mut arg_regions: Vec<(ty::RegionVid, ty::Region<'_>)> = - vec![(self.universal_regions().fr_static, infcx.tcx.lifetimes.re_static)]; - - let opaque_type_key = - opaque_type_key.fold_captured_lifetime_args(infcx.tcx, |region| { - // Use the SCC representative instead of directly using `region`. - // See [rustc-dev-guide chapter] § "Strict lifetime equality". - let scc = self.constraint_sccs.scc(region.as_var()); - let vid = self.scc_representative(scc); - let named = match self.definitions[vid].origin { - // Iterate over all universal regions in a consistent order and find the - // *first* equal region. This makes sure that equal lifetimes will have - // the same name and simplifies subsequent handling. - // See [rustc-dev-guide chapter] § "Semantic lifetime equality". - NllRegionVariableOrigin::FreeRegion => self - .universal_regions() - .universal_regions_iter() - .filter(|&ur| { - // See [rustc-dev-guide chapter] § "Closure restrictions". - !matches!( - self.universal_regions().region_classification(ur), - Some(RegionClassification::External) - ) - }) - .find(|&ur| self.universal_region_relations.equal(vid, ur)) - .map(|ur| self.definitions[ur].external_name.unwrap()), - NllRegionVariableOrigin::Placeholder(placeholder) => { - Some(ty::Region::new_placeholder(infcx.tcx, placeholder)) - } - NllRegionVariableOrigin::Existential { .. } => None, - } - .unwrap_or_else(|| { - ty::Region::new_error_with_message( - infcx.tcx, - concrete_type.span, - "opaque type with non-universal region args", - ) - }); - - arg_regions.push((vid, named)); - named - }); - debug!(?opaque_type_key, ?arg_regions); - - let concrete_type = fold_regions(infcx.tcx, concrete_type, |region, _| { - arg_regions - .iter() - .find(|&&(arg_vid, _)| self.eval_equal(region.as_var(), arg_vid)) - .map(|&(_, arg_named)| arg_named) - .unwrap_or(infcx.tcx.lifetimes.re_erased) - }); - debug!(?concrete_type); - - let ty = match infcx - .infer_opaque_definition_from_instantiation(opaque_type_key, concrete_type) - { - Ok(ty) => ty, - Err(err) => { - errors.push(DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err)); - continue; - } - }; - - // Sometimes, when the hidden type is an inference variable, it can happen that - // the hidden type becomes the opaque type itself. In this case, this was an opaque - // usage of the opaque type and we can ignore it. This check is mirrored in typeck's - // writeback. - if !infcx.next_trait_solver() { - if let ty::Alias(ty::Opaque, alias_ty) = ty.kind() - && alias_ty.def_id == opaque_type_key.def_id.to_def_id() - && alias_ty.args == opaque_type_key.args - { - continue; - } - } - - root_cx.add_concrete_opaque_type( - opaque_type_key.def_id, - OpaqueHiddenType { span: concrete_type.span, ty }, - ); - - // Check that all opaque types have the same region parameters if they have the same - // non-region parameters. This is necessary because within the new solver we perform - // various query operations modulo regions, and thus could unsoundly select some impls - // that don't hold. - if let Some((prev_decl_key, prev_span)) = decls_modulo_regions.insert( - infcx.tcx.erase_regions(opaque_type_key), - (opaque_type_key, concrete_type.span), - ) && let Some((arg1, arg2)) = std::iter::zip( - prev_decl_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), - opaque_type_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), - ) - .find(|(arg1, arg2)| arg1 != arg2) - { - errors.push(DeferredOpaqueTypeError::LifetimeMismatchOpaqueParam( - LifetimeMismatchOpaqueParam { - arg: arg1, - prev: arg2, - span: prev_span, - prev_span: concrete_type.span, - }, - )); - } - } - - errors - } - - /// Map the regions in the type to named regions. This is similar to what - /// `infer_opaque_types` does, but can infer any universal region, not only - /// ones from the args for the opaque type. It also doesn't double check - /// that the regions produced are in fact equal to the named region they are - /// replaced with. This is fine because this function is only to improve the - /// region names in error messages. - /// - /// This differs from `MirBorrowckCtxt::name_regions` since it is particularly - /// lax with mapping region vids that are *shorter* than a universal region to - /// that universal region. This is useful for member region constraints since - /// we want to suggest a universal region name to capture even if it's technically - /// not equal to the error region. - pub(crate) fn name_regions_for_member_constraint<T>(&self, tcx: TyCtxt<'tcx>, ty: T) -> T - where - T: TypeFoldable<TyCtxt<'tcx>>, - { - fold_regions(tcx, ty, |region, _| match region.kind() { - ty::ReVar(vid) => { - let scc = self.constraint_sccs.scc(vid); - - // Special handling of higher-ranked regions. - if !self.max_nameable_universe(scc).is_root() { - match self.scc_values.placeholders_contained_in(scc).enumerate().last() { - // If the region contains a single placeholder then they're equal. - Some((0, placeholder)) => { - return ty::Region::new_placeholder(tcx, placeholder); - } - - // Fallback: this will produce a cryptic error message. - _ => return region, - } - } - - // Find something that we can name - let upper_bound = self.approx_universal_upper_bound(vid); - if let Some(universal_region) = self.definitions[upper_bound].external_name { - return universal_region; - } - - // Nothing exact found, so we pick a named upper bound, if there's only one. - // If there's >1 universal region, then we probably are dealing w/ an intersection - // region which cannot be mapped back to a universal. - // FIXME: We could probably compute the LUB if there is one. - let scc = self.constraint_sccs.scc(vid); - let upper_bounds: Vec<_> = self - .reverse_scc_graph() - .upper_bounds(scc) - .filter_map(|vid| self.definitions[vid].external_name) - .filter(|r| !r.is_static()) - .collect(); - match &upper_bounds[..] { - [universal_region] => *universal_region, - _ => region, - } - } - _ => region, - }) - } -} - -#[extension(pub trait InferCtxtExt<'tcx>)] -impl<'tcx> InferCtxt<'tcx> { - /// Given the fully resolved, instantiated type for an opaque - /// type, i.e., the value of an inference variable like C1 or C2 - /// (*), computes the "definition type" for an opaque type - /// definition -- that is, the inferred value of `Foo1<'x>` or - /// `Foo2<'x>` that we would conceptually use in its definition: - /// ```ignore (illustrative) - /// type Foo1<'x> = impl Bar<'x> = AAA; // <-- this type AAA - /// type Foo2<'x> = impl Bar<'x> = BBB; // <-- or this type BBB - /// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } - /// ``` - /// Note that these values are defined in terms of a distinct set of - /// generic parameters (`'x` instead of `'a`) from C1 or C2. The main - /// purpose of this function is to do that translation. - /// - /// (*) C1 and C2 were introduced in the comments on - /// `register_member_constraints`. Read that comment for more context. - /// - /// # Parameters - /// - /// - `def_id`, the `impl Trait` type - /// - `args`, the args used to instantiate this opaque type - /// - `instantiated_ty`, the inferred type C1 -- fully resolved, lifted version of - /// `opaque_defn.concrete_ty` - #[instrument(level = "debug", skip(self))] - fn infer_opaque_definition_from_instantiation( - &self, - opaque_type_key: OpaqueTypeKey<'tcx>, - instantiated_ty: OpaqueHiddenType<'tcx>, - ) -> Result<Ty<'tcx>, InvalidOpaqueTypeArgs<'tcx>> { - check_opaque_type_parameter_valid( - self, - opaque_type_key, - instantiated_ty.span, - DefiningScopeKind::MirBorrowck, - )?; - - let definition_ty = instantiated_ty - .remap_generic_params_to_declaration_params( - opaque_type_key, - self.tcx, - DefiningScopeKind::MirBorrowck, - ) - .ty; - - definition_ty.error_reported()?; - Ok(definition_ty) - } -} diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/member_constraints.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/member_constraints.rs new file mode 100644 index 00000000000..667fc440ac0 --- /dev/null +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/member_constraints.rs @@ -0,0 +1,194 @@ +use rustc_data_structures::fx::FxHashMap; +use rustc_hir::def_id::DefId; +use rustc_middle::bug; +use rustc_middle::ty::{ + self, GenericArgsRef, Region, RegionVid, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, + TypeVisitor, +}; +use tracing::{debug, instrument}; + +use super::DefiningUse; +use super::region_ctxt::RegionCtxt; +use crate::constraints::ConstraintSccIndex; + +pub(super) fn apply_member_constraints<'tcx>( + rcx: &mut RegionCtxt<'_, 'tcx>, + defining_uses: &[DefiningUse<'tcx>], +) { + // Start by collecting the member constraints of all defining uses. + // + // Applying member constraints can influence other member constraints, + // so we first collect and then apply them. + let mut member_constraints = Default::default(); + for defining_use in defining_uses { + let mut visitor = CollectMemberConstraintsVisitor { + rcx, + defining_use, + member_constraints: &mut member_constraints, + }; + defining_use.hidden_type.ty.visit_with(&mut visitor); + } + + // Now walk over the region graph, visiting the smallest regions first and then all + // regions which have to outlive that one. + // + // Whenever we encounter a member region, we mutate the value of this SCC. This is + // as if we'd introduce new outlives constraints. However, we discard these region + // values after we've inferred the hidden types of opaques and apply the region + // constraints by simply equating the actual hidden type with the inferred one. + debug!(?member_constraints); + for scc_a in rcx.constraint_sccs.all_sccs() { + debug!(?scc_a); + // Start by adding the region values required by outlives constraints. This + // matches how we compute the final region values in `fn compute_regions`. + // + // We need to do this here to get a lower bound when applying member constraints. + // This propagates the region values added by previous member constraints. + for &scc_b in rcx.constraint_sccs.successors(scc_a) { + debug!(?scc_b); + rcx.scc_values.add_region(scc_a, scc_b); + } + + for defining_use in member_constraints.get(&scc_a).into_iter().flatten() { + apply_member_constraint(rcx, scc_a, &defining_use.arg_regions); + } + } +} + +#[instrument(level = "debug", skip(rcx))] +fn apply_member_constraint<'tcx>( + rcx: &mut RegionCtxt<'_, 'tcx>, + member: ConstraintSccIndex, + arg_regions: &[RegionVid], +) { + // If the member region lives in a higher universe, we currently choose + // the most conservative option by leaving it unchanged. + if !rcx.max_placeholder_universe_reached(member).is_root() { + return; + } + + // The existing value of `'member` is a lower-bound. If its is already larger than + // some universal region, we cannot equate it with that region. Said differently, we + // ignore choice regions which are smaller than this member region. + let mut choice_regions = arg_regions + .iter() + .copied() + .map(|r| rcx.representative(r).rvid()) + .filter(|&choice_region| { + rcx.scc_values.universal_regions_outlived_by(member).all(|lower_bound| { + rcx.universal_region_relations.outlives(choice_region, lower_bound) + }) + }) + .collect::<Vec<_>>(); + debug!(?choice_regions, "after enforcing lower-bound"); + + // Now find all the *upper bounds* -- that is, each UB is a + // free region that must outlive the member region `R0` (`UB: + // R0`). Therefore, we need only keep an option `O` if `UB: O` + // for all UB. + // + // If we have a requirement `'upper_bound: 'member`, equating `'member` + // with some region `'choice` means we now also require `'upper_bound: 'choice`. + // Avoid choice regions for which this does not hold. + for ub in rcx.rev_scc_graph.upper_bounds(member) { + choice_regions + .retain(|&choice_region| rcx.universal_region_relations.outlives(ub, choice_region)); + } + debug!(?choice_regions, "after enforcing upper-bound"); + + // At this point we can pick any member of `choice_regions` and would like to choose + // it to be a small as possible. To avoid potential non-determinism we will pick the + // smallest such choice. + // + // Because universal regions are only partially ordered (i.e, not every two regions are + // comparable), we will ignore any region that doesn't compare to all others when picking + // the minimum choice. + // + // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where + // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`. + // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`. + let totally_ordered_subset = choice_regions.iter().copied().filter(|&r1| { + choice_regions.iter().all(|&r2| { + rcx.universal_region_relations.outlives(r1, r2) + || rcx.universal_region_relations.outlives(r2, r1) + }) + }); + // Now we're left with `['static, 'c]`. Pick `'c` as the minimum! + let Some(min_choice) = totally_ordered_subset.reduce(|r1, r2| { + let r1_outlives_r2 = rcx.universal_region_relations.outlives(r1, r2); + let r2_outlives_r1 = rcx.universal_region_relations.outlives(r2, r1); + match (r1_outlives_r2, r2_outlives_r1) { + (true, true) => r1.min(r2), + (true, false) => r2, + (false, true) => r1, + (false, false) => bug!("incomparable regions in total order"), + } + }) else { + debug!("no unique minimum choice"); + return; + }; + + debug!(?min_choice); + // Lift the member region to be at least as large as this `min_choice` by directly + // mutating the `scc_values` as we compute it. This acts as if we've added a + // `'member: 'min_choice` while not recomputing sccs. This means different sccs + // may now actually be equal. + let min_choice_scc = rcx.constraint_sccs.scc(min_choice); + rcx.scc_values.add_region(member, min_choice_scc); +} + +struct CollectMemberConstraintsVisitor<'a, 'b, 'tcx> { + rcx: &'a RegionCtxt<'a, 'tcx>, + defining_use: &'b DefiningUse<'tcx>, + member_constraints: &'a mut FxHashMap<ConstraintSccIndex, Vec<&'b DefiningUse<'tcx>>>, +} +impl<'tcx> CollectMemberConstraintsVisitor<'_, '_, 'tcx> { + fn cx(&self) -> TyCtxt<'tcx> { + self.rcx.infcx.tcx + } + fn visit_closure_args(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) { + let generics = self.cx().generics_of(def_id); + for arg in args.iter().skip(generics.parent_count) { + arg.visit_with(self); + } + } +} +impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for CollectMemberConstraintsVisitor<'_, '_, 'tcx> { + fn visit_region(&mut self, r: Region<'tcx>) { + match r.kind() { + ty::ReBound(..) => return, + ty::ReVar(vid) => { + let scc = self.rcx.constraint_sccs.scc(vid); + self.member_constraints.entry(scc).or_default().push(self.defining_use); + } + _ => unreachable!(), + } + } + + fn visit_ty(&mut self, ty: Ty<'tcx>) { + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return; + } + + match *ty.kind() { + ty::Closure(def_id, args) + | ty::CoroutineClosure(def_id, args) + | ty::Coroutine(def_id, args) => self.visit_closure_args(def_id, args), + + ty::Alias(kind, ty::AliasTy { def_id, args, .. }) + if let Some(variances) = self.cx().opt_alias_variances(kind, def_id) => + { + // Skip lifetime parameters that are not captured, since they do + // not need member constraints registered for them; we'll erase + // them (and hopefully in the future replace them with placeholders). + for (&v, arg) in std::iter::zip(variances, args.iter()) { + if v != ty::Bivariant { + arg.visit_with(self) + } + } + } + + _ => ty.super_visit_with(self), + } + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs new file mode 100644 index 00000000000..bee82e17835 --- /dev/null +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs @@ -0,0 +1,697 @@ +use std::iter; +use std::rc::Rc; + +use rustc_data_structures::frozen::Frozen; +use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::def_id::DefId; +use rustc_infer::infer::outlives::env::RegionBoundPairs; +use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, OpaqueTypeStorageEntries}; +use rustc_infer::traits::ObligationCause; +use rustc_macros::extension; +use rustc_middle::mir::{Body, ConstraintCategory}; +use rustc_middle::ty::{ + self, DefiningScopeKind, FallibleTypeFolder, GenericArg, GenericArgsRef, OpaqueHiddenType, + OpaqueTypeKey, Region, RegionVid, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable, + TypeVisitableExt, fold_regions, +}; +use rustc_mir_dataflow::points::DenseLocationMap; +use rustc_span::Span; +use rustc_trait_selection::opaque_types::{ + NonDefiningUseReason, opaque_type_has_defining_use_args, +}; +use rustc_trait_selection::solve::NoSolution; +use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp; +use tracing::{debug, instrument}; + +use super::reverse_sccs::ReverseSccGraph; +use crate::consumers::RegionInferenceContext; +use crate::session_diagnostics::LifetimeMismatchOpaqueParam; +use crate::type_check::canonical::fully_perform_op_raw; +use crate::type_check::free_region_relations::UniversalRegionRelations; +use crate::type_check::{Locations, MirTypeckRegionConstraints}; +use crate::universal_regions::{RegionClassification, UniversalRegions}; +use crate::{BorrowCheckRootCtxt, BorrowckInferCtxt}; + +mod member_constraints; +mod region_ctxt; + +use member_constraints::apply_member_constraints; +use region_ctxt::RegionCtxt; + +/// We defer errors from [fn handle_opaque_type_uses] and only report them +/// if there are no `RegionErrors`. If there are region errors, it's likely +/// that errors here are caused by them and don't need to be handled separately. +pub(crate) enum DeferredOpaqueTypeError<'tcx> { + InvalidOpaqueTypeArgs(NonDefiningUseReason<'tcx>), + LifetimeMismatchOpaqueParam(LifetimeMismatchOpaqueParam<'tcx>), + UnexpectedHiddenRegion { + /// The opaque type. + opaque_type_key: OpaqueTypeKey<'tcx>, + /// The hidden type containing the member region. + hidden_type: OpaqueHiddenType<'tcx>, + /// The unexpected region. + member_region: Region<'tcx>, + }, + NonDefiningUseInDefiningScope { + span: Span, + opaque_type_key: OpaqueTypeKey<'tcx>, + }, +} + +/// This looks at all uses of opaque types in their defining scope inside +/// of this function. +/// +/// It first uses all defining uses to compute the actual concrete type of each +/// opaque type definition. +/// +/// We then apply this inferred type to actually check all uses of the opaque. +pub(crate) fn handle_opaque_type_uses<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>, + region_bound_pairs: &RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>], + location_map: &Rc<DenseLocationMap>, + constraints: &mut MirTypeckRegionConstraints<'tcx>, +) -> Vec<DeferredOpaqueTypeError<'tcx>> { + let tcx = infcx.tcx; + let opaque_types = infcx.clone_opaque_types(); + if opaque_types.is_empty() { + return Vec::new(); + } + + // We need to eagerly map all regions to NLL vars here, as we need to make sure we've + // introduced nll vars for all used placeholders. + // + // We need to resolve inference vars as even though we're in MIR typeck, we may still + // encounter inference variables, e.g. when checking user types. + let opaque_types_storage_num_entries = infcx.inner.borrow_mut().opaque_types().num_entries(); + let opaque_types = opaque_types + .into_iter() + .map(|entry| { + fold_regions(tcx, infcx.resolve_vars_if_possible(entry), |r, _| { + let vid = if let ty::RePlaceholder(placeholder) = r.kind() { + constraints.placeholder_region(infcx, placeholder).as_var() + } else { + universal_region_relations.universal_regions.to_region_vid(r) + }; + Region::new_var(tcx, vid) + }) + }) + .collect::<Vec<_>>(); + + debug!(?opaque_types); + + let errors = compute_concrete_opaque_types( + root_cx, + infcx, + constraints, + universal_region_relations, + Rc::clone(location_map), + &opaque_types, + ); + + if !errors.is_empty() { + return errors; + } + + let errors = apply_computed_concrete_opaque_types( + root_cx, + infcx, + body, + &universal_region_relations.universal_regions, + region_bound_pairs, + known_type_outlives_obligations, + constraints, + &opaque_types, + ); + + detect_opaque_types_added_while_handling_opaque_types(infcx, opaque_types_storage_num_entries); + + errors +} + +/// Maps an NLL var to a deterministically chosen equal universal region. +/// +/// See the corresponding [rustc-dev-guide chapter] for more details. This +/// ignores changes to the region values due to member constraints. Applying +/// member constraints does not impact the result of this function. +/// +/// [rustc-dev-guide chapter]: https://rustc-dev-guide.rust-lang.org/borrow_check/opaque-types-region-inference-restrictions.html +fn nll_var_to_universal_region<'tcx>( + rcx: &RegionCtxt<'_, 'tcx>, + r: RegionVid, +) -> Option<Region<'tcx>> { + // Use the SCC representative instead of directly using `region`. + // See [rustc-dev-guide chapter] § "Strict lifetime equality". + let vid = rcx.representative(r).rvid(); + match rcx.definitions[vid].origin { + // Iterate over all universal regions in a consistent order and find the + // *first* equal region. This makes sure that equal lifetimes will have + // the same name and simplifies subsequent handling. + // See [rustc-dev-guide chapter] § "Semantic lifetime equality". + NllRegionVariableOrigin::FreeRegion => rcx + .universal_regions() + .universal_regions_iter() + .filter(|&ur| { + // See [rustc-dev-guide chapter] § "Closure restrictions". + !matches!( + rcx.universal_regions().region_classification(ur), + Some(RegionClassification::External) + ) + }) + .find(|&ur| rcx.universal_region_relations.equal(vid, ur)) + .map(|ur| rcx.definitions[ur].external_name.unwrap()), + NllRegionVariableOrigin::Placeholder(placeholder) => { + Some(ty::Region::new_placeholder(rcx.infcx.tcx, placeholder)) + } + // If `r` were equal to any universal region, its SCC representative + // would have been set to a free region. + NllRegionVariableOrigin::Existential { .. } => None, + } +} + +#[derive(Debug)] +struct DefiningUse<'tcx> { + /// The opaque type using non NLL vars. This uses the actual + /// free regions and placeholders. This is necessary + /// to interact with code outside of `rustc_borrowck`. + opaque_type_key: OpaqueTypeKey<'tcx>, + arg_regions: Vec<RegionVid>, + hidden_type: OpaqueHiddenType<'tcx>, +} + +/// This computes the actual hidden types of the opaque types and maps them to their +/// definition sites. Outside of registering the computed concrete types this function +/// does not mutate the current borrowck state. +/// +/// While it may fail to infer the hidden type and return errors, we always apply +/// the computed concrete hidden type to all opaque type uses to check whether they +/// are correct. This is necessary to support non-defining uses of opaques in their +/// defining scope. +/// +/// It also means that this whole function is not really soundness critical as we +/// recheck all uses of the opaques regardless. +fn compute_concrete_opaque_types<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, + constraints: &MirTypeckRegionConstraints<'tcx>, + universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>, + location_map: Rc<DenseLocationMap>, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], +) -> Vec<DeferredOpaqueTypeError<'tcx>> { + let mut errors = Vec::new(); + // When computing the hidden type we need to track member constraints. + // We don't mutate the region graph used by `fn compute_regions` but instead + // manually track region information via a `RegionCtxt`. We discard this + // information at the end of this function. + let mut rcx = RegionCtxt::new(infcx, universal_region_relations, location_map, constraints); + + // We start by checking each use of an opaque type during type check and + // check whether the generic arguments of the opaque type are fully + // universal, if so, it's a defining use. + let defining_uses = collect_defining_uses(root_cx, &mut rcx, opaque_types, &mut errors); + + // We now compute and apply member constraints for all regions in the hidden + // types of each defining use. This mutates the region values of the `rcx` which + // is used when mapping the defining uses to the definition site. + apply_member_constraints(&mut rcx, &defining_uses); + + // After applying member constraints, we now check whether all member regions ended + // up equal to one of their choice regions and compute the actual concrete type of + // the opaque type definition. This is stored in the `root_cx`. + compute_concrete_types_from_defining_uses(root_cx, &rcx, &defining_uses, &mut errors); + errors +} + +#[instrument(level = "debug", skip_all, ret)] +fn collect_defining_uses<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + rcx: &mut RegionCtxt<'_, 'tcx>, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], + errors: &mut Vec<DeferredOpaqueTypeError<'tcx>>, +) -> Vec<DefiningUse<'tcx>> { + let infcx = rcx.infcx; + let mut defining_uses = vec![]; + for &(opaque_type_key, hidden_type) in opaque_types { + let non_nll_opaque_type_key = opaque_type_key.fold_captured_lifetime_args(infcx.tcx, |r| { + nll_var_to_universal_region(&rcx, r.as_var()).unwrap_or(r) + }); + if let Err(err) = opaque_type_has_defining_use_args( + infcx, + non_nll_opaque_type_key, + hidden_type.span, + DefiningScopeKind::MirBorrowck, + ) { + // A non-defining use. This is a hard error on stable and gets ignored + // with `TypingMode::Borrowck`. + if infcx.tcx.use_typing_mode_borrowck() { + match err { + NonDefiningUseReason::Tainted(guar) => root_cx.add_concrete_opaque_type( + opaque_type_key.def_id, + OpaqueHiddenType::new_error(infcx.tcx, guar), + ), + _ => debug!(?non_nll_opaque_type_key, ?err, "ignoring non-defining use"), + } + } else { + errors.push(DeferredOpaqueTypeError::InvalidOpaqueTypeArgs(err)); + } + continue; + } + + // We use the original `opaque_type_key` to compute the `arg_regions`. + let arg_regions = iter::once(rcx.universal_regions().fr_static) + .chain( + opaque_type_key + .iter_captured_args(infcx.tcx) + .filter_map(|(_, arg)| arg.as_region()) + .map(Region::as_var), + ) + .collect(); + defining_uses.push(DefiningUse { + opaque_type_key: non_nll_opaque_type_key, + arg_regions, + hidden_type, + }); + } + + defining_uses +} + +fn compute_concrete_types_from_defining_uses<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + rcx: &RegionCtxt<'_, 'tcx>, + defining_uses: &[DefiningUse<'tcx>], + errors: &mut Vec<DeferredOpaqueTypeError<'tcx>>, +) { + let infcx = rcx.infcx; + let tcx = infcx.tcx; + let mut decls_modulo_regions: FxIndexMap<OpaqueTypeKey<'tcx>, (OpaqueTypeKey<'tcx>, Span)> = + FxIndexMap::default(); + for &DefiningUse { opaque_type_key, ref arg_regions, hidden_type } in defining_uses { + // After applying member constraints, we now map all regions in the hidden type + // to the `arg_regions` of this defining use. In case a region in the hidden type + // ended up not being equal to any such region, we error. + let hidden_type = + match hidden_type.try_fold_with(&mut ToArgRegionsFolder::new(rcx, arg_regions)) { + Ok(hidden_type) => hidden_type, + Err(r) => { + errors.push(DeferredOpaqueTypeError::UnexpectedHiddenRegion { + hidden_type, + opaque_type_key, + member_region: ty::Region::new_var(tcx, r), + }); + let guar = tcx.dcx().span_delayed_bug( + hidden_type.span, + "opaque type with non-universal region args", + ); + ty::OpaqueHiddenType::new_error(tcx, guar) + } + }; + + // Now that we mapped the member regions to their final value, + // map the arguments of the opaque type key back to the parameters + // of the opaque type definition. + let ty = infcx + .infer_opaque_definition_from_instantiation(opaque_type_key, hidden_type) + .unwrap_or_else(|_| { + Ty::new_error_with_message( + rcx.infcx.tcx, + hidden_type.span, + "deferred invalid opaque type args", + ) + }); + + // Sometimes, when the hidden type is an inference variable, it can happen that + // the hidden type becomes the opaque type itself. In this case, this was an opaque + // usage of the opaque type and we can ignore it. This check is mirrored in typeck's + // writeback. + if !rcx.infcx.tcx.use_typing_mode_borrowck() { + if let ty::Alias(ty::Opaque, alias_ty) = ty.kind() + && alias_ty.def_id == opaque_type_key.def_id.to_def_id() + && alias_ty.args == opaque_type_key.args + { + continue; + } + } + + // Check that all opaque types have the same region parameters if they have the same + // non-region parameters. This is necessary because within the new solver we perform + // various query operations modulo regions, and thus could unsoundly select some impls + // that don't hold. + // + // FIXME(-Znext-solver): This isn't necessary after all. We can remove this check again. + if let Some((prev_decl_key, prev_span)) = decls_modulo_regions.insert( + rcx.infcx.tcx.erase_regions(opaque_type_key), + (opaque_type_key, hidden_type.span), + ) && let Some((arg1, arg2)) = std::iter::zip( + prev_decl_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), + opaque_type_key.iter_captured_args(infcx.tcx).map(|(_, arg)| arg), + ) + .find(|(arg1, arg2)| arg1 != arg2) + { + errors.push(DeferredOpaqueTypeError::LifetimeMismatchOpaqueParam( + LifetimeMismatchOpaqueParam { + arg: arg1, + prev: arg2, + span: prev_span, + prev_span: hidden_type.span, + }, + )); + } + root_cx.add_concrete_opaque_type( + opaque_type_key.def_id, + OpaqueHiddenType { span: hidden_type.span, ty }, + ); + } +} + +/// A folder to map the regions in the hidden type to their corresponding `arg_regions`. +/// +/// This folder has to differentiate between member regions and other regions in the hidden +/// type. Member regions have to be equal to one of the `arg_regions` while other regions simply +/// get treated as an existential region in the opaque if they are not. Existential +/// regions are currently represented using `'erased`. +struct ToArgRegionsFolder<'a, 'tcx> { + rcx: &'a RegionCtxt<'a, 'tcx>, + // When folding closure args or bivariant alias arguments, we simply + // ignore non-member regions. However, we still need to map member + // regions to their arg region even if its in a closure argument. + // + // See tests/ui/type-alias-impl-trait/closure_wf_outlives.rs for an example. + erase_unknown_regions: bool, + arg_regions: &'a [RegionVid], +} + +impl<'a, 'tcx> ToArgRegionsFolder<'a, 'tcx> { + fn new( + rcx: &'a RegionCtxt<'a, 'tcx>, + arg_regions: &'a [RegionVid], + ) -> ToArgRegionsFolder<'a, 'tcx> { + ToArgRegionsFolder { rcx, erase_unknown_regions: false, arg_regions } + } + + fn fold_non_member_arg(&mut self, arg: GenericArg<'tcx>) -> GenericArg<'tcx> { + let prev = self.erase_unknown_regions; + self.erase_unknown_regions = true; + let res = arg.try_fold_with(self).unwrap(); + self.erase_unknown_regions = prev; + res + } + + fn fold_closure_args( + &mut self, + def_id: DefId, + args: GenericArgsRef<'tcx>, + ) -> Result<GenericArgsRef<'tcx>, RegionVid> { + let generics = self.cx().generics_of(def_id); + self.cx().mk_args_from_iter(args.iter().enumerate().map(|(index, arg)| { + if index < generics.parent_count { + Ok(self.fold_non_member_arg(arg)) + } else { + arg.try_fold_with(self) + } + })) + } +} +impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for ToArgRegionsFolder<'_, 'tcx> { + type Error = RegionVid; + fn cx(&self) -> TyCtxt<'tcx> { + self.rcx.infcx.tcx + } + + fn try_fold_region(&mut self, r: Region<'tcx>) -> Result<Region<'tcx>, RegionVid> { + match r.kind() { + // ignore bound regions, keep visiting + ty::ReBound(_, _) => Ok(r), + _ => { + let r = r.as_var(); + if let Some(arg_region) = self + .arg_regions + .iter() + .copied() + .find(|&arg_vid| self.rcx.eval_equal(r, arg_vid)) + .and_then(|r| nll_var_to_universal_region(self.rcx, r)) + { + Ok(arg_region) + } else if self.erase_unknown_regions { + Ok(self.cx().lifetimes.re_erased) + } else { + Err(r) + } + } + } + } + + fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, RegionVid> { + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return Ok(ty); + } + + let tcx = self.cx(); + Ok(match *ty.kind() { + ty::Closure(def_id, args) => { + Ty::new_closure(tcx, def_id, self.fold_closure_args(def_id, args)?) + } + + ty::CoroutineClosure(def_id, args) => { + Ty::new_coroutine_closure(tcx, def_id, self.fold_closure_args(def_id, args)?) + } + + ty::Coroutine(def_id, args) => { + Ty::new_coroutine(tcx, def_id, self.fold_closure_args(def_id, args)?) + } + + ty::Alias(kind, ty::AliasTy { def_id, args, .. }) + if let Some(variances) = tcx.opt_alias_variances(kind, def_id) => + { + let args = tcx.mk_args_from_iter(std::iter::zip(variances, args.iter()).map( + |(&v, s)| { + if v == ty::Bivariant { + Ok(self.fold_non_member_arg(s)) + } else { + s.try_fold_with(self) + } + }, + ))?; + ty::AliasTy::new_from_args(tcx, def_id, args).to_ty(tcx) + } + + _ => ty.try_super_fold_with(self)?, + }) + } +} + +/// This function is what actually applies member constraints to the borrowck +/// state. It is also responsible to check all uses of the opaques in their +/// defining scope. +/// +/// It does this by equating the hidden type of each use with the instantiated final +/// hidden type of the opaque. +fn apply_computed_concrete_opaque_types<'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + universal_regions: &UniversalRegions<'tcx>, + region_bound_pairs: &RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>], + constraints: &mut MirTypeckRegionConstraints<'tcx>, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], +) -> Vec<DeferredOpaqueTypeError<'tcx>> { + let tcx = infcx.tcx; + let mut errors = Vec::new(); + for &(key, hidden_type) in opaque_types { + let Some(expected) = root_cx.get_concrete_opaque_type(key.def_id) else { + assert!(tcx.use_typing_mode_borrowck(), "non-defining use in defining scope"); + errors.push(DeferredOpaqueTypeError::NonDefiningUseInDefiningScope { + span: hidden_type.span, + opaque_type_key: key, + }); + let guar = tcx.dcx().span_delayed_bug( + hidden_type.span, + "non-defining use in the defining scope with no defining uses", + ); + root_cx.add_concrete_opaque_type(key.def_id, OpaqueHiddenType::new_error(tcx, guar)); + continue; + }; + + // We erase all non-member region of the opaque and need to treat these as existentials. + let expected = ty::fold_regions(tcx, expected.instantiate(tcx, key.args), |re, _dbi| { + match re.kind() { + ty::ReErased => infcx.next_nll_region_var( + NllRegionVariableOrigin::Existential { name: None }, + || crate::RegionCtxt::Existential(None), + ), + _ => re, + } + }); + + // We now simply equate the expected with the actual hidden type. + let locations = Locations::All(hidden_type.span); + if let Err(guar) = fully_perform_op_raw( + infcx, + body, + universal_regions, + region_bound_pairs, + known_type_outlives_obligations, + constraints, + locations, + ConstraintCategory::OpaqueType, + CustomTypeOp::new( + |ocx| { + let cause = ObligationCause::misc( + hidden_type.span, + body.source.def_id().expect_local(), + ); + // We need to normalize both types in the old solver before equatingt them. + let actual_ty = ocx.normalize(&cause, infcx.param_env, hidden_type.ty); + let expected_ty = ocx.normalize(&cause, infcx.param_env, expected.ty); + ocx.eq(&cause, infcx.param_env, actual_ty, expected_ty).map_err(|_| NoSolution) + }, + "equating opaque types", + ), + ) { + root_cx.add_concrete_opaque_type(key.def_id, OpaqueHiddenType::new_error(tcx, guar)); + } + } + errors +} + +/// In theory `apply_concrete_opaque_types` could introduce new uses of opaque types. +/// We do not check these new uses so this could be unsound. +/// +/// We detect any new uses and simply delay a bug if they occur. If this results in +/// an ICE we can properly handle this, but we haven't encountered any such test yet. +/// +/// See the related comment in `FnCtxt::detect_opaque_types_added_during_writeback`. +fn detect_opaque_types_added_while_handling_opaque_types<'tcx>( + infcx: &InferCtxt<'tcx>, + opaque_types_storage_num_entries: OpaqueTypeStorageEntries, +) { + for (key, hidden_type) in infcx + .inner + .borrow_mut() + .opaque_types() + .opaque_types_added_since(opaque_types_storage_num_entries) + { + let opaque_type_string = infcx.tcx.def_path_str(key.def_id); + let msg = format!("unexpected cyclic definition of `{opaque_type_string}`"); + infcx.dcx().span_delayed_bug(hidden_type.span, msg); + } + + let _ = infcx.take_opaque_types(); +} + +impl<'tcx> RegionInferenceContext<'tcx> { + /// Map the regions in the type to named regions. This is similar to what + /// `infer_opaque_types` does, but can infer any universal region, not only + /// ones from the args for the opaque type. It also doesn't double check + /// that the regions produced are in fact equal to the named region they are + /// replaced with. This is fine because this function is only to improve the + /// region names in error messages. + /// + /// This differs from `MirBorrowckCtxt::name_regions` since it is particularly + /// lax with mapping region vids that are *shorter* than a universal region to + /// that universal region. This is useful for member region constraints since + /// we want to suggest a universal region name to capture even if it's technically + /// not equal to the error region. + pub(crate) fn name_regions_for_member_constraint<T>(&self, tcx: TyCtxt<'tcx>, ty: T) -> T + where + T: TypeFoldable<TyCtxt<'tcx>>, + { + fold_regions(tcx, ty, |region, _| match region.kind() { + ty::ReVar(vid) => { + let scc = self.constraint_sccs.scc(vid); + + // Special handling of higher-ranked regions. + if !self.max_nameable_universe(scc).is_root() { + match self.scc_values.placeholders_contained_in(scc).enumerate().last() { + // If the region contains a single placeholder then they're equal. + Some((0, placeholder)) => { + return ty::Region::new_placeholder(tcx, placeholder); + } + + // Fallback: this will produce a cryptic error message. + _ => return region, + } + } + + // Find something that we can name + let upper_bound = self.approx_universal_upper_bound(vid); + if let Some(universal_region) = self.definitions[upper_bound].external_name { + return universal_region; + } + + // Nothing exact found, so we pick a named upper bound, if there's only one. + // If there's >1 universal region, then we probably are dealing w/ an intersection + // region which cannot be mapped back to a universal. + // FIXME: We could probably compute the LUB if there is one. + let scc = self.constraint_sccs.scc(vid); + let rev_scc_graph = + ReverseSccGraph::compute(&self.constraint_sccs, self.universal_regions()); + let upper_bounds: Vec<_> = rev_scc_graph + .upper_bounds(scc) + .filter_map(|vid| self.definitions[vid].external_name) + .filter(|r| !r.is_static()) + .collect(); + match &upper_bounds[..] { + [universal_region] => *universal_region, + _ => region, + } + } + _ => region, + }) + } +} + +#[extension(pub trait InferCtxtExt<'tcx>)] +impl<'tcx> InferCtxt<'tcx> { + /// Given the fully resolved, instantiated type for an opaque + /// type, i.e., the value of an inference variable like C1 or C2 + /// (*), computes the "definition type" for an opaque type + /// definition -- that is, the inferred value of `Foo1<'x>` or + /// `Foo2<'x>` that we would conceptually use in its definition: + /// ```ignore (illustrative) + /// type Foo1<'x> = impl Bar<'x> = AAA; // <-- this type AAA + /// type Foo2<'x> = impl Bar<'x> = BBB; // <-- or this type BBB + /// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } + /// ``` + /// Note that these values are defined in terms of a distinct set of + /// generic parameters (`'x` instead of `'a`) from C1 or C2. The main + /// purpose of this function is to do that translation. + /// + /// (*) C1 and C2 were introduced in the comments on + /// `register_member_constraints`. Read that comment for more context. + /// + /// # Parameters + /// + /// - `def_id`, the `impl Trait` type + /// - `args`, the args used to instantiate this opaque type + /// - `instantiated_ty`, the inferred type C1 -- fully resolved, lifted version of + /// `opaque_defn.concrete_ty` + #[instrument(level = "debug", skip(self))] + fn infer_opaque_definition_from_instantiation( + &self, + opaque_type_key: OpaqueTypeKey<'tcx>, + instantiated_ty: OpaqueHiddenType<'tcx>, + ) -> Result<Ty<'tcx>, NonDefiningUseReason<'tcx>> { + opaque_type_has_defining_use_args( + self, + opaque_type_key, + instantiated_ty.span, + DefiningScopeKind::MirBorrowck, + )?; + + let definition_ty = instantiated_ty + .remap_generic_params_to_declaration_params( + opaque_type_key, + self.tcx, + DefiningScopeKind::MirBorrowck, + ) + .ty; + + definition_ty.error_reported()?; + Ok(definition_ty) + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/region_ctxt.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/region_ctxt.rs new file mode 100644 index 00000000000..88326e4eebf --- /dev/null +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/region_ctxt.rs @@ -0,0 +1,114 @@ +use std::rc::Rc; + +use rustc_data_structures::frozen::Frozen; +use rustc_index::IndexVec; +use rustc_infer::infer::NllRegionVariableOrigin; +use rustc_middle::ty::{RegionVid, UniverseIndex}; +use rustc_mir_dataflow::points::DenseLocationMap; + +use crate::BorrowckInferCtxt; +use crate::constraints::ConstraintSccIndex; +use crate::handle_placeholders::{SccAnnotations, region_definitions}; +use crate::region_infer::reverse_sccs::ReverseSccGraph; +use crate::region_infer::values::RegionValues; +use crate::region_infer::{ConstraintSccs, RegionDefinition, RegionTracker, Representative}; +use crate::type_check::MirTypeckRegionConstraints; +use crate::type_check::free_region_relations::UniversalRegionRelations; +use crate::universal_regions::UniversalRegions; + +/// A slimmed down version of [crate::region_infer::RegionInferenceContext] used +/// only by opaque type handling. +pub(super) struct RegionCtxt<'a, 'tcx> { + pub(super) infcx: &'a BorrowckInferCtxt<'tcx>, + pub(super) definitions: Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>, + pub(super) universal_region_relations: &'a UniversalRegionRelations<'tcx>, + pub(super) constraint_sccs: ConstraintSccs, + pub(super) scc_annotations: IndexVec<ConstraintSccIndex, RegionTracker>, + pub(super) rev_scc_graph: ReverseSccGraph, + pub(super) scc_values: RegionValues<ConstraintSccIndex>, +} + +impl<'a, 'tcx> RegionCtxt<'a, 'tcx> { + /// Creates a new `RegionCtxt` used to compute defining opaque type uses. + /// + /// This does not yet propagate region values. This is instead done lazily + /// when applying member constraints. + pub(super) fn new( + infcx: &'a BorrowckInferCtxt<'tcx>, + universal_region_relations: &'a Frozen<UniversalRegionRelations<'tcx>>, + location_map: Rc<DenseLocationMap>, + constraints: &MirTypeckRegionConstraints<'tcx>, + ) -> RegionCtxt<'a, 'tcx> { + let universal_regions = &universal_region_relations.universal_regions; + let (definitions, _has_placeholders) = region_definitions(infcx, universal_regions); + let mut scc_annotations = SccAnnotations::init(&definitions); + let constraint_sccs = ConstraintSccs::new_with_annotation( + &constraints + .outlives_constraints + .graph(definitions.len()) + .region_graph(&constraints.outlives_constraints, universal_regions.fr_static), + &mut scc_annotations, + ); + let scc_annotations = scc_annotations.scc_to_annotation; + + // Unlike the `RegionInferenceContext`, we only care about free regions + // and fully ignore liveness and placeholders. + let placeholder_indices = Default::default(); + let mut scc_values = + RegionValues::new(location_map, universal_regions.len(), placeholder_indices); + for variable in definitions.indices() { + let scc = constraint_sccs.scc(variable); + match definitions[variable].origin { + NllRegionVariableOrigin::FreeRegion => { + scc_values.add_element(scc, variable); + } + _ => {} + } + } + + let rev_scc_graph = ReverseSccGraph::compute(&constraint_sccs, universal_regions); + RegionCtxt { + infcx, + definitions, + universal_region_relations, + constraint_sccs, + scc_annotations, + rev_scc_graph, + scc_values, + } + } + + pub(super) fn representative(&self, vid: RegionVid) -> Representative { + let scc = self.constraint_sccs.scc(vid); + self.scc_annotations[scc].representative + } + + pub(crate) fn max_placeholder_universe_reached( + &self, + scc: ConstraintSccIndex, + ) -> UniverseIndex { + self.scc_annotations[scc].max_placeholder_universe_reached() + } + + pub(super) fn universal_regions(&self) -> &UniversalRegions<'tcx> { + &self.universal_region_relations.universal_regions + } + + pub(super) fn eval_equal(&self, r1_vid: RegionVid, r2_vid: RegionVid) -> bool { + let r1 = self.constraint_sccs.scc(r1_vid); + let r2 = self.constraint_sccs.scc(r2_vid); + + if r1 == r2 { + return true; + } + + let universal_outlives = |sub, sup| { + self.scc_values.universal_regions_outlived_by(sub).all(|r1| { + self.scc_values + .universal_regions_outlived_by(sup) + .any(|r2| self.universal_region_relations.outlives(r2, r1)) + }) + }; + universal_outlives(r1, r2) && universal_outlives(r2, r1) + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs index 604265f8940..e8da85eccef 100644 --- a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs +++ b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs @@ -5,7 +5,6 @@ use rustc_data_structures::graph; use rustc_data_structures::graph::vec_graph::VecGraph; use rustc_middle::ty::RegionVid; -use crate::RegionInferenceContext; use crate::constraints::ConstraintSccIndex; use crate::region_infer::ConstraintSccs; use crate::universal_regions::UniversalRegions; @@ -57,12 +56,3 @@ impl ReverseSccGraph { .filter(move |r| duplicates.insert(*r)) } } - -impl RegionInferenceContext<'_> { - /// Return the reverse graph of the region SCCs, initialising it if needed. - pub(super) fn reverse_scc_graph(&self) -> &ReverseSccGraph { - self.rev_scc_graph.get_or_init(|| { - ReverseSccGraph::compute(&self.constraint_sccs, self.universal_regions()) - }) - } -} diff --git a/compiler/rustc_borrowck/src/root_cx.rs b/compiler/rustc_borrowck/src/root_cx.rs index 9b1d12aede5..4e90ae391bb 100644 --- a/compiler/rustc_borrowck/src/root_cx.rs +++ b/compiler/rustc_borrowck/src/root_cx.rs @@ -2,7 +2,7 @@ use rustc_abi::FieldIdx; use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::LocalDefId; use rustc_middle::bug; -use rustc_middle::ty::{OpaqueHiddenType, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{EarlyBinder, OpaqueHiddenType, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::ErrorGuaranteed; use smallvec::SmallVec; @@ -19,7 +19,7 @@ pub(super) struct BorrowCheckRootCtxt<'tcx> { tainted_by_errors: Option<ErrorGuaranteed>, /// This should be `None` during normal compilation. See [`crate::consumers`] for more /// information on how this is used. - pub(crate) consumer: Option<BorrowckConsumer<'tcx>>, + pub consumer: Option<BorrowckConsumer<'tcx>>, } impl<'tcx> BorrowCheckRootCtxt<'tcx> { @@ -38,6 +38,10 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> { } } + pub(super) fn root_def_id(&self) -> LocalDefId { + self.root_def_id + } + /// Collect all defining uses of opaque types inside of this typeck root. This /// expects the hidden type to be mapped to the definition parameters of the opaque /// and errors if we end up with distinct hidden types. @@ -67,6 +71,13 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> { } } + pub(super) fn get_concrete_opaque_type( + &mut self, + def_id: LocalDefId, + ) -> Option<EarlyBinder<'tcx, OpaqueHiddenType<'tcx>>> { + self.concrete_opaque_types.0.get(&def_id).map(|ty| EarlyBinder::bind(*ty)) + } + pub(super) fn set_tainted_by_errors(&mut self, guar: ErrorGuaranteed) { self.tainted_by_errors = Some(guar); } diff --git a/compiler/rustc_borrowck/src/type_check/canonical.rs b/compiler/rustc_borrowck/src/type_check/canonical.rs index b3fa786a517..2627ed899a9 100644 --- a/compiler/rustc_borrowck/src/type_check/canonical.rs +++ b/compiler/rustc_borrowck/src/type_check/canonical.rs @@ -2,8 +2,9 @@ use std::fmt; use rustc_errors::ErrorGuaranteed; use rustc_infer::infer::canonical::Canonical; +use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_middle::bug; -use rustc_middle::mir::ConstraintCategory; +use rustc_middle::mir::{Body, ConstraintCategory}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, Upcast}; use rustc_span::Span; use rustc_span::def_id::DefId; @@ -14,7 +15,68 @@ use rustc_trait_selection::traits::query::type_op::{self, TypeOpOutput}; use tracing::{debug, instrument}; use super::{Locations, NormalizeLocation, TypeChecker}; +use crate::BorrowckInferCtxt; use crate::diagnostics::ToUniverseInfo; +use crate::type_check::{MirTypeckRegionConstraints, constraint_conversion}; +use crate::universal_regions::UniversalRegions; + +#[instrument(skip(infcx, constraints, op), level = "trace")] +pub(crate) fn fully_perform_op_raw<'tcx, R: fmt::Debug, Op>( + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + universal_regions: &UniversalRegions<'tcx>, + region_bound_pairs: &RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>], + constraints: &mut MirTypeckRegionConstraints<'tcx>, + locations: Locations, + category: ConstraintCategory<'tcx>, + op: Op, +) -> Result<R, ErrorGuaranteed> +where + Op: type_op::TypeOp<'tcx, Output = R>, + Op::ErrorInfo: ToUniverseInfo<'tcx>, +{ + let old_universe = infcx.universe(); + + let TypeOpOutput { output, constraints: query_constraints, error_info } = + op.fully_perform(infcx, infcx.root_def_id, locations.span(body))?; + if cfg!(debug_assertions) { + let data = infcx.take_and_reset_region_constraints(); + if !data.is_empty() { + panic!("leftover region constraints: {data:#?}"); + } + } + + debug!(?output, ?query_constraints); + + if let Some(data) = query_constraints { + constraint_conversion::ConstraintConversion::new( + infcx, + universal_regions, + region_bound_pairs, + known_type_outlives_obligations, + locations, + locations.span(body), + category, + constraints, + ) + .convert_all(data); + } + + // If the query has created new universes and errors are going to be emitted, register the + // cause of these new universes for improved diagnostics. + let universe = infcx.universe(); + if old_universe != universe + && let Some(error_info) = error_info + { + let universe_info = error_info.to_universe_info(old_universe); + for u in (old_universe + 1)..=universe { + constraints.universe_causes.insert(u, universe_info.clone()); + } + } + + Ok(output) +} impl<'a, 'tcx> TypeChecker<'a, 'tcx> { /// Given some operation `op` that manipulates types, proves @@ -38,36 +100,17 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Op: type_op::TypeOp<'tcx, Output = R>, Op::ErrorInfo: ToUniverseInfo<'tcx>, { - let old_universe = self.infcx.universe(); - - let TypeOpOutput { output, constraints, error_info } = - op.fully_perform(self.infcx, locations.span(self.body))?; - if cfg!(debug_assertions) { - let data = self.infcx.take_and_reset_region_constraints(); - if !data.is_empty() { - panic!("leftover region constraints: {data:#?}"); - } - } - - debug!(?output, ?constraints); - - if let Some(data) = constraints { - self.push_region_constraints(locations, category, data); - } - - // If the query has created new universes and errors are going to be emitted, register the - // cause of these new universes for improved diagnostics. - let universe = self.infcx.universe(); - if old_universe != universe - && let Some(error_info) = error_info - { - let universe_info = error_info.to_universe_info(old_universe); - for u in (old_universe + 1)..=universe { - self.constraints.universe_causes.insert(u, universe_info.clone()); - } - } - - Ok(output) + fully_perform_op_raw( + self.infcx, + self.body, + self.universal_regions, + self.region_bound_pairs, + self.known_type_outlives_obligations, + self.constraints, + locations, + category, + op, + ) } pub(super) fn instantiate_canonical<T>( diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index 9bb96b94506..703223e2e54 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -1,10 +1,10 @@ use rustc_data_structures::fx::FxHashSet; use rustc_hir::def_id::LocalDefId; +use rustc_infer::infer::SubregionOrigin; use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate}; use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound}; -use rustc_infer::infer::{InferCtxt, SubregionOrigin}; use rustc_infer::traits::query::type_op::DeeplyNormalize; use rustc_middle::bug; use rustc_middle::ty::{ @@ -18,10 +18,12 @@ use crate::constraints::OutlivesConstraint; use crate::region_infer::TypeTest; use crate::type_check::{Locations, MirTypeckRegionConstraints}; use crate::universal_regions::UniversalRegions; -use crate::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory}; +use crate::{ + BorrowckInferCtxt, ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory, +}; pub(crate) struct ConstraintConversion<'a, 'tcx> { - infcx: &'a InferCtxt<'tcx>, + infcx: &'a BorrowckInferCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, /// Each RBP `GK: 'a` is assumed to be true. These encode /// relationships like `T: 'a` that are added via implicit bounds @@ -34,7 +36,6 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> { /// logic expecting to see (e.g.) `ReStatic`, and if we supplied /// our special inference variable there, we would mess that up. region_bound_pairs: &'a RegionBoundPairs<'tcx>, - param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, span: Span, @@ -45,10 +46,9 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> { impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { pub(crate) fn new( - infcx: &'a InferCtxt<'tcx>, + infcx: &'a BorrowckInferCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, region_bound_pairs: &'a RegionBoundPairs<'tcx>, - param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, span: Span, @@ -59,7 +59,6 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { infcx, universal_regions, region_bound_pairs, - param_env, known_type_outlives_obligations, locations, span, @@ -286,8 +285,11 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ConstraintCategory<'tcx>, )>, ) -> Ty<'tcx> { - match self.param_env.and(DeeplyNormalize { value: ty }).fully_perform(self.infcx, self.span) - { + match self.infcx.param_env.and(DeeplyNormalize { value: ty }).fully_perform( + self.infcx, + self.infcx.root_def_id, + self.span, + ) { Ok(TypeOpOutput { output: ty, constraints, .. }) => { // FIXME(higher_ranked_auto): What should we do with the assumptions here? if let Some(QueryRegionConstraints { outlives, assumptions: _ }) = constraints { diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index f642d34ea67..7bf2df91470 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -2,9 +2,9 @@ use rustc_data_structures::frozen::Frozen; use rustc_data_structures::transitive_relation::{TransitiveRelation, TransitiveRelationBuilder}; use rustc_hir::def::DefKind; use rustc_infer::infer::canonical::QueryRegionConstraints; +use rustc_infer::infer::outlives; use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::region_constraints::GenericKind; -use rustc_infer::infer::{InferCtxt, outlives}; use rustc_infer::traits::query::type_op::DeeplyNormalize; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::query::OutlivesBound; @@ -14,6 +14,7 @@ use rustc_trait_selection::traits::query::type_op::{self, TypeOp}; use tracing::{debug, instrument}; use type_op::TypeOpOutput; +use crate::BorrowckInferCtxt; use crate::type_check::{Locations, MirTypeckRegionConstraints, constraint_conversion}; use crate::universal_regions::UniversalRegions; @@ -47,14 +48,12 @@ pub(crate) struct CreateResult<'tcx> { } pub(crate) fn create<'tcx>( - infcx: &InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, + infcx: &BorrowckInferCtxt<'tcx>, universal_regions: UniversalRegions<'tcx>, constraints: &mut MirTypeckRegionConstraints<'tcx>, ) -> CreateResult<'tcx> { UniversalRegionRelationsBuilder { infcx, - param_env, constraints, universal_regions, region_bound_pairs: Default::default(), @@ -177,8 +176,7 @@ impl UniversalRegionRelations<'_> { } struct UniversalRegionRelationsBuilder<'a, 'tcx> { - infcx: &'a InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, + infcx: &'a BorrowckInferCtxt<'tcx>, universal_regions: UniversalRegions<'tcx>, constraints: &'a mut MirTypeckRegionConstraints<'tcx>, @@ -205,7 +203,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // Insert the `'a: 'b` we know from the predicates. // This does not consider the type-outlives. - let param_env = self.param_env; + let param_env = self.infcx.param_env; self.add_outlives_bounds(outlives::explicit_outlives_bounds(param_env)); // - outlives is reflexive, so `'r: 'r` for every region `'r` @@ -263,7 +261,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { let TypeOpOutput { output: norm_ty, constraints: constraints_normalize, .. } = param_env .and(DeeplyNormalize { value: ty }) - .fully_perform(self.infcx, span) + .fully_perform(self.infcx, self.infcx.root_def_id, span) .unwrap_or_else(|guar| TypeOpOutput { output: Ty::new_error(self.infcx.tcx, guar), constraints: None, @@ -298,8 +296,9 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // Add implied bounds from impl header. if matches!(tcx.def_kind(defining_ty_def_id), DefKind::AssocFn | DefKind::AssocConst) { for &(ty, _) in tcx.assumed_wf_types(tcx.local_parent(defining_ty_def_id)) { - let result: Result<_, ErrorGuaranteed> = - param_env.and(DeeplyNormalize { value: ty }).fully_perform(self.infcx, span); + let result: Result<_, ErrorGuaranteed> = param_env + .and(DeeplyNormalize { value: ty }) + .fully_perform(self.infcx, self.infcx.root_def_id, span); let Ok(TypeOpOutput { output: norm_ty, constraints: c, .. }) = result else { continue; }; @@ -318,7 +317,6 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { self.infcx, &self.universal_regions, &self.region_bound_pairs, - param_env, &known_type_outlives_obligations, Locations::All(span), span, @@ -353,10 +351,11 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { output: normalized_outlives, constraints: constraints_normalize, error_info: _, - }) = self - .param_env - .and(DeeplyNormalize { value: outlives }) - .fully_perform(self.infcx, span) + }) = self.infcx.param_env.and(DeeplyNormalize { value: outlives }).fully_perform( + self.infcx, + self.infcx.root_def_id, + span, + ) else { self.infcx.dcx().delayed_bug(format!("could not normalize {outlives:?}")); return; @@ -381,9 +380,10 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { span: Span, ) -> Option<&'tcx QueryRegionConstraints<'tcx>> { let TypeOpOutput { output: bounds, constraints, .. } = self + .infcx .param_env .and(type_op::ImpliedOutlivesBounds { ty }) - .fully_perform(self.infcx, span) + .fully_perform(self.infcx, self.infcx.root_def_id, span) .map_err(|_: ErrorGuaranteed| debug!("failed to compute implied bounds {:?}", ty)) .ok()?; debug!(?bounds, ?constraints); diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index 5d30fa71e92..b704d8f0a76 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -640,7 +640,7 @@ impl<'tcx> LivenessContext<'_, '_, 'tcx> { let op = typeck.infcx.param_env.and(DropckOutlives { dropped_ty }); - match op.fully_perform(typeck.infcx, DUMMY_SP) { + match op.fully_perform(typeck.infcx, typeck.root_cx.root_def_id(), DUMMY_SP) { Ok(TypeOpOutput { output, constraints, .. }) => { DropData { dropck_result: output, region_constraint_data: constraints } } diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index a960b96b91c..0d363935f14 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -26,8 +26,7 @@ use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::cast::CastTy; use rustc_middle::ty::{ self, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, CoroutineArgsExt, - GenericArgsRef, OpaqueHiddenType, OpaqueTypeKey, RegionVid, Ty, TyCtxt, TypeVisitableExt, - UserArgs, UserTypeAnnotationIndex, fold_regions, + GenericArgsRef, Ty, TyCtxt, TypeVisitableExt, UserArgs, UserTypeAnnotationIndex, fold_regions, }; use rustc_middle::{bug, span_bug}; use rustc_mir_dataflow::move_paths::MoveData; @@ -35,6 +34,7 @@ use rustc_mir_dataflow::points::DenseLocationMap; use rustc_span::def_id::CRATE_DEF_ID; use rustc_span::source_map::Spanned; use rustc_span::{Span, sym}; +use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints; use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput}; use tracing::{debug, instrument, trace}; @@ -42,7 +42,6 @@ use tracing::{debug, instrument, trace}; use crate::borrow_set::BorrowSet; use crate::constraints::{OutlivesConstraint, OutlivesConstraintSet}; use crate::diagnostics::UniverseInfo; -use crate::member_constraints::MemberConstraintSet; use crate::polonius::legacy::{PoloniusFacts, PoloniusLocationTable}; use crate::polonius::{PoloniusContext, PoloniusLivenessContext}; use crate::region_infer::TypeTest; @@ -67,12 +66,11 @@ macro_rules! span_mirbug { }) } -mod canonical; +pub(crate) mod canonical; mod constraint_conversion; pub(crate) mod free_region_relations; mod input_output; pub(crate) mod liveness; -mod opaque_types; mod relate_tys; /// Type checks the given `mir` in the context of the inference @@ -114,7 +112,6 @@ pub(crate) fn type_check<'tcx>( placeholder_index_to_region: IndexVec::default(), liveness_constraints: LivenessValues::with_specific_points(Rc::clone(&location_map)), outlives_constraints: OutlivesConstraintSet::default(), - member_constraints: MemberConstraintSet::default(), type_tests: Vec::default(), universe_causes: FxIndexMap::default(), }; @@ -124,7 +121,7 @@ pub(crate) fn type_check<'tcx>( region_bound_pairs, normalized_inputs_and_output, known_type_outlives_obligations, - } = free_region_relations::create(infcx, infcx.param_env, universal_regions, &mut constraints); + } = free_region_relations::create(infcx, universal_regions, &mut constraints); let pre_obligations = infcx.take_registered_region_obligations(); assert!( @@ -170,9 +167,6 @@ pub(crate) fn type_check<'tcx>( liveness::generate(&mut typeck, &location_map, move_data); - let opaque_type_values = - opaque_types::take_opaques_and_register_member_constraints(&mut typeck); - // We're done with typeck, we can finalize the polonius liveness context for region inference. let polonius_context = typeck.polonius_liveness.take().map(|liveness_context| { PoloniusContext::create_from_liveness( @@ -187,7 +181,6 @@ pub(crate) fn type_check<'tcx>( if let Some(guar) = universal_region_relations.universal_regions.encountered_re_error() { debug!("encountered an error region; removing constraints!"); constraints.outlives_constraints = Default::default(); - constraints.member_constraints = Default::default(); constraints.type_tests = Default::default(); root_cx.set_tainted_by_errors(guar); infcx.set_tainted_by_errors(guar); @@ -196,7 +189,8 @@ pub(crate) fn type_check<'tcx>( MirTypeckResults { constraints, universal_region_relations, - opaque_type_values, + region_bound_pairs, + known_type_outlives_obligations, polonius_context, } } @@ -245,7 +239,8 @@ struct TypeChecker<'a, 'tcx> { pub(crate) struct MirTypeckResults<'tcx> { pub(crate) constraints: MirTypeckRegionConstraints<'tcx>, pub(crate) universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>, - pub(crate) opaque_type_values: FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>>, + pub(crate) region_bound_pairs: Frozen<RegionBoundPairs<'tcx>>, + pub(crate) known_type_outlives_obligations: Frozen<Vec<ty::PolyTypeOutlivesPredicate<'tcx>>>, pub(crate) polonius_context: Option<PoloniusContext>, } @@ -277,8 +272,6 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> { pub(crate) outlives_constraints: OutlivesConstraintSet<'tcx>, - pub(crate) member_constraints: MemberConstraintSet<'tcx, RegionVid>, - pub(crate) universe_causes: FxIndexMap<ty::UniverseIndex, UniverseInfo<'tcx>>, pub(crate) type_tests: Vec<TypeTest<'tcx>>, @@ -287,7 +280,7 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> { impl<'tcx> MirTypeckRegionConstraints<'tcx> { /// Creates a `Region` for a given `PlaceholderRegion`, or returns the /// region that corresponds to a previously created one. - fn placeholder_region( + pub(crate) fn placeholder_region( &mut self, infcx: &InferCtxt<'tcx>, placeholder: ty::PlaceholderRegion, @@ -380,14 +373,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.body } - fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> RegionVid { - if let ty::RePlaceholder(placeholder) = r.kind() { - self.constraints.placeholder_region(self.infcx, placeholder).as_var() - } else { - self.universal_regions.to_region_vid(r) - } - } - fn unsized_feature_enabled(&self) -> bool { self.tcx().features().unsized_fn_params() } @@ -424,7 +409,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.infcx, self.universal_regions, self.region_bound_pairs, - self.infcx.param_env, self.known_type_outlives_obligations, locations, locations.span(self.body), @@ -1471,68 +1455,79 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } CastKind::PtrToPtr => { let ty_from = op.ty(self.body, tcx); - let cast_ty_from = CastTy::from_ty(ty_from); - let cast_ty_to = CastTy::from_ty(*ty); - match (cast_ty_from, cast_ty_to) { - (Some(CastTy::Ptr(src)), Some(CastTy::Ptr(dst))) => { - let src_tail = self.struct_tail(src.ty, location); - let dst_tail = self.struct_tail(dst.ty, location); - - // This checks (lifetime part of) vtable validity for pointer casts, - // which is irrelevant when there are aren't principal traits on - // both sides (aka only auto traits). - // - // Note that other checks (such as denying `dyn Send` -> `dyn - // Debug`) are in `rustc_hir_typeck`. - if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) = *src_tail.kind() - && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) = *dst_tail.kind() - && src_tty.principal().is_some() - && dst_tty.principal().is_some() - { - // Remove auto traits. - // Auto trait checks are handled in `rustc_hir_typeck` as FCW. - let src_obj = Ty::new_dynamic( - tcx, - tcx.mk_poly_existential_predicates( - &src_tty.without_auto_traits().collect::<Vec<_>>(), - ), - // FIXME: Once we disallow casting `*const dyn Trait + 'short` - // to `*const dyn Trait + 'long`, then this can just be `src_lt`. - dst_lt, - ty::Dyn, - ); - let dst_obj = Ty::new_dynamic( - tcx, - tcx.mk_poly_existential_predicates( - &dst_tty.without_auto_traits().collect::<Vec<_>>(), - ), - dst_lt, - ty::Dyn, - ); - - debug!(?src_tty, ?dst_tty, ?src_obj, ?dst_obj); - - self.sub_types( - src_obj, - dst_obj, - location.to_locations(), - ConstraintCategory::Cast { - is_implicit_coercion: false, - unsize_to: None, - }, - ) - .unwrap(); - } - } - _ => { - span_mirbug!( - self, - rvalue, - "Invalid PtrToPtr cast {:?} -> {:?}", - ty_from, - ty - ) - } + let Some(CastTy::Ptr(src)) = CastTy::from_ty(ty_from) else { + unreachable!(); + }; + let Some(CastTy::Ptr(dst)) = CastTy::from_ty(*ty) else { + unreachable!(); + }; + + if self.infcx.type_is_sized_modulo_regions(self.infcx.param_env, dst.ty) { + // Wide to thin ptr cast. This may even occur in an env with + // impossible predicates, such as `where dyn Trait: Sized`. + // In this case, we don't want to fall into the case below, + // since the types may not actually be equatable, but it's + // fine to perform this operation in an impossible env. + let trait_ref = ty::TraitRef::new( + tcx, + tcx.require_lang_item(LangItem::Sized, self.last_span), + [dst.ty], + ); + self.prove_trait_ref( + trait_ref, + location.to_locations(), + ConstraintCategory::Cast { + is_implicit_coercion: true, + unsize_to: None, + }, + ); + } else if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) = + *self.struct_tail(src.ty, location).kind() + && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) = + *self.struct_tail(dst.ty, location).kind() + && src_tty.principal().is_some() + && dst_tty.principal().is_some() + { + // This checks (lifetime part of) vtable validity for pointer casts, + // which is irrelevant when there are aren't principal traits on + // both sides (aka only auto traits). + // + // Note that other checks (such as denying `dyn Send` -> `dyn + // Debug`) are in `rustc_hir_typeck`. + + // Remove auto traits. + // Auto trait checks are handled in `rustc_hir_typeck` as FCW. + let src_obj = Ty::new_dynamic( + tcx, + tcx.mk_poly_existential_predicates( + &src_tty.without_auto_traits().collect::<Vec<_>>(), + ), + // FIXME: Once we disallow casting `*const dyn Trait + 'short` + // to `*const dyn Trait + 'long`, then this can just be `src_lt`. + dst_lt, + ty::Dyn, + ); + let dst_obj = Ty::new_dynamic( + tcx, + tcx.mk_poly_existential_predicates( + &dst_tty.without_auto_traits().collect::<Vec<_>>(), + ), + dst_lt, + ty::Dyn, + ); + + debug!(?src_tty, ?dst_tty, ?src_obj, ?dst_obj); + + self.sub_types( + src_obj, + dst_obj, + location.to_locations(), + ConstraintCategory::Cast { + is_implicit_coercion: false, + unsize_to: None, + }, + ) + .unwrap(); } } CastKind::Transmute => { @@ -1895,7 +1890,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { if !output_ty .is_privately_uninhabited(self.tcx(), self.infcx.typing_env(self.infcx.param_env)) { - span_mirbug!(self, term, "call to converging function {:?} w/o dest", sig); + span_mirbug!(self, term, "call to non-diverging function {:?} w/o dest", sig); } } else { let dest_ty = destination.ty(self.body, tcx).ty; @@ -2474,12 +2469,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { args: GenericArgsRef<'tcx>, locations: Locations, ) -> ty::InstantiatedPredicates<'tcx> { + let root_def_id = self.root_cx.root_def_id(); if let Some(closure_requirements) = &self.root_cx.closure_requirements(def_id) { constraint_conversion::ConstraintConversion::new( self.infcx, self.universal_regions, self.region_bound_pairs, - self.infcx.param_env, self.known_type_outlives_obligations, locations, self.body.span, // irrelevant; will be overridden. @@ -2489,9 +2484,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { .apply_closure_requirements(closure_requirements, def_id, args); } - // Now equate closure args to regions inherited from `typeck_root_def_id`. Fixes #98589. - let typeck_root_def_id = tcx.typeck_root_def_id(self.body.source.def_id()); - let typeck_root_args = ty::GenericArgs::identity_for_item(tcx, typeck_root_def_id); + // Now equate closure args to regions inherited from `root_def_id`. Fixes #98589. + let typeck_root_args = ty::GenericArgs::identity_for_item(tcx, root_def_id); let parent_args = match tcx.def_kind(def_id) { // We don't want to dispatch on 3 different kind of closures here, so take @@ -2566,17 +2560,14 @@ impl<'tcx> TypeOp<'tcx> for InstantiateOpaqueType<'tcx> { fn fully_perform( mut self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> { - let (mut output, region_constraints) = scrape_region_constraints( - infcx, - |ocx| { + let (mut output, region_constraints) = + scrape_region_constraints(infcx, root_def_id, "InstantiateOpaqueType", span, |ocx| { ocx.register_obligations(self.obligations.clone()); Ok(()) - }, - "InstantiateOpaqueType", - span, - )?; + })?; self.region_constraints = Some(region_constraints); output.error_info = Some(self); Ok(output) diff --git a/compiler/rustc_borrowck/src/type_check/opaque_types.rs b/compiler/rustc_borrowck/src/type_check/opaque_types.rs deleted file mode 100644 index 5a422483eef..00000000000 --- a/compiler/rustc_borrowck/src/type_check/opaque_types.rs +++ /dev/null @@ -1,333 +0,0 @@ -use std::iter; - -use rustc_data_structures::fx::FxIndexMap; -use rustc_middle::span_bug; -use rustc_middle::ty::{ - self, GenericArgKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeSuperVisitable, - TypeVisitable, TypeVisitableExt, TypeVisitor, fold_regions, -}; -use tracing::{debug, trace}; - -use super::{MemberConstraintSet, TypeChecker}; - -/// Once we're done with typechecking the body, we take all the opaque types -/// defined by this function and add their 'member constraints'. -pub(super) fn take_opaques_and_register_member_constraints<'tcx>( - typeck: &mut TypeChecker<'_, 'tcx>, -) -> FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>> { - let infcx = typeck.infcx; - // Annoying: to invoke `typeck.to_region_vid`, we need access to - // `typeck.constraints`, but we also want to be mutating - // `typeck.member_constraints`. For now, just swap out the value - // we want and replace at the end. - let mut member_constraints = std::mem::take(&mut typeck.constraints.member_constraints); - let opaque_types = infcx - .take_opaque_types() - .into_iter() - .map(|(opaque_type_key, hidden_type)| { - let hidden_type = infcx.resolve_vars_if_possible(hidden_type); - register_member_constraints( - typeck, - &mut member_constraints, - opaque_type_key, - hidden_type, - ); - trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind()); - if hidden_type.has_non_region_infer() { - span_bug!(hidden_type.span, "could not resolve {:?}", hidden_type.ty); - } - - // Convert all regions to nll vars. - let (opaque_type_key, hidden_type) = - fold_regions(infcx.tcx, (opaque_type_key, hidden_type), |r, _| { - ty::Region::new_var(infcx.tcx, typeck.to_region_vid(r)) - }); - - (opaque_type_key, hidden_type) - }) - .collect(); - assert!(typeck.constraints.member_constraints.is_empty()); - typeck.constraints.member_constraints = member_constraints; - opaque_types -} - -/// Given the map `opaque_types` containing the opaque -/// `impl Trait` types whose underlying, hidden types are being -/// inferred, this method adds constraints to the regions -/// appearing in those underlying hidden types to ensure that they -/// at least do not refer to random scopes within the current -/// function. These constraints are not (quite) sufficient to -/// guarantee that the regions are actually legal values; that -/// final condition is imposed after region inference is done. -/// -/// # The Problem -/// -/// Let's work through an example to explain how it works. Assume -/// the current function is as follows: -/// -/// ```text -/// fn foo<'a, 'b>(..) -> (impl Bar<'a>, impl Bar<'b>) -/// ``` -/// -/// Here, we have two `impl Trait` types whose values are being -/// inferred (the `impl Bar<'a>` and the `impl -/// Bar<'b>`). Conceptually, this is sugar for a setup where we -/// define underlying opaque types (`Foo1`, `Foo2`) and then, in -/// the return type of `foo`, we *reference* those definitions: -/// -/// ```text -/// type Foo1<'x> = impl Bar<'x>; -/// type Foo2<'x> = impl Bar<'x>; -/// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } -/// // ^^^^ ^^ -/// // | | -/// // | args -/// // def_id -/// ``` -/// -/// As indicating in the comments above, each of those references -/// is (in the compiler) basically generic parameters (`args`) -/// applied to the type of a suitable `def_id` (which identifies -/// `Foo1` or `Foo2`). -/// -/// Now, at this point in compilation, what we have done is to -/// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with -/// fresh inference variables C1 and C2. We wish to use the values -/// of these variables to infer the underlying types of `Foo1` and -/// `Foo2`. That is, this gives rise to higher-order (pattern) unification -/// constraints like: -/// -/// ```text -/// for<'a> (Foo1<'a> = C1) -/// for<'b> (Foo1<'b> = C2) -/// ``` -/// -/// For these equation to be satisfiable, the types `C1` and `C2` -/// can only refer to a limited set of regions. For example, `C1` -/// can only refer to `'static` and `'a`, and `C2` can only refer -/// to `'static` and `'b`. The job of this function is to impose that -/// constraint. -/// -/// Up to this point, C1 and C2 are basically just random type -/// inference variables, and hence they may contain arbitrary -/// regions. In fact, it is fairly likely that they do! Consider -/// this possible definition of `foo`: -/// -/// ```text -/// fn foo<'a, 'b>(x: &'a i32, y: &'b i32) -> (impl Bar<'a>, impl Bar<'b>) { -/// (&*x, &*y) -/// } -/// ``` -/// -/// Here, the values for the concrete types of the two impl -/// traits will include inference variables: -/// -/// ```text -/// &'0 i32 -/// &'1 i32 -/// ``` -/// -/// Ordinarily, the subtyping rules would ensure that these are -/// sufficiently large. But since `impl Bar<'a>` isn't a specific -/// type per se, we don't get such constraints by default. This -/// is where this function comes into play. It adds extra -/// constraints to ensure that all the regions which appear in the -/// inferred type are regions that could validly appear. -/// -/// This is actually a bit of a tricky constraint in general. We -/// want to say that each variable (e.g., `'0`) can only take on -/// values that were supplied as arguments to the opaque type -/// (e.g., `'a` for `Foo1<'a>`) or `'static`, which is always in -/// scope. We don't have a constraint quite of this kind in the current -/// region checker. -/// -/// # The Solution -/// -/// We generally prefer to make `<=` constraints, since they -/// integrate best into the region solver. To do that, we find the -/// "minimum" of all the arguments that appear in the args: that -/// is, some region which is less than all the others. In the case -/// of `Foo1<'a>`, that would be `'a` (it's the only choice, after -/// all). Then we apply that as a least bound to the variables -/// (e.g., `'a <= '0`). -/// -/// In some cases, there is no minimum. Consider this example: -/// -/// ```text -/// fn baz<'a, 'b>() -> impl Trait<'a, 'b> { ... } -/// ``` -/// -/// Here we would report a more complex "in constraint", like `'r -/// in ['a, 'b, 'static]` (where `'r` is some region appearing in -/// the hidden type). -/// -/// # Constrain regions, not the hidden concrete type -/// -/// Note that generating constraints on each region `Rc` is *not* -/// the same as generating an outlives constraint on `Tc` itself. -/// For example, if we had a function like this: -/// -/// ``` -/// # #![feature(type_alias_impl_trait)] -/// # fn main() {} -/// # trait Foo<'a> {} -/// # impl<'a, T> Foo<'a> for (&'a u32, T) {} -/// fn foo<'a, T>(x: &'a u32, y: T) -> impl Foo<'a> { -/// (x, y) -/// } -/// -/// // Equivalent to: -/// # mod dummy { use super::*; -/// type FooReturn<'a, T> = impl Foo<'a>; -/// #[define_opaque(FooReturn)] -/// fn foo<'a, T>(x: &'a u32, y: T) -> FooReturn<'a, T> { -/// (x, y) -/// } -/// # } -/// ``` -/// -/// then the hidden type `Tc` would be `(&'0 u32, T)` (where `'0` -/// is an inference variable). If we generated a constraint that -/// `Tc: 'a`, then this would incorrectly require that `T: 'a` -- -/// but this is not necessary, because the opaque type we -/// create will be allowed to reference `T`. So we only generate a -/// constraint that `'0: 'a`. -fn register_member_constraints<'tcx>( - typeck: &mut TypeChecker<'_, 'tcx>, - member_constraints: &mut MemberConstraintSet<'tcx, ty::RegionVid>, - opaque_type_key: OpaqueTypeKey<'tcx>, - OpaqueHiddenType { span, ty: hidden_ty }: OpaqueHiddenType<'tcx>, -) { - let tcx = typeck.tcx(); - let hidden_ty = typeck.infcx.resolve_vars_if_possible(hidden_ty); - debug!(?hidden_ty); - - let variances = tcx.variances_of(opaque_type_key.def_id); - debug!(?variances); - - // For a case like `impl Foo<'a, 'b>`, we would generate a constraint - // `'r in ['a, 'b, 'static]` for each region `'r` that appears in the - // hidden type (i.e., it must be equal to `'a`, `'b`, or `'static`). - // - // `conflict1` and `conflict2` are the two region bounds that we - // detected which were unrelated. They are used for diagnostics. - - // Create the set of choice regions: each region in the hidden - // type can be equal to any of the region parameters of the - // opaque type definition. - let fr_static = typeck.universal_regions.fr_static; - let choice_regions: Vec<_> = opaque_type_key - .args - .iter() - .enumerate() - .filter(|(i, _)| variances[*i] == ty::Invariant) - .filter_map(|(_, arg)| match arg.kind() { - GenericArgKind::Lifetime(r) => Some(typeck.to_region_vid(r)), - GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, - }) - .chain(iter::once(fr_static)) - .collect(); - - // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's - // not currently sound until we have existential regions. - hidden_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { - tcx, - op: |r| { - member_constraints.add_member_constraint( - opaque_type_key, - hidden_ty, - span, - typeck.to_region_vid(r), - &choice_regions, - ) - }, - }); -} - -/// Visitor that requires that (almost) all regions in the type visited outlive -/// `least_region`. We cannot use `push_outlives_components` because regions in -/// closure signatures are not included in their outlives components. We need to -/// ensure all regions outlive the given bound so that we don't end up with, -/// say, `ReVar` appearing in a return type and causing ICEs when other -/// functions end up with region constraints involving regions from other -/// functions. -/// -/// We also cannot use `for_each_free_region` because for closures it includes -/// the regions parameters from the enclosing item. -/// -/// We ignore any type parameters because impl trait values are assumed to -/// capture all the in-scope type parameters. -struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { - tcx: TyCtxt<'tcx>, - op: OP, -} - -impl<'tcx, OP> TypeVisitor<TyCtxt<'tcx>> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP> -where - OP: FnMut(ty::Region<'tcx>), -{ - fn visit_region(&mut self, r: ty::Region<'tcx>) { - match r.kind() { - // ignore bound regions, keep visiting - ty::ReBound(_, _) => {} - _ => (self.op)(r), - } - } - - fn visit_ty(&mut self, ty: Ty<'tcx>) { - // We're only interested in types involving regions - if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { - return; - } - - match *ty.kind() { - ty::Closure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_closure().upvar_tys() { - upvar.visit_with(self); - } - args.as_closure().sig_as_fn_ptr_ty().visit_with(self); - } - - ty::CoroutineClosure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_coroutine_closure().upvar_tys() { - upvar.visit_with(self); - } - - args.as_coroutine_closure().signature_parts_ty().visit_with(self); - } - - ty::Coroutine(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - // Also skip the witness type, because that has no free regions. - - for upvar in args.as_coroutine().upvar_tys() { - upvar.visit_with(self); - } - args.as_coroutine().return_ty().visit_with(self); - args.as_coroutine().yield_ty().visit_with(self); - args.as_coroutine().resume_ty().visit_with(self); - } - - ty::Alias(kind, ty::AliasTy { def_id, args, .. }) - if let Some(variances) = self.tcx.opt_alias_variances(kind, def_id) => - { - // Skip lifetime parameters that are not captured, since they do - // not need member constraints registered for them; we'll erase - // them (and hopefully in the future replace them with placeholders). - for (v, s) in std::iter::zip(variances, args.iter()) { - if *v != ty::Bivariant { - s.visit_with(self); - } - } - } - - _ => { - ty.super_visit_with(self); - } - } - } -} diff --git a/compiler/rustc_borrowck/src/type_check/relate_tys.rs b/compiler/rustc_borrowck/src/type_check/relate_tys.rs index 84ca9bad2c1..7ac2dff12f7 100644 --- a/compiler/rustc_borrowck/src/type_check/relate_tys.rs +++ b/compiler/rustc_borrowck/src/type_check/relate_tys.rs @@ -124,8 +124,13 @@ impl<'a, 'b, 'tcx> NllTypeRelating<'a, 'b, 'tcx> { // by using `ty_vid rel B` and then finally and end by equating `ty_vid` to // the opaque. let mut enable_subtyping = |ty, opaque_is_expected| { - let ty_vid = infcx.next_ty_var_id_in_universe(self.span(), ty::UniverseIndex::ROOT); - + // We create the fresh inference variable in the highest universe. + // In theory we could limit it to the highest universe in the args of + // the opaque but that isn't really worth the effort. + // + // We'll make sure that the opaque type can actually name everything + // in its hidden type later on. + let ty_vid = infcx.next_ty_vid(self.span()); let variance = if opaque_is_expected { self.ambient_variance } else { diff --git a/compiler/rustc_builtin_macros/src/cfg_accessible.rs b/compiler/rustc_builtin_macros/src/cfg_accessible.rs index f7d8f4aa783..48d80004cdd 100644 --- a/compiler/rustc_builtin_macros/src/cfg_accessible.rs +++ b/compiler/rustc_builtin_macros/src/cfg_accessible.rs @@ -1,9 +1,9 @@ //! Implementation of the `#[cfg_accessible(path)]` attribute macro. use rustc_ast as ast; +use rustc_attr_parsing::validate_attr; use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier}; use rustc_feature::AttributeTemplate; -use rustc_parse::validate_attr; use rustc_span::{Span, sym}; use crate::errors; diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs index a33eca43de5..09d827b0635 100644 --- a/compiler/rustc_builtin_macros/src/derive.rs +++ b/compiler/rustc_builtin_macros/src/derive.rs @@ -1,10 +1,10 @@ use rustc_ast as ast; use rustc_ast::{GenericParamKind, ItemKind, MetaItemInner, MetaItemKind, StmtKind}; +use rustc_attr_parsing::validate_attr; use rustc_expand::base::{ Annotatable, DeriveResolution, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier, }; use rustc_feature::AttributeTemplate; -use rustc_parse::validate_attr; use rustc_session::Session; use rustc_span::{ErrorGuaranteed, Ident, Span, sym}; diff --git a/compiler/rustc_builtin_macros/src/deriving/from.rs b/compiler/rustc_builtin_macros/src/deriving/from.rs index ef0e6ca324a..ab25de7c917 100644 --- a/compiler/rustc_builtin_macros/src/deriving/from.rs +++ b/compiler/rustc_builtin_macros/src/deriving/from.rs @@ -27,21 +27,39 @@ pub(crate) fn expand_deriving_from( cx.dcx().bug("derive(From) used on something else than an item"); }; - // #[derive(From)] is currently usable only on structs with exactly one field. - let field = if let ItemKind::Struct(_, _, data) = &item.kind - && let [field] = data.fields() - { - Some(field.clone()) - } else { - None + let err_span = || { + let item_span = item.kind.ident().map(|ident| ident.span).unwrap_or(item.span); + MultiSpan::from_spans(vec![span, item_span]) }; - let from_type = match &field { - Some(field) => Ty::AstTy(field.ty.clone()), - // We don't have a type to put into From<...> if we don't have a single field, so just put - // unit there. - None => Ty::Unit, + // `#[derive(From)]` is currently usable only on structs with exactly one field. + let field = match &item.kind { + ItemKind::Struct(_, _, data) => { + if let [field] = data.fields() { + Ok(field.clone()) + } else { + let guar = cx.dcx().emit_err(errors::DeriveFromWrongFieldCount { + span: err_span(), + multiple_fields: data.fields().len() > 1, + }); + Err(guar) + } + } + ItemKind::Enum(_, _, _) | ItemKind::Union(_, _, _) => { + let guar = cx.dcx().emit_err(errors::DeriveFromWrongTarget { + span: err_span(), + kind: &format!("{} {}", item.kind.article(), item.kind.descr()), + }); + Err(guar) + } + _ => cx.dcx().bug("Invalid derive(From) ADT input"), }; + + let from_type = Ty::AstTy(match field { + Ok(ref field) => field.ty.clone(), + Err(guar) => cx.ty(span, ast::TyKind::Err(guar)), + }); + let path = Path::new_(pathvec_std!(convert::From), vec![Box::new(from_type.clone())], PathKind::Std); @@ -71,34 +89,17 @@ pub(crate) fn expand_deriving_from( attributes: thin_vec![cx.attr_word(sym::inline, span)], fieldless_variants_strategy: FieldlessVariantsStrategy::Default, combine_substructure: combine_substructure(Box::new(|cx, span, substructure| { - let Some(field) = &field else { - let item_span = item.kind.ident().map(|ident| ident.span).unwrap_or(item.span); - let err_span = MultiSpan::from_spans(vec![span, item_span]); - let error = match &item.kind { - ItemKind::Struct(_, _, data) => { - cx.dcx().emit_err(errors::DeriveFromWrongFieldCount { - span: err_span, - multiple_fields: data.fields().len() > 1, - }) - } - ItemKind::Enum(_, _, _) | ItemKind::Union(_, _, _) => { - cx.dcx().emit_err(errors::DeriveFromWrongTarget { - span: err_span, - kind: &format!("{} {}", item.kind.article(), item.kind.descr()), - }) - } - _ => cx.dcx().bug("Invalid derive(From) ADT input"), - }; - - return BlockOrExpr::new_expr(DummyResult::raw_expr(span, Some(error))); + let field = match field { + Ok(ref field) => field, + Err(guar) => { + return BlockOrExpr::new_expr(DummyResult::raw_expr(span, Some(guar))); + } }; let self_kw = Ident::new(kw::SelfUpper, span); let expr: Box<ast::Expr> = match substructure.fields { SubstructureFields::StaticStruct(variant, _) => match variant { - // Self { - // field: value - // } + // Self { field: value } VariantData::Struct { .. } => cx.expr_struct_ident( span, self_kw, diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index ec613b7b710..6415e55e0b0 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -10,11 +10,12 @@ use rustc_ast::{ }; use rustc_data_structures::fx::FxHashSet; use rustc_errors::{ - Applicability, Diag, MultiSpan, PResult, SingleLabelManySpans, listify, pluralize, + Applicability, BufferedEarlyLint, Diag, MultiSpan, PResult, SingleLabelManySpans, listify, + pluralize, }; use rustc_expand::base::*; use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY; -use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, LintId}; +use rustc_lint_defs::{BuiltinLintDiag, LintId}; use rustc_parse::exp; use rustc_parse_format as parse; use rustc_span::{BytePos, ErrorGuaranteed, Ident, InnerSpan, Span, Symbol}; @@ -595,7 +596,8 @@ fn make_format_args( named_arg_sp: arg_name.span, named_arg_name: arg_name.name.to_string(), is_formatting_arg: matches!(used_as, Width | Precision), - }, + } + .into(), }); } } diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs index 86a4927f390..1bcea95fbb7 100644 --- a/compiler/rustc_builtin_macros/src/lib.rs +++ b/compiler/rustc_builtin_macros/src/lib.rs @@ -8,7 +8,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] -#![feature(autodiff)] #![feature(box_patterns)] #![feature(decl_macro)] #![feature(if_let_guard)] diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index e803f3be82b..a9d91f77560 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -141,7 +141,7 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { if let ast::ItemKind::Mod( _, _, - ModKind::Loaded(.., ast::ModSpans { inner_span: span, .. }, _), + ModKind::Loaded(.., ast::ModSpans { inner_span: span, .. }), ) = item.kind { let prev_tests = mem::take(&mut self.tests); diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs index f00c170e485..3a4585d5be9 100644 --- a/compiler/rustc_builtin_macros/src/util.rs +++ b/compiler/rustc_builtin_macros/src/util.rs @@ -1,12 +1,13 @@ use rustc_ast::tokenstream::TokenStream; use rustc_ast::{self as ast, AttrStyle, Attribute, MetaItem, attr, token}; +use rustc_attr_parsing::validate_attr; use rustc_errors::{Applicability, Diag, ErrorGuaranteed}; use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt}; use rustc_expand::expand::AstFragment; use rustc_feature::AttributeTemplate; use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES; -use rustc_parse::{exp, parser, validate_attr}; +use rustc_parse::{exp, parser}; use rustc_session::errors::report_lit_error; use rustc_span::{BytePos, Span, Symbol}; diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 043123fcab2..399f8b6e762 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -42,12 +42,13 @@ trait ArgAttributesExt { const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] = [(ArgAttribute::InReg, llvm::AttributeKind::InReg)]; -const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 5] = [ +const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 6] = [ (ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias), (ArgAttribute::NoCapture, llvm::AttributeKind::NoCapture), (ArgAttribute::NonNull, llvm::AttributeKind::NonNull), (ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly), (ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef), + (ArgAttribute::CapturesReadOnly, llvm::AttributeKind::CapturesReadOnly), ]; fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> { @@ -83,6 +84,10 @@ fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&' } for (attr, llattr) in OPTIMIZATION_ATTRIBUTES { if regular.contains(attr) { + // captures(address, read_provenance) is only available since LLVM 21. + if attr == ArgAttribute::CapturesReadOnly && llvm_util::get_version() < (21, 0, 0) { + continue; + } attrs.push(llattr.create_attr(cx.llcx)); } } diff --git a/compiler/rustc_codegen_llvm/src/allocator.rs b/compiler/rustc_codegen_llvm/src/allocator.rs index 23610aa856c..df3e49279d9 100644 --- a/compiler/rustc_codegen_llvm/src/allocator.rs +++ b/compiler/rustc_codegen_llvm/src/allocator.rs @@ -12,7 +12,7 @@ use smallvec::SmallVec; use crate::builder::SBuilder; use crate::declare::declare_simple_fn; -use crate::llvm::{self, False, True, Type, Value}; +use crate::llvm::{self, FALSE, TRUE, Type, Value}; use crate::{SimpleCx, attributes, debuginfo, llvm_util}; pub(crate) unsafe fn codegen( @@ -80,7 +80,7 @@ pub(crate) unsafe fn codegen( &cx, &mangle_internal_symbol(tcx, OomStrategy::SYMBOL), &i8, - &llvm::LLVMConstInt(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as u64, False), + &llvm::LLVMConstInt(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as u64, FALSE), ); // __rust_no_alloc_shim_is_unstable_v2 @@ -201,7 +201,7 @@ fn create_wrapper_function( .map(|(i, _)| llvm::get_param(llfn, i as c_uint)) .collect::<Vec<_>>(); let ret = bx.call(ty, callee, &args, None); - llvm::LLVMSetTailCall(ret, True); + llvm::LLVMSetTailCall(ret, TRUE); if output.is_some() { bx.ret(ret); } else { diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index a643a91141e..38c1d3b53e8 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -16,6 +16,7 @@ use tracing::debug; use crate::builder::Builder; use crate::common::Funclet; use crate::context::CodegenCx; +use crate::llvm::ToLlvmBool; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; @@ -470,10 +471,6 @@ pub(crate) fn inline_asm_call<'ll>( dest: Option<&'ll llvm::BasicBlock>, catch_funclet: Option<(&'ll llvm::BasicBlock, Option<&Funclet<'ll>>)>, ) -> Option<&'ll Value> { - let volatile = if volatile { llvm::True } else { llvm::False }; - let alignstack = if alignstack { llvm::True } else { llvm::False }; - let can_throw = if unwind { llvm::True } else { llvm::False }; - let argtys = inputs .iter() .map(|v| { @@ -500,10 +497,10 @@ pub(crate) fn inline_asm_call<'ll>( asm.len(), cons.as_ptr(), cons.len(), - volatile, - alignstack, + volatile.to_llvm_bool(), + alignstack.to_llvm_bool(), dia, - can_throw, + unwind.to_llvm_bool(), ) }; diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index a6daacd95ef..5affb26483a 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -420,6 +420,16 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::ALLOCATOR_ZEROED) { to_add.push(create_alloc_family_attr(cx.llcx)); + if let Some(zv) = + cx.tcx.get_attr(instance.def_id(), rustc_span::sym::rustc_allocator_zeroed_variant) + && let Some(name) = zv.value_str() + { + to_add.push(llvm::CreateAttrStringValue( + cx.llcx, + "alloc-variant-zeroed", + &mangle_internal_symbol(cx.tcx, name.as_str()), + )); + } // apply to argument place instead of function let alloc_align = AttributeKind::AllocAlign.create_attr(cx.llcx); attributes::apply_to_llfn(llfn, AttributePlace::Argument(1), &[alloc_align]); @@ -497,7 +507,7 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( to_add.push(llvm::CreateAttrStringValue(cx.llcx, "wasm-import-module", module)); let name = - codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id())); + codegen_fn_attrs.symbol_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id())); let name = name.as_str(); to_add.push(llvm::CreateAttrStringValue(cx.llcx, "wasm-import-name", name)); } diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs index 62998003ca1..7c65d667bda 100644 --- a/compiler/rustc_codegen_llvm/src/back/write.rs +++ b/compiler/rustc_codegen_llvm/src/back/write.rs @@ -1110,7 +1110,7 @@ fn embed_bitcode( llvm::set_section(llglobal, bitcode_section_name(cgcx)); llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage); - llvm::LLVMSetGlobalConstant(llglobal, llvm::True); + llvm::LLVMSetGlobalConstant(llglobal, llvm::TRUE); let llconst = common::bytes_in_context(llcx, &[]); let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline"); diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 427c75d40e9..37379586d58 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -35,7 +35,7 @@ use crate::attributes; use crate::common::Funclet; use crate::context::{CodegenCx, FullCx, GenericCx, SCx}; use crate::llvm::{ - self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, GEPNoWrapFlags, Metadata, True, + self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, GEPNoWrapFlags, Metadata, TRUE, ToLlvmBool, }; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; @@ -493,8 +493,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { let add = llvm::LLVMBuildAdd(self.llbuilder, a, b, UNNAMED); if llvm::LLVMIsAInstruction(add).is_some() { - llvm::LLVMSetNUW(add, True); - llvm::LLVMSetNSW(add, True); + llvm::LLVMSetNUW(add, TRUE); + llvm::LLVMSetNSW(add, TRUE); } add } @@ -503,8 +503,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { let sub = llvm::LLVMBuildSub(self.llbuilder, a, b, UNNAMED); if llvm::LLVMIsAInstruction(sub).is_some() { - llvm::LLVMSetNUW(sub, True); - llvm::LLVMSetNSW(sub, True); + llvm::LLVMSetNUW(sub, TRUE); + llvm::LLVMSetNSW(sub, TRUE); } sub } @@ -513,8 +513,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { let mul = llvm::LLVMBuildMul(self.llbuilder, a, b, UNNAMED); if llvm::LLVMIsAInstruction(mul).is_some() { - llvm::LLVMSetNUW(mul, True); - llvm::LLVMSetNSW(mul, True); + llvm::LLVMSetNUW(mul, TRUE); + llvm::LLVMSetNSW(mul, TRUE); } mul } @@ -528,7 +528,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { // an instruction, so we need to check before setting the flag. // (See also `LLVMBuildNUWNeg` which also needs a check.) if llvm::LLVMIsAInstruction(or).is_some() { - llvm::LLVMSetIsDisjoint(or, True); + llvm::LLVMSetIsDisjoint(or, TRUE); } or } @@ -629,7 +629,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value { unsafe { let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED); - llvm::LLVMSetVolatile(load, llvm::True); + llvm::LLVMSetVolatile(load, llvm::TRUE); load } } @@ -717,7 +717,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let mut const_llval = None; let llty = place.layout.llvm_type(self); if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) { - if llvm::LLVMIsGlobalConstant(global) == llvm::True { + if llvm::LLVMIsGlobalConstant(global).is_true() { if let Some(init) = llvm::LLVMGetInitializer(global) { if self.val_ty(init) == llty { const_llval = Some(init); @@ -838,7 +838,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint }; llvm::LLVMSetAlignment(store, align); if flags.contains(MemFlags::VOLATILE) { - llvm::LLVMSetVolatile(store, llvm::True); + llvm::LLVMSetVolatile(store, llvm::TRUE); } if flags.contains(MemFlags::NONTEMPORAL) { // Make sure that the current target architectures supports "sane" non-temporal @@ -956,7 +956,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let trunc = self.trunc(val, dest_ty); unsafe { if llvm::LLVMIsAInstruction(trunc).is_some() { - llvm::LLVMSetNUW(trunc, True); + llvm::LLVMSetNUW(trunc, TRUE); } } trunc @@ -968,7 +968,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let trunc = self.trunc(val, dest_ty); unsafe { if llvm::LLVMIsAInstruction(trunc).is_some() { - llvm::LLVMSetNSW(trunc, True); + llvm::LLVMSetNSW(trunc, TRUE); } } trunc @@ -1067,13 +1067,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value { unsafe { - llvm::LLVMBuildIntCast2( - self.llbuilder, - val, - dest_ty, - if is_signed { True } else { False }, - UNNAMED, - ) + llvm::LLVMBuildIntCast2(self.llbuilder, val, dest_ty, is_signed.to_llvm_bool(), UNNAMED) } } @@ -1229,7 +1223,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false); let landing_pad = self.landing_pad(ty, pers_fn, 0); unsafe { - llvm::LLVMSetCleanup(landing_pad, llvm::True); + llvm::LLVMSetCleanup(landing_pad, llvm::TRUE); } (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1)) } @@ -1317,7 +1311,6 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { failure_order: rustc_middle::ty::AtomicOrdering, weak: bool, ) -> (&'ll Value, &'ll Value) { - let weak = if weak { llvm::True } else { llvm::False }; unsafe { let value = llvm::LLVMBuildAtomicCmpXchg( self.llbuilder, @@ -1326,9 +1319,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { src, AtomicOrdering::from_generic(order), AtomicOrdering::from_generic(failure_order), - llvm::False, // SingleThreaded + llvm::FALSE, // SingleThreaded ); - llvm::LLVMSetWeak(value, weak); + llvm::LLVMSetWeak(value, weak.to_llvm_bool()); let val = self.extract_value(value, 0); let success = self.extract_value(value, 1); (val, success) @@ -1353,7 +1346,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { dst, src, AtomicOrdering::from_generic(order), - llvm::False, // SingleThreaded + llvm::FALSE, // SingleThreaded ) }; if ret_ptr && self.val_ty(res) != self.type_ptr() { @@ -1368,14 +1361,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { scope: SynchronizationScope, ) { let single_threaded = match scope { - SynchronizationScope::SingleThread => llvm::True, - SynchronizationScope::CrossThread => llvm::False, + SynchronizationScope::SingleThread => true, + SynchronizationScope::CrossThread => false, }; unsafe { llvm::LLVMBuildFence( self.llbuilder, AtomicOrdering::from_generic(order), - single_threaded, + single_threaded.to_llvm_bool(), UNNAMED, ); } diff --git a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs index e2df3265f6f..6ddf53cdc87 100644 --- a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs +++ b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs @@ -11,7 +11,7 @@ use crate::builder::{Builder, PlaceRef, UNNAMED}; use crate::context::SimpleCx; use crate::declare::declare_simple_fn; use crate::llvm; -use crate::llvm::{Metadata, True, Type}; +use crate::llvm::{Metadata, TRUE, Type}; use crate::value::Value; pub(crate) fn adjust_activity_to_abi<'tcx>( @@ -293,7 +293,7 @@ pub(crate) fn generate_enzyme_call<'ll, 'tcx>( // ret double %0 // } // ``` - let enzyme_ty = unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, True) }; + let enzyme_ty = unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, TRUE) }; // FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and // think a bit more about what should go here. diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index f29fefb66f0..11b79a7fe68 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -20,7 +20,7 @@ use tracing::debug; use crate::consts::const_alloc_to_llvm; pub(crate) use crate::context::CodegenCx; use crate::context::{GenericCx, SCx}; -use crate::llvm::{self, BasicBlock, Bool, ConstantInt, False, Metadata, True}; +use crate::llvm::{self, BasicBlock, ConstantInt, FALSE, Metadata, TRUE, ToLlvmBool}; use crate::type_::Type; use crate::value::Value; @@ -158,7 +158,7 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { self.type_kind(t) == TypeKind::Integer, "only allows integer types in const_int" ); - unsafe { llvm::LLVMConstInt(t, i as u64, True) } + unsafe { llvm::LLVMConstInt(t, i as u64, TRUE) } } fn const_u8(&self, i: u8) -> &'ll Value { @@ -192,7 +192,7 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { self.type_kind(t) == TypeKind::Integer, "only allows integer types in const_uint" ); - unsafe { llvm::LLVMConstInt(t, i, False) } + unsafe { llvm::LLVMConstInt(t, i, FALSE) } } fn const_uint_big(&self, t: &'ll Type, u: u128) -> &'ll Value { @@ -377,7 +377,7 @@ pub(crate) fn val_ty(v: &Value) -> &Type { pub(crate) fn bytes_in_context<'ll>(llcx: &'ll llvm::Context, bytes: &[u8]) -> &'ll Value { unsafe { let ptr = bytes.as_ptr() as *const c_char; - llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), True) + llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), TRUE) } } @@ -392,7 +392,7 @@ fn struct_in_context<'ll>( packed: bool, ) -> &'ll Value { let len = c_uint::try_from(elts.len()).expect("LLVMConstStructInContext elements len overflow"); - unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed as Bool) } + unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed.to_llvm_bool()) } } #[inline] diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index 2d91ae64e2d..4fd6110ac4a 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -471,6 +471,15 @@ pub(crate) unsafe fn create_module<'ll>( } } + if sess.opts.unstable_opts.indirect_branch_cs_prefix { + llvm::add_module_flag_u32( + llmod, + llvm::ModuleFlagMergeBehavior::Override, + "indirect_branch_cs_prefix", + 1, + ); + } + match (sess.opts.unstable_opts.small_data_threshold, sess.target.small_data_threshold_support()) { // Set up the small-data optimization limit for architectures that use @@ -692,7 +701,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { } pub(crate) fn get_const_int(&self, ty: &'ll Type, val: u64) -> &'ll Value { - unsafe { llvm::LLVMConstInt(ty, val, llvm::False) } + unsafe { llvm::LLVMConstInt(ty, val, llvm::FALSE) } } pub(crate) fn get_const_i64(&self, n: u64) -> &'ll Value { diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs index 6eb7042da61..7a6dc008c7b 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs @@ -72,7 +72,7 @@ pub(crate) fn get_or_insert_gdb_debug_scripts_section_global<'ll>( .unwrap_or_else(|| bug!("symbol `{}` is already defined", section_var_name)); llvm::set_section(section_var, c".debug_gdb_scripts"); llvm::set_initializer(section_var, cx.const_bytes(section_contents)); - llvm::LLVMSetGlobalConstant(section_var, llvm::True); + llvm::LLVMSetGlobalConstant(section_var, llvm::TRUE); llvm::set_unnamed_address(section_var, llvm::UnnamedAddr::Global); llvm::set_linkage(section_var, llvm::Linkage::LinkOnceODRLinkage); // This should make sure that the whole section is not larger than diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs index d1502d2b1e6..18a783a348a 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs @@ -276,7 +276,7 @@ pub(super) fn build_type_with_children<'ll, 'tcx>( && let ty::Adt(adt_def, args) = ty.kind() { let def_id = adt_def.did(); - // If any sub type reference the original type definition and the sub type has a type + // If any child type references the original type definition and the child type has a type // parameter that strictly contains the original parameter, the original type is a recursive // type that can expanding indefinitely. Example, // ``` @@ -285,21 +285,43 @@ pub(super) fn build_type_with_children<'ll, 'tcx>( // Item(T), // } // ``` - let is_expanding_recursive = adt_def.is_enum() - && debug_context(cx).adt_stack.borrow().iter().any(|(parent_def_id, parent_args)| { - if def_id == *parent_def_id { - args.iter().zip(parent_args.iter()).any(|(arg, parent_arg)| { - if let (Some(arg), Some(parent_arg)) = (arg.as_type(), parent_arg.as_type()) - { - arg != parent_arg && arg.contains(parent_arg) - } else { - false - } - }) - } else { - false - } - }); + let is_expanding_recursive = { + let stack = debug_context(cx).adt_stack.borrow(); + stack + .iter() + .enumerate() + .rev() + .skip(1) + .filter(|(_, (ancestor_def_id, _))| def_id == *ancestor_def_id) + .any(|(ancestor_index, (_, ancestor_args))| { + args.iter() + .zip(ancestor_args.iter()) + .filter_map(|(arg, ancestor_arg)| arg.as_type().zip(ancestor_arg.as_type())) + .any(|(arg, ancestor_arg)| + // Strictly contains. + (arg != ancestor_arg && arg.contains(ancestor_arg)) + // Check all types between current and ancestor use the + // ancestor_arg. + // Otherwise, duplicate wrappers in normal recursive type may be + // regarded as expanding. + // ``` + // struct Recursive { + // a: Box<Box<Recursive>>, + // } + // ``` + // It can produce an ADT stack like this, + // - Box<Recursive> + // - Recursive + // - Box<Box<Recursive>> + && stack[ancestor_index + 1..stack.len()].iter().all( + |(_, intermediate_args)| + intermediate_args + .iter() + .filter_map(|arg| arg.as_type()) + .any(|mid_arg| mid_arg.contains(ancestor_arg)) + )) + }) + }; if is_expanding_recursive { // FIXME: indicate that this is an expanding recursive type in stub metadata? return DINodeCreationResult::new(stub_info.metadata, false); diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs b/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs index b4d639368b0..1dcf4ff3062 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs @@ -38,7 +38,7 @@ pub(crate) fn item_namespace<'ll>(cx: &CodegenCx<'ll, '_>, def_id: DefId) -> &'l parent_scope, namespace_name_string.as_ptr(), namespace_name_string.len(), - llvm::False, // ExportSymbols (only relevant for C++ anonymous namespaces) + llvm::FALSE, // ExportSymbols (only relevant for C++ anonymous namespaces) ) }; diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 4935f8d7dff..49d3dedbeab 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -330,10 +330,16 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { _ => bug!(), }; let ptr = args[0].immediate(); + let locality = fn_args.const_at(1).to_value().valtree.unwrap_leaf().to_i32(); self.call_intrinsic( "llvm.prefetch", &[self.val_ty(ptr)], - &[ptr, self.const_i32(rw), args[1].immediate(), self.const_i32(cache_type)], + &[ + ptr, + self.const_i32(rw), + self.const_i32(locality), + self.const_i32(cache_type), + ], ) } sym::carrying_mul_add => { diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 55d26a97e2d..fa2802a891f 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -11,9 +11,8 @@ //! the need for an extra cast from `*const u8` on the Rust side. #![allow(non_camel_case_types)] -#![allow(non_upper_case_globals)] -use std::fmt::Debug; +use std::fmt::{self, Debug}; use std::marker::PhantomData; use std::num::NonZero; use std::ptr; @@ -33,10 +32,59 @@ use crate::llvm; /// In the LLVM-C API, boolean values are passed as `typedef int LLVMBool`, /// which has a different ABI from Rust or C++ `bool`. -pub(crate) type Bool = c_int; +/// +/// This wrapper does not implement `PartialEq`. +/// To test the underlying boolean value, use [`Self::is_true`]. +#[derive(Clone, Copy)] +#[repr(transparent)] +pub(crate) struct Bool { + value: c_int, +} + +pub(crate) const TRUE: Bool = Bool::TRUE; +pub(crate) const FALSE: Bool = Bool::FALSE; + +impl Bool { + pub(crate) const TRUE: Self = Self { value: 1 }; + pub(crate) const FALSE: Self = Self { value: 0 }; + + pub(crate) const fn from_bool(rust_bool: bool) -> Self { + if rust_bool { Self::TRUE } else { Self::FALSE } + } + + /// Converts this LLVM-C boolean to a Rust `bool` + pub(crate) fn is_true(self) -> bool { + // Since we're interacting with a C API, follow the C convention of + // treating any nonzero value as true. + self.value != Self::FALSE.value + } +} -pub(crate) const True: Bool = 1 as Bool; -pub(crate) const False: Bool = 0 as Bool; +impl Debug for Bool { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.value { + 0 => f.write_str("FALSE"), + 1 => f.write_str("TRUE"), + // As with `Self::is_true`, treat any nonzero value as true. + v => write!(f, "TRUE ({v})"), + } + } +} + +/// Convenience trait to convert `bool` to `llvm::Bool` with an explicit method call. +/// +/// Being able to write `b.to_llvm_bool()` is less noisy than `llvm::Bool::from(b)`, +/// while being more explicit and less mistake-prone than something like `b.into()`. +pub(crate) trait ToLlvmBool: Copy { + fn to_llvm_bool(self) -> llvm::Bool; +} + +impl ToLlvmBool for bool { + #[inline(always)] + fn to_llvm_bool(self) -> llvm::Bool { + llvm::Bool::from_bool(self) + } +} /// Wrapper for a raw enum value returned from LLVM's C APIs. /// @@ -251,6 +299,7 @@ pub(crate) enum AttributeKind { Writable = 42, DeadOnUnwind = 43, DeadOnReturn = 44, + CapturesReadOnly = 45, } /// LLVMIntPredicate diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs index 7fea7b79a8c..6adabe53129 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs @@ -215,7 +215,7 @@ pub(crate) fn set_initializer(llglobal: &Value, constant_val: &Value) { } pub(crate) fn set_global_constant(llglobal: &Value, is_constant: bool) { - LLVMSetGlobalConstant(llglobal, if is_constant { ffi::True } else { ffi::False }); + LLVMSetGlobalConstant(llglobal, is_constant.to_llvm_bool()); } pub(crate) fn get_linkage(llglobal: &Value) -> Linkage { @@ -229,7 +229,7 @@ pub(crate) fn set_linkage(llglobal: &Value, linkage: Linkage) { } pub(crate) fn is_declaration(llglobal: &Value) -> bool { - unsafe { LLVMIsDeclaration(llglobal) == ffi::True } + unsafe { LLVMIsDeclaration(llglobal) }.is_true() } pub(crate) fn get_visibility(llglobal: &Value) -> Visibility { diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 28d2100f478..d5025bb405c 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -26,7 +26,7 @@ static INIT: Once = Once::new(); pub(crate) fn init(sess: &Session) { unsafe { // Before we touch LLVM, make sure that multithreading is enabled. - if llvm::LLVMIsMultithreaded() != 1 { + if !llvm::LLVMIsMultithreaded().is_true() { bug!("LLVM compiled without support for threads"); } INIT.call_once(|| { @@ -277,6 +277,7 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option<LLVMFea { None } + ("loongarch32" | "loongarch64", "32s") if get_version().0 < 21 => None, // Filter out features that are not supported by the current LLVM version ("riscv32" | "riscv64", "zacas") if get_version().0 < 20 => None, ( diff --git a/compiler/rustc_codegen_llvm/src/mono_item.rs b/compiler/rustc_codegen_llvm/src/mono_item.rs index 5075befae8a..52eefe2d4d2 100644 --- a/compiler/rustc_codegen_llvm/src/mono_item.rs +++ b/compiler/rustc_codegen_llvm/src/mono_item.rs @@ -133,7 +133,7 @@ impl CodegenCx<'_, '_> { // Thread-local variables generally don't support copy relocations. let is_thread_local_var = llvm::LLVMIsAGlobalVariable(llval) - .is_some_and(|v| llvm::LLVMIsThreadLocal(v) == llvm::True); + .is_some_and(|v| llvm::LLVMIsThreadLocal(v).is_true()); if is_thread_local_var { return false; } diff --git a/compiler/rustc_codegen_llvm/src/type_.rs b/compiler/rustc_codegen_llvm/src/type_.rs index f02d16baf94..9ecaf5f24fe 100644 --- a/compiler/rustc_codegen_llvm/src/type_.rs +++ b/compiler/rustc_codegen_llvm/src/type_.rs @@ -15,7 +15,7 @@ use rustc_target::callconv::{CastTarget, FnAbi}; use crate::abi::{FnAbiLlvmExt, LlvmType}; use crate::context::{CodegenCx, GenericCx, SCx}; pub(crate) use crate::llvm::Type; -use crate::llvm::{Bool, False, Metadata, True}; +use crate::llvm::{FALSE, Metadata, TRUE, ToLlvmBool}; use crate::type_of::LayoutLlvmExt; use crate::value::Value; use crate::{common, llvm}; @@ -53,7 +53,9 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { } pub(crate) fn set_struct_body(&self, ty: &'ll Type, els: &[&'ll Type], packed: bool) { - unsafe { llvm::LLVMStructSetBody(ty, els.as_ptr(), els.len() as c_uint, packed as Bool) } + unsafe { + llvm::LLVMStructSetBody(ty, els.as_ptr(), els.len() as c_uint, packed.to_llvm_bool()) + } } pub(crate) fn type_void(&self) -> &'ll Type { unsafe { llvm::LLVMVoidTypeInContext(self.llcx()) } @@ -139,7 +141,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { } pub(crate) fn type_variadic_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type { - unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, True) } + unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, TRUE) } } pub(crate) fn type_i1(&self) -> &'ll Type { @@ -152,7 +154,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> { self.llcx(), els.as_ptr(), els.len() as c_uint, - packed as Bool, + packed.to_llvm_bool(), ) } } @@ -200,7 +202,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> BaseTypeCodegenMethods for GenericCx<'ll, CX> { } fn type_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type { - unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, False) } + unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, FALSE) } } fn type_kind(&self, ty: &'ll Type) -> TypeKind { diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index 42ba0154192..44b9941691a 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -171,9 +171,6 @@ codegen_ssa_invalid_monomorphization_unsupported_symbol = invalid monomorphizati codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size = invalid monomorphization of `{$name}` intrinsic: unsupported {$symbol} from `{$in_ty}` with element `{$in_elem}` of size `{$size}` to `{$ret_ty}` -codegen_ssa_invalid_sanitize = invalid argument for `sanitize` - .note = expected one of: `address`, `kernel_address`, `cfi`, `hwaddress`, `kcfi`, `memory`, `memtag`, `shadow_call_stack`, or `thread` - codegen_ssa_invalid_windows_subsystem = invalid windows subsystem `{$subsystem}`, only `windows` and `console` are allowed codegen_ssa_ld64_unimplemented_modifier = `as-needed` modifier not implemented yet for ld64 diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index 4ebe59dc2a7..c3777f64e9e 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -2435,6 +2435,13 @@ fn linker_with_args( // Passed after compiler-generated options to support manual overriding when necessary. add_user_defined_link_args(cmd, sess); + // ------------ Builtin configurable linker scripts ------------ + // The user's link args should be able to overwrite symbols in the compiler's + // linker script that were weakly defined (i.e. defined with `PROVIDE()`). For this + // to work correctly, the user needs to be able to specify linker arguments like + // `--defsym` and `--script` *before* any builtin linker scripts are evaluated. + add_link_script(cmd, sess, tmpdir, crate_type); + // ------------ Object code and libraries, order-dependent ------------ // Post-link CRT objects. @@ -2469,8 +2476,6 @@ fn add_order_independent_options( let apple_sdk_root = add_apple_sdk(cmd, sess, flavor); - add_link_script(cmd, sess, tmpdir, crate_type); - if sess.target.os == "fuchsia" && crate_type == CrateType::Executable && !matches!(flavor, LinkerFlavor::Gnu(Cc::Yes, _)) diff --git a/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs b/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs index b9e0c957363..509168b2cd2 100644 --- a/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs +++ b/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs @@ -307,11 +307,14 @@ fn create_elf_raw_dylib_stub(sess: &Session, soname: &str, symbols: &[DllImport] stub.reserve_section_headers(); stub.reserve_dynsym(); stub.reserve_dynstr(); + let verdef_count = 1 + vers.len(); + let mut dynamic_entries = 2; // DT_SONAME, DT_NULL if !vers.is_empty() { stub.reserve_gnu_versym(); - stub.reserve_gnu_verdef(1 + vers.len(), 1 + vers.len()); + stub.reserve_gnu_verdef(verdef_count, verdef_count); + dynamic_entries += 1; // DT_VERDEFNUM } - stub.reserve_dynamic(2); // DT_SONAME, DT_NULL + stub.reserve_dynamic(dynamic_entries); // First write the ELF header with the arch information. let e_machine = match (arch, sub_arch) { @@ -443,9 +446,13 @@ fn create_elf_raw_dylib_stub(sess: &Session, soname: &str, symbols: &[DllImport] // .dynamic // the DT_SONAME will be used by the linker to populate DT_NEEDED // which the loader uses to find the library. - // DT_NULL terminates the .dynamic table. stub.write_align_dynamic(); stub.write_dynamic_string(elf::DT_SONAME, soname); + // LSB section "2.7. Symbol Versioning" requires `DT_VERDEFNUM` to be reliable. + if verdef_count > 1 { + stub.write_dynamic(elf::DT_VERDEFNUM, verdef_count as u64); + } + // DT_NULL terminates the .dynamic table. stub.write_dynamic(elf::DT_NULL, 0); stub_buf diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index 67cd1f4cd41..8abaf201aba 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -858,7 +858,7 @@ pub fn is_call_from_compiler_builtins_to_upstream_monomorphization<'tcx>( instance: Instance<'tcx>, ) -> bool { fn is_llvm_intrinsic(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - if let Some(name) = tcx.codegen_fn_attrs(def_id).link_name { + if let Some(name) = tcx.codegen_fn_attrs(def_id).symbol_name { name.as_str().starts_with("llvm.") } else { false diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index bf14c02a09c..6b0bd64102c 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -6,7 +6,6 @@ use rustc_ast::{LitKind, MetaItem, MetaItemInner, attr}; use rustc_hir::attrs::{AttributeKind, InlineAttr, InstructionSetAttr, UsedBy}; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; -use rustc_hir::weak_lang_items::WEAK_LANG_ITEMS; use rustc_hir::{self as hir, Attribute, LangItem, find_attr, lang_items}; use rustc_middle::middle::codegen_fn_attrs::{ CodegenFnAttrFlags, CodegenFnAttrs, PatchableFunctionEntry, @@ -156,7 +155,7 @@ fn process_builtin_attrs( match p { AttributeKind::Cold(_) => codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD, AttributeKind::ExportName { name, .. } => { - codegen_fn_attrs.export_name = Some(*name) + codegen_fn_attrs.symbol_name = Some(*name) } AttributeKind::Inline(inline, span) => { codegen_fn_attrs.inline = *inline; @@ -164,7 +163,13 @@ fn process_builtin_attrs( } AttributeKind::Naked(_) => codegen_fn_attrs.flags |= CodegenFnAttrFlags::NAKED, AttributeKind::Align { align, .. } => codegen_fn_attrs.alignment = Some(*align), - AttributeKind::LinkName { name, .. } => codegen_fn_attrs.link_name = Some(*name), + AttributeKind::LinkName { name, .. } => { + // FIXME Remove check for foreign functions once #[link_name] on non-foreign + // functions is a hard error + if tcx.is_foreign_item(did) { + codegen_fn_attrs.symbol_name = Some(*name); + } + } AttributeKind::LinkOrdinal { ordinal, span } => { codegen_fn_attrs.link_ordinal = Some(*ordinal); interesting_spans.link_ordinal = Some(*span); @@ -188,7 +193,7 @@ fn process_builtin_attrs( } } AttributeKind::Optimize(optimize, _) => codegen_fn_attrs.optimize = *optimize, - AttributeKind::TargetFeature(features, attr_span) => { + AttributeKind::TargetFeature { features, attr_span, was_forced } => { let Some(sig) = tcx.hir_node_by_def_id(did).fn_sig() else { tcx.dcx().span_delayed_bug(*attr_span, "target_feature applied to non-fn"); continue; @@ -196,7 +201,7 @@ fn process_builtin_attrs( let safe_target_features = matches!(sig.header.safety, hir::HeaderSafety::SafeTargetFeatures); codegen_fn_attrs.safe_target_features = safe_target_features; - if safe_target_features { + if safe_target_features && !was_forced { if tcx.sess.target.is_like_wasm || tcx.sess.opts.actually_rustdoc { // The `#[target_feature]` attribute is allowed on // WebAssembly targets on all functions. Prior to stabilizing @@ -227,6 +232,7 @@ fn process_builtin_attrs( tcx, did, features, + *was_forced, rust_target_features, &mut codegen_fn_attrs.target_features, ); @@ -287,6 +293,9 @@ fn process_builtin_attrs( codegen_fn_attrs.linkage = linkage; } } + AttributeKind::Sanitize { span, .. } => { + interesting_spans.sanitize = Some(*span); + } _ => {} } } @@ -304,7 +313,6 @@ fn process_builtin_attrs( codegen_fn_attrs.flags |= CodegenFnAttrFlags::ALLOCATOR_ZEROED } sym::thread_local => codegen_fn_attrs.flags |= CodegenFnAttrFlags::THREAD_LOCAL, - sym::sanitize => interesting_spans.sanitize = Some(attr.span()), sym::instruction_set => { codegen_fn_attrs.instruction_set = parse_instruction_set_attr(tcx, attr) } @@ -382,7 +390,7 @@ fn apply_overrides(tcx: TyCtxt<'_>, did: LocalDefId, codegen_fn_attrs: &mut Code // * `#[rustc_std_internal_symbol]` mangles the symbol name in a special way // both for exports and imports through foreign items. This is handled further, // during symbol mangling logic. - } else if codegen_fn_attrs.link_name.is_some() { + } else if codegen_fn_attrs.symbol_name.is_some() { // * This can be overridden with the `#[link_name]` attribute } else { // NOTE: there's one more exception that we cannot apply here. On wasm, @@ -437,7 +445,7 @@ fn check_result( } // error when specifying link_name together with link_ordinal - if let Some(_) = codegen_fn_attrs.link_name + if let Some(_) = codegen_fn_attrs.symbol_name && let Some(_) = codegen_fn_attrs.link_ordinal { let msg = "cannot use `#[link_name]` with `#[link_ordinal]`"; @@ -457,7 +465,7 @@ fn check_result( .collect(), ) { let span = - find_attr!(tcx.get_all_attrs(did), AttributeKind::TargetFeature(_, span) => *span) + find_attr!(tcx.get_all_attrs(did), AttributeKind::TargetFeature{attr_span: span, ..} => *span) .unwrap_or_else(|| tcx.def_span(did)); tcx.dcx() @@ -484,14 +492,11 @@ fn handle_lang_items( // strippable by the linker. // // Additionally weak lang items have predetermined symbol names. - if let Some(lang_item) = lang_item { - if WEAK_LANG_ITEMS.contains(&lang_item) { - codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL; - } - if let Some(link_name) = lang_item.link_name() { - codegen_fn_attrs.export_name = Some(link_name); - codegen_fn_attrs.link_name = Some(link_name); - } + if let Some(lang_item) = lang_item + && let Some(link_name) = lang_item.link_name() + { + codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL; + codegen_fn_attrs.symbol_name = Some(link_name); } // error when using no_mangle on a lang item item @@ -557,79 +562,9 @@ fn opt_trait_item(tcx: TyCtxt<'_>, def_id: DefId) -> Option<DefId> { } } -/// For an attr that has the `sanitize` attribute, read the list of -/// disabled sanitizers. `current_attr` holds the information about -/// previously parsed attributes. -fn parse_sanitize_attr( - tcx: TyCtxt<'_>, - attr: &Attribute, - current_attr: SanitizerSet, -) -> SanitizerSet { - let mut result = current_attr; - if let Some(list) = attr.meta_item_list() { - for item in list.iter() { - let MetaItemInner::MetaItem(set) = item else { - tcx.dcx().emit_err(errors::InvalidSanitize { span: attr.span() }); - break; - }; - let segments = set.path.segments.iter().map(|x| x.ident.name).collect::<Vec<_>>(); - match segments.as_slice() { - // Similar to clang, sanitize(address = ..) and - // sanitize(kernel_address = ..) control both ASan and KASan - // Source: https://reviews.llvm.org/D44981. - [sym::address] | [sym::kernel_address] if set.value_str() == Some(sym::off) => { - result |= SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS - } - [sym::address] | [sym::kernel_address] if set.value_str() == Some(sym::on) => { - result &= !SanitizerSet::ADDRESS; - result &= !SanitizerSet::KERNELADDRESS; - } - [sym::cfi] if set.value_str() == Some(sym::off) => result |= SanitizerSet::CFI, - [sym::cfi] if set.value_str() == Some(sym::on) => result &= !SanitizerSet::CFI, - [sym::kcfi] if set.value_str() == Some(sym::off) => result |= SanitizerSet::KCFI, - [sym::kcfi] if set.value_str() == Some(sym::on) => result &= !SanitizerSet::KCFI, - [sym::memory] if set.value_str() == Some(sym::off) => { - result |= SanitizerSet::MEMORY - } - [sym::memory] if set.value_str() == Some(sym::on) => { - result &= !SanitizerSet::MEMORY - } - [sym::memtag] if set.value_str() == Some(sym::off) => { - result |= SanitizerSet::MEMTAG - } - [sym::memtag] if set.value_str() == Some(sym::on) => { - result &= !SanitizerSet::MEMTAG - } - [sym::shadow_call_stack] if set.value_str() == Some(sym::off) => { - result |= SanitizerSet::SHADOWCALLSTACK - } - [sym::shadow_call_stack] if set.value_str() == Some(sym::on) => { - result &= !SanitizerSet::SHADOWCALLSTACK - } - [sym::thread] if set.value_str() == Some(sym::off) => { - result |= SanitizerSet::THREAD - } - [sym::thread] if set.value_str() == Some(sym::on) => { - result &= !SanitizerSet::THREAD - } - [sym::hwaddress] if set.value_str() == Some(sym::off) => { - result |= SanitizerSet::HWADDRESS - } - [sym::hwaddress] if set.value_str() == Some(sym::on) => { - result &= !SanitizerSet::HWADDRESS - } - _ => { - tcx.dcx().emit_err(errors::InvalidSanitize { span: attr.span() }); - } - } - } - } - result -} - fn disabled_sanitizers_for(tcx: TyCtxt<'_>, did: LocalDefId) -> SanitizerSet { // Backtrack to the crate root. - let disabled = match tcx.opt_local_parent(did) { + let mut disabled = match tcx.opt_local_parent(did) { // Check the parent (recursively). Some(parent) => tcx.disabled_sanitizers_for(parent), // We reached the crate root without seeing an attribute, so @@ -638,8 +573,17 @@ fn disabled_sanitizers_for(tcx: TyCtxt<'_>, did: LocalDefId) -> SanitizerSet { }; // Check for a sanitize annotation directly on this def. - if let Some(attr) = tcx.get_attr(did, sym::sanitize) { - return parse_sanitize_attr(tcx, attr, disabled); + if let Some((on_set, off_set)) = find_attr!(tcx.get_all_attrs(did), AttributeKind::Sanitize {on_set, off_set, ..} => (on_set, off_set)) + { + // the on set is the set of sanitizers explicitly enabled. + // we mask those out since we want the set of disabled sanitizers here + disabled &= !*on_set; + // the off set is the set of sanitizers explicitly disabled. + // we or those in here. + disabled |= *off_set; + // the on set and off set are distjoint since there's a third option: unset. + // a node may not set the sanitizer setting in which case it inherits from parents. + // the code above in this function does this backtracking } disabled } diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index 209c78ddeda..fb5a8205140 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -1121,14 +1121,6 @@ impl IntoDiagArg for ExpectedPointerMutability { } #[derive(Diagnostic)] -#[diag(codegen_ssa_invalid_sanitize)] -#[note] -pub(crate) struct InvalidSanitize { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(codegen_ssa_target_feature_safe_trait)] pub(crate) struct TargetFeatureSafeTrait { #[primary_span] diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs index 7e4341a8236..54584999d61 100644 --- a/compiler/rustc_codegen_ssa/src/target_features.rs +++ b/compiler/rustc_codegen_ssa/src/target_features.rs @@ -3,7 +3,7 @@ use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_hir::attrs::InstructionSetAttr; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; -use rustc_middle::middle::codegen_fn_attrs::TargetFeature; +use rustc_middle::middle::codegen_fn_attrs::{TargetFeature, TargetFeatureKind}; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; use rustc_session::Session; @@ -22,6 +22,7 @@ pub(crate) fn from_target_feature_attr( tcx: TyCtxt<'_>, did: LocalDefId, features: &[(Symbol, Span)], + was_forced: bool, rust_target_features: &UnordMap<String, target_features::Stability>, target_features: &mut Vec<TargetFeature>, ) { @@ -88,7 +89,14 @@ pub(crate) fn from_target_feature_attr( } } } - target_features.push(TargetFeature { name, implied: name != feature }) + let kind = if name != feature { + TargetFeatureKind::Implied + } else if was_forced { + TargetFeatureKind::Forced + } else { + TargetFeatureKind::Enabled + }; + target_features.push(TargetFeature { name, kind }) } } } @@ -180,6 +188,7 @@ fn parse_rust_feature_flag<'a>( while let Some(new_feature) = new_features.pop() { if features.insert(new_feature) { if let Some(implied_features) = inverse_implied_features.get(&new_feature) { + #[allow(rustc::potential_query_instability)] new_features.extend(implied_features) } } diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index da954cf4ed7..fccb6b171b1 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -280,22 +280,110 @@ impl<'tcx> CompileTimeInterpCx<'tcx> { interp_ok(match (a, b) { // Comparisons between integers are always known. (Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8, - // Comparisons of null with an arbitrary scalar can be known if `scalar_may_be_null` - // indicates that the scalar can definitely *not* be null. - (Scalar::Int(int), ptr) | (ptr, Scalar::Int(int)) - if int.is_null() && !self.scalar_may_be_null(ptr)? => - { - 0 + // Comparing a pointer `ptr` with an integer `int` is equivalent to comparing + // `ptr-int` with null, so we can reduce this case to a `scalar_may_be_null` test. + (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => { + let int = int.to_target_usize(*self.tcx); + // The `wrapping_neg` here may produce a value that is not + // a valid target usize any more... but `wrapping_offset` handles that correctly. + let offset_ptr = ptr.wrapping_offset(Size::from_bytes(int.wrapping_neg()), self); + if !self.scalar_may_be_null(Scalar::from_pointer(offset_ptr, self))? { + // `ptr.wrapping_sub(int)` is definitely not equal to `0`, so `ptr != int` + 0 + } else { + // `ptr.wrapping_sub(int)` could be equal to `0`, but might not be, + // so we cannot know for sure if `ptr == int` or not + 2 + } + } + (Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => { + let (a_prov, a_offset) = a.prov_and_relative_offset(); + let (b_prov, b_offset) = b.prov_and_relative_offset(); + let a_allocid = a_prov.alloc_id(); + let b_allocid = b_prov.alloc_id(); + let a_info = self.get_alloc_info(a_allocid); + let b_info = self.get_alloc_info(b_allocid); + + // Check if the pointers cannot be equal due to alignment + if a_info.align > Align::ONE && b_info.align > Align::ONE { + let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes()); + let a_residue = a_offset.bytes() % min_align; + let b_residue = b_offset.bytes() % min_align; + if a_residue != b_residue { + // If the two pointers have a different residue modulo their + // common alignment, they cannot be equal. + return interp_ok(0); + } + // The pointers have the same residue modulo their common alignment, + // so they could be equal. Try the other checks. + } + + if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = ( + self.tcx.try_get_global_alloc(a_allocid), + self.tcx.try_get_global_alloc(b_allocid), + ) { + if a_allocid == b_allocid { + debug_assert_eq!( + a_did, b_did, + "different static item DefIds had same AllocId? {a_allocid:?} == {b_allocid:?}, {a_did:?} != {b_did:?}" + ); + // Comparing two pointers into the same static. As per + // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro + // a static cannot be duplicated, so if two pointers are into the same + // static, they are equal if and only if their offsets are equal. + (a_offset == b_offset) as u8 + } else { + debug_assert_ne!( + a_did, b_did, + "same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}" + ); + // Comparing two pointers into the different statics. + // We can never determine for sure that two pointers into different statics + // are *equal*, but we can know that they are *inequal* if they are both + // strictly in-bounds (i.e. in-bounds and not one-past-the-end) of + // their respective static, as different non-zero-sized statics cannot + // overlap or be deduplicated as per + // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro + // (non-deduplication), and + // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness + // (non-overlapping). + if a_offset < a_info.size && b_offset < b_info.size { + 0 + } else { + // Otherwise, conservatively say we don't know. + // There are some cases we could still return `0` for, e.g. + // if the pointers being equal would require their statics to overlap + // one or more bytes, but for simplicity we currently only check + // strictly in-bounds pointers. + 2 + } + } + } else { + // All other cases we conservatively say we don't know. + // + // For comparing statics to non-statics, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness + // immutable statics can overlap with other kinds of allocations sometimes. + // + // FIXME: We could be more decisive for (non-zero-sized) mutable statics, + // which cannot overlap with other kinds of allocations. + // + // Functions and vtables can be duplicated and deduplicated, so we + // cannot be sure of runtime equality of pointers to the same one, or the + // runtime inequality of pointers to different ones (see e.g. #73722), + // so comparing those should return 2, whether they are the same allocation + // or not. + // + // `GlobalAlloc::TypeId` exists mostly to prevent consteval from comparing + // `TypeId`s, so comparing those should always return 2, whether they are the + // same allocation or not. + // + // FIXME: We could revisit comparing pointers into the same + // `GlobalAlloc::Memory` once https://github.com/rust-lang/rust/issues/128775 + // is fixed (but they can be deduplicated, so comparing pointers into different + // ones should return 2). + 2 + } } - // Other ways of comparing integers and pointers can never be known for sure. - (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2, - // FIXME: return a `1` for when both sides are the same pointer, *except* that - // some things (like functions and vtables) do not have stable addresses - // so we need to be careful around them (see e.g. #73722). - // FIXME: return `0` for at least some comparisons where we can reliably - // determine the result of runtime inequality tests at compile-time. - // Examples include comparison of addresses in different static items. - (Scalar::Ptr(..), Scalar::Ptr(..)) => 2, }) } } diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index 64cb934ac8d..4cb88d44e1b 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -27,8 +27,9 @@ use crate::{enter_trace_span, fluent_generated as fluent}; pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> { /// Pass a copy of the given operand. Copy(OpTy<'tcx, Prov>), - /// Allow for the argument to be passed in-place: destroy the value originally stored at that place and - /// make the place inaccessible for the duration of the function call. + /// Allow for the argument to be passed in-place: destroy the value originally stored at that + /// place and make the place inaccessible for the duration of the function call. This *must* be + /// an in-memory place so that we can do the proper alias checks. InPlace(MPlaceTy<'tcx, Prov>), } @@ -379,6 +380,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } + // *Before* pushing the new frame, determine whether the return destination is in memory. + // Need to use `place_to_op` to be *sure* we get the mplace if there is one. + let destination_mplace = self.place_to_op(destination)?.as_mplace_or_imm().left(); + + // Push the "raw" frame -- this leaves locals uninitialized. self.push_stack_frame_raw(instance, body, destination, cont)?; // If an error is raised here, pop the frame again to get an accurate backtrace. @@ -496,7 +502,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Protect return place for in-place return value passing. // We only need to protect anything if this is actually an in-memory place. - if let Left(mplace) = destination.as_mplace_or_local() { + if let Some(mplace) = destination_mplace { M::protect_in_place_function_argument(self, &mplace)?; } diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index a8a1ac1c980..9681d89ce35 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -325,8 +325,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let _trace = enter_trace_span!( M, "instantiate_from_frame_and_normalize_erasing_regions", - "{}", - frame.instance + %frame.instance ); frame .instance @@ -583,6 +582,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { span: Span, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { + let _trace = enter_trace_span!(M, const_eval::eval_mir_constant, ?val); let const_val = val.eval(*self.tcx, self.typing_env, span).map_err(|err| { if M::ALL_CONSTS_ARE_PRECHECKED { match err { diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index e7fe4c0b34f..a86fdf80f60 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -234,6 +234,12 @@ impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> { } /// A place is either an mplace or some local. + /// + /// Note that the return value can be different even for logically identical places! + /// Specifically, if a local is stored in-memory, this may return `Local` or `MPlaceTy` + /// depending on how the place was constructed. In other words, seeing `Local` here does *not* + /// imply that this place does not point to memory. Every caller must therefore always handle + /// both cases. #[inline(always)] pub fn as_mplace_or_local( &self, diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 73cc87508ef..7cabfd96121 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -20,7 +20,7 @@ use super::{ MemoryKind, Operand, PlaceTy, Pointer, Provenance, ReturnAction, Scalar, from_known_layout, interp_ok, throw_ub, throw_unsup, }; -use crate::errors; +use crate::{enter_trace_span, errors}; // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread // boundary and dropped in the other thread, it would exit the span in the other thread. @@ -386,6 +386,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). for &const_ in body.required_consts() { + // We can't use `eval_mir_constant` here as that assumes that all required consts have + // already been checked, so we need a separate tracing call. + let _trace = enter_trace_span!(M, const_eval::required_consts, ?const_.const_); let c = self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?; c.eval(*self.tcx, self.typing_env, const_.span).map_err(|err| { diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index f1995b3f132..23d362de308 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -2,8 +2,11 @@ //! //! The main entry point is the `step` method. +use std::iter; + use either::Either; use rustc_abi::{FIRST_VARIANT, FieldIdx}; +use rustc_data_structures::fx::FxHashSet; use rustc_index::IndexSlice; use rustc_middle::ty::{self, Instance, Ty}; use rustc_middle::{bug, mir, span_bug}; @@ -389,8 +392,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Evaluate the arguments of a function call fn eval_fn_call_argument( - &self, + &mut self, op: &mir::Operand<'tcx>, + move_definitely_disjoint: bool, ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> { interp_ok(match op { mir::Operand::Copy(_) | mir::Operand::Constant(_) => { @@ -399,24 +403,19 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { FnArg::Copy(op) } mir::Operand::Move(place) => { - // If this place lives in memory, preserve its location. - // We call `place_to_op` which will be an `MPlaceTy` whenever there exists - // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local` - // which can return a local even if that has an mplace.) let place = self.eval_place(*place)?; - let op = self.place_to_op(&place)?; - - match op.as_mplace_or_imm() { - Either::Left(mplace) => FnArg::InPlace(mplace), - Either::Right(_imm) => { - // This argument doesn't live in memory, so there's no place - // to make inaccessible during the call. - // We rely on there not being any stray `PlaceTy` that would let the - // caller directly access this local! - // This is also crucial for tail calls, where we want the `FnArg` to - // stay valid when the old stack frame gets popped. - FnArg::Copy(op) + if move_definitely_disjoint { + // We still have to ensure that no *other* pointers are used to access this place, + // so *if* it is in memory then we have to treat it as `InPlace`. + // Use `place_to_op` to guarantee that we notice it being in memory. + let op = self.place_to_op(&place)?; + match op.as_mplace_or_imm() { + Either::Left(mplace) => FnArg::InPlace(mplace), + Either::Right(_imm) => FnArg::Copy(op), } + } else { + // We have to force this into memory to detect aliasing among `Move` arguments. + FnArg::InPlace(self.force_allocation(&place)?) } } }) @@ -425,18 +424,53 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the /// necessary information about callee and arguments to make a call. fn eval_callee_and_args( - &self, + &mut self, terminator: &mir::Terminator<'tcx>, func: &mir::Operand<'tcx>, args: &[Spanned<mir::Operand<'tcx>>], + dest: &mir::Place<'tcx>, ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> { let func = self.eval_operand(func, None)?; + + // Evaluating function call arguments. The tricky part here is dealing with `Move` + // arguments: we have to ensure no two such arguments alias. This would be most easily done + // by just forcing them all into memory and then doing the usual in-place argument + // protection, but then we'd force *a lot* of arguments into memory. So we do some syntactic + // pre-processing here where if all `move` arguments are syntactically distinct local + // variables (and none is indirect), we can skip the in-memory forcing. + // We have to include `dest` in that list so that we can detect aliasing of an in-place + // argument with the return place. + let move_definitely_disjoint = 'move_definitely_disjoint: { + let mut previous_locals = FxHashSet::<mir::Local>::default(); + for place in args + .iter() + .filter_map(|a| { + // We only have to care about `Move` arguments. + if let mir::Operand::Move(place) = &a.node { Some(place) } else { None } + }) + .chain(iter::once(dest)) + { + if place.is_indirect_first_projection() { + // An indirect in-place argument could alias with anything else... + break 'move_definitely_disjoint false; + } + if !previous_locals.insert(place.local) { + // This local is the base for two arguments! They might overlap. + break 'move_definitely_disjoint false; + } + } + // We found no violation so they are all definitely disjoint. + true + }; let args = args .iter() - .map(|arg| self.eval_fn_call_argument(&arg.node)) + .map(|arg| self.eval_fn_call_argument(&arg.node, move_definitely_disjoint)) .collect::<InterpResult<'tcx, Vec<_>>>()?; - let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx); + let fn_sig_binder = { + let _trace = enter_trace_span!(M, "fn_sig", ty = ?func.layout.ty.kind()); + func.layout.ty.fn_sig(*self.tcx) + }; let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder); let extra_args = &args[fn_sig.inputs().len()..]; let extra_args = @@ -519,7 +553,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let old_loc = self.frame().loc; let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = - self.eval_callee_and_args(terminator, func, args)?; + self.eval_callee_and_args(terminator, func, args, &destination)?; let destination = self.eval_place(destination)?; self.init_fn_call( @@ -542,7 +576,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let old_frame_idx = self.frame_idx(); let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = - self.eval_callee_and_args(terminator, func, args)?; + self.eval_callee_and_args(terminator, func, args, &mir::Place::return_place())?; self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?; diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 5e8bee65706..02e3d90f4af 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -1418,7 +1418,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let _trace = enter_trace_span!( M, "validate_operand", - "recursive={recursive}, reset_provenance_and_padding={reset_provenance_and_padding}, val={val:?}" + recursive, + reset_provenance_and_padding, + ?val, ); // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 13cc607135a..2ae6655901b 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -1,7 +1,7 @@ use std::fmt::Write; use rustc_data_structures::intern::Interned; -use rustc_hir::def_id::CrateNum; +use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::DisambiguatedDefPathData; use rustc_middle::bug; use rustc_middle::ty::print::{PrettyPrinter, PrintError, Printer}; @@ -132,15 +132,44 @@ impl<'tcx> Printer<'tcx> for TypeNamePrinter<'tcx> { Ok(()) } } + + fn print_coroutine_with_kind( + &mut self, + def_id: DefId, + parent_args: &'tcx [GenericArg<'tcx>], + kind: Ty<'tcx>, + ) -> Result<(), PrintError> { + self.print_def_path(def_id, parent_args)?; + + let ty::Coroutine(_, args) = self.tcx.type_of(def_id).instantiate_identity().kind() else { + // Could be `ty::Error`. + return Ok(()); + }; + + let default_kind = args.as_coroutine().kind_ty(); + + match kind.to_opt_closure_kind() { + _ if kind == default_kind => { + // No need to mark the closure if it's the deduced coroutine kind. + } + Some(ty::ClosureKind::Fn) | None => { + // Should never happen. Just don't mark anything rather than panicking. + } + Some(ty::ClosureKind::FnMut) => self.path.push_str("::{{call_mut}}"), + Some(ty::ClosureKind::FnOnce) => self.path.push_str("::{{call_once}}"), + } + + Ok(()) + } } impl<'tcx> PrettyPrinter<'tcx> for TypeNamePrinter<'tcx> { - fn should_print_optional_region(&self, _region: ty::Region<'_>) -> bool { + fn should_print_optional_region(&self, region: ty::Region<'_>) -> bool { // Bound regions are always printed (as `'_`), which gives some idea that they are special, // even though the `for` is omitted by the pretty printer. // E.g. `for<'a, 'b> fn(&'a u32, &'b u32)` is printed as "fn(&'_ u32, &'_ u32)". - match _region.kind() { - ty::ReErased => false, + match region.kind() { + ty::ReErased | ty::ReEarlyParam(_) => false, ty::ReBound(..) => true, _ => unreachable!(), } diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 53178d09348..17da3ea83c8 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -77,6 +77,7 @@ pub mod thinvec; pub mod thousands; pub mod transitive_relation; pub mod unhash; +pub mod union_find; pub mod unord; pub mod vec_cache; pub mod work_queue; diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs b/compiler/rustc_data_structures/src/union_find.rs index a826a953fa6..ef73cd7ab40 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs +++ b/compiler/rustc_data_structures/src/union_find.rs @@ -9,7 +9,7 @@ mod tests; /// Simple implementation of a union-find data structure, i.e. a disjoint-set /// forest. #[derive(Debug)] -pub(crate) struct UnionFind<Key: Idx> { +pub struct UnionFind<Key: Idx> { table: IndexVec<Key, UnionFindEntry<Key>>, } @@ -28,7 +28,7 @@ struct UnionFindEntry<Key> { impl<Key: Idx> UnionFind<Key> { /// Creates a new disjoint-set forest containing the keys `0..num_keys`. /// Initially, every key is part of its own one-element set. - pub(crate) fn new(num_keys: usize) -> Self { + pub fn new(num_keys: usize) -> Self { // Initially, every key is the root of its own set, so its parent is itself. Self { table: IndexVec::from_fn_n(|key| UnionFindEntry { parent: key, rank: 0 }, num_keys) } } @@ -38,7 +38,7 @@ impl<Key: Idx> UnionFind<Key> { /// /// Also updates internal data structures to make subsequent `find` /// operations faster. - pub(crate) fn find(&mut self, key: Key) -> Key { + pub fn find(&mut self, key: Key) -> Key { // Loop until we find a key that is its own parent. let mut curr = key; while let parent = self.table[curr].parent @@ -60,7 +60,7 @@ impl<Key: Idx> UnionFind<Key> { /// Merges the set containing `a` and the set containing `b` into one set. /// /// Returns the common root of both keys, after the merge. - pub(crate) fn unify(&mut self, a: Key, b: Key) -> Key { + pub fn unify(&mut self, a: Key, b: Key) -> Key { let mut a = self.find(a); let mut b = self.find(b); @@ -90,7 +90,7 @@ impl<Key: Idx> UnionFind<Key> { /// Takes a "snapshot" of the current state of this disjoint-set forest, in /// the form of a vector that directly maps each key to its current root. - pub(crate) fn snapshot(&mut self) -> IndexVec<Key, Key> { + pub fn snapshot(&mut self) -> IndexVec<Key, Key> { self.table.indices().map(|key| self.find(key)).collect() } } diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs b/compiler/rustc_data_structures/src/union_find/tests.rs index 34a4e4f8e6e..34a4e4f8e6e 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs +++ b/compiler/rustc_data_structures/src/union_find/tests.rs diff --git a/compiler/rustc_error_messages/Cargo.toml b/compiler/rustc_error_messages/Cargo.toml index 0951859fa53..552ad672752 100644 --- a/compiler/rustc_error_messages/Cargo.toml +++ b/compiler/rustc_error_messages/Cargo.toml @@ -11,6 +11,8 @@ icu_list = "1.2" icu_locid = "1.2" icu_provider_adapters = "1.2" intl-memoizer = "0.5.1" +rustc_ast = { path = "../rustc_ast" } +rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_baked_icu_data = { path = "../rustc_baked_icu_data" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_macros = { path = "../rustc_macros" } diff --git a/compiler/rustc_error_messages/src/diagnostic_impls.rs b/compiler/rustc_error_messages/src/diagnostic_impls.rs new file mode 100644 index 00000000000..3b664cce577 --- /dev/null +++ b/compiler/rustc_error_messages/src/diagnostic_impls.rs @@ -0,0 +1,205 @@ +use std::backtrace::Backtrace; +use std::borrow::Cow; +use std::fmt; +use std::num::ParseIntError; +use std::path::{Path, PathBuf}; +use std::process::ExitStatus; + +use rustc_ast as ast; +use rustc_ast_pretty::pprust; +use rustc_span::edition::Edition; + +use crate::{DiagArgValue, IntoDiagArg}; + +pub struct DiagArgFromDisplay<'a>(pub &'a dyn fmt::Display); + +impl IntoDiagArg for DiagArgFromDisplay<'_> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.0.to_string().into_diag_arg(path) + } +} + +impl<'a> From<&'a dyn fmt::Display> for DiagArgFromDisplay<'a> { + fn from(t: &'a dyn fmt::Display) -> Self { + DiagArgFromDisplay(t) + } +} + +impl<'a, T: fmt::Display> From<&'a T> for DiagArgFromDisplay<'a> { + fn from(t: &'a T) -> Self { + DiagArgFromDisplay(t) + } +} + +impl<'a, T: Clone + IntoDiagArg> IntoDiagArg for &'a T { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.clone().into_diag_arg(path) + } +} + +#[macro_export] +macro_rules! into_diag_arg_using_display { + ($( $ty:ty ),+ $(,)?) => { + $( + impl $crate::IntoDiagArg for $ty { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> $crate::DiagArgValue { + self.to_string().into_diag_arg(path) + } + } + )+ + } +} + +macro_rules! into_diag_arg_for_number { + ($( $ty:ty ),+ $(,)?) => { + $( + impl $crate::IntoDiagArg for $ty { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> $crate::DiagArgValue { + // Convert to a string if it won't fit into `Number`. + #[allow(irrefutable_let_patterns)] + if let Ok(n) = TryInto::<i32>::try_into(self) { + $crate::DiagArgValue::Number(n) + } else { + self.to_string().into_diag_arg(path) + } + } + } + )+ + } +} + +into_diag_arg_using_display!( + ast::ParamKindOrd, + std::io::Error, + Box<dyn std::error::Error>, + std::num::NonZero<u32>, + Edition, + rustc_span::Ident, + rustc_span::MacroRulesNormalizedIdent, + ParseIntError, + ExitStatus, +); + +into_diag_arg_for_number!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize); + +impl IntoDiagArg for bool { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + if self { + DiagArgValue::Str(Cow::Borrowed("true")) + } else { + DiagArgValue::Str(Cow::Borrowed("false")) + } + } +} + +impl IntoDiagArg for char { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(format!("{self:?}"))) + } +} + +impl IntoDiagArg for Vec<char> { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::StrListSepByAnd( + self.into_iter().map(|c| Cow::Owned(format!("{c:?}"))).collect(), + ) + } +} + +impl IntoDiagArg for rustc_span::Symbol { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_ident_string().into_diag_arg(path) + } +} + +impl<'a> IntoDiagArg for &'a str { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } +} + +impl IntoDiagArg for String { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self)) + } +} + +impl<'a> IntoDiagArg for Cow<'a, str> { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.into_owned())) + } +} + +impl<'a> IntoDiagArg for &'a Path { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.display().to_string())) + } +} + +impl IntoDiagArg for PathBuf { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.display().to_string())) + } +} + +impl IntoDiagArg for ast::Expr { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(pprust::expr_to_string(&self))) + } +} + +impl IntoDiagArg for ast::Path { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(pprust::path_to_string(&self))) + } +} + +impl IntoDiagArg for ast::token::Token { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(pprust::token_to_string(&self)) + } +} + +impl IntoDiagArg for ast::token::TokenKind { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(pprust::token_kind_to_string(&self)) + } +} + +impl IntoDiagArg for std::ffi::CString { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) + } +} + +impl IntoDiagArg for rustc_data_structures::small_c_str::SmallCStr { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) + } +} + +impl IntoDiagArg for ast::Visibility { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + let s = pprust::vis_to_string(&self); + let s = s.trim_end().to_string(); + DiagArgValue::Str(Cow::Owned(s)) + } +} + +impl IntoDiagArg for Backtrace { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::from(self.to_string())) + } +} + +impl IntoDiagArg for ast::util::parser::ExprPrecedence { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Number(self as i32) + } +} + +impl IntoDiagArg for ast::FloatTy { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.name_str())) + } +} diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs index 4b3ecad307f..d8bacbe762b 100644 --- a/compiler/rustc_error_messages/src/lib.rs +++ b/compiler/rustc_error_messages/src/lib.rs @@ -23,6 +23,9 @@ use rustc_span::Span; use tracing::{instrument, trace}; pub use unic_langid::{LanguageIdentifier, langid}; +mod diagnostic_impls; +pub use diagnostic_impls::DiagArgFromDisplay; + pub type FluentBundle = IntoDynSyncSend<fluent_bundle::bundle::FluentBundle<FluentResource, IntlLangMemoizer>>; @@ -589,3 +592,53 @@ pub fn fluent_value_from_str_list_sep_by_and(l: Vec<Cow<'_, str>>) -> FluentValu FluentValue::Custom(Box::new(FluentStrListSepByAnd(l))) } + +/// Simplified version of `FluentArg` that can implement `Encodable` and `Decodable`. Collection of +/// `DiagArg` are converted to `FluentArgs` (consuming the collection) at the start of diagnostic +/// emission. +pub type DiagArg<'iter> = (&'iter DiagArgName, &'iter DiagArgValue); + +/// Name of a diagnostic argument. +pub type DiagArgName = Cow<'static, str>; + +/// Simplified version of `FluentValue` that can implement `Encodable` and `Decodable`. Converted +/// to a `FluentValue` by the emitter to be used in diagnostic translation. +#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)] +pub enum DiagArgValue { + Str(Cow<'static, str>), + // This gets converted to a `FluentNumber`, which is an `f64`. An `i32` + // safely fits in an `f64`. Any integers bigger than that will be converted + // to strings in `into_diag_arg` and stored using the `Str` variant. + Number(i32), + StrListSepByAnd(Vec<Cow<'static, str>>), +} + +/// Converts a value of a type into a `DiagArg` (typically a field of an `Diag` struct). +/// Implemented as a custom trait rather than `From` so that it is implemented on the type being +/// converted rather than on `DiagArgValue`, which enables types from other `rustc_*` crates to +/// implement this. +pub trait IntoDiagArg { + /// Convert `Self` into a `DiagArgValue` suitable for rendering in a diagnostic. + /// + /// It takes a `path` where "long values" could be written to, if the `DiagArgValue` is too big + /// for displaying on the terminal. This path comes from the `Diag` itself. When rendering + /// values that come from `TyCtxt`, like `Ty<'_>`, they can use `TyCtxt::short_string`. If a + /// value has no shortening logic that could be used, the argument can be safely ignored. + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue; +} + +impl IntoDiagArg for DiagArgValue { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self + } +} + +impl From<DiagArgValue> for FluentValue<'static> { + fn from(val: DiagArgValue) -> Self { + match val { + DiagArgValue::Str(s) => From::from(s), + DiagArgValue::Number(n) => From::from(n), + DiagArgValue::StrListSepByAnd(l) => fluent_value_from_str_list_sep_by_and(l), + } + } +} diff --git a/compiler/rustc_errors/Cargo.toml b/compiler/rustc_errors/Cargo.toml index 3e8cf6207ae..7912b8e6098 100644 --- a/compiler/rustc_errors/Cargo.toml +++ b/compiler/rustc_errors/Cargo.toml @@ -9,21 +9,17 @@ annotate-snippets = "0.11" derive_setters = "0.1.6" rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } -rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_codes = { path = "../rustc_error_codes" } rustc_error_messages = { path = "../rustc_error_messages" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hashes = { path = "../rustc_hashes" } -rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } -rustc_target = { path = "../rustc_target" } -rustc_type_ir = { path = "../rustc_type_ir" } serde = { version = "1.0.125", features = ["derive"] } serde_json = "1.0.59" termcolor = "1.2.0" diff --git a/compiler/rustc_errors/src/codes.rs b/compiler/rustc_errors/src/codes.rs index 947cf27ca79..787a8af99b1 100644 --- a/compiler/rustc_errors/src/codes.rs +++ b/compiler/rustc_errors/src/codes.rs @@ -20,6 +20,8 @@ impl fmt::Display for ErrCode { } } +rustc_error_messages::into_diag_arg_using_display!(ErrCode); + macro_rules! define_error_code_constants_and_diagnostics_table { ($($name:ident: $num:literal,)*) => ( $( diff --git a/compiler/rustc_errors/src/decorate_diag.rs b/compiler/rustc_errors/src/decorate_diag.rs new file mode 100644 index 00000000000..5aef26ccf97 --- /dev/null +++ b/compiler/rustc_errors/src/decorate_diag.rs @@ -0,0 +1,85 @@ +/// This module provides types and traits for buffering lints until later in compilation. +use rustc_ast::node_id::NodeId; +use rustc_data_structures::fx::FxIndexMap; +use rustc_error_messages::MultiSpan; +use rustc_lint_defs::{BuiltinLintDiag, Lint, LintId}; + +use crate::{DynSend, LintDiagnostic, LintDiagnosticBox}; + +/// We can't implement `LintDiagnostic` for `BuiltinLintDiag`, because decorating some of its +/// variants requires types we don't have yet. So, handle that case separately. +pub enum DecorateDiagCompat { + Dynamic(Box<dyn for<'a> LintDiagnosticBox<'a, ()> + DynSend + 'static>), + Builtin(BuiltinLintDiag), +} + +impl std::fmt::Debug for DecorateDiagCompat { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DecorateDiagCompat").finish() + } +} + +impl !LintDiagnostic<'_, ()> for BuiltinLintDiag {} + +impl<D: for<'a> LintDiagnostic<'a, ()> + DynSend + 'static> From<D> for DecorateDiagCompat { + #[inline] + fn from(d: D) -> Self { + Self::Dynamic(Box::new(d)) + } +} + +impl From<BuiltinLintDiag> for DecorateDiagCompat { + #[inline] + fn from(b: BuiltinLintDiag) -> Self { + Self::Builtin(b) + } +} + +/// Lints that are buffered up early on in the `Session` before the +/// `LintLevels` is calculated. +#[derive(Debug)] +pub struct BufferedEarlyLint { + /// The span of code that we are linting on. + pub span: Option<MultiSpan>, + + /// The `NodeId` of the AST node that generated the lint. + pub node_id: NodeId, + + /// A lint Id that can be passed to + /// `rustc_lint::early::EarlyContextAndPass::check_id`. + pub lint_id: LintId, + + /// Customization of the `Diag<'_>` for the lint. + pub diagnostic: DecorateDiagCompat, +} + +#[derive(Default, Debug)] +pub struct LintBuffer { + pub map: FxIndexMap<NodeId, Vec<BufferedEarlyLint>>, +} + +impl LintBuffer { + pub fn add_early_lint(&mut self, early_lint: BufferedEarlyLint) { + self.map.entry(early_lint.node_id).or_default().push(early_lint); + } + + pub fn take(&mut self, id: NodeId) -> Vec<BufferedEarlyLint> { + // FIXME(#120456) - is `swap_remove` correct? + self.map.swap_remove(&id).unwrap_or_default() + } + + pub fn buffer_lint( + &mut self, + lint: &'static Lint, + node_id: NodeId, + span: impl Into<MultiSpan>, + decorate: impl Into<DecorateDiagCompat>, + ) { + self.add_early_lint(BufferedEarlyLint { + lint_id: LintId::of(lint), + node_id, + span: Some(span.into()), + diagnostic: decorate.into(), + }); + } +} diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index e579370ce4e..96a4ed3218f 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -8,7 +8,7 @@ use std::path::PathBuf; use std::thread::panicking; use rustc_data_structures::fx::FxIndexMap; -use rustc_error_messages::{FluentValue, fluent_value_from_str_list_sep_by_and}; +use rustc_error_messages::{DiagArgName, DiagArgValue, IntoDiagArg}; use rustc_lint_defs::{Applicability, LintExpectationId}; use rustc_macros::{Decodable, Encodable}; use rustc_span::source_map::Spanned; @@ -22,26 +22,6 @@ use crate::{ Suggestions, }; -/// Simplified version of `FluentArg` that can implement `Encodable` and `Decodable`. Collection of -/// `DiagArg` are converted to `FluentArgs` (consuming the collection) at the start of diagnostic -/// emission. -pub type DiagArg<'iter> = (&'iter DiagArgName, &'iter DiagArgValue); - -/// Name of a diagnostic argument. -pub type DiagArgName = Cow<'static, str>; - -/// Simplified version of `FluentValue` that can implement `Encodable` and `Decodable`. Converted -/// to a `FluentValue` by the emitter to be used in diagnostic translation. -#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)] -pub enum DiagArgValue { - Str(Cow<'static, str>), - // This gets converted to a `FluentNumber`, which is an `f64`. An `i32` - // safely fits in an `f64`. Any integers bigger than that will be converted - // to strings in `into_diag_arg` and stored using the `Str` variant. - Number(i32), - StrListSepByAnd(Vec<Cow<'static, str>>), -} - pub type DiagArgMap = FxIndexMap<DiagArgName, DiagArgValue>; /// Trait for types that `Diag::emit` can return as a "guarantee" (or "proof") @@ -143,36 +123,6 @@ where } } -/// Converts a value of a type into a `DiagArg` (typically a field of an `Diag` struct). -/// Implemented as a custom trait rather than `From` so that it is implemented on the type being -/// converted rather than on `DiagArgValue`, which enables types from other `rustc_*` crates to -/// implement this. -pub trait IntoDiagArg { - /// Convert `Self` into a `DiagArgValue` suitable for rendering in a diagnostic. - /// - /// It takes a `path` where "long values" could be written to, if the `DiagArgValue` is too big - /// for displaying on the terminal. This path comes from the `Diag` itself. When rendering - /// values that come from `TyCtxt`, like `Ty<'_>`, they can use `TyCtxt::short_string`. If a - /// value has no shortening logic that could be used, the argument can be safely ignored. - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue; -} - -impl IntoDiagArg for DiagArgValue { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self - } -} - -impl From<DiagArgValue> for FluentValue<'static> { - fn from(val: DiagArgValue) -> Self { - match val { - DiagArgValue::Str(s) => From::from(s), - DiagArgValue::Number(n) => From::from(n), - DiagArgValue::StrListSepByAnd(l) => fluent_value_from_str_list_sep_by_and(l), - } - } -} - /// Trait implemented by error types. This should not be implemented manually. Instead, use /// `#[derive(Subdiagnostic)]` -- see [rustc_macros::Subdiagnostic]. #[rustc_diagnostic_item = "Subdiagnostic"] @@ -188,10 +138,20 @@ where /// `#[derive(LintDiagnostic)]` -- see [rustc_macros::LintDiagnostic]. #[rustc_diagnostic_item = "LintDiagnostic"] pub trait LintDiagnostic<'a, G: EmissionGuarantee> { - /// Decorate and emit a lint. + /// Decorate a lint with the information from this type. fn decorate_lint<'b>(self, diag: &'b mut Diag<'a, G>); } +pub trait LintDiagnosticBox<'a, G: EmissionGuarantee> { + fn decorate_lint_box<'b>(self: Box<Self>, diag: &'b mut Diag<'a, G>); +} + +impl<'a, G: EmissionGuarantee, D: LintDiagnostic<'a, G>> LintDiagnosticBox<'a, G> for D { + fn decorate_lint_box<'b>(self: Box<Self>, diag: &'b mut Diag<'a, G>) { + self.decorate_lint(diag); + } +} + #[derive(Clone, Debug, Encodable, Decodable)] pub(crate) struct DiagLocation { file: Cow<'static, str>, @@ -617,6 +577,29 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { self.level = Level::DelayedBug; } + /// Make emitting this diagnostic fatal + /// + /// Changes the level of this diagnostic to Fatal, and importantly also changes the emission guarantee. + /// This is sound for errors that would otherwise be printed, but now simply exit the process instead. + /// This function still gives an emission guarantee, the guarantee is now just that it exits fatally. + /// For delayed bugs this is different, since those are buffered. If we upgrade one to fatal, another + /// might now be ignored. + #[rustc_lint_diagnostics] + #[track_caller] + pub fn upgrade_to_fatal(mut self) -> Diag<'a, FatalAbort> { + assert!( + matches!(self.level, Level::Error), + "upgrade_to_fatal: cannot upgrade {:?} to Fatal: not an error", + self.level + ); + self.level = Level::Fatal; + + // Take is okay since we immediately rewrap it in another diagnostic. + // i.e. we do emit it despite defusing the original diagnostic's drop bomb. + let diag = self.diag.take(); + Diag { dcx: self.dcx, diag, _marker: PhantomData } + } + with_fn! { with_span_label, /// Appends a labeled span to the diagnostic. /// diff --git a/compiler/rustc_errors/src/diagnostic_impls.rs b/compiler/rustc_errors/src/diagnostic_impls.rs index 698b6b8d504..435d16a8380 100644 --- a/compiler/rustc_errors/src/diagnostic_impls.rs +++ b/compiler/rustc_errors/src/diagnostic_impls.rs @@ -1,340 +1,22 @@ -use std::backtrace::Backtrace; use std::borrow::Cow; -use std::fmt; -use std::num::ParseIntError; -use std::path::{Path, PathBuf}; -use std::process::ExitStatus; use rustc_abi::TargetDataLayoutErrors; -use rustc_ast::util::parser::ExprPrecedence; -use rustc_ast_pretty::pprust; -use rustc_hir::RustcVersion; -use rustc_hir::attrs::{MirDialect, MirPhase}; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::Subdiagnostic; -use rustc_span::edition::Edition; -use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol}; -use rustc_target::spec::{PanicStrategy, SplitDebuginfo, StackProtector, TargetTuple}; -use rustc_type_ir::{ClosureKind, FloatTy}; -use {rustc_ast as ast, rustc_hir as hir}; +use rustc_span::{Span, Symbol}; use crate::diagnostic::DiagLocation; use crate::{ - Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, ErrCode, IntoDiagArg, Level, - Subdiagnostic, fluent_generated as fluent, + Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic, + fluent_generated as fluent, }; -pub struct DiagArgFromDisplay<'a>(pub &'a dyn fmt::Display); - -impl IntoDiagArg for DiagArgFromDisplay<'_> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.0.to_string().into_diag_arg(path) - } -} - -impl<'a> From<&'a dyn fmt::Display> for DiagArgFromDisplay<'a> { - fn from(t: &'a dyn fmt::Display) -> Self { - DiagArgFromDisplay(t) - } -} - -impl<'a, T: fmt::Display> From<&'a T> for DiagArgFromDisplay<'a> { - fn from(t: &'a T) -> Self { - DiagArgFromDisplay(t) - } -} - -impl<'a, T: Clone + IntoDiagArg> IntoDiagArg for &'a T { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.clone().into_diag_arg(path) - } -} - -#[macro_export] -macro_rules! into_diag_arg_using_display { - ($( $ty:ty ),+ $(,)?) => { - $( - impl IntoDiagArg for $ty { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } - } - )+ - } -} - -macro_rules! into_diag_arg_for_number { - ($( $ty:ty ),+ $(,)?) => { - $( - impl IntoDiagArg for $ty { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - // Convert to a string if it won't fit into `Number`. - #[allow(irrefutable_let_patterns)] - if let Ok(n) = TryInto::<i32>::try_into(self) { - DiagArgValue::Number(n) - } else { - self.to_string().into_diag_arg(path) - } - } - } - )+ - } -} - -into_diag_arg_using_display!( - ast::ParamKindOrd, - std::io::Error, - Box<dyn std::error::Error>, - std::num::NonZero<u32>, - hir::Target, - Edition, - Ident, - MacroRulesNormalizedIdent, - ParseIntError, - StackProtector, - &TargetTuple, - SplitDebuginfo, - ExitStatus, - ErrCode, - rustc_abi::ExternAbi, -); - -impl IntoDiagArg for RustcVersion { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.to_string())) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::TraitRef<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::ExistentialTraitRef<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::UnevaluatedConst<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - format!("{self:?}").into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner> IntoDiagArg for rustc_type_ir::FnSig<I> { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - format!("{self:?}").into_diag_arg(path) - } -} - -impl<I: rustc_type_ir::Interner, T> IntoDiagArg for rustc_type_ir::Binder<I, T> -where - T: IntoDiagArg, -{ - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.skip_binder().into_diag_arg(path) - } -} - -into_diag_arg_for_number!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize); - -impl IntoDiagArg for bool { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - if self { - DiagArgValue::Str(Cow::Borrowed("true")) - } else { - DiagArgValue::Str(Cow::Borrowed("false")) - } - } -} - -impl IntoDiagArg for char { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(format!("{self:?}"))) - } -} - -impl IntoDiagArg for Vec<char> { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::StrListSepByAnd( - self.into_iter().map(|c| Cow::Owned(format!("{c:?}"))).collect(), - ) - } -} - -impl IntoDiagArg for Symbol { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_ident_string().into_diag_arg(path) - } -} - -impl<'a> IntoDiagArg for &'a str { - fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { - self.to_string().into_diag_arg(path) - } -} - -impl IntoDiagArg for String { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self)) - } -} - -impl<'a> IntoDiagArg for Cow<'a, str> { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.into_owned())) - } -} - -impl<'a> IntoDiagArg for &'a Path { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.display().to_string())) - } -} - -impl IntoDiagArg for PathBuf { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.display().to_string())) - } -} - -impl IntoDiagArg for PanicStrategy { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.desc().to_string())) - } -} - -impl IntoDiagArg for hir::ConstContext { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(match self { - hir::ConstContext::ConstFn => "const_fn", - hir::ConstContext::Static(_) => "static", - hir::ConstContext::Const { .. } => "const", - })) - } -} - -impl IntoDiagArg for ast::Expr { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(pprust::expr_to_string(&self))) - } -} - -impl IntoDiagArg for ast::Path { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(pprust::path_to_string(&self))) - } -} - -impl IntoDiagArg for ast::token::Token { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(pprust::token_to_string(&self)) - } -} - -impl IntoDiagArg for ast::token::TokenKind { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(pprust::token_kind_to_string(&self)) - } -} - -impl IntoDiagArg for FloatTy { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.name_str())) - } -} - -impl IntoDiagArg for std::ffi::CString { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) - } -} - -impl IntoDiagArg for rustc_data_structures::small_c_str::SmallCStr { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Owned(self.to_string_lossy().into_owned())) - } -} - -impl IntoDiagArg for ast::Visibility { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - let s = pprust::vis_to_string(&self); - let s = s.trim_end().to_string(); - DiagArgValue::Str(Cow::Owned(s)) - } -} - -impl IntoDiagArg for rustc_lint_defs::Level { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.to_cmd_flag())) - } -} - -impl<Id> IntoDiagArg for hir::def::Res<Id> { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.descr())) - } -} - impl IntoDiagArg for DiagLocation { fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { DiagArgValue::Str(Cow::from(self.to_string())) } } -impl IntoDiagArg for Backtrace { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::from(self.to_string())) - } -} - -impl IntoDiagArg for Level { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::from(self.to_string())) - } -} - -impl IntoDiagArg for ClosureKind { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(self.as_str().into()) - } -} - -impl IntoDiagArg for hir::def::Namespace { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Str(Cow::Borrowed(self.descr())) - } -} - -impl IntoDiagArg for ExprPrecedence { - fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { - DiagArgValue::Number(self as i32) - } -} - -impl IntoDiagArg for MirDialect { - fn into_diag_arg(self, _path: &mut Option<PathBuf>) -> DiagArgValue { - let arg = match self { - MirDialect::Analysis => "analysis", - MirDialect::Built => "built", - MirDialect::Runtime => "runtime", - }; - DiagArgValue::Str(Cow::Borrowed(arg)) - } -} - -impl IntoDiagArg for MirPhase { - fn into_diag_arg(self, _path: &mut Option<PathBuf>) -> DiagArgValue { - let arg = match self { - MirPhase::Initial => "initial", - MirPhase::PostCleanup => "post-cleanup", - MirPhase::Optimized => "optimized", - }; - DiagArgValue::Str(Cow::Borrowed(arg)) - } -} - #[derive(Clone)] pub struct DiagSymbolList<S = Symbol>(Vec<S>); diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index 97c47fa9b9a..749bba5de12 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -17,7 +17,7 @@ use std::path::Path; use std::sync::Arc; use derive_setters::Setters; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::{DynSend, IntoDynSyncSend}; use rustc_error_messages::{FluentArgs, SpanLabel}; use rustc_lexer; @@ -1853,7 +1853,7 @@ impl HumanEmitter { && line_idx + 1 == annotated_file.lines.len(), ); - let mut to_add = FxHashMap::default(); + let mut to_add = FxIndexMap::default(); for (depth, style) in depths { // FIXME(#120456) - is `swap_remove` correct? diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index 2534cddf105..71fc54f0d33 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -40,13 +40,13 @@ use std::{fmt, panic}; use Level::*; pub use codes::*; +pub use decorate_diag::{BufferedEarlyLint, DecorateDiagCompat, LintBuffer}; pub use diagnostic::{ - BugAbort, Diag, DiagArg, DiagArgMap, DiagArgName, DiagArgValue, DiagInner, DiagStyledString, - Diagnostic, EmissionGuarantee, FatalAbort, IntoDiagArg, LintDiagnostic, StringPart, Subdiag, - Subdiagnostic, + BugAbort, Diag, DiagArgMap, DiagInner, DiagStyledString, Diagnostic, EmissionGuarantee, + FatalAbort, LintDiagnostic, LintDiagnosticBox, StringPart, Subdiag, Subdiagnostic, }; pub use diagnostic_impls::{ - DiagArgFromDisplay, DiagSymbolList, ElidedLifetimeInPathSubdiag, ExpectedLifetimeParameter, + DiagSymbolList, ElidedLifetimeInPathSubdiag, ExpectedLifetimeParameter, IndicateAnonymousLifetime, SingleLabelManySpans, }; pub use emitter::ColorConfig; @@ -56,11 +56,11 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::sync::{DynSend, Lock}; pub use rustc_error_messages::{ - DiagMessage, FluentBundle, LanguageIdentifier, LazyFallbackBundle, MultiSpan, SpanLabel, - SubdiagMessage, fallback_fluent_bundle, fluent_bundle, + DiagArg, DiagArgFromDisplay, DiagArgName, DiagArgValue, DiagMessage, FluentBundle, IntoDiagArg, + LanguageIdentifier, LazyFallbackBundle, MultiSpan, SpanLabel, SubdiagMessage, + fallback_fluent_bundle, fluent_bundle, into_diag_arg_using_display, }; use rustc_hashes::Hash128; -use rustc_hir::HirId; pub use rustc_lint_defs::{Applicability, listify, pluralize}; use rustc_lint_defs::{Lint, LintExpectationId}; use rustc_macros::{Decodable, Encodable}; @@ -80,6 +80,7 @@ use crate::timings::TimingRecord; pub mod annotate_snippet_emitter_writer; pub mod codes; +mod decorate_diag; mod diagnostic; mod diagnostic_impls; pub mod emitter; @@ -108,13 +109,14 @@ rustc_data_structures::static_assert_size!(PResult<'_, bool>, 24); /// Used to avoid depending on `rustc_middle` in `rustc_attr_parsing`. /// Always the `TyCtxt`. pub trait LintEmitter: Copy { + type Id: Copy; #[track_caller] fn emit_node_span_lint( self, lint: &'static Lint, - hir_id: HirId, + hir_id: Self::Id, span: impl Into<MultiSpan>, - decorator: impl for<'a> LintDiagnostic<'a, ()>, + decorator: impl for<'a> LintDiagnostic<'a, ()> + DynSend + 'static, ); } @@ -1999,6 +2001,12 @@ impl Level { } } +impl IntoDiagArg for Level { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::from(self.to_string())) + } +} + // FIXME(eddyb) this doesn't belong here AFAICT, should be moved to callsite. pub fn elided_lifetime_in_path_suggestion( source_map: &SourceMap, diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index f2c15071532..8ff21509f4a 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -13,13 +13,13 @@ use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_data_structures::sync; -use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult}; +use rustc_errors::{BufferedEarlyLint, DiagCtxtHandle, ErrorGuaranteed, PResult}; use rustc_feature::Features; use rustc_hir as hir; use rustc_hir::attrs::{AttributeKind, CfgEntry, Deprecation}; use rustc_hir::def::MacroKinds; use rustc_hir::{Stability, find_attr}; -use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools}; +use rustc_lint_defs::RegisteredTools; use rustc_parse::MACRO_ARGUMENTS; use rustc_parse::parser::{ForceCollect, Parser}; use rustc_session::config::CollapseMacroDebuginfo; diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 83a8d601afe..15419ab7423 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -11,8 +11,10 @@ use rustc_ast::{ NodeId, NormalAttr, }; use rustc_attr_parsing as attr; +use rustc_attr_parsing::validate_attr::deny_builtin_meta_unsafety; use rustc_attr_parsing::{ AttributeParser, CFG_TEMPLATE, EvalConfigResult, ShouldEmit, eval_config_entry, parse_cfg_attr, + validate_attr, }; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_feature::{ @@ -20,8 +22,6 @@ use rustc_feature::{ REMOVED_LANG_FEATURES, UNSTABLE_LANG_FEATURES, }; use rustc_lint_defs::BuiltinLintDiag; -use rustc_parse::validate_attr; -use rustc_parse::validate_attr::deny_builtin_meta_unsafety; use rustc_session::Session; use rustc_session::parse::feature_err; use rustc_span::{STDLIB_STABLE_CRATES, Span, Symbol, sym}; @@ -415,16 +415,6 @@ impl<'a> StripUnconfigured<'a> { node: NodeId, emit_errors: ShouldEmit, ) -> EvalConfigResult { - // We need to run this to do basic validation of the attribute, such as that lits are valid, etc - // FIXME(jdonszelmann) this should not be necessary in the future - match validate_attr::parse_meta(&self.sess.psess, attr) { - Ok(_) => {} - Err(err) => { - err.emit(); - return EvalConfigResult::True; - } - } - // Unsafety check needs to be done explicitly here because this attribute will be removed before the normal check deny_builtin_meta_unsafety( self.sess.dcx(), diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 1f7f4c7d856..dbb4a9de9e0 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; -use std::{iter, mem}; +use std::{iter, mem, slice}; use rustc_ast::mut_visit::*; use rustc_ast::tokenstream::TokenStream; @@ -12,16 +12,17 @@ use rustc_ast::{ MetaItemKind, ModKind, NodeId, PatKind, StmtKind, TyKind, token, }; use rustc_ast_pretty::pprust; -use rustc_attr_parsing::{EvalConfigResult, ShouldEmit}; +use rustc_attr_parsing::{AttributeParser, Early, EvalConfigResult, ShouldEmit, validate_attr}; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::PResult; use rustc_feature::Features; +use rustc_hir::Target; use rustc_hir::def::MacroKinds; use rustc_parse::parser::{ AttemptLocalParseRecovery, CommaRecoveryMode, ForceCollect, Parser, RecoverColon, RecoverComma, token_descr, }; -use rustc_parse::validate_attr; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{UNUSED_ATTRIBUTES, UNUSED_DOC_COMMENTS}; use rustc_session::parse::feature_err; @@ -801,7 +802,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ItemKind::Mod( _, _, - ModKind::Unloaded | ModKind::Loaded(_, Inline::No, _, _), + ModKind::Unloaded + | ModKind::Loaded(_, Inline::No { .. }, _), ) ) => { @@ -1035,7 +1037,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { fn visit_item(&mut self, item: &'ast ast::Item) { match &item.kind { ItemKind::Mod(_, _, mod_kind) - if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _, _)) => + if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _)) => { feature_err( self.sess, @@ -1346,7 +1348,7 @@ impl InvocationCollectorNode for Box<ast::Item> { let ItemKind::Mod(_, ident, ref mut mod_kind) = node.kind else { unreachable!() }; let ecx = &mut collector.cx; let (file_path, dir_path, dir_ownership) = match mod_kind { - ModKind::Loaded(_, inline, _, _) => { + ModKind::Loaded(_, inline, _) => { // Inline `mod foo { ... }`, but we still need to push directories. let (dir_path, dir_ownership) = mod_dir_path( ecx.sess, @@ -1360,7 +1362,7 @@ impl InvocationCollectorNode for Box<ast::Item> { // This lets `parse_external_mod` catch cycles if it's self-referential. let file_path = match inline { Inline::Yes => None, - Inline::No => mod_file_path_from_attr(ecx.sess, &attrs, &dir_path), + Inline::No { .. } => mod_file_path_from_attr(ecx.sess, &attrs, &dir_path), }; node.attrs = attrs; (file_path, dir_path, dir_ownership) @@ -1396,7 +1398,7 @@ impl InvocationCollectorNode for Box<ast::Item> { ); } - *mod_kind = ModKind::Loaded(items, Inline::No, spans, had_parse_error); + *mod_kind = ModKind::Loaded(items, Inline::No { had_parse_error }, spans); node.attrs = attrs; if node.attrs.len() > old_attrs_len { // If we loaded an out-of-line module and added some inner attributes, @@ -2157,6 +2159,16 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { attr, self.cx.current_expansion.lint_node_id, ); + AttributeParser::parse_limited_all( + self.cx.sess, + slice::from_ref(attr), + None, + Target::MacroCall, + call.span(), + self.cx.current_expansion.lint_node_id, + Some(self.cx.ecfg.features), + ShouldEmit::ErrorsAndLints, + ); let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span }; span = Some(current_span); @@ -2172,7 +2184,9 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { self.cx.current_expansion.lint_node_id, BuiltinLintDiag::UnusedDocComment(attr.span), ); - } else if rustc_attr_parsing::is_builtin_attr(attr) { + } else if rustc_attr_parsing::is_builtin_attr(attr) + && !AttributeParser::<Early>::is_parsed_attribute(&attr.path()) + { let attr_name = attr.ident().unwrap().name; // `#[cfg]` and `#[cfg_attr]` are special - they are // eagerly evaluated. @@ -2468,7 +2482,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { if let Some(attr) = node.attrs.first() { self.cfg().maybe_emit_expr_attr_err(attr); } - self.visit_node(node) + ensure_sufficient_stack(|| self.visit_node(node)) } fn visit_method_receiver_expr(&mut self, node: &mut ast::Expr) { diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index bced02ae4da..8b43f852b26 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -596,6 +596,7 @@ pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>( match result { Success(body_named_matches) => { psess.gated_spans.merge(gated_spans_snapshot); + #[allow(rustc::potential_query_instability)] named_matches.extend(body_named_matches); return Ok((i, rule, named_matches)); } diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs index 662c67f2d3f..19f3cdbc549 100644 --- a/compiler/rustc_expand/src/module.rs +++ b/compiler/rustc_expand/src/module.rs @@ -2,8 +2,9 @@ use std::iter::once; use std::path::{self, Path, PathBuf}; use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans}; +use rustc_attr_parsing::validate_attr; use rustc_errors::{Diag, ErrorGuaranteed}; -use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal, validate_attr}; +use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal}; use rustc_session::Session; use rustc_session::parse::ParseSess; use rustc_span::{Ident, Span, sym}; @@ -120,7 +121,7 @@ pub(crate) fn mod_dir_path( (dir_path, dir_ownership) } - Inline::No => { + Inline::No { .. } => { // FIXME: This is a subset of `parse_external_mod` without actual parsing, // check whether the logic for unloaded, loaded and inline modules can be unified. let file_path = mod_file_path(sess, ident, attrs, &module.dir_path, dir_ownership) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index fd71f2ce948..5b1d3d6d35b 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -250,12 +250,14 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre Question => op("?"), SingleQuote => op("'"), - Ident(sym, is_raw) => { - trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span })) - } + Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { + sym, + is_raw: matches!(is_raw, IdentIsRaw::Yes), + span, + })), NtIdent(ident, is_raw) => trees.push(TokenTree::Ident(Ident { sym: ident.name, - is_raw: is_raw.into(), + is_raw: matches!(is_raw, IdentIsRaw::Yes), span: ident.span, })), @@ -263,7 +265,11 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre let ident = rustc_span::Ident::new(name, span).without_first_quote(); trees.extend([ TokenTree::Punct(Punct { ch: b'\'', joint: true, span }), - TokenTree::Ident(Ident { sym: ident.name, is_raw: is_raw.into(), span }), + TokenTree::Ident(Ident { + sym: ident.name, + is_raw: matches!(is_raw, IdentIsRaw::Yes), + span, + }), ]); } NtLifetime(ident, is_raw) => { diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 3b11a2bc7df..e81003b1897 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -745,6 +745,10 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ErrorPreceding, EncodeCrossCrate::No ), gated!( + unsafe force_target_feature, Normal, template!(List: &[r#"enable = "name""#]), + DuplicatesOk, EncodeCrossCrate::No, effective_target_features, experimental!(force_target_feature) + ), + gated!( sanitize, Normal, template!(List: &[r#"address = "on|off""#, r#"kernel_address = "on|off""#, r#"cfi = "on|off""#, r#"hwaddress = "on|off""#, r#"kcfi = "on|off""#, r#"memory = "on|off""#, r#"memtag = "on|off""#, r#"shadow_call_stack = "on|off""#, r#"thread = "on|off""#]), ErrorPreceding, EncodeCrossCrate::No, sanitize, experimental!(sanitize), ), @@ -996,6 +1000,10 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_allocator_zeroed, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No, ), + rustc_attr!( + rustc_allocator_zeroed_variant, Normal, template!(NameValueStr: "function"), ErrorPreceding, + EncodeCrossCrate::Yes, + ), gated!( default_lib_allocator, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No, allocator_internals, experimental!(default_lib_allocator), diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 746871982ce..92b435b4b01 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -480,6 +480,8 @@ declare_features! ( (unstable, doc_cfg_hide, "1.57.0", Some(43781)), /// Allows `#[doc(masked)]`. (unstable, doc_masked, "1.21.0", Some(44027)), + /// Allows features to allow target_feature to better interact with traits. + (incomplete, effective_target_features, "CURRENT_RUSTC_VERSION", Some(143352)), /// Allows the .use postfix syntax `x.use` and use closures `use |x| { ... }` (incomplete, ergonomic_clones, "1.87.0", Some(132290)), /// Allows exhaustive pattern matching on types that contain uninhabited types. @@ -614,6 +616,7 @@ declare_features! ( (unstable, proc_macro_hygiene, "1.30.0", Some(54727)), /// Allows the use of raw-dylibs on ELF platforms (incomplete, raw_dylib_elf, "1.87.0", Some(135694)), + (unstable, reborrow, "CURRENT_RUSTC_VERSION", Some(145612)), /// Makes `&` and `&mut` patterns eat only one layer of references in Rust 2024. (incomplete, ref_pat_eat_one_layer_2024, "1.79.0", Some(123076)), /// Makes `&` and `&mut` patterns eat only one layer of references in Rust 2024—structural variant diff --git a/compiler/rustc_hir/Cargo.toml b/compiler/rustc_hir/Cargo.toml index 71496b7ec32..1008a3e787d 100644 --- a/compiler/rustc_hir/Cargo.toml +++ b/compiler/rustc_hir/Cargo.toml @@ -12,7 +12,9 @@ rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_error_messages = { path = "../rustc_error_messages" } rustc_hashes = { path = "../rustc_hashes" } +rustc_hir_id = { path = "../rustc_hir_id" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } diff --git a/compiler/rustc_hir/src/attrs/data_structures.rs b/compiler/rustc_hir/src/attrs/data_structures.rs index 31715955ed3..09da5772d23 100644 --- a/compiler/rustc_hir/src/attrs/data_structures.rs +++ b/compiler/rustc_hir/src/attrs/data_structures.rs @@ -1,11 +1,16 @@ +use std::borrow::Cow; +use std::path::PathBuf; + pub use ReprAttr::*; use rustc_abi::Align; use rustc_ast::token::CommentKind; use rustc_ast::{AttrStyle, ast}; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::{Decodable, Encodable, HashStable_Generic, PrintAttribute}; use rustc_span::def_id::DefId; use rustc_span::hygiene::Transparency; use rustc_span::{Ident, Span, Symbol}; +pub use rustc_target::spec::SanitizerSet; use thin_vec::ThinVec; use crate::attrs::pretty_printing::PrintAttribute; @@ -213,6 +218,17 @@ pub enum MirDialect { Runtime, } +impl IntoDiagArg for MirDialect { + fn into_diag_arg(self, _path: &mut Option<PathBuf>) -> DiagArgValue { + let arg = match self { + MirDialect::Analysis => "analysis", + MirDialect::Built => "built", + MirDialect::Runtime => "runtime", + }; + DiagArgValue::Str(Cow::Borrowed(arg)) + } +} + #[derive(Clone, Copy, Decodable, Debug, Encodable, PartialEq)] #[derive(HashStable_Generic, PrintAttribute)] pub enum MirPhase { @@ -221,6 +237,17 @@ pub enum MirPhase { Optimized, } +impl IntoDiagArg for MirPhase { + fn into_diag_arg(self, _path: &mut Option<PathBuf>) -> DiagArgValue { + let arg = match self { + MirPhase::Initial => "initial", + MirPhase::PostCleanup => "post-cleanup", + MirPhase::Optimized => "optimized", + }; + DiagArgValue::Str(Cow::Borrowed(arg)) + } +} + /// Represents parsed *built-in* inert attributes. /// /// ## Overview @@ -340,6 +367,9 @@ pub enum AttributeKind { /// Represents `#[coverage(..)]`. Coverage(Span, CoverageAttrKind), + /// Represents `#[crate_name = ...]` + CrateName { name: Symbol, name_span: Span, attr_span: Span, style: AttrStyle }, + /// Represents `#[custom_mir]`. CustomMir(Option<(MirDialect, Span)>, Option<(MirPhase, Span)>, Span), @@ -479,6 +509,12 @@ pub enum AttributeKind { /// Represents `#[rustc_object_lifetime_default]`. RustcObjectLifetimeDefault, + /// Represents `#[sanitize]` + /// + /// the on set and off set are distjoint since there's a third option: unset. + /// a node may not set the sanitizer setting in which case it inherits from parents. + Sanitize { on_set: SanitizerSet, off_set: SanitizerSet, span: Span }, + /// Represents `#[should_panic]` ShouldPanic { reason: Option<Symbol>, span: Span }, @@ -498,8 +534,9 @@ pub enum AttributeKind { /// Represents `#[rustc_std_internal_symbol]`. StdInternalSymbol(Span), - /// Represents `#[target_feature(enable = "...")]` - TargetFeature(ThinVec<(Symbol, Span)>, Span), + /// Represents `#[target_feature(enable = "...")]` and + /// `#[unsafe(force_target_feature(enable = "...")]`. + TargetFeature { features: ThinVec<(Symbol, Span)>, attr_span: Span, was_forced: bool }, /// Represents `#[track_caller]` TrackCaller(Span), diff --git a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs index defabdccc02..e5329c104bb 100644 --- a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs +++ b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs @@ -7,6 +7,11 @@ pub enum EncodeCrossCrate { } impl AttributeKind { + /// Whether this attribute should be encoded in metadata files. + /// + /// If this is "Yes", then another crate can do `tcx.get_all_attrs(did)` for a did in this crate, and get the attribute. + /// When this is No, the attribute is filtered out while encoding and other crate won't be able to observe it. + /// This can be unexpectedly good for performance, so unless necessary for cross-crate compilation, prefer No. pub fn encode_cross_crate(&self) -> EncodeCrossCrate { use AttributeKind::*; use EncodeCrossCrate::*; @@ -31,6 +36,7 @@ impl AttributeKind { ConstTrait(..) => No, Coroutine(..) => No, Coverage(..) => No, + CrateName { .. } => No, CustomMir(_, _, _) => Yes, DenyExplicitImpl(..) => No, Deprecation { .. } => Yes, @@ -73,12 +79,13 @@ impl AttributeKind { RustcLayoutScalarValidRangeEnd(..) => Yes, RustcLayoutScalarValidRangeStart(..) => Yes, RustcObjectLifetimeDefault => No, + Sanitize { .. } => No, ShouldPanic { .. } => No, SkipDuringMethodDispatch { .. } => No, SpecializationTrait(..) => No, Stability { .. } => Yes, StdInternalSymbol(..) => No, - TargetFeature(..) => No, + TargetFeature { .. } => No, TrackCaller(..) => Yes, TypeConst(..) => Yes, UnsafeSpecializationMarker(..) => No, diff --git a/compiler/rustc_hir/src/attrs/pretty_printing.rs b/compiler/rustc_hir/src/attrs/pretty_printing.rs index e44b29141da..e65de25b451 100644 --- a/compiler/rustc_hir/src/attrs/pretty_printing.rs +++ b/compiler/rustc_hir/src/attrs/pretty_printing.rs @@ -6,6 +6,7 @@ use rustc_ast::{AttrStyle, IntTy, UintTy}; use rustc_ast_pretty::pp::Printer; use rustc_span::hygiene::Transparency; use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol}; +use rustc_target::spec::SanitizerSet; use thin_vec::ThinVec; /// This trait is used to print attributes in `rustc_hir_pretty`. @@ -146,4 +147,14 @@ macro_rules! print_tup { print_tup!(A B C D E F G H); print_skip!(Span, (), ErrorGuaranteed); print_disp!(u16, bool, NonZero<u32>); -print_debug!(Symbol, Ident, UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency); +print_debug!( + Symbol, + Ident, + UintTy, + IntTy, + Align, + AttrStyle, + CommentKind, + Transparency, + SanitizerSet, +); diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs index 79319e24266..8af4740f376 100644 --- a/compiler/rustc_hir/src/def.rs +++ b/compiler/rustc_hir/src/def.rs @@ -1,10 +1,12 @@ use std::array::IntoIter; +use std::borrow::Cow; use std::fmt::Debug; use rustc_ast as ast; use rustc_ast::NodeId; use rustc_data_structures::stable_hasher::ToStableHashKey; use rustc_data_structures::unord::UnordMap; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::Symbol; use rustc_span::def_id::{DefId, LocalDefId}; @@ -586,6 +588,12 @@ pub enum Res<Id = hir::HirId> { Err, } +impl<Id> IntoDiagArg for Res<Id> { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.descr())) + } +} + /// The result of resolving a path before lowering to HIR, /// with "module" segments resolved and associated item /// segments deferred to type checking. @@ -673,6 +681,12 @@ impl Namespace { } } +impl IntoDiagArg for Namespace { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.descr())) + } +} + impl<CTX: crate::HashStableContext> ToStableHashKey<CTX> for Namespace { type KeyType = Namespace; diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 2c8986b7c7d..e397c286de2 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -1,4 +1,5 @@ // ignore-tidy-filelength +use std::borrow::Cow; use std::fmt; use rustc_abi::ExternAbi; @@ -17,6 +18,7 @@ pub use rustc_ast::{ use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::tagged_ptr::TaggedRef; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_index::IndexVec; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::def_id::LocalDefId; @@ -1159,6 +1161,12 @@ pub struct AttrPath { pub span: Span, } +impl IntoDiagArg for AttrPath { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } +} + impl AttrPath { pub fn from_ast(path: &ast::Path) -> Self { AttrPath { @@ -2259,8 +2267,15 @@ impl fmt::Display for ConstContext { } } -// NOTE: `IntoDiagArg` impl for `ConstContext` lives in `rustc_errors` -// due to a cyclical dependency between hir and that crate. +impl IntoDiagArg for ConstContext { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(match self { + ConstContext::ConstFn => "const_fn", + ConstContext::Static(_) => "static", + ConstContext::Const { .. } => "const", + })) + } +} /// A literal. pub type Lit = Spanned<LitKind>; diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index 905b84a8cbe..0464665b41f 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -437,6 +437,9 @@ language_item_table! { DefaultTrait1, sym::default_trait1, default_trait1_trait, Target::Trait, GenericRequirement::None; ContractCheckEnsures, sym::contract_check_ensures, contract_check_ensures_fn, Target::Fn, GenericRequirement::None; + + // Reborrowing related lang-items + Reborrow, sym::reborrow, reborrow, Target::Trait, GenericRequirement::Exact(0); } /// The requirement imposed on the generics of a lang item diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs index f1212d07ff6..78fc63753a2 100644 --- a/compiler/rustc_hir/src/lib.rs +++ b/compiler/rustc_hir/src/lib.rs @@ -3,14 +3,11 @@ //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/hir.html // tidy-alphabetical-start -#![allow(internal_features)] #![feature(associated_type_defaults)] #![feature(closure_track_caller)] #![feature(debug_closure_helpers)] #![feature(exhaustive_patterns)] -#![feature(negative_impls)] #![feature(never_type)] -#![feature(rustc_attrs)] #![feature(variant_count)] #![recursion_limit = "256"] // tidy-alphabetical-end @@ -25,7 +22,7 @@ pub mod definitions; pub mod diagnostic_items; pub use rustc_span::def_id; mod hir; -pub mod hir_id; +pub use rustc_hir_id::{self as hir_id, *}; pub mod intravisit; pub mod lang_items; pub mod lints; @@ -41,7 +38,6 @@ mod tests; #[doc(no_inline)] pub use hir::*; -pub use hir_id::*; pub use lang_items::{LangItem, LanguageItems}; pub use stability::*; pub use stable_hash_impls::HashStableContext; diff --git a/compiler/rustc_hir/src/lints.rs b/compiler/rustc_hir/src/lints.rs index e3cde2d3bb6..061ec786dc8 100644 --- a/compiler/rustc_hir/src/lints.rs +++ b/compiler/rustc_hir/src/lints.rs @@ -1,8 +1,8 @@ use rustc_data_structures::fingerprint::Fingerprint; use rustc_macros::HashStable_Generic; -use rustc_span::{Span, Symbol}; +use rustc_span::Span; -use crate::{HirId, Target}; +use crate::{AttrPath, HirId, Target}; #[derive(Debug)] pub struct DelayedLints { @@ -34,5 +34,5 @@ pub enum AttributeLintKind { UnusedDuplicate { this: Span, other: Span, warning: bool }, IllFormedAttributeInput { suggestions: Vec<String> }, EmptyAttribute { first_span: Span }, - InvalidTarget { name: Symbol, target: Target, applied: String, only: &'static str }, + InvalidTarget { name: AttrPath, target: Target, applied: Vec<String>, only: &'static str }, } diff --git a/compiler/rustc_hir/src/stable_hash_impls.rs b/compiler/rustc_hir/src/stable_hash_impls.rs index ecc608d437b..16e8bac3d8a 100644 --- a/compiler/rustc_hir/src/stable_hash_impls.rs +++ b/compiler/rustc_hir/src/stable_hash_impls.rs @@ -5,7 +5,7 @@ use crate::HashIgnoredAttrId; use crate::hir::{ AttributeMap, BodyId, Crate, ForeignItemId, ImplItemId, ItemId, OwnerNodes, TraitItemId, }; -use crate::hir_id::{HirId, ItemLocalId}; +use crate::hir_id::ItemLocalId; use crate::lints::DelayedLints; /// Requirements for a `StableHashingContext` to be used in this crate. @@ -15,25 +15,6 @@ pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStabl fn hash_attr_id(&mut self, id: &HashIgnoredAttrId, hasher: &mut StableHasher); } -impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for HirId { - type KeyType = (DefPathHash, ItemLocalId); - - #[inline] - fn to_stable_hash_key(&self, hcx: &HirCtx) -> (DefPathHash, ItemLocalId) { - let def_path_hash = self.owner.def_id.to_stable_hash_key(hcx); - (def_path_hash, self.local_id) - } -} - -impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for ItemLocalId { - type KeyType = ItemLocalId; - - #[inline] - fn to_stable_hash_key(&self, _: &HirCtx) -> ItemLocalId { - *self - } -} - impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for BodyId { type KeyType = (DefPathHash, ItemLocalId); diff --git a/compiler/rustc_hir/src/target.rs b/compiler/rustc_hir/src/target.rs index f68dad3a5e8..dcac51b10b4 100644 --- a/compiler/rustc_hir/src/target.rs +++ b/compiler/rustc_hir/src/target.rs @@ -79,6 +79,8 @@ impl Display for Target { } } +rustc_error_messages::into_diag_arg_using_display!(Target); + impl Target { pub fn is_associated_item(self) -> bool { match self { diff --git a/compiler/rustc_hir/src/version.rs b/compiler/rustc_hir/src/version.rs index ab5ab026b4c..bc2c38a4935 100644 --- a/compiler/rustc_hir/src/version.rs +++ b/compiler/rustc_hir/src/version.rs @@ -1,6 +1,8 @@ +use std::borrow::Cow; use std::fmt::{self, Display}; use std::sync::OnceLock; +use rustc_error_messages::{DiagArgValue, IntoDiagArg}; use rustc_macros::{ Decodable, Encodable, HashStable_Generic, PrintAttribute, current_rustc_version, }; @@ -45,3 +47,9 @@ impl Display for RustcVersion { write!(formatter, "{}.{}.{}", self.major, self.minor, self.patch) } } + +impl IntoDiagArg for RustcVersion { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.to_string())) + } +} diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index 2428c1aa29f..06f2ec512ab 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -509,10 +509,6 @@ hir_analysis_supertrait_item_shadowee = item from `{$supertrait}` is shadowed by hir_analysis_supertrait_item_shadowing = trait item `{$item}` from `{$subtrait}` shadows identically named item from supertrait -hir_analysis_tait_forward_compat2 = item does not constrain `{$opaque_type}` - .note = consider removing `#[define_opaque]` or adding an empty `#[define_opaque()]` - .opaque = this opaque type is supposed to be constrained - hir_analysis_target_feature_on_main = `main` function is not allowed to have `#[target_feature]` hir_analysis_too_large_static = extern static is too large for the target architecture diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 161a8566b04..eccb88a938f 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -2053,3 +2053,29 @@ pub(super) fn check_coroutine_obligations( Ok(()) } + +pub(super) fn check_potentially_region_dependent_goals<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: LocalDefId, +) -> Result<(), ErrorGuaranteed> { + if !tcx.next_trait_solver_globally() { + return Ok(()); + } + let typeck_results = tcx.typeck(def_id); + let param_env = tcx.param_env(def_id); + + // We use `TypingMode::Borrowck` as we want to use the opaque types computed by HIR typeck. + let typing_mode = TypingMode::borrowck(tcx, def_id); + let infcx = tcx.infer_ctxt().ignoring_regions().build(typing_mode); + let ocx = ObligationCtxt::new_with_diagnostics(&infcx); + for (predicate, cause) in &typeck_results.potentially_region_dependent_goals { + let predicate = fold_regions(tcx, *predicate, |_, _| { + infcx.next_region_var(RegionVariableOrigin::Misc(cause.span)) + }); + ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate)); + } + + let errors = ocx.select_all_or_error(); + debug!(?errors); + if errors.is_empty() { Ok(()) } else { Err(infcx.err_ctxt().report_fulfillment_errors(errors)) } +} diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index f50aed0b3c2..cfc6bc2f3a0 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -136,6 +136,10 @@ fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hi | sym::round_ties_even_f64 | sym::round_ties_even_f128 | sym::autodiff + | sym::prefetch_read_data + | sym::prefetch_write_data + | sym::prefetch_read_instruction + | sym::prefetch_write_instruction | sym::const_eval_select => hir::Safety::Safe, _ => hir::Safety::Unsafe, }; @@ -218,7 +222,7 @@ pub(crate) fn check_intrinsic_type( | sym::prefetch_write_data | sym::prefetch_read_instruction | sym::prefetch_write_instruction => { - (1, 0, vec![Ty::new_imm_ptr(tcx, param(0)), tcx.types.i32], tcx.types.unit) + (1, 1, vec![Ty::new_imm_ptr(tcx, param(0))], tcx.types.unit) } sym::needs_drop => (1, 0, vec![], tcx.types.bool), diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index 85445cb3c00..2e4b151d4dc 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -109,6 +109,7 @@ pub(super) fn provide(providers: &mut Providers) { collect_return_position_impl_trait_in_trait_tys, compare_impl_item: compare_impl_item::compare_impl_item, check_coroutine_obligations: check::check_coroutine_obligations, + check_potentially_region_dependent_goals: check::check_potentially_region_dependent_goals, check_type_wf: wfcheck::check_type_wf, check_well_formed: wfcheck::check_well_formed, ..*providers diff --git a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs index 50e20a19eda..b6d898886ac 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs @@ -4,9 +4,10 @@ use rustc_hir::{self as hir, Expr, ImplItem, Item, Node, TraitItem, def, intravi use rustc_middle::bug; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{self, DefiningScopeKind, Ty, TyCtxt, TypeVisitableExt}; +use rustc_trait_selection::opaque_types::report_item_does_not_constrain_error; use tracing::{debug, instrument, trace}; -use crate::errors::{TaitForwardCompat2, UnconstrainedOpaqueType}; +use crate::errors::UnconstrainedOpaqueType; /// Checks "defining uses" of opaque `impl Trait` in associated types. /// These can only be defined by associated items of the same trait. @@ -127,14 +128,11 @@ impl<'tcx> TaitConstraintLocator<'tcx> { } fn non_defining_use_in_defining_scope(&mut self, item_def_id: LocalDefId) { - let guar = self.tcx.dcx().emit_err(TaitForwardCompat2 { - span: self - .tcx - .def_ident_span(item_def_id) - .unwrap_or_else(|| self.tcx.def_span(item_def_id)), - opaque_type_span: self.tcx.def_span(self.def_id), - opaque_type: self.tcx.def_path_str(self.def_id), - }); + // We make sure that all opaque types get defined while + // type checking the defining scope, so this error is unreachable + // with the new solver. + assert!(!self.tcx.next_trait_solver_globally()); + let guar = report_item_does_not_constrain_error(self.tcx, item_def_id, self.def_id, None); self.insert_found(ty::OpaqueHiddenType::new_error(self.tcx, guar)); } @@ -252,9 +250,7 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>( } else if let Some(hidden_ty) = tables.concrete_opaque_types.get(&def_id) { hidden_ty.ty } else { - // FIXME(-Znext-solver): This should not be necessary and we should - // instead rely on inference variable fallback inside of typeck itself. - + assert!(!tcx.next_trait_solver_globally()); // We failed to resolve the opaque type or it // resolves to itself. We interpret this as the // no values of the hidden type ever being constructed, @@ -273,6 +269,7 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>( if let Err(guar) = hir_ty.error_reported() { Ty::new_error(tcx, guar) } else { + assert!(!tcx.next_trait_solver_globally()); hir_ty } } diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 26a98722b34..6565ad7cf53 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -410,17 +410,6 @@ pub(crate) struct UnconstrainedOpaqueType { pub what: &'static str, } -#[derive(Diagnostic)] -#[diag(hir_analysis_tait_forward_compat2)] -#[note] -pub(crate) struct TaitForwardCompat2 { - #[primary_span] - pub span: Span, - #[note(hir_analysis_opaque)] - pub opaque_type_span: Span, - pub opaque_type: String, -} - pub(crate) struct MissingTypeParams { pub span: Span, pub def_span: Span, diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 3a153ab089a..44a5ceed469 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -82,7 +82,7 @@ mod coherence; mod collect; mod constrained_generic_params; mod delegation; -mod errors; +pub mod errors; pub mod hir_ty_lowering; pub mod hir_wf_check; mod impl_wf_check; diff --git a/compiler/rustc_hir_id/Cargo.toml b/compiler/rustc_hir_id/Cargo.toml new file mode 100644 index 00000000000..c357a4f62d9 --- /dev/null +++ b/compiler/rustc_hir_id/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "rustc_hir_id" +version = "0.0.0" +edition = "2024" + +[dependencies] +# tidy-alphabetical-start +rustc_data_structures = { path = "../rustc_data_structures" } +rustc_index = { path = "../rustc_index" } +rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } +rustc_span = { path = "../rustc_span" } +# tidy-alphabetical-end diff --git a/compiler/rustc_hir/src/hir_id.rs b/compiler/rustc_hir_id/src/lib.rs index b48a081d371..d07bc88e66a 100644 --- a/compiler/rustc_hir/src/hir_id.rs +++ b/compiler/rustc_hir_id/src/lib.rs @@ -1,11 +1,15 @@ +//! Library containing Id types from `rustc_hir`, split out so crates can use it without depending +//! on all of `rustc_hir` (which is large and depends on other large things like `rustc_target`). +#![allow(internal_features)] +#![feature(negative_impls)] +#![feature(rustc_attrs)] + use std::fmt::{self, Debug}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; -use rustc_span::HashStableContext; -use rustc_span::def_id::DefPathHash; - -use crate::def_id::{CRATE_DEF_ID, DefId, DefIndex, LocalDefId}; +pub use rustc_span::HashStableContext; +use rustc_span::def_id::{CRATE_DEF_ID, DefId, DefIndex, DefPathHash, LocalDefId}; #[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable)] pub struct OwnerId { @@ -171,3 +175,22 @@ pub const CRATE_HIR_ID: HirId = HirId { owner: OwnerId { def_id: CRATE_DEF_ID }, local_id: ItemLocalId::ZERO }; pub const CRATE_OWNER_ID: OwnerId = OwnerId { def_id: CRATE_DEF_ID }; + +impl<CTX: rustc_span::HashStableContext> ToStableHashKey<CTX> for HirId { + type KeyType = (DefPathHash, ItemLocalId); + + #[inline] + fn to_stable_hash_key(&self, hcx: &CTX) -> (DefPathHash, ItemLocalId) { + let def_path_hash = self.owner.def_id.to_stable_hash_key(hcx); + (def_path_hash, self.local_id) + } +} + +impl<CTX: HashStableContext> ToStableHashKey<CTX> for ItemLocalId { + type KeyType = ItemLocalId; + + #[inline] + fn to_stable_hash_key(&self, _: &CTX) -> ItemLocalId { + *self + } +} diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index b80a2af3100..5aec50c8b53 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -83,14 +83,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { *self.deferred_cast_checks.borrow_mut() = deferred_cast_checks; } - pub(in super::super) fn check_transmutes(&self) { - let mut deferred_transmute_checks = self.deferred_transmute_checks.borrow_mut(); - debug!("FnCtxt::check_transmutes: {} deferred checks", deferred_transmute_checks.len()); - for (from, to, hir_id) in deferred_transmute_checks.drain(..) { - self.check_transmute(from, to, hir_id); - } - } - pub(in super::super) fn check_asms(&self) { let mut deferred_asm_checks = self.deferred_asm_checks.borrow_mut(); debug!("FnCtxt::check_asm: {} deferred checks", deferred_asm_checks.len()); diff --git a/compiler/rustc_hir_typeck/src/intrinsicck.rs b/compiler/rustc_hir_typeck/src/intrinsicck.rs index 194e420b606..7567f8ba348 100644 --- a/compiler/rustc_hir_typeck/src/intrinsicck.rs +++ b/compiler/rustc_hir_typeck/src/intrinsicck.rs @@ -7,11 +7,10 @@ use rustc_hir as hir; use rustc_index::Idx; use rustc_middle::bug; use rustc_middle::ty::layout::{LayoutError, SizeSkeleton}; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::def_id::LocalDefId; use tracing::trace; -use super::FnCtxt; - /// If the type is `Option<T>`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { @@ -39,119 +38,117 @@ fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { ty } -impl<'a, 'tcx> FnCtxt<'a, 'tcx> { - /// FIXME: Move this check out of typeck, since it'll easily cycle when revealing opaques, - /// and we shouldn't need to check anything here if the typeck results are tainted. - pub(crate) fn check_transmute(&self, from: Ty<'tcx>, to: Ty<'tcx>, hir_id: HirId) { - let tcx = self.tcx; - let dl = &tcx.data_layout; - let span = tcx.hir_span(hir_id); - let normalize = |ty| { - let ty = self.resolve_vars_if_possible(ty); - if let Ok(ty) = - self.tcx.try_normalize_erasing_regions(self.typing_env(self.param_env), ty) - { - ty +/// Try to display a sensible error with as much information as possible. +fn skeleton_string<'tcx>( + ty: Ty<'tcx>, + sk: Result<SizeSkeleton<'tcx>, &'tcx LayoutError<'tcx>>, +) -> String { + match sk { + Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{tail}`"), + Ok(SizeSkeleton::Known(size, _)) => { + if let Some(v) = u128::from(size.bytes()).checked_mul(8) { + format!("{v} bits") } else { - Ty::new_error_with_message( - tcx, - span, - "tried to normalize non-wf type in check_transmute", - ) + // `u128` should definitely be able to hold the size of different architectures + // larger sizes should be reported as error `are too big for the target architecture` + // otherwise we have a bug somewhere + bug!("{:?} overflow for u128", size) } - }; - let from = normalize(from); - let to = normalize(to); - trace!(?from, ?to); - if from.has_non_region_infer() || to.has_non_region_infer() { - // Note: this path is currently not reached in any test, so any - // example that triggers this would be worth minimizing and - // converting into a test. - self.dcx().span_bug(span, "argument to transmute has inference variables"); } - // Transmutes that are only changing lifetimes are always ok. - if from == to { - return; + Ok(SizeSkeleton::Generic(size)) => { + format!("generic size {size}") + } + Err(LayoutError::TooGeneric(bad)) => { + if *bad == ty { + "this type does not have a fixed size".to_owned() + } else { + format!("size can vary because of {bad}") + } + } + Err(err) => err.to_string(), + } +} + +fn check_transmute<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + from: Ty<'tcx>, + to: Ty<'tcx>, + hir_id: HirId, +) { + let span = || tcx.hir_span(hir_id); + let normalize = |ty| { + if let Ok(ty) = tcx.try_normalize_erasing_regions(typing_env, ty) { + ty + } else { + Ty::new_error_with_message( + tcx, + span(), + format!("tried to normalize non-wf type {ty:#?} in check_transmute"), + ) } + }; - let skel = |ty| SizeSkeleton::compute(ty, tcx, self.typing_env(self.param_env)); - let sk_from = skel(from); - let sk_to = skel(to); - trace!(?sk_from, ?sk_to); + let from = normalize(from); + let to = normalize(to); + trace!(?from, ?to); - // Check for same size using the skeletons. - if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) { - if sk_from.same_size(sk_to) { - return; - } + // Transmutes that are only changing lifetimes are always ok. + if from == to { + return; + } - // Special-case transmuting from `typeof(function)` and - // `Option<typeof(function)>` to present a clearer error. - let from = unpack_option_like(tcx, from); - if let (&ty::FnDef(..), SizeSkeleton::Known(size_to, _)) = (from.kind(), sk_to) - && size_to == Pointer(dl.instruction_address_space).size(&tcx) - { - struct_span_code_err!(self.dcx(), span, E0591, "can't transmute zero-sized type") - .with_note(format!("source type: {from}")) - .with_note(format!("target type: {to}")) - .with_help("cast with `as` to a pointer instead") - .emit(); - return; - } + let sk_from = SizeSkeleton::compute(from, tcx, typing_env); + let sk_to = SizeSkeleton::compute(to, tcx, typing_env); + trace!(?sk_from, ?sk_to); + + // Check for same size using the skeletons. + if let Ok(sk_from) = sk_from + && let Ok(sk_to) = sk_to + { + if sk_from.same_size(sk_to) { + return; } - // Try to display a sensible error with as much information as possible. - let skeleton_string = |ty: Ty<'tcx>, sk: Result<_, &_>| match sk { - Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{tail}`"), - Ok(SizeSkeleton::Known(size, _)) => { - if let Some(v) = u128::from(size.bytes()).checked_mul(8) { - format!("{v} bits") - } else { - // `u128` should definitely be able to hold the size of different architectures - // larger sizes should be reported as error `are too big for the target architecture` - // otherwise we have a bug somewhere - bug!("{:?} overflow for u128", size) - } - } - Ok(SizeSkeleton::Generic(size)) => { - if let Some(size) = - self.try_structurally_resolve_const(span, size).try_to_target_usize(tcx) - { - format!("{size} bytes") - } else { - format!("generic size {size}") - } - } - Err(LayoutError::TooGeneric(bad)) => { - if *bad == ty { - "this type does not have a fixed size".to_owned() - } else { - format!("size can vary because of {bad}") - } - } - Err(err) => err.to_string(), - }; - - let mut err = struct_span_code_err!( - self.dcx(), - span, - E0512, - "cannot transmute between types of different sizes, \ - or dependently-sized types" - ); - if from == to { - err.note(format!("`{from}` does not have a fixed size")); - err.emit(); - } else { - err.note(format!("source type: `{}` ({})", from, skeleton_string(from, sk_from))) - .note(format!("target type: `{}` ({})", to, skeleton_string(to, sk_to))); - if let Err(LayoutError::ReferencesError(_)) = sk_from { - err.delay_as_bug(); - } else if let Err(LayoutError::ReferencesError(_)) = sk_to { - err.delay_as_bug(); - } else { - err.emit(); - } + // Special-case transmuting from `typeof(function)` and + // `Option<typeof(function)>` to present a clearer error. + let from = unpack_option_like(tcx, from); + if let ty::FnDef(..) = from.kind() + && let SizeSkeleton::Known(size_to, _) = sk_to + && size_to == Pointer(tcx.data_layout.instruction_address_space).size(&tcx) + { + struct_span_code_err!(tcx.sess.dcx(), span(), E0591, "can't transmute zero-sized type") + .with_note(format!("source type: {from}")) + .with_note(format!("target type: {to}")) + .with_help("cast with `as` to a pointer instead") + .emit(); + return; } } + + let mut err = struct_span_code_err!( + tcx.sess.dcx(), + span(), + E0512, + "cannot transmute between types of different sizes, or dependently-sized types" + ); + if from == to { + err.note(format!("`{from}` does not have a fixed size")); + err.emit(); + } else { + err.note(format!("source type: `{}` ({})", from, skeleton_string(from, sk_from))); + err.note(format!("target type: `{}` ({})", to, skeleton_string(to, sk_to))); + err.emit(); + } +} + +pub(crate) fn check_transmutes(tcx: TyCtxt<'_>, owner: LocalDefId) { + assert!(!tcx.is_typeck_child(owner.to_def_id())); + let typeck_results = tcx.typeck(owner); + let None = typeck_results.tainted_by_errors else { return }; + + let typing_env = ty::TypingEnv::post_analysis(tcx, owner); + for &(from, to, hir_id) in &typeck_results.transmutes_to_check { + check_transmute(tcx, typing_env, from, to, hir_id); + } } diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index aae870f7ee3..879b7472736 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -53,7 +53,7 @@ use rustc_hir_analysis::check::{check_abi, check_custom_abi}; use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer; use rustc_infer::traits::{ObligationCauseCode, ObligationInspector, WellFormedLoc}; use rustc_middle::query::Providers; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_session::config; use rustc_span::Span; @@ -247,33 +247,21 @@ fn typeck_with_inspect<'tcx>( debug!(pending_obligations = ?fcx.fulfillment_cx.borrow().pending_obligations()); - if let None = fcx.infcx.tainted_by_errors() { - fcx.report_ambiguity_errors(); + // We need to handle opaque types before emitting ambiguity errors as applying + // defining uses may guide type inference. + if fcx.next_trait_solver() { + fcx.handle_opaque_type_uses_next(); } + fcx.select_obligations_where_possible(|_| {}); if let None = fcx.infcx.tainted_by_errors() { - fcx.check_transmutes(); + fcx.report_ambiguity_errors(); } fcx.check_asms(); let typeck_results = fcx.resolve_type_vars_in_body(body); - // Handle potentially region dependent goals, see `InferCtxt::in_hir_typeck`. - if let None = fcx.infcx.tainted_by_errors() { - for obligation in fcx.take_hir_typeck_potentially_region_dependent_goals() { - let obligation = fcx.resolve_vars_if_possible(obligation); - if obligation.has_non_region_infer() { - bug!("unexpected inference variable after writeback: {obligation:?}"); - } - fcx.register_predicate(obligation); - } - fcx.select_obligations_where_possible(|_| {}); - if let None = fcx.infcx.tainted_by_errors() { - fcx.report_ambiguity_errors(); - } - } - fcx.detect_opaque_types_added_during_writeback(); // Consistency check our TypeckResults instance can hold all ItemLocalIds @@ -555,6 +543,7 @@ pub fn provide(providers: &mut Providers) { method_autoderef_steps: method::probe::method_autoderef_steps, typeck, used_trait_imports, + check_transmutes: intrinsicck::check_transmutes, ..*providers }; } diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index 3ca3b56b87e..f5c3ebd8375 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -909,6 +909,10 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { debug_assert_matches!(param_ty.kind(), ty::Param(_)); let tcx = self.tcx; + + // We use `DeepRejectCtxt` here which may return false positive on where clauses + // with alias self types. We need to later on reject these as inherent candidates + // in `consider_probe`. let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| { let bound_predicate = predicate.kind(); match bound_predicate.skip_binder() { @@ -1945,6 +1949,29 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { ); (xform_self_ty, xform_ret_ty) = self.xform_self_ty(probe.item, trait_ref.self_ty(), trait_ref.args); + + if matches!(probe.kind, WhereClauseCandidate(_)) { + // `WhereClauseCandidate` requires that the self type is a param, + // because it has special behavior with candidate preference as an + // inherent pick. + match ocx.structurally_normalize_ty( + cause, + self.param_env, + trait_ref.self_ty(), + ) { + Ok(ty) => { + if !matches!(ty.kind(), ty::Param(_)) { + debug!("--> not a param ty: {xform_self_ty:?}"); + return ProbeResult::NoMatch; + } + } + Err(errors) => { + debug!("--> cannot relate self-types {:?}", errors); + return ProbeResult::NoMatch; + } + } + } + xform_self_ty = ocx.normalize(cause, self.param_env, xform_self_ty); match ocx.relate(cause, self.param_env, self.variance(), self_ty, xform_self_ty) { diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index 824d592fa6c..c8f6c06b720 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -13,9 +13,7 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::unord::UnordSet; use rustc_errors::codes::*; -use rustc_errors::{ - Applicability, Diag, DiagStyledString, MultiSpan, StashKey, pluralize, struct_span_code_err, -}; +use rustc_errors::{Applicability, Diag, MultiSpan, StashKey, pluralize, struct_span_code_err}; use rustc_hir::attrs::AttributeKind; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::DefId; @@ -1560,11 +1558,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } - if rcvr_ty.is_numeric() && rcvr_ty.is_fresh() - || restrict_type_params - || suggested_derive - || self.lookup_alternative_tuple_impls(&mut err, &unsatisfied_predicates) - { + if rcvr_ty.is_numeric() && rcvr_ty.is_fresh() || restrict_type_params || suggested_derive { } else { self.suggest_traits_to_import( &mut err, @@ -1741,119 +1735,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.emit() } - /// If the predicate failure is caused by an unmet bound on a tuple, recheck if the bound would - /// succeed if all the types on the tuple had no borrows. This is a common problem for libraries - /// like Bevy and ORMs, which rely heavily on traits being implemented on tuples. - fn lookup_alternative_tuple_impls( - &self, - err: &mut Diag<'_>, - unsatisfied_predicates: &[( - ty::Predicate<'tcx>, - Option<ty::Predicate<'tcx>>, - Option<ObligationCause<'tcx>>, - )], - ) -> bool { - let mut found_tuple = false; - for (pred, root, _ob) in unsatisfied_predicates { - let mut preds = vec![pred]; - if let Some(root) = root { - // We will look at both the current predicate and the root predicate that caused it - // to be needed. If calling something like `<(A, &B)>::default()`, then `pred` is - // `&B: Default` and `root` is `(A, &B): Default`, which is the one we are checking - // for further down, so we check both. - preds.push(root); - } - for pred in preds { - if let Some(clause) = pred.as_clause() - && let Some(clause) = clause.as_trait_clause() - && let ty = clause.self_ty().skip_binder() - && let ty::Tuple(types) = ty.kind() - { - let path = clause.skip_binder().trait_ref.print_only_trait_path(); - let def_id = clause.def_id(); - let ty = Ty::new_tup( - self.tcx, - self.tcx.mk_type_list_from_iter(types.iter().map(|ty| ty.peel_refs())), - ); - let args = ty::GenericArgs::for_item(self.tcx, def_id, |param, _| { - if param.index == 0 { - ty.into() - } else { - self.infcx.var_for_def(DUMMY_SP, param) - } - }); - if self - .infcx - .type_implements_trait(def_id, args, self.param_env) - .must_apply_modulo_regions() - { - // "`Trait` is implemented for `(A, B)` but not for `(A, &B)`" - let mut msg = DiagStyledString::normal(format!("`{path}` ")); - msg.push_highlighted("is"); - msg.push_normal(" implemented for `("); - let len = types.len(); - for (i, t) in types.iter().enumerate() { - msg.push( - format!("{}", with_forced_trimmed_paths!(t.peel_refs())), - t.peel_refs() != t, - ); - if i < len - 1 { - msg.push_normal(", "); - } - } - msg.push_normal(")` but "); - msg.push_highlighted("not"); - msg.push_normal(" for `("); - for (i, t) in types.iter().enumerate() { - msg.push( - format!("{}", with_forced_trimmed_paths!(t)), - t.peel_refs() != t, - ); - if i < len - 1 { - msg.push_normal(", "); - } - } - msg.push_normal(")`"); - - // Find the span corresponding to the impl that was found to point at it. - if let Some(impl_span) = self - .tcx - .all_impls(def_id) - .filter(|&impl_def_id| { - let header = self.tcx.impl_trait_header(impl_def_id).unwrap(); - let trait_ref = header.trait_ref.instantiate( - self.tcx, - self.infcx.fresh_args_for_item(DUMMY_SP, impl_def_id), - ); - - let value = ty::fold_regions(self.tcx, ty, |_, _| { - self.tcx.lifetimes.re_erased - }); - // FIXME: Don't bother dealing with non-lifetime binders here... - if value.has_escaping_bound_vars() { - return false; - } - self.infcx.can_eq(ty::ParamEnv::empty(), trait_ref.self_ty(), value) - && header.polarity == ty::ImplPolarity::Positive - }) - .map(|impl_def_id| self.tcx.def_span(impl_def_id)) - .next() - { - err.highlighted_span_note(impl_span, msg.0); - } else { - err.highlighted_note(msg.0); - } - found_tuple = true; - } - // If `pred` was already on the tuple, we don't need to look at the root - // obligation too. - break; - } - } - } - found_tuple - } - /// If an appropriate error source is not found, check method chain for possible candidates fn lookup_segments_chain_for_no_match_method( &self, diff --git a/compiler/rustc_hir_typeck/src/opaque_types.rs b/compiler/rustc_hir_typeck/src/opaque_types.rs index e0224f8c6e1..97feac3d009 100644 --- a/compiler/rustc_hir_typeck/src/opaque_types.rs +++ b/compiler/rustc_hir_typeck/src/opaque_types.rs @@ -1,5 +1,218 @@ -use super::FnCtxt; +use rustc_hir::def::DefKind; +use rustc_infer::traits::ObligationCause; +use rustc_middle::ty::{ + self, DefiningScopeKind, EarlyBinder, OpaqueHiddenType, OpaqueTypeKey, TypeVisitableExt, + TypingMode, +}; +use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; +use rustc_trait_selection::opaque_types::{ + NonDefiningUseReason, opaque_type_has_defining_use_args, report_item_does_not_constrain_error, +}; +use rustc_trait_selection::solve; +use tracing::{debug, instrument}; + +use crate::FnCtxt; + impl<'tcx> FnCtxt<'_, 'tcx> { + /// This takes all the opaque type uses during HIR typeck. It first computes + /// the concrete hidden type by iterating over all defining uses. + /// + /// A use during HIR typeck is defining if all non-lifetime arguments are + /// unique generic parameters and the hidden type does not reference any + /// inference variables. + /// + /// It then uses these defining uses to guide inference for all other uses. + #[instrument(level = "debug", skip(self))] + pub(super) fn handle_opaque_type_uses_next(&mut self) { + // We clone the opaques instead of stealing them here as they are still used for + // normalization in the next generation trait solver. + let mut opaque_types: Vec<_> = self.infcx.clone_opaque_types(); + let num_entries = self.inner.borrow_mut().opaque_types().num_entries(); + let prev = self.checked_opaque_types_storage_entries.replace(Some(num_entries)); + debug_assert_eq!(prev, None); + for entry in &mut opaque_types { + *entry = self.resolve_vars_if_possible(*entry); + } + debug!(?opaque_types); + + self.compute_concrete_opaque_types(&opaque_types); + self.apply_computed_concrete_opaque_types(&opaque_types); + } +} + +enum UsageKind<'tcx> { + None, + NonDefiningUse(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>), + UnconstrainedHiddenType(OpaqueHiddenType<'tcx>), + HasDefiningUse, +} + +impl<'tcx> UsageKind<'tcx> { + fn merge(&mut self, other: UsageKind<'tcx>) { + match (&*self, &other) { + (UsageKind::HasDefiningUse, _) | (_, UsageKind::None) => unreachable!(), + (UsageKind::None, _) => *self = other, + // When mergining non-defining uses, prefer earlier ones. This means + // the error happens as early as possible. + ( + UsageKind::NonDefiningUse(..) | UsageKind::UnconstrainedHiddenType(..), + UsageKind::NonDefiningUse(..), + ) => {} + // When merging unconstrained hidden types, we prefer later ones. This is + // used as in most cases, the defining use is the final return statement + // of our function, and other uses with defining arguments are likely not + // intended to be defining. + ( + UsageKind::NonDefiningUse(..) | UsageKind::UnconstrainedHiddenType(..), + UsageKind::UnconstrainedHiddenType(..) | UsageKind::HasDefiningUse, + ) => *self = other, + } + } +} + +impl<'tcx> FnCtxt<'_, 'tcx> { + fn compute_concrete_opaque_types( + &mut self, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], + ) { + let tcx = self.tcx; + let TypingMode::Analysis { defining_opaque_types_and_generators } = self.typing_mode() + else { + unreachable!(); + }; + + for def_id in defining_opaque_types_and_generators { + match tcx.def_kind(def_id) { + DefKind::OpaqueTy => {} + DefKind::Closure => continue, + _ => unreachable!("not opaque or generator: {def_id:?}"), + } + + let mut usage_kind = UsageKind::None; + for &(opaque_type_key, hidden_type) in opaque_types { + if opaque_type_key.def_id != def_id { + continue; + } + + usage_kind.merge(self.consider_opaque_type_use(opaque_type_key, hidden_type)); + if let UsageKind::HasDefiningUse = usage_kind { + break; + } + } + + let guar = match usage_kind { + UsageKind::None => { + if let Some(guar) = self.tainted_by_errors() { + guar + } else { + report_item_does_not_constrain_error(self.tcx, self.body_id, def_id, None) + } + } + UsageKind::NonDefiningUse(opaque_type_key, hidden_type) => { + report_item_does_not_constrain_error( + self.tcx, + self.body_id, + def_id, + Some((opaque_type_key, hidden_type.span)), + ) + } + UsageKind::UnconstrainedHiddenType(hidden_type) => { + let infer_var = hidden_type + .ty + .walk() + .filter_map(ty::GenericArg::as_term) + .find(|term| term.is_infer()) + .unwrap_or_else(|| hidden_type.ty.into()); + self.err_ctxt() + .emit_inference_failure_err( + self.body_id, + hidden_type.span, + infer_var, + TypeAnnotationNeeded::E0282, + false, + ) + .emit() + } + UsageKind::HasDefiningUse => continue, + }; + + self.typeck_results + .borrow_mut() + .concrete_opaque_types + .insert(def_id, OpaqueHiddenType::new_error(tcx, guar)); + self.set_tainted_by_errors(guar); + } + } + + fn consider_opaque_type_use( + &mut self, + opaque_type_key: OpaqueTypeKey<'tcx>, + hidden_type: OpaqueHiddenType<'tcx>, + ) -> UsageKind<'tcx> { + if let Err(err) = opaque_type_has_defining_use_args( + &self, + opaque_type_key, + hidden_type.span, + DefiningScopeKind::HirTypeck, + ) { + match err { + NonDefiningUseReason::Tainted(guar) => { + self.typeck_results.borrow_mut().concrete_opaque_types.insert( + opaque_type_key.def_id, + OpaqueHiddenType::new_error(self.tcx, guar), + ); + return UsageKind::HasDefiningUse; + } + _ => return UsageKind::NonDefiningUse(opaque_type_key, hidden_type), + }; + } + + // We ignore uses of the opaque if they have any inference variables + // as this can frequently happen with recursive calls. + // + // See `tests/ui/traits/next-solver/opaques/universal-args-non-defining.rs`. + if hidden_type.ty.has_non_region_infer() { + return UsageKind::UnconstrainedHiddenType(hidden_type); + } + + let cause = ObligationCause::misc(hidden_type.span, self.body_id); + let at = self.at(&cause, self.param_env); + let hidden_type = match solve::deeply_normalize(at, hidden_type) { + Ok(hidden_type) => hidden_type, + Err(errors) => { + let guar = self.err_ctxt().report_fulfillment_errors(errors); + OpaqueHiddenType::new_error(self.tcx, guar) + } + }; + let hidden_type = hidden_type.remap_generic_params_to_declaration_params( + opaque_type_key, + self.tcx, + DefiningScopeKind::HirTypeck, + ); + + let prev = self + .typeck_results + .borrow_mut() + .concrete_opaque_types + .insert(opaque_type_key.def_id, hidden_type); + assert!(prev.is_none()); + UsageKind::HasDefiningUse + } + + fn apply_computed_concrete_opaque_types( + &mut self, + opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)], + ) { + let tcx = self.tcx; + for &(key, hidden_type) in opaque_types { + let expected = + *self.typeck_results.borrow_mut().concrete_opaque_types.get(&key.def_id).unwrap(); + + let expected = EarlyBinder::bind(expected.ty).instantiate(tcx, key.args); + self.demand_eqtype(hidden_type.span, expected, hidden_type.ty); + } + } + /// We may in theory add further uses of an opaque after cloning the opaque /// types storage during writeback when computing the defining uses. /// diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs index 093de950d63..253eacbf353 100644 --- a/compiler/rustc_hir_typeck/src/writeback.rs +++ b/compiler/rustc_hir_typeck/src/writeback.rs @@ -27,7 +27,7 @@ use rustc_middle::ty::{ }; use rustc_span::{Span, sym}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; -use rustc_trait_selection::opaque_types::check_opaque_type_parameter_valid; +use rustc_trait_selection::opaque_types::opaque_type_has_defining_use_args; use rustc_trait_selection::solve; use tracing::{debug, instrument}; @@ -74,7 +74,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { wbcx.visit_user_provided_tys(); wbcx.visit_user_provided_sigs(); wbcx.visit_coroutine_interior(); + wbcx.visit_transmutes(); wbcx.visit_offset_of_container_types(); + wbcx.visit_potentially_region_dependent_goals(); wbcx.typeck_results.rvalue_scopes = mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes); @@ -532,8 +534,36 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } } + fn visit_transmutes(&mut self) { + let tcx = self.tcx(); + let fcx_typeck_results = self.fcx.typeck_results.borrow(); + assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner); + for &(from, to, hir_id) in self.fcx.deferred_transmute_checks.borrow().iter() { + let span = tcx.hir_span(hir_id); + let from = self.resolve(from, &span); + let to = self.resolve(to, &span); + self.typeck_results.transmutes_to_check.push((from, to, hir_id)); + } + } + + fn visit_opaque_types_next(&mut self) { + let mut fcx_typeck_results = self.fcx.typeck_results.borrow_mut(); + assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner); + for hidden_ty in fcx_typeck_results.concrete_opaque_types.values() { + assert!(!hidden_ty.has_infer()); + } + + assert_eq!(self.typeck_results.concrete_opaque_types.len(), 0); + self.typeck_results.concrete_opaque_types = + mem::take(&mut fcx_typeck_results.concrete_opaque_types); + } + #[instrument(skip(self), level = "debug")] fn visit_opaque_types(&mut self) { + if self.fcx.next_trait_solver() { + return self.visit_opaque_types_next(); + } + let tcx = self.tcx(); // We clone the opaques instead of stealing them here as they are still used for // normalization in the next generation trait solver. @@ -544,17 +574,14 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { for (opaque_type_key, hidden_type) in opaque_types { let hidden_type = self.resolve(hidden_type, &hidden_type.span); let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span); - - if !self.fcx.next_trait_solver() { - if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind() - && alias_ty.def_id == opaque_type_key.def_id.to_def_id() - && alias_ty.args == opaque_type_key.args - { - continue; - } + if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind() + && alias_ty.def_id == opaque_type_key.def_id.to_def_id() + && alias_ty.args == opaque_type_key.args + { + continue; } - if let Err(err) = check_opaque_type_parameter_valid( + if let Err(err) = opaque_type_has_defining_use_args( &self.fcx, opaque_type_key, hidden_type.span, @@ -762,6 +789,32 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } } + fn visit_potentially_region_dependent_goals(&mut self) { + let obligations = self.fcx.take_hir_typeck_potentially_region_dependent_goals(); + if let None = self.fcx.tainted_by_errors() { + for obligation in obligations { + let (predicate, mut cause) = + self.fcx.resolve_vars_if_possible((obligation.predicate, obligation.cause)); + if predicate.has_non_region_infer() { + self.fcx.dcx().span_delayed_bug( + cause.span, + format!("unexpected inference variable after writeback: {predicate:?}"), + ); + } else { + let predicate = self.tcx().erase_regions(predicate); + if cause.has_infer() || cause.has_placeholders() { + // We can't use the the obligation cause as it references + // information local to this query. + cause = self.fcx.misc(cause.span); + } + self.typeck_results + .potentially_region_dependent_goals + .insert((predicate, cause)); + } + } + } + } + fn resolve<T>(&mut self, value: T, span: &dyn Locatable) -> T where T: TypeFoldable<TyCtxt<'tcx>>, @@ -883,6 +936,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { } } + #[instrument(level = "debug", skip(self, outer_exclusive_binder, new_err))] fn handle_term<T>( &mut self, value: T, diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index 645d95b1dba..684bd34f909 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -491,19 +491,15 @@ pub struct ChunkedBitSet<T> { marker: PhantomData<T>, } -// Note: the chunk domain size is duplicated in each variant. This is a bit -// inconvenient, but it allows the type size to be smaller than if we had an -// outer struct containing a chunk domain size plus the `Chunk`, because the -// compiler can place the chunk domain size after the tag. +// NOTE: The chunk size is computed on-the-fly on each manipulation of a chunk. +// This avoids storing it, as it's almost always CHUNK_BITS except for the last one. #[derive(Clone, Debug, PartialEq, Eq)] enum Chunk { /// A chunk that is all zeros; we don't represent the zeros explicitly. - /// The `ChunkSize` is always non-zero. - Zeros(ChunkSize), + Zeros, /// A chunk that is all ones; we don't represent the ones explicitly. - /// `ChunkSize` is always non-zero. - Ones(ChunkSize), + Ones, /// A chunk that has a mix of zeros and ones, which are represented /// explicitly and densely. It never has all zeros or all ones. @@ -514,16 +510,14 @@ enum Chunk { /// to store the length, which would make this type larger. These excess /// words are always zero, as are any excess bits in the final in-use word. /// - /// The first `ChunkSize` field is always non-zero. - /// - /// The second `ChunkSize` field is the count of 1s set in the chunk, and + /// The `ChunkSize` field is the count of 1s set in the chunk, and /// must satisfy `0 < count < chunk_domain_size`. /// /// The words are within an `Rc` because it's surprisingly common to /// duplicate an entire chunk, e.g. in `ChunkedBitSet::clone_from()`, or /// when a `Mixed` chunk is union'd into a `Zeros` chunk. When we do need /// to modify a chunk we use `Rc::make_mut`. - Mixed(ChunkSize, ChunkSize, Rc<[Word; CHUNK_WORDS]>), + Mixed(ChunkSize, Rc<[Word; CHUNK_WORDS]>), } // This type is used a lot. Make sure it doesn't unintentionally get bigger. @@ -535,6 +529,22 @@ impl<T> ChunkedBitSet<T> { self.domain_size } + #[inline] + fn last_chunk_size(&self) -> ChunkSize { + let n = self.domain_size % CHUNK_BITS; + if n == 0 { CHUNK_BITS as ChunkSize } else { n as ChunkSize } + } + + /// All the chunks have a chunk_domain_size of `CHUNK_BITS` except the final one. + #[inline] + fn chunk_domain_size(&self, chunk: usize) -> ChunkSize { + if chunk == self.chunks.len() - 1 { + self.last_chunk_size() + } else { + CHUNK_BITS as ChunkSize + } + } + #[cfg(test)] fn assert_valid(&self) { if self.domain_size == 0 { @@ -544,8 +554,9 @@ impl<T> ChunkedBitSet<T> { assert!((self.chunks.len() - 1) * CHUNK_BITS <= self.domain_size); assert!(self.chunks.len() * CHUNK_BITS >= self.domain_size); - for chunk in self.chunks.iter() { - chunk.assert_valid(); + for (chunk_index, chunk) in self.chunks.iter().enumerate() { + let chunk_domain_size = self.chunk_domain_size(chunk_index); + chunk.assert_valid(chunk_domain_size); } } } @@ -556,16 +567,7 @@ impl<T: Idx> ChunkedBitSet<T> { let chunks = if domain_size == 0 { Box::new([]) } else { - // All the chunks have a chunk_domain_size of `CHUNK_BITS` except - // the final one. - let final_chunk_domain_size = { - let n = domain_size % CHUNK_BITS; - if n == 0 { CHUNK_BITS } else { n } - }; - let mut chunks = - vec![Chunk::new(CHUNK_BITS, is_empty); num_chunks(domain_size)].into_boxed_slice(); - *chunks.last_mut().unwrap() = Chunk::new(final_chunk_domain_size, is_empty); - chunks + vec![if is_empty { Zeros } else { Ones }; num_chunks(domain_size)].into_boxed_slice() }; ChunkedBitSet { domain_size, chunks, marker: PhantomData } } @@ -594,11 +596,15 @@ impl<T: Idx> ChunkedBitSet<T> { /// Count the number of bits in the set. pub fn count(&self) -> usize { - self.chunks.iter().map(|chunk| chunk.count()).sum() + self.chunks + .iter() + .enumerate() + .map(|(index, chunk)| chunk.count(self.chunk_domain_size(index))) + .sum() } pub fn is_empty(&self) -> bool { - self.chunks.iter().all(|chunk| matches!(chunk, Zeros(..))) + self.chunks.iter().all(|chunk| matches!(chunk, Zeros)) } /// Returns `true` if `self` contains `elem`. @@ -607,9 +613,9 @@ impl<T: Idx> ChunkedBitSet<T> { assert!(elem.index() < self.domain_size); let chunk = &self.chunks[chunk_index(elem)]; match &chunk { - Zeros(_) => false, - Ones(_) => true, - Mixed(_, _, words) => { + Zeros => false, + Ones => true, + Mixed(_, words) => { let (word_index, mask) = chunk_word_index_and_mask(elem); (words[word_index] & mask) != 0 } @@ -625,9 +631,10 @@ impl<T: Idx> ChunkedBitSet<T> { pub fn insert(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); let chunk_index = chunk_index(elem); + let chunk_domain_size = self.chunk_domain_size(chunk_index); let chunk = &mut self.chunks[chunk_index]; match *chunk { - Zeros(chunk_domain_size) => { + Zeros => { if chunk_domain_size > 1 { #[cfg(feature = "nightly")] let mut words = { @@ -649,14 +656,14 @@ impl<T: Idx> ChunkedBitSet<T> { let (word_index, mask) = chunk_word_index_and_mask(elem); words_ref[word_index] |= mask; - *chunk = Mixed(chunk_domain_size, 1, words); + *chunk = Mixed(1, words); } else { - *chunk = Ones(chunk_domain_size); + *chunk = Ones; } true } - Ones(_) => false, - Mixed(chunk_domain_size, ref mut count, ref mut words) => { + Ones => false, + Mixed(ref mut count, ref mut words) => { // We skip all the work if the bit is already set. let (word_index, mask) = chunk_word_index_and_mask(elem); if (words[word_index] & mask) == 0 { @@ -665,7 +672,7 @@ impl<T: Idx> ChunkedBitSet<T> { let words = Rc::make_mut(words); words[word_index] |= mask; } else { - *chunk = Ones(chunk_domain_size); + *chunk = Ones; } true } else { @@ -678,11 +685,7 @@ impl<T: Idx> ChunkedBitSet<T> { /// Sets all bits to true. pub fn insert_all(&mut self) { for chunk in self.chunks.iter_mut() { - *chunk = match *chunk { - Zeros(chunk_domain_size) - | Ones(chunk_domain_size) - | Mixed(chunk_domain_size, ..) => Ones(chunk_domain_size), - } + *chunk = Ones; } } @@ -690,10 +693,11 @@ impl<T: Idx> ChunkedBitSet<T> { pub fn remove(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); let chunk_index = chunk_index(elem); + let chunk_domain_size = self.chunk_domain_size(chunk_index); let chunk = &mut self.chunks[chunk_index]; match *chunk { - Zeros(_) => false, - Ones(chunk_domain_size) => { + Zeros => false, + Ones => { if chunk_domain_size > 1 { #[cfg(feature = "nightly")] let mut words = { @@ -722,13 +726,13 @@ impl<T: Idx> ChunkedBitSet<T> { ); let (word_index, mask) = chunk_word_index_and_mask(elem); words_ref[word_index] &= !mask; - *chunk = Mixed(chunk_domain_size, chunk_domain_size - 1, words); + *chunk = Mixed(chunk_domain_size - 1, words); } else { - *chunk = Zeros(chunk_domain_size); + *chunk = Zeros; } true } - Mixed(chunk_domain_size, ref mut count, ref mut words) => { + Mixed(ref mut count, ref mut words) => { // We skip all the work if the bit is already clear. let (word_index, mask) = chunk_word_index_and_mask(elem); if (words[word_index] & mask) != 0 { @@ -737,7 +741,7 @@ impl<T: Idx> ChunkedBitSet<T> { let words = Rc::make_mut(words); words[word_index] &= !mask; } else { - *chunk = Zeros(chunk_domain_size); + *chunk = Zeros } true } else { @@ -748,11 +752,12 @@ impl<T: Idx> ChunkedBitSet<T> { } fn chunk_iter(&self, chunk_index: usize) -> ChunkIter<'_> { + let chunk_domain_size = self.chunk_domain_size(chunk_index); match self.chunks.get(chunk_index) { - Some(Zeros(_chunk_domain_size)) => ChunkIter::Zeros, - Some(Ones(chunk_domain_size)) => ChunkIter::Ones(0..*chunk_domain_size as usize), - Some(Mixed(chunk_domain_size, _, words)) => { - let num_words = num_words(*chunk_domain_size as usize); + Some(Zeros) => ChunkIter::Zeros, + Some(Ones) => ChunkIter::Ones(0..chunk_domain_size as usize), + Some(Mixed(_, words)) => { + let num_words = num_words(chunk_domain_size as usize); ChunkIter::Mixed(BitIter::new(&words[0..num_words])) } None => ChunkIter::Finished, @@ -765,23 +770,33 @@ impl<T: Idx> ChunkedBitSet<T> { impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { fn union(&mut self, other: &ChunkedBitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); - debug_assert_eq!(self.chunks.len(), other.chunks.len()); + + let num_chunks = self.chunks.len(); + debug_assert_eq!(num_chunks, other.chunks.len()); + + let last_chunk_size = self.last_chunk_size(); + debug_assert_eq!(last_chunk_size, other.last_chunk_size()); let mut changed = false; - for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) { + for (chunk_index, (mut self_chunk, other_chunk)) in + self.chunks.iter_mut().zip(other.chunks.iter()).enumerate() + { + let chunk_domain_size = if chunk_index + 1 == num_chunks { + last_chunk_size + } else { + CHUNK_BITS as ChunkSize + }; + match (&mut self_chunk, &other_chunk) { - (_, Zeros(_)) | (Ones(_), _) => {} - (Zeros(self_chunk_domain_size), Ones(other_chunk_domain_size)) - | (Mixed(self_chunk_domain_size, ..), Ones(other_chunk_domain_size)) - | (Zeros(self_chunk_domain_size), Mixed(other_chunk_domain_size, ..)) => { + (_, Zeros) | (Ones, _) => {} + (Zeros, Ones) | (Mixed(..), Ones) | (Zeros, Mixed(..)) => { // `other_chunk` fully overwrites `self_chunk` - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); *self_chunk = other_chunk.clone(); changed = true; } ( - Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words), - Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words), + Mixed(self_chunk_count, self_chunk_words), + Mixed(_other_chunk_count, other_chunk_words), ) => { // First check if the operation would change // `self_chunk.words`. If not, we can avoid allocating some @@ -789,7 +804,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { // performance win. Also, we only need to operate on the // in-use words, hence the slicing. let op = |a, b| a | b; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); if bitwise_changes( &self_chunk_words[0..num_words], &other_chunk_words[0..num_words], @@ -806,8 +821,8 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .iter() .map(|w| w.count_ones() as ChunkSize) .sum(); - if *self_chunk_count == *self_chunk_domain_size { - *self_chunk = Ones(*self_chunk_domain_size); + if *self_chunk_count == chunk_domain_size { + *self_chunk = Ones; } changed = true; } @@ -819,36 +834,41 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { fn subtract(&mut self, other: &ChunkedBitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); - debug_assert_eq!(self.chunks.len(), other.chunks.len()); + + let num_chunks = self.chunks.len(); + debug_assert_eq!(num_chunks, other.chunks.len()); + + let last_chunk_size = self.last_chunk_size(); + debug_assert_eq!(last_chunk_size, other.last_chunk_size()); let mut changed = false; - for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) { + for (chunk_index, (mut self_chunk, other_chunk)) in + self.chunks.iter_mut().zip(other.chunks.iter()).enumerate() + { + let chunk_domain_size = if chunk_index + 1 == num_chunks { + last_chunk_size + } else { + CHUNK_BITS as ChunkSize + }; + match (&mut self_chunk, &other_chunk) { - (Zeros(..), _) | (_, Zeros(..)) => {} - ( - Ones(self_chunk_domain_size) | Mixed(self_chunk_domain_size, _, _), - Ones(other_chunk_domain_size), - ) => { - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); + (Zeros, _) | (_, Zeros) => {} + (Ones | Mixed(_, _), Ones) => { changed = true; - *self_chunk = Zeros(*self_chunk_domain_size); + *self_chunk = Zeros; } - ( - Ones(self_chunk_domain_size), - Mixed(other_chunk_domain_size, other_chunk_count, other_chunk_words), - ) => { - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); + (Ones, Mixed(other_chunk_count, other_chunk_words)) => { changed = true; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); debug_assert!(num_words > 0 && num_words <= CHUNK_WORDS); let mut tail_mask = - 1 << (*other_chunk_domain_size - ((num_words - 1) * WORD_BITS) as u16) - 1; + 1 << (chunk_domain_size - ((num_words - 1) * WORD_BITS) as u16) - 1; let mut self_chunk_words = **other_chunk_words; for word in self_chunk_words[0..num_words].iter_mut().rev() { *word = !*word & tail_mask; tail_mask = u64::MAX; } - let self_chunk_count = *self_chunk_domain_size - *other_chunk_count; + let self_chunk_count = chunk_domain_size - *other_chunk_count; debug_assert_eq!( self_chunk_count, self_chunk_words[0..num_words] @@ -856,16 +876,15 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .map(|w| w.count_ones() as ChunkSize) .sum() ); - *self_chunk = - Mixed(*self_chunk_domain_size, self_chunk_count, Rc::new(self_chunk_words)); + *self_chunk = Mixed(self_chunk_count, Rc::new(self_chunk_words)); } ( - Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words), - Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words), + Mixed(self_chunk_count, self_chunk_words), + Mixed(_other_chunk_count, other_chunk_words), ) => { // See [`<Self as BitRelations<ChunkedBitSet<T>>>::union`] for the explanation let op = |a: u64, b: u64| a & !b; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); if bitwise_changes( &self_chunk_words[0..num_words], &other_chunk_words[0..num_words], @@ -883,7 +902,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .map(|w| w.count_ones() as ChunkSize) .sum(); if *self_chunk_count == 0 { - *self_chunk = Zeros(*self_chunk_domain_size); + *self_chunk = Zeros; } changed = true; } @@ -895,28 +914,36 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { fn intersect(&mut self, other: &ChunkedBitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); - debug_assert_eq!(self.chunks.len(), other.chunks.len()); + + let num_chunks = self.chunks.len(); + debug_assert_eq!(num_chunks, other.chunks.len()); + + let last_chunk_size = self.last_chunk_size(); + debug_assert_eq!(last_chunk_size, other.last_chunk_size()); let mut changed = false; - for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) { + for (chunk_index, (mut self_chunk, other_chunk)) in + self.chunks.iter_mut().zip(other.chunks.iter()).enumerate() + { + let chunk_domain_size = if chunk_index + 1 == num_chunks { + last_chunk_size + } else { + CHUNK_BITS as ChunkSize + }; + match (&mut self_chunk, &other_chunk) { - (Zeros(..), _) | (_, Ones(..)) => {} - ( - Ones(self_chunk_domain_size), - Zeros(other_chunk_domain_size) | Mixed(other_chunk_domain_size, ..), - ) - | (Mixed(self_chunk_domain_size, ..), Zeros(other_chunk_domain_size)) => { - debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size); + (Zeros, _) | (_, Ones) => {} + (Ones, Zeros | Mixed(..)) | (Mixed(..), Zeros) => { changed = true; *self_chunk = other_chunk.clone(); } ( - Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words), - Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words), + Mixed(self_chunk_count, self_chunk_words), + Mixed(_other_chunk_count, other_chunk_words), ) => { // See [`<Self as BitRelations<ChunkedBitSet<T>>>::union`] for the explanation let op = |a, b| a & b; - let num_words = num_words(*self_chunk_domain_size as usize); + let num_words = num_words(chunk_domain_size as usize); if bitwise_changes( &self_chunk_words[0..num_words], &other_chunk_words[0..num_words], @@ -934,7 +961,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> { .map(|w| w.count_ones() as ChunkSize) .sum(); if *self_chunk_count == 0 { - *self_chunk = Zeros(*self_chunk_domain_size); + *self_chunk = Zeros; } changed = true; } @@ -964,7 +991,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for DenseBitSet<T> { words = &mut words[..CHUNK_WORDS]; } match chunk { - Zeros(..) => { + Zeros => { for word in words { if *word != 0 { changed = true; @@ -972,8 +999,8 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for DenseBitSet<T> { } } } - Ones(..) => (), - Mixed(_, _, data) => { + Ones => (), + Mixed(_, data) => { for (i, word) in words.iter_mut().enumerate() { let new_val = *word & data[i]; if new_val != *word { @@ -1053,13 +1080,11 @@ impl<'a, T: Idx> Iterator for ChunkedBitIter<'a, T> { impl Chunk { #[cfg(test)] - fn assert_valid(&self) { + fn assert_valid(&self, chunk_domain_size: ChunkSize) { + assert!(chunk_domain_size as usize <= CHUNK_BITS); match *self { - Zeros(chunk_domain_size) | Ones(chunk_domain_size) => { - assert!(chunk_domain_size as usize <= CHUNK_BITS); - } - Mixed(chunk_domain_size, count, ref words) => { - assert!(chunk_domain_size as usize <= CHUNK_BITS); + Zeros | Ones => {} + Mixed(count, ref words) => { assert!(0 < count && count < chunk_domain_size); // Check the number of set bits matches `count`. @@ -1083,18 +1108,12 @@ impl Chunk { } } - fn new(chunk_domain_size: usize, is_empty: bool) -> Self { - debug_assert!(0 < chunk_domain_size && chunk_domain_size <= CHUNK_BITS); - let chunk_domain_size = chunk_domain_size as ChunkSize; - if is_empty { Zeros(chunk_domain_size) } else { Ones(chunk_domain_size) } - } - /// Count the number of 1s in the chunk. - fn count(&self) -> usize { + fn count(&self, chunk_domain_size: ChunkSize) -> usize { match *self { - Zeros(_) => 0, - Ones(chunk_domain_size) => chunk_domain_size as usize, - Mixed(_, count, _) => count as usize, + Zeros => 0, + Ones => chunk_domain_size as usize, + Mixed(count, _) => count as usize, } } } diff --git a/compiler/rustc_index/src/bit_set/tests.rs b/compiler/rustc_index/src/bit_set/tests.rs index 9ce4cf4293f..deddc872614 100644 --- a/compiler/rustc_index/src/bit_set/tests.rs +++ b/compiler/rustc_index/src/bit_set/tests.rs @@ -120,8 +120,9 @@ fn chunked_bitset() { let mut b1 = ChunkedBitSet::<usize>::new_empty(1); assert_eq!( b1, - ChunkedBitSet { domain_size: 1, chunks: Box::new([Zeros(1)]), marker: PhantomData } + ChunkedBitSet { domain_size: 1, chunks: Box::new([Zeros]), marker: PhantomData } ); + assert_eq!(b1.chunk_domain_size(0), 1); b1.assert_valid(); assert!(!b1.contains(0)); @@ -129,12 +130,12 @@ fn chunked_bitset() { assert!(b1.insert(0)); assert!(b1.contains(0)); assert_eq!(b1.count(), 1); - assert_eq!(b1.chunks(), [Ones(1)]); + assert_eq!(b1.chunks(), [Ones]); assert!(!b1.insert(0)); assert!(b1.remove(0)); assert!(!b1.contains(0)); assert_eq!(b1.count(), 0); - assert_eq!(b1.chunks(), [Zeros(1)]); + assert_eq!(b1.chunks(), [Zeros]); b1.assert_valid(); //----------------------------------------------------------------------- @@ -142,8 +143,9 @@ fn chunked_bitset() { let mut b100 = ChunkedBitSet::<usize>::new_filled(100); assert_eq!( b100, - ChunkedBitSet { domain_size: 100, chunks: Box::new([Ones(100)]), marker: PhantomData } + ChunkedBitSet { domain_size: 100, chunks: Box::new([Ones]), marker: PhantomData } ); + assert_eq!(b100.chunk_domain_size(0), 100); b100.assert_valid(); for i in 0..100 { @@ -152,7 +154,7 @@ fn chunked_bitset() { assert_eq!(b100.count(), 100); assert!(b100.remove(3)); assert!(b100.insert(3)); - assert_eq!(b100.chunks(), vec![Ones(100)]); + assert_eq!(b100.chunks(), vec![Ones]); assert!( b100.remove(20) && b100.remove(30) && b100.remove(40) && b100.remove(99) && b100.insert(30) ); @@ -161,7 +163,6 @@ fn chunked_bitset() { assert_eq!( b100.chunks(), vec![Mixed( - 100, 97, #[rustfmt::skip] Rc::new([ @@ -180,7 +181,7 @@ fn chunked_bitset() { } } assert_eq!(num_removed, 97); - assert_eq!(b100.chunks(), vec![Zeros(100)]); + assert_eq!(b100.chunks(), vec![Zeros]); b100.assert_valid(); //----------------------------------------------------------------------- @@ -188,23 +189,21 @@ fn chunked_bitset() { let mut b2548 = ChunkedBitSet::<usize>::new_empty(2548); assert_eq!( b2548, - ChunkedBitSet { - domain_size: 2548, - chunks: Box::new([Zeros(2048), Zeros(500)]), - marker: PhantomData, - } + ChunkedBitSet { domain_size: 2548, chunks: Box::new([Zeros, Zeros]), marker: PhantomData } ); + assert_eq!(b2548.chunk_domain_size(0), 2048); + assert_eq!(b2548.chunk_domain_size(1), 500); b2548.assert_valid(); b2548.insert(14); b2548.remove(14); - assert_eq!(b2548.chunks(), vec![Zeros(2048), Zeros(500)]); + assert_eq!(b2548.chunks(), vec![Zeros, Zeros]); b2548.insert_all(); for i in 0..2548 { assert!(b2548.contains(i)); } assert_eq!(b2548.count(), 2548); - assert_eq!(b2548.chunks(), vec![Ones(2048), Ones(500)]); + assert_eq!(b2548.chunks(), vec![Ones, Ones]); b2548.assert_valid(); //----------------------------------------------------------------------- @@ -212,12 +211,10 @@ fn chunked_bitset() { let mut b4096 = ChunkedBitSet::<usize>::new_empty(4096); assert_eq!( b4096, - ChunkedBitSet { - domain_size: 4096, - chunks: Box::new([Zeros(2048), Zeros(2048)]), - marker: PhantomData, - } + ChunkedBitSet { domain_size: 4096, chunks: Box::new([Zeros, Zeros]), marker: PhantomData } ); + assert_eq!(b4096.chunk_domain_size(0), 2048); + assert_eq!(b4096.chunk_domain_size(1), 2048); b4096.assert_valid(); for i in 0..4096 { @@ -231,11 +228,11 @@ fn chunked_bitset() { b4096.chunks(), #[rustfmt::skip] vec![ - Mixed(2048, 1, Rc::new([ + Mixed(1, Rc::new([ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ])), - Mixed(2048, 1, Rc::new([ + Mixed(1, Rc::new([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x8000_0000_0000_0000 ])), @@ -251,10 +248,15 @@ fn chunked_bitset() { b10000, ChunkedBitSet { domain_size: 10000, - chunks: Box::new([Zeros(2048), Zeros(2048), Zeros(2048), Zeros(2048), Zeros(1808),]), + chunks: Box::new([Zeros, Zeros, Zeros, Zeros, Zeros,]), marker: PhantomData, } ); + assert_eq!(b10000.chunk_domain_size(0), 2048); + assert_eq!(b10000.chunk_domain_size(1), 2048); + assert_eq!(b10000.chunk_domain_size(2), 2048); + assert_eq!(b10000.chunk_domain_size(3), 2048); + assert_eq!(b10000.chunk_domain_size(4), 1808); b10000.assert_valid(); assert!(b10000.insert(3000) && b10000.insert(5000)); @@ -262,17 +264,17 @@ fn chunked_bitset() { b10000.chunks(), #[rustfmt::skip] vec![ - Zeros(2048), - Mixed(2048, 1, Rc::new([ + Zeros, + Mixed(1, Rc::new([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0100_0000_0000_0000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])), - Mixed(2048, 1, Rc::new([ + Mixed(1, Rc::new([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])), - Zeros(2048), - Zeros(1808), + Zeros, + Zeros, ], ); let mut b10000b = ChunkedBitSet::<usize>::new_empty(10000); diff --git a/compiler/rustc_infer/src/infer/context.rs b/compiler/rustc_infer/src/infer/context.rs index 21e999b080d..bb9c8850093 100644 --- a/compiler/rustc_infer/src/infer/context.rs +++ b/compiler/rustc_infer/src/infer/context.rs @@ -22,10 +22,6 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { self.next_trait_solver } - fn in_hir_typeck(&self) -> bool { - self.in_hir_typeck - } - fn typing_mode(&self) -> ty::TypingMode<'tcx> { self.typing_mode() } diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 82d4856df39..d1507f08c06 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -158,7 +158,8 @@ pub struct InferCtxtInner<'tcx> { region_assumptions: Vec<ty::ArgOutlivesPredicate<'tcx>>, /// `-Znext-solver`: Successfully proven goals during HIR typeck which - /// reference inference variables and get reproven after writeback. + /// reference inference variables and get reproven in case MIR type check + /// fails to prove something. /// /// See the documentation of `InferCtxt::in_hir_typeck` for more details. hir_typeck_potentially_region_dependent_goals: Vec<PredicateObligation<'tcx>>, @@ -782,22 +783,30 @@ impl<'tcx> InferCtxt<'tcx> { self.inner.borrow_mut().type_variables().num_vars() } - pub fn next_ty_var(&self, span: Span) -> Ty<'tcx> { - self.next_ty_var_with_origin(TypeVariableOrigin { span, param_def_id: None }) + pub fn next_ty_vid(&self, span: Span) -> TyVid { + self.next_ty_vid_with_origin(TypeVariableOrigin { span, param_def_id: None }) } - pub fn next_ty_var_with_origin(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { - let vid = self.inner.borrow_mut().type_variables().new_var(self.universe(), origin); - Ty::new_var(self.tcx, vid) + pub fn next_ty_vid_with_origin(&self, origin: TypeVariableOrigin) -> TyVid { + self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) } - pub fn next_ty_var_id_in_universe(&self, span: Span, universe: ty::UniverseIndex) -> TyVid { + pub fn next_ty_vid_in_universe(&self, span: Span, universe: ty::UniverseIndex) -> TyVid { let origin = TypeVariableOrigin { span, param_def_id: None }; self.inner.borrow_mut().type_variables().new_var(universe, origin) } + pub fn next_ty_var(&self, span: Span) -> Ty<'tcx> { + self.next_ty_var_with_origin(TypeVariableOrigin { span, param_def_id: None }) + } + + pub fn next_ty_var_with_origin(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { + let vid = self.next_ty_vid_with_origin(origin); + Ty::new_var(self.tcx, vid) + } + pub fn next_ty_var_in_universe(&self, span: Span, universe: ty::UniverseIndex) -> Ty<'tcx> { - let vid = self.next_ty_var_id_in_universe(span, universe); + let vid = self.next_ty_vid_in_universe(span, universe); Ty::new_var(self.tcx, vid) } diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index c46e879b976..8f131f45bbd 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -285,7 +285,9 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> Ch .expecteds .entry(name.name) .and_modify(|v| match v { - ExpectedValues::Some(v) if !values_any_specified => { + ExpectedValues::Some(v) if !values_any_specified => + { + #[allow(rustc::potential_query_instability)] v.extend(values.clone()) } ExpectedValues::Some(_) => *v = ExpectedValues::Any, diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 3ba224723e3..bc5ef04079e 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -6,6 +6,7 @@ use std::sync::{Arc, LazyLock, OnceLock}; use std::{env, fs, iter}; use rustc_ast as ast; +use rustc_attr_parsing::{AttributeParser, ShouldEmit, validate_attr}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::steal::Steal; @@ -15,6 +16,7 @@ use rustc_errors::timings::TimingSection; use rustc_expand::base::{ExtCtxt, LintStoreExpand}; use rustc_feature::Features; use rustc_fs_util::try_canonicalize; +use rustc_hir::attrs::AttributeKind; use rustc_hir::def_id::{LOCAL_CRATE, StableCrateId, StableCrateIdMap}; use rustc_hir::definitions::Definitions; use rustc_incremental::setup_dep_graph; @@ -25,9 +27,7 @@ use rustc_middle::arena::Arena; use rustc_middle::dep_graph::DepsType; use rustc_middle::ty::{self, CurrentGcx, GlobalCtxt, RegisteredTools, TyCtxt}; use rustc_middle::util::Providers; -use rustc_parse::{ - new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal, validate_attr, -}; +use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal}; use rustc_passes::{abi_test, input_stats, layout_test}; use rustc_resolve::{Resolver, ResolverOutputs}; use rustc_session::config::{CrateType, Input, OutFileName, OutputFilenames, OutputType}; @@ -1081,7 +1081,8 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { if !tcx.is_typeck_child(def_id.to_def_id()) { // Child unsafety and borrowck happens together with the parent tcx.ensure_ok().check_unsafety(def_id); - tcx.ensure_ok().mir_borrowck(def_id) + tcx.ensure_ok().mir_borrowck(def_id); + tcx.ensure_ok().check_transmutes(def_id); } tcx.ensure_ok().has_ffi_unwind_calls(def_id); @@ -1244,8 +1245,7 @@ pub fn get_crate_name(sess: &Session, krate_attrs: &[ast::Attribute]) -> Symbol // in all code paths that require the crate name very early on, namely before // macro expansion. - let attr_crate_name = - validate_and_find_value_str_builtin_attr(sym::crate_name, sess, krate_attrs); + let attr_crate_name = parse_crate_name(sess, krate_attrs, ShouldEmit::EarlyFatal); let validate = |name, span| { rustc_session::output::validate_crate_name(sess, name, span); @@ -1283,6 +1283,28 @@ pub fn get_crate_name(sess: &Session, krate_attrs: &[ast::Attribute]) -> Symbol sym::rust_out } +pub(crate) fn parse_crate_name( + sess: &Session, + attrs: &[ast::Attribute], + emit_errors: ShouldEmit, +) -> Option<(Symbol, Span)> { + let rustc_hir::Attribute::Parsed(AttributeKind::CrateName { name, name_span, .. }) = + AttributeParser::parse_limited_should_emit( + sess, + &attrs, + sym::crate_name, + DUMMY_SP, + rustc_ast::node_id::CRATE_NODE_ID, + None, + emit_errors, + )? + else { + unreachable!("crate_name is the only attr we could've parsed here"); + }; + + Some((name, name_span)) +} + fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit { // We don't permit macro calls inside of the attribute (e.g., #![recursion_limit = `expand!()`]) // because that would require expanding this while in the middle of expansion, which needs to diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 0a764808f95..4425877308a 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -807,6 +807,7 @@ fn test_unstable_options_tracking_hash() { tracked!(hint_mostly_unused, true); tracked!(human_readable_cgu_names, true); tracked!(incremental_ignore_spans, true); + tracked!(indirect_branch_cs_prefix, true); tracked!(inline_mir, Some(true)); tracked!(inline_mir_hint_threshold, Some(123)); tracked!(inline_mir_threshold, Some(123)); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 0ca4fcc66ca..04006f3e446 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -5,14 +5,15 @@ use std::sync::{Arc, OnceLock}; use std::{env, thread}; use rustc_ast as ast; +use rustc_attr_parsing::{ShouldEmit, validate_attr}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::sync; +use rustc_errors::LintBuffer; use rustc_metadata::{DylibError, load_symbol_from_dylib}; use rustc_middle::ty::CurrentGcx; -use rustc_parse::validate_attr; use rustc_session::config::{Cfg, OutFileName, OutputFilenames, OutputTypes, Sysroot, host_tuple}; -use rustc_session::lint::{self, BuiltinLintDiag, LintBuffer}; +use rustc_session::lint::{self, BuiltinLintDiag}; use rustc_session::output::{CRATE_TYPES, categorize_crate_type}; use rustc_session::{EarlyDiagCtxt, Session, filesearch}; use rustc_span::edit_distance::find_best_match_for_name; @@ -23,6 +24,7 @@ use rustc_target::spec::Target; use tracing::info; use crate::errors; +use crate::passes::parse_crate_name; /// Function pointer type that constructs a new CodegenBackend. type MakeBackendFn = fn() -> Box<dyn CodegenBackend>; @@ -519,11 +521,10 @@ pub fn build_output_filenames(attrs: &[ast::Attribute], sess: &Session) -> Outpu sess.dcx().emit_fatal(errors::MultipleOutputTypesToStdout); } - let crate_name = sess - .opts - .crate_name - .clone() - .or_else(|| rustc_attr_parsing::find_crate_name(attrs).map(|n| n.to_string())); + let crate_name = + sess.opts.crate_name.clone().or_else(|| { + parse_crate_name(sess, attrs, ShouldEmit::Nothing).map(|i| i.0.to_string()) + }); match sess.io.output_file { None => { diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs index e80196ed567..483cc3e93dc 100644 --- a/compiler/rustc_lexer/src/lib.rs +++ b/compiler/rustc_lexer/src/lib.rs @@ -540,11 +540,11 @@ impl Cursor<'_> { // whitespace between the opening and the infostring. self.eat_while(|ch| ch != '\n' && is_whitespace(ch)); - // copied from `eat_identifier`, but allows `.` in infostring to allow something like + // copied from `eat_identifier`, but allows `-` and `.` in infostring to allow something like // `---Cargo.toml` as a valid opener if is_id_start(self.first()) { self.bump(); - self.eat_while(|c| is_id_continue(c) || c == '.'); + self.eat_while(|c| is_id_continue(c) || c == '-' || c == '.'); } self.eat_while(|ch| ch != '\n' && is_whitespace(ch)); diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index c485e6fc849..417e5a97069 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -205,8 +205,6 @@ lint_confusable_identifier_pair = found both `{$existing_sym}` and `{$sym}` as i .current_use = this identifier can be confused with `{$existing_sym}` .other_use = other identifier used here -lint_custom_inner_attribute_unstable = custom inner attributes are unstable - lint_dangling_pointers_from_locals = a dangling pointer will be produced because the local variable `{$local_var_name}` will be dropped .ret_ty = return type of the {$fn_kind} is `{$ret_ty}` .local_var = `{$local_var_name}` is part the {$fn_kind} and will be dropped at the end of the {$fn_kind} @@ -271,10 +269,6 @@ lint_expectation = this lint expectation is unfulfilled lint_extern_crate_not_idiomatic = `extern crate` is not idiomatic in the new edition .suggestion = convert it to a `use` -lint_extern_without_abi = `extern` declarations without an explicit ABI are deprecated - .label = ABI should be specified here - .suggestion = explicitly specify the {$default_abi} ABI - lint_for_loops_over_fallibles = for loop over {$article} `{$ref_prefix}{$ty}`. This is more readably written as an `if let` statement .suggestion = consider using `if let` to clear intent @@ -294,19 +288,6 @@ lint_hidden_glob_reexport = private item shadows public glob re-export lint_hidden_lifetime_parameters = hidden lifetime parameters in types are deprecated -lint_hidden_unicode_codepoints = unicode codepoint changing visible direction of text present in {$label} - .label = this {$label} contains {$count -> - [one] an invisible - *[other] invisible - } unicode text flow control {$count -> - [one] codepoint - *[other] codepoints - } - .note = these kind of unicode codepoints change the way text flows on applications that support them, but can cause confusion because they change the order of characters on the screen - .suggestion_remove = if their presence wasn't intentional, you can remove them - .suggestion_escape = if you want to keep them but make them visible in your source code, you can escape them - .no_suggestion_note_escape = if you want to keep them but make them visible in your source code, you can escape them: {$escaped} - lint_identifier_non_ascii_char = identifier contains non-ASCII characters lint_identifier_uncommon_codepoints = identifier contains {$codepoints_len -> @@ -431,8 +412,6 @@ lint_improper_ctypes_union_non_exhaustive = this union is non-exhaustive lint_incomplete_include = include macro expected single expression in source -lint_inner_macro_attribute_unstable = inner macro attributes are unstable - lint_invalid_asm_label_binary = avoid using labels containing only the digits `0` and `1` in inline assembly .label = use a different label that doesn't start with `0` or `1` .help = start numbering with `2` instead @@ -483,6 +462,14 @@ lint_invalid_reference_casting_note_book = for more information, visit <https:// lint_invalid_reference_casting_note_ty_has_interior_mutability = even for types with interior mutability, the only legal way to obtain a mutable pointer from a shared reference is through `UnsafeCell::get` +lint_int_to_ptr_transmutes = transmuting an integer to a pointer creates a pointer without provenance + .note = this is dangerous because dereferencing the resulting pointer is undefined behavior + .note_exposed_provenance = exposed provenance semantics can be used to create a pointer based on some previously exposed provenance + .help_transmute = for more information about transmute, see <https://doc.rust-lang.org/std/mem/fn.transmute.html#transmutation-between-pointers-and-integers> + .help_exposed_provenance = for more information about exposed provenance, see <https://doc.rust-lang.org/std/ptr/index.html#exposed-provenance> + .suggestion_with_exposed_provenance = use `std::ptr::with_exposed_provenance{$suffix}` instead to use a previously exposed provenance + .suggestion_without_provenance_mut = if you truly mean to create a pointer without provenance, use `std::ptr::without_provenance_mut` + lint_legacy_derive_helpers = derive helper attribute is used before it is introduced .label = the attribute is introduced here @@ -732,7 +719,7 @@ lint_pattern_in_foreign = patterns aren't allowed in foreign function declaratio lint_private_extern_crate_reexport = extern crate `{$ident}` is private and cannot be re-exported .suggestion = consider making the `extern crate` item publicly accessible -lint_proc_macro_derive_resolution_fallback = cannot find {$ns} `{$ident}` in this scope +lint_proc_macro_derive_resolution_fallback = cannot find {$ns_descr} `{$ident}` in this scope .label = names from parent modules are not accessible without an explicit import lint_query_instability = using `{$query}` can result in unstable query results @@ -870,10 +857,6 @@ lint_undropped_manually_drops = calls to `std::mem::drop` with `std::mem::Manual .label = argument has type `{$arg_ty}` .suggestion = use `std::mem::ManuallyDrop::into_inner` to get the inner value -lint_unexpected_builtin_cfg = unexpected `--cfg {$cfg}` flag - .controlled_by = config `{$cfg_name}` is only supposed to be controlled by `{$controlled_by}` - .incoherent = manually setting a built-in cfg can and does create incoherent behaviors - lint_unexpected_cfg_add_build_rs_println = or consider adding `{$build_rs_println}` to the top of the `build.rs` lint_unexpected_cfg_add_cargo_feature = consider using a Cargo feature instead lint_unexpected_cfg_add_cargo_toml_lint_cfg = or consider adding in `Cargo.toml` the `check-cfg` lint config for the lint:{$cargo_toml_lint_cfg} diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 8006cfcf30f..c3c0a34df71 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -3097,7 +3097,7 @@ impl EarlyLintPass for SpecialModuleName { if let ast::ItemKind::Mod( _, ident, - ast::ModKind::Unloaded | ast::ModKind::Loaded(_, ast::Inline::No, _, _), + ast::ModKind::Unloaded | ast::ModKind::Loaded(_, ast::Inline::No { .. }, _), ) = item.kind { if item.attrs.iter().any(|a| a.has_name(sym::path)) { diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index cb159a0b914..0669da1a025 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -11,7 +11,7 @@ use rustc_ast::util::parser::ExprPrecedence; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sync; use rustc_data_structures::unord::UnordMap; -use rustc_errors::{Diag, LintDiagnostic, MultiSpan}; +use rustc_errors::{Diag, LintBuffer, LintDiagnostic, MultiSpan}; use rustc_feature::Features; use rustc_hir::def::Res; use rustc_hir::def_id::{CrateNum, DefId}; @@ -23,8 +23,8 @@ use rustc_middle::middle::privacy::EffectiveVisibilities; use rustc_middle::ty::layout::{LayoutError, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::print::{PrintError, PrintTraitRefExt as _, Printer, with_no_trimmed_paths}; use rustc_middle::ty::{self, GenericArg, RegisteredTools, Ty, TyCtxt, TypingEnv, TypingMode}; -use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintBuffer, LintExpectationId, LintId}; -use rustc_session::{LintStoreMarker, Session}; +use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintExpectationId, LintId}; +use rustc_session::{DynLintStore, Session}; use rustc_span::edit_distance::find_best_match_for_names; use rustc_span::{Ident, Span, Symbol, sym}; use tracing::debug; @@ -62,7 +62,13 @@ pub struct LintStore { lint_groups: FxIndexMap<&'static str, LintGroup>, } -impl LintStoreMarker for LintStore {} +impl DynLintStore for LintStore { + fn lint_groups_iter(&self) -> Box<dyn Iterator<Item = rustc_session::LintGroup> + '_> { + Box::new(self.get_lint_groups().map(|(name, lints, is_externally_loaded)| { + rustc_session::LintGroup { name, lints, is_externally_loaded } + })) + } +} /// The target of the `by_name` map, which accounts for renaming/deprecation. #[derive(Debug)] diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs index 58205087def..dff1fc43670 100644 --- a/compiler/rustc_lint/src/early.rs +++ b/compiler/rustc_lint/src/early.rs @@ -7,10 +7,11 @@ use rustc_ast::visit::{self as ast_visit, Visitor, walk_list}; use rustc_ast::{self as ast, HasAttrs}; use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_errors::{BufferedEarlyLint, DecorateDiagCompat, LintBuffer}; use rustc_feature::Features; use rustc_middle::ty::{RegisteredTools, TyCtxt}; use rustc_session::Session; -use rustc_session::lint::{BufferedEarlyLint, LintBuffer, LintPass}; +use rustc_session::lint::LintPass; use rustc_span::{Ident, Span}; use tracing::debug; @@ -36,8 +37,11 @@ impl<'ecx, 'tcx, T: EarlyLintPass> EarlyContextAndPass<'ecx, 'tcx, T> { fn check_id(&mut self, id: ast::NodeId) { for early_lint in self.context.buffered.take(id) { let BufferedEarlyLint { span, node_id: _, lint_id, diagnostic } = early_lint; - self.context.opt_span_lint(lint_id.lint, span, |diag| { - diagnostics::decorate_builtin_lint(self.context.sess(), self.tcx, diagnostic, diag); + self.context.opt_span_lint(lint_id.lint, span, |diag| match diagnostic { + DecorateDiagCompat::Builtin(b) => { + diagnostics::decorate_builtin_lint(self.context.sess(), self.tcx, b, diag); + } + DecorateDiagCompat::Dynamic(d) => d.decorate_lint_box(diag), }); } } diff --git a/compiler/rustc_lint/src/early/diagnostics.rs b/compiler/rustc_lint/src/early/diagnostics.rs index 678d3d1f8ed..7300490b838 100644 --- a/compiler/rustc_lint/src/early/diagnostics.rs +++ b/compiler/rustc_lint/src/early/diagnostics.rs @@ -64,10 +64,12 @@ pub fn decorate_builtin_lint( } .decorate_lint(diag); } - BuiltinLintDiag::ProcMacroDeriveResolutionFallback { span: macro_span, ns, ident } => { - lints::ProcMacroDeriveResolutionFallback { span: macro_span, ns, ident } - .decorate_lint(diag) - } + BuiltinLintDiag::ProcMacroDeriveResolutionFallback { + span: macro_span, + ns_descr, + ident, + } => lints::ProcMacroDeriveResolutionFallback { span: macro_span, ns_descr, ident } + .decorate_lint(diag), BuiltinLintDiag::MacroExpandedMacroExportsAccessedByAbsolutePaths(span_def) => { lints::MacroExpandedMacroExportsAccessedByAbsolutePaths { definition: span_def } .decorate_lint(diag) @@ -156,9 +158,6 @@ pub fn decorate_builtin_lint( } .decorate_lint(diag); } - BuiltinLintDiag::MissingAbi(label_span, default_abi) => { - lints::MissingAbi { span: label_span, default_abi }.decorate_lint(diag); - } BuiltinLintDiag::LegacyDeriveHelpers(label_span) => { lints::LegacyDeriveHelpers { span: label_span }.decorate_lint(diag); } @@ -184,27 +183,6 @@ pub fn decorate_builtin_lint( lints::ReservedMultihash { suggestion }.decorate_lint(diag); } } - BuiltinLintDiag::HiddenUnicodeCodepoints { - label, - count, - span_label, - labels, - escape, - spans, - } => { - lints::HiddenUnicodeCodepointsDiag { - label: &label, - count, - span_label, - labels: labels.map(|spans| lints::HiddenUnicodeCodepointsDiagLabels { spans }), - sub: if escape { - lints::HiddenUnicodeCodepointsDiagSub::Escape { spans } - } else { - lints::HiddenUnicodeCodepointsDiagSub::NoEscape { spans } - }, - } - .decorate_lint(diag); - } BuiltinLintDiag::UnusedBuiltinAttribute { attr_name, macro_name, @@ -464,17 +442,8 @@ pub fn decorate_builtin_lint( } .decorate_lint(diag) } - BuiltinLintDiag::InnerAttributeUnstable { is_macro } => if is_macro { - lints::InnerAttributeUnstable::InnerMacroAttribute - } else { - lints::InnerAttributeUnstable::CustomInnerAttribute - } - .decorate_lint(diag), BuiltinLintDiag::OutOfScopeMacroCalls { span, path, location } => { lints::OutOfScopeMacroCalls { span, path, location }.decorate_lint(diag) } - BuiltinLintDiag::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by } => { - lints::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by }.decorate_lint(diag) - } } } diff --git a/compiler/rustc_lint/src/foreign_modules.rs b/compiler/rustc_lint/src/foreign_modules.rs index 759e6c927b8..3267e70f1de 100644 --- a/compiler/rustc_lint/src/foreign_modules.rs +++ b/compiler/rustc_lint/src/foreign_modules.rs @@ -179,7 +179,7 @@ impl ClashingExternDeclarations { /// symbol's name. fn name_of_extern_decl(tcx: TyCtxt<'_>, fi: hir::OwnerId) -> SymbolName { if let Some((overridden_link_name, overridden_link_name_span)) = - tcx.codegen_fn_attrs(fi).link_name.map(|overridden_link_name| { + tcx.codegen_fn_attrs(fi).symbol_name.map(|overridden_link_name| { // FIXME: Instead of searching through the attributes again to get span // information, we could have codegen_fn_attrs also give span information back for // where the attribute was defined. However, until this is found to be a diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs index 016ff17f5d7..929fc8207b0 100644 --- a/compiler/rustc_lint/src/internal.rs +++ b/compiler/rustc_lint/src/internal.rs @@ -1,10 +1,10 @@ //! Some lints that are only useful in the compiler or crates that use compiler internals, such as //! Clippy. -use rustc_hir::HirId; use rustc_hir::def::Res; use rustc_hir::def_id::DefId; -use rustc_middle::ty::{self, GenericArgsRef, Ty as MiddleTy}; +use rustc_hir::{Expr, ExprKind, HirId}; +use rustc_middle::ty::{self, GenericArgsRef, PredicatePolarity, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::{Span, sym}; @@ -56,25 +56,6 @@ impl LateLintPass<'_> for DefaultHashTypes { } } -/// Helper function for lints that check for expressions with calls and use typeck results to -/// get the `DefId` and `GenericArgsRef` of the function. -fn typeck_results_of_method_fn<'tcx>( - cx: &LateContext<'tcx>, - expr: &hir::Expr<'_>, -) -> Option<(Span, DefId, ty::GenericArgsRef<'tcx>)> { - match expr.kind { - hir::ExprKind::MethodCall(segment, ..) - if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) => - { - Some((segment.ident.span, def_id, cx.typeck_results().node_args(expr.hir_id))) - } - _ => match cx.typeck_results().node_type(expr.hir_id).kind() { - &ty::FnDef(def_id, args) => Some((expr.span, def_id, args)), - _ => None, - }, - } -} - declare_tool_lint! { /// The `potential_query_instability` lint detects use of methods which can lead to /// potential query instability, such as iterating over a `HashMap`. @@ -101,10 +82,12 @@ declare_tool_lint! { declare_lint_pass!(QueryStability => [POTENTIAL_QUERY_INSTABILITY, UNTRACKED_QUERY_INFORMATION]); -impl LateLintPass<'_> for QueryStability { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { - let Some((span, def_id, args)) = typeck_results_of_method_fn(cx, expr) else { return }; - if let Ok(Some(instance)) = ty::Instance::try_resolve(cx.tcx, cx.typing_env(), def_id, args) +impl<'tcx> LateLintPass<'tcx> for QueryStability { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { + if let Some((callee_def_id, span, generic_args, _recv, _args)) = + get_callee_span_generic_args_and_args(cx, expr) + && let Ok(Some(instance)) = + ty::Instance::try_resolve(cx.tcx, cx.typing_env(), callee_def_id, generic_args) { let def_id = instance.def_id(); if cx.tcx.has_attr(def_id, sym::rustc_lint_query_instability) { @@ -113,7 +96,15 @@ impl LateLintPass<'_> for QueryStability { span, QueryInstability { query: cx.tcx.item_name(def_id) }, ); + } else if has_unstable_into_iter_predicate(cx, callee_def_id, generic_args) { + let call_span = span.with_hi(expr.span.hi()); + cx.emit_span_lint( + POTENTIAL_QUERY_INSTABILITY, + call_span, + QueryInstability { query: sym::into_iter }, + ); } + if cx.tcx.has_attr(def_id, sym::rustc_lint_untracked_query_information) { cx.emit_span_lint( UNTRACKED_QUERY_INFORMATION, @@ -125,6 +116,69 @@ impl LateLintPass<'_> for QueryStability { } } +fn has_unstable_into_iter_predicate<'tcx>( + cx: &LateContext<'tcx>, + callee_def_id: DefId, + generic_args: GenericArgsRef<'tcx>, +) -> bool { + let Some(into_iterator_def_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator) else { + return false; + }; + let Some(into_iter_fn_def_id) = cx.tcx.lang_items().into_iter_fn() else { + return false; + }; + let predicates = cx.tcx.predicates_of(callee_def_id).instantiate(cx.tcx, generic_args); + for (predicate, _) in predicates { + let Some(trait_pred) = predicate.as_trait_clause() else { + continue; + }; + if trait_pred.def_id() != into_iterator_def_id + || trait_pred.polarity() != PredicatePolarity::Positive + { + continue; + } + // `IntoIterator::into_iter` has no additional method args. + let into_iter_fn_args = + cx.tcx.instantiate_bound_regions_with_erased(trait_pred).trait_ref.args; + let Ok(Some(instance)) = ty::Instance::try_resolve( + cx.tcx, + cx.typing_env(), + into_iter_fn_def_id, + into_iter_fn_args, + ) else { + continue; + }; + // Does the input type's `IntoIterator` implementation have the + // `rustc_lint_query_instability` attribute on its `into_iter` method? + if cx.tcx.has_attr(instance.def_id(), sym::rustc_lint_query_instability) { + return true; + } + } + false +} + +/// Checks whether an expression is a function or method call and, if so, returns its `DefId`, +/// `Span`, `GenericArgs`, and arguments. This is a slight augmentation of a similarly named Clippy +/// function, `get_callee_generic_args_and_args`. +fn get_callee_span_generic_args_and_args<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'tcx>, +) -> Option<(DefId, Span, GenericArgsRef<'tcx>, Option<&'tcx Expr<'tcx>>, &'tcx [Expr<'tcx>])> { + if let ExprKind::Call(callee, args) = expr.kind + && let callee_ty = cx.typeck_results().expr_ty(callee) + && let ty::FnDef(callee_def_id, generic_args) = callee_ty.kind() + { + return Some((*callee_def_id, callee.span, generic_args, None, args)); + } + if let ExprKind::MethodCall(segment, recv, args, _) = expr.kind + && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) + { + let generic_args = cx.typeck_results().node_args(expr.hir_id); + return Some((method_def_id, segment.ident.span, generic_args, Some(recv), args)); + } + None +} + declare_tool_lint! { /// The `usage_of_ty_tykind` lint detects usages of `ty::TyKind::<kind>`, /// where `ty::<kind>` would suffice. @@ -461,33 +515,22 @@ declare_tool_lint! { declare_lint_pass!(Diagnostics => [UNTRANSLATABLE_DIAGNOSTIC, DIAGNOSTIC_OUTSIDE_OF_IMPL]); impl LateLintPass<'_> for Diagnostics { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) { + fn check_expr<'tcx>(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { let collect_args_tys_and_spans = |args: &[hir::Expr<'_>], reserve_one_extra: bool| { let mut result = Vec::with_capacity(args.len() + usize::from(reserve_one_extra)); result.extend(args.iter().map(|arg| (cx.typeck_results().expr_ty(arg), arg.span))); result }; // Only check function calls and method calls. - let (span, def_id, fn_gen_args, arg_tys_and_spans) = match expr.kind { - hir::ExprKind::Call(callee, args) => { - match cx.typeck_results().node_type(callee.hir_id).kind() { - &ty::FnDef(def_id, fn_gen_args) => { - (callee.span, def_id, fn_gen_args, collect_args_tys_and_spans(args, false)) - } - _ => return, // occurs for fns passed as args - } - } - hir::ExprKind::MethodCall(_segment, _recv, args, _span) => { - let Some((span, def_id, fn_gen_args)) = typeck_results_of_method_fn(cx, expr) - else { - return; - }; - let mut args = collect_args_tys_and_spans(args, true); - args.insert(0, (cx.tcx.types.self_param, _recv.span)); // dummy inserted for `self` - (span, def_id, fn_gen_args, args) - } - _ => return, + let Some((def_id, span, fn_gen_args, recv, args)) = + get_callee_span_generic_args_and_args(cx, expr) + else { + return; }; + let mut arg_tys_and_spans = collect_args_tys_and_spans(args, recv.is_some()); + if let Some(recv) = recv { + arg_tys_and_spans.insert(0, (cx.tcx.types.self_param, recv.span)); // dummy inserted for `self` + } Self::diagnostic_outside_of_impl(cx, span, expr.hir_id, def_id, fn_gen_args); Self::untranslatable_diagnostic(cx, def_id, &arg_tys_and_spans); @@ -496,7 +539,7 @@ impl LateLintPass<'_> for Diagnostics { impl Diagnostics { // Is the type `{D,Subd}iagMessage`? - fn is_diag_message<'cx>(cx: &LateContext<'cx>, ty: MiddleTy<'cx>) -> bool { + fn is_diag_message<'cx>(cx: &LateContext<'cx>, ty: Ty<'cx>) -> bool { if let Some(adt_def) = ty.ty_adt_def() && let Some(name) = cx.tcx.get_diagnostic_name(adt_def.did()) && matches!(name, sym::DiagMessage | sym::SubdiagMessage) @@ -510,7 +553,7 @@ impl Diagnostics { fn untranslatable_diagnostic<'cx>( cx: &LateContext<'cx>, def_id: DefId, - arg_tys_and_spans: &[(MiddleTy<'cx>, Span)], + arg_tys_and_spans: &[(Ty<'cx>, Span)], ) { let fn_sig = cx.tcx.fn_sig(def_id).instantiate_identity().skip_binder(); let predicates = cx.tcx.predicates_of(def_id).instantiate_identity(cx.tcx).predicates; diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index f06757b3c23..bdbac7fc4d1 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -133,10 +133,9 @@ pub use early::{EarlyCheckNode, check_ast_node}; pub use late::{check_crate, late_lint_mod, unerased_lint_store}; pub use levels::LintLevelsBuilder; pub use passes::{EarlyLintPass, LateLintPass}; +pub use rustc_errors::BufferedEarlyLint; pub use rustc_session::lint::Level::{self, *}; -pub use rustc_session::lint::{ - BufferedEarlyLint, FutureIncompatibleInfo, Lint, LintId, LintPass, LintVec, -}; +pub use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintId, LintPass, LintVec}; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_lint/src/lifetime_syntax.rs b/compiler/rustc_lint/src/lifetime_syntax.rs index 464f4fc34b9..413525eb6e5 100644 --- a/compiler/rustc_lint/src/lifetime_syntax.rs +++ b/compiler/rustc_lint/src/lifetime_syntax.rs @@ -214,9 +214,9 @@ impl<T> LifetimeSyntaxCategories<Vec<T>> { } } - pub fn flatten(&self) -> impl Iterator<Item = &T> { - let Self { hidden, elided, named } = self; - [hidden.iter(), elided.iter(), named.iter()].into_iter().flatten() + pub fn iter_unnamed(&self) -> impl Iterator<Item = &T> { + let Self { hidden, elided, named: _ } = self; + [hidden.iter(), elided.iter()].into_iter().flatten() } } @@ -495,7 +495,7 @@ fn emit_mismatch_diagnostic<'tcx>( cx.emit_span_lint( MISMATCHED_LIFETIME_SYNTAXES, - inputs.flatten().copied().collect::<Vec<_>>(), + inputs.iter_unnamed().chain(outputs.iter_unnamed()).copied().collect::<Vec<_>>(), lints::MismatchedLifetimeSyntaxes { inputs, outputs, suggestions }, ); } diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index a6d59af6900..426500dda4a 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1,14 +1,14 @@ +// ignore-tidy-filelength + #![allow(rustc::untranslatable_diagnostic)] use std::num::NonZero; -use rustc_abi::ExternAbi; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagMessage, DiagStyledString, ElidedLifetimeInPathSubdiag, EmissionGuarantee, LintDiagnostic, MultiSpan, Subdiagnostic, SuggestionStyle, }; use rustc_hir as hir; -use rustc_hir::def::Namespace; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::VisitorExt; use rustc_macros::{LintDiagnostic, Subdiagnostic}; @@ -817,80 +817,6 @@ pub(crate) enum InvalidReferenceCastingDiag<'tcx> { }, } -// hidden_unicode_codepoints.rs -#[derive(LintDiagnostic)] -#[diag(lint_hidden_unicode_codepoints)] -#[note] -pub(crate) struct HiddenUnicodeCodepointsDiag<'a> { - pub label: &'a str, - pub count: usize, - #[label] - pub span_label: Span, - #[subdiagnostic] - pub labels: Option<HiddenUnicodeCodepointsDiagLabels>, - #[subdiagnostic] - pub sub: HiddenUnicodeCodepointsDiagSub, -} - -pub(crate) struct HiddenUnicodeCodepointsDiagLabels { - pub spans: Vec<(char, Span)>, -} - -impl Subdiagnostic for HiddenUnicodeCodepointsDiagLabels { - fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { - for (c, span) in self.spans { - diag.span_label(span, format!("{c:?}")); - } - } -} - -pub(crate) enum HiddenUnicodeCodepointsDiagSub { - Escape { spans: Vec<(char, Span)> }, - NoEscape { spans: Vec<(char, Span)> }, -} - -// Used because of multiple multipart_suggestion and note -impl Subdiagnostic for HiddenUnicodeCodepointsDiagSub { - fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { - match self { - HiddenUnicodeCodepointsDiagSub::Escape { spans } => { - diag.multipart_suggestion_with_style( - fluent::lint_suggestion_remove, - spans.iter().map(|(_, span)| (*span, "".to_string())).collect(), - Applicability::MachineApplicable, - SuggestionStyle::HideCodeAlways, - ); - diag.multipart_suggestion( - fluent::lint_suggestion_escape, - spans - .into_iter() - .map(|(c, span)| { - let c = format!("{c:?}"); - (span, c[1..c.len() - 1].to_string()) - }) - .collect(), - Applicability::MachineApplicable, - ); - } - HiddenUnicodeCodepointsDiagSub::NoEscape { spans } => { - // FIXME: in other suggestions we've reversed the inner spans of doc comments. We - // should do the same here to provide the same good suggestions as we do for - // literals above. - diag.arg( - "escaped", - spans - .into_iter() - .map(|(c, _)| format!("{c:?}")) - .collect::<Vec<String>>() - .join(", "), - ); - diag.note(fluent::lint_suggestion_remove); - diag.note(fluent::lint_no_suggestion_note_escape); - } - } - } -} - // map_unit_fn.rs #[derive(LintDiagnostic)] #[diag(lint_map_unit_fn)] @@ -1618,6 +1544,48 @@ impl<'a> LintDiagnostic<'a, ()> for DropGlue<'_> { } } +// transmute.rs +#[derive(LintDiagnostic)] +#[diag(lint_int_to_ptr_transmutes)] +#[note] +#[note(lint_note_exposed_provenance)] +#[help(lint_suggestion_without_provenance_mut)] +#[help(lint_help_transmute)] +#[help(lint_help_exposed_provenance)] +pub(crate) struct IntegerToPtrTransmutes<'tcx> { + #[subdiagnostic] + pub suggestion: IntegerToPtrTransmutesSuggestion<'tcx>, +} + +#[derive(Subdiagnostic)] +pub(crate) enum IntegerToPtrTransmutesSuggestion<'tcx> { + #[multipart_suggestion( + lint_suggestion_with_exposed_provenance, + applicability = "machine-applicable", + style = "verbose" + )] + ToPtr { + dst: Ty<'tcx>, + suffix: &'static str, + #[suggestion_part(code = "std::ptr::with_exposed_provenance{suffix}::<{dst}>(")] + start_call: Span, + }, + #[multipart_suggestion( + lint_suggestion_with_exposed_provenance, + applicability = "machine-applicable", + style = "verbose" + )] + ToRef { + dst: Ty<'tcx>, + suffix: &'static str, + ref_mutbl: &'static str, + #[suggestion_part( + code = "&{ref_mutbl}*std::ptr::with_exposed_provenance{suffix}::<{dst}>(" + )] + start_call: Span, + }, +} + // types.rs #[derive(LintDiagnostic)] #[diag(lint_range_endpoint_out_of_range)] @@ -2568,16 +2536,6 @@ pub(crate) mod unexpected_cfg_value { } #[derive(LintDiagnostic)] -#[diag(lint_unexpected_builtin_cfg)] -#[note(lint_controlled_by)] -#[note(lint_incoherent)] -pub(crate) struct UnexpectedBuiltinCfg { - pub(crate) cfg: String, - pub(crate) cfg_name: Symbol, - pub(crate) controlled_by: &'static str, -} - -#[derive(LintDiagnostic)] #[diag(lint_macro_use_deprecated)] #[help] pub(crate) struct MacroUseDeprecated; @@ -2691,14 +2649,6 @@ pub(crate) struct IllFormedAttributeInput { } #[derive(LintDiagnostic)] -pub(crate) enum InnerAttributeUnstable { - #[diag(lint_inner_macro_attribute_unstable)] - InnerMacroAttribute, - #[diag(lint_custom_inner_attribute_unstable)] - CustomInnerAttribute, -} - -#[derive(LintDiagnostic)] #[diag(lint_unknown_diagnostic_attribute)] pub(crate) struct UnknownDiagnosticAttribute { #[subdiagnostic] @@ -2769,7 +2719,7 @@ pub(crate) struct AbsPathWithModuleSugg { pub(crate) struct ProcMacroDeriveResolutionFallback { #[label] pub span: Span, - pub ns: Namespace, + pub ns_descr: &'static str, pub ident: Ident, } @@ -2891,14 +2841,6 @@ pub(crate) struct PatternsInFnsWithoutBodySub { } #[derive(LintDiagnostic)] -#[diag(lint_extern_without_abi)] -pub(crate) struct MissingAbi { - #[suggestion(code = "extern {default_abi}", applicability = "machine-applicable")] - pub span: Span, - pub default_abi: ExternAbi, -} - -#[derive(LintDiagnostic)] #[diag(lint_legacy_derive_helpers)] pub(crate) struct LegacyDeriveHelpers { #[label] diff --git a/compiler/rustc_lint/src/nonstandard_style.rs b/compiler/rustc_lint/src/nonstandard_style.rs index 8fafaa33d0c..65075cfecfa 100644 --- a/compiler/rustc_lint/src/nonstandard_style.rs +++ b/compiler/rustc_lint/src/nonstandard_style.rs @@ -5,7 +5,7 @@ use rustc_hir::attrs::{AttributeKind, ReprAttr}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{FnKind, Visitor}; -use rustc_hir::{AttrArgs, AttrItem, Attribute, GenericParamKind, PatExprKind, PatKind, find_attr}; +use rustc_hir::{Attribute, GenericParamKind, PatExprKind, PatKind, find_attr}; use rustc_middle::hir::nested_filter::All; use rustc_middle::ty; use rustc_session::config::CrateType; @@ -343,35 +343,27 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase { let crate_ident = if let Some(name) = &cx.tcx.sess.opts.crate_name { Some(Ident::from_str(name)) } else { - ast::attr::find_by_name(cx.tcx.hir_attrs(hir::CRATE_HIR_ID), sym::crate_name).and_then( - |attr| { - if let Attribute::Unparsed(n) = attr - && let AttrItem { args: AttrArgs::Eq { eq_span: _, expr: lit }, .. } = - n.as_ref() - && let ast::LitKind::Str(name, ..) = lit.kind - { - // Discard the double quotes surrounding the literal. - let sp = cx - .sess() - .source_map() - .span_to_snippet(lit.span) - .ok() - .and_then(|snippet| { - let left = snippet.find('"')?; - let right = snippet.rfind('"').map(|pos| snippet.len() - pos)?; - - Some( - lit.span - .with_lo(lit.span.lo() + BytePos(left as u32 + 1)) - .with_hi(lit.span.hi() - BytePos(right as u32)), - ) - }) - .unwrap_or(lit.span); - - Some(Ident::new(name, sp)) - } else { - None - } + find_attr!(cx.tcx.hir_attrs(hir::CRATE_HIR_ID), AttributeKind::CrateName{name, name_span,..} => (name, name_span)).map( + |(&name, &span)| { + // Discard the double quotes surrounding the literal. + let sp = cx + .sess() + .source_map() + .span_to_snippet(span) + .ok() + .and_then(|snippet| { + let left = snippet.find('"')?; + let right = snippet.rfind('"').map(|pos| snippet.len() - pos)?; + + Some( + span + .with_lo(span.lo() + BytePos(left as u32 + 1)) + .with_hi(span.hi() - BytePos(right as u32)), + ) + }) + .unwrap_or(span); + + Ident::new(name, sp) }, ) }; diff --git a/compiler/rustc_lint/src/transmute.rs b/compiler/rustc_lint/src/transmute.rs index bc1d4587d07..239c8649041 100644 --- a/compiler/rustc_lint/src/transmute.rs +++ b/compiler/rustc_lint/src/transmute.rs @@ -1,3 +1,4 @@ +use rustc_ast::LitKind; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::LocalDefId; @@ -7,6 +8,7 @@ use rustc_middle::ty::{self, Ty}; use rustc_session::{declare_lint, impl_lint_pass}; use rustc_span::sym; +use crate::lints::{IntegerToPtrTransmutes, IntegerToPtrTransmutesSuggestion}; use crate::{LateContext, LateLintPass}; declare_lint! { @@ -67,9 +69,44 @@ declare_lint! { "detects transmutes that can also be achieved by other operations" } +declare_lint! { + /// The `integer_to_ptr_transmutes` lint detects integer to pointer + /// transmutes where the resulting pointers are undefined behavior to dereference. + /// + /// ### Example + /// + /// ```rust + /// fn foo(a: usize) -> *const u8 { + /// unsafe { + /// std::mem::transmute::<usize, *const u8>(a) + /// } + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Any attempt to use the resulting pointers are undefined behavior as the resulting + /// pointers won't have any provenance. + /// + /// Alternatively, [`std::ptr::with_exposed_provenance`] should be used, as they do not + /// carry the provenance requirement. If wanting to create pointers without provenance + /// [`std::ptr::without_provenance`] should be used instead. + /// + /// See [`std::mem::transmute`] in the reference for more details. + /// + /// [`std::mem::transmute`]: https://doc.rust-lang.org/std/mem/fn.transmute.html + /// [`std::ptr::with_exposed_provenance`]: https://doc.rust-lang.org/std/ptr/fn.with_exposed_provenance.html + /// [`std::ptr::without_provenance`]: https://doc.rust-lang.org/std/ptr/fn.without_provenance.html + pub INTEGER_TO_PTR_TRANSMUTES, + Warn, + "detects integer to pointer transmutes", +} + pub(crate) struct CheckTransmutes; -impl_lint_pass!(CheckTransmutes => [PTR_TO_INTEGER_TRANSMUTE_IN_CONSTS, UNNECESSARY_TRANSMUTES]); +impl_lint_pass!(CheckTransmutes => [PTR_TO_INTEGER_TRANSMUTE_IN_CONSTS, UNNECESSARY_TRANSMUTES, INTEGER_TO_PTR_TRANSMUTES]); impl<'tcx> LateLintPass<'tcx> for CheckTransmutes { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { @@ -94,9 +131,62 @@ impl<'tcx> LateLintPass<'tcx> for CheckTransmutes { check_ptr_transmute_in_const(cx, expr, body_owner_def_id, const_context, src, dst); check_unnecessary_transmute(cx, expr, callee, arg, const_context, src, dst); + check_int_to_ptr_transmute(cx, expr, arg, src, dst); } } +/// Check for transmutes from integer to pointers (*const/*mut and &/&mut). +/// +/// Using the resulting pointers would be undefined behavior. +fn check_int_to_ptr_transmute<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx hir::Expr<'tcx>, + arg: &'tcx hir::Expr<'tcx>, + src: Ty<'tcx>, + dst: Ty<'tcx>, +) { + if !matches!(src.kind(), ty::Uint(_) | ty::Int(_)) { + return; + } + let (ty::Ref(_, inner_ty, mutbl) | ty::RawPtr(inner_ty, mutbl)) = dst.kind() else { + return; + }; + // bail-out if the argument is literal 0 as we have other lints for those cases + if matches!(arg.kind, hir::ExprKind::Lit(hir::Lit { node: LitKind::Int(v, _), .. }) if v == 0) { + return; + } + // bail-out if the inner type is a ZST + let Ok(layout_inner_ty) = cx.tcx.layout_of(cx.typing_env().as_query_input(*inner_ty)) else { + return; + }; + if layout_inner_ty.is_1zst() { + return; + } + + let suffix = if mutbl.is_mut() { "_mut" } else { "" }; + cx.tcx.emit_node_span_lint( + INTEGER_TO_PTR_TRANSMUTES, + expr.hir_id, + expr.span, + IntegerToPtrTransmutes { + suggestion: if dst.is_ref() { + IntegerToPtrTransmutesSuggestion::ToRef { + dst: *inner_ty, + suffix, + ref_mutbl: mutbl.prefix_str(), + start_call: expr.span.shrink_to_lo().until(arg.span), + } + } else { + IntegerToPtrTransmutesSuggestion::ToPtr { + dst: *inner_ty, + suffix, + start_call: expr.span.shrink_to_lo().until(arg.span), + } + }, + }, + ); +} + /// Check for transmutes that exhibit undefined behavior. /// For example, transmuting pointers to integers in a const context. /// diff --git a/compiler/rustc_lint_defs/Cargo.toml b/compiler/rustc_lint_defs/Cargo.toml index 9ab350daf69..c8201d5ea8c 100644 --- a/compiler/rustc_lint_defs/Cargo.toml +++ b/compiler/rustc_lint_defs/Cargo.toml @@ -5,11 +5,10 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_messages = { path = "../rustc_error_messages" } -rustc_hir = { path = "../rustc_hir" } +rustc_hir_id = { path = "../rustc_hir_id" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs index 3bb7bbce567..2e84233e5a5 100644 --- a/compiler/rustc_lint_defs/src/lib.rs +++ b/compiler/rustc_lint_defs/src/lib.rs @@ -1,16 +1,15 @@ -use rustc_abi::ExternAbi; +use std::borrow::Cow; + use rustc_ast::AttrId; use rustc_ast::attr::AttributeExt; -use rustc_ast::node_id::NodeId; -use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::stable_hasher::{ HashStable, StableCompare, StableHasher, ToStableHashKey, }; -use rustc_error_messages::{DiagMessage, MultiSpan}; -use rustc_hir::def::Namespace; -use rustc_hir::def_id::DefPathHash; -use rustc_hir::{HashStableContext, HirId, ItemLocalId}; +use rustc_error_messages::{DiagArgValue, DiagMessage, IntoDiagArg, MultiSpan}; +use rustc_hir_id::{HashStableContext, HirId, ItemLocalId}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; +use rustc_span::def_id::DefPathHash; pub use rustc_span::edition::Edition; use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol, sym}; use serde::{Deserialize, Serialize}; @@ -138,7 +137,7 @@ impl LintExpectationId { } } -impl<HCX: rustc_hir::HashStableContext> HashStable<HCX> for LintExpectationId { +impl<HCX: HashStableContext> HashStable<HCX> for LintExpectationId { #[inline] fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { match self { @@ -156,7 +155,7 @@ impl<HCX: rustc_hir::HashStableContext> HashStable<HCX> for LintExpectationId { } } -impl<HCX: rustc_hir::HashStableContext> ToStableHashKey<HCX> for LintExpectationId { +impl<HCX: HashStableContext> ToStableHashKey<HCX> for LintExpectationId { type KeyType = (DefPathHash, ItemLocalId, u16, u16); #[inline] @@ -297,6 +296,12 @@ impl Level { } } +impl IntoDiagArg for Level { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Borrowed(self.to_cmd_flag())) + } +} + /// Specification of a single lint. #[derive(Copy, Clone, Debug)] pub struct Lint { @@ -617,7 +622,7 @@ pub enum BuiltinLintDiag { AbsPathWithModule(Span), ProcMacroDeriveResolutionFallback { span: Span, - ns: Namespace, + ns_descr: &'static str, ident: Ident, }, MacroExpandedMacroExportsAccessedByAbsolutePaths(Span), @@ -641,7 +646,6 @@ pub enum BuiltinLintDiag { path: String, since_kind: DeprecatedSinceKind, }, - MissingAbi(Span, ExternAbi), UnusedDocComment(Span), UnusedBuiltinAttribute { attr_name: Symbol, @@ -664,14 +668,6 @@ pub enum BuiltinLintDiag { is_string: bool, suggestion: Span, }, - HiddenUnicodeCodepoints { - label: String, - count: usize, - span_label: Span, - labels: Option<Vec<(char, Span)>>, - escape: bool, - spans: Vec<(char, Span)>, - }, TrailingMacro(bool, Ident), BreakWithLabelAndLoop(Span), UnicodeTextFlow(Span, String), @@ -796,68 +792,11 @@ pub enum BuiltinLintDiag { suggestions: Vec<String>, docs: Option<&'static str>, }, - InnerAttributeUnstable { - is_macro: bool, - }, OutOfScopeMacroCalls { span: Span, path: String, location: String, }, - UnexpectedBuiltinCfg { - cfg: String, - cfg_name: Symbol, - controlled_by: &'static str, - }, -} - -/// Lints that are buffered up early on in the `Session` before the -/// `LintLevels` is calculated. -#[derive(Debug)] -pub struct BufferedEarlyLint { - /// The span of code that we are linting on. - pub span: Option<MultiSpan>, - - /// The `NodeId` of the AST node that generated the lint. - pub node_id: NodeId, - - /// A lint Id that can be passed to - /// `rustc_lint::early::EarlyContextAndPass::check_id`. - pub lint_id: LintId, - - /// Customization of the `Diag<'_>` for the lint. - pub diagnostic: BuiltinLintDiag, -} - -#[derive(Default, Debug)] -pub struct LintBuffer { - pub map: FxIndexMap<NodeId, Vec<BufferedEarlyLint>>, -} - -impl LintBuffer { - pub fn add_early_lint(&mut self, early_lint: BufferedEarlyLint) { - self.map.entry(early_lint.node_id).or_default().push(early_lint); - } - - pub fn take(&mut self, id: NodeId) -> Vec<BufferedEarlyLint> { - // FIXME(#120456) - is `swap_remove` correct? - self.map.swap_remove(&id).unwrap_or_default() - } - - pub fn buffer_lint( - &mut self, - lint: &'static Lint, - node_id: NodeId, - span: impl Into<MultiSpan>, - diagnostic: BuiltinLintDiag, - ) { - self.add_early_lint(BufferedEarlyLint { - lint_id: LintId::of(lint), - node_id, - span: Some(span.into()), - diagnostic, - }); - } } pub type RegisteredTools = FxIndexSet<Ident>; diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index e4fe1fc2e42..e699e4b9c13 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -278,6 +278,7 @@ enum class LLVMRustAttributeKind { Writable = 42, DeadOnUnwind = 43, DeadOnReturn = 44, + CapturesReadOnly = 45, }; static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) { @@ -376,6 +377,8 @@ static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) { #else report_fatal_error("DeadOnReturn attribute requires LLVM 21 or later"); #endif + case LLVMRustAttributeKind::CapturesReadOnly: + report_fatal_error("Should be handled separately"); } report_fatal_error("bad LLVMRustAttributeKind"); } @@ -430,6 +433,11 @@ LLVMRustCreateAttrNoValue(LLVMContextRef C, LLVMRustAttributeKind RustAttr) { if (RustAttr == LLVMRustAttributeKind::NoCapture) { return wrap(Attribute::getWithCaptureInfo(*unwrap(C), CaptureInfo::none())); } + if (RustAttr == LLVMRustAttributeKind::CapturesReadOnly) { + return wrap(Attribute::getWithCaptureInfo( + *unwrap(C), CaptureInfo(CaptureComponents::Address | + CaptureComponents::ReadProvenance))); + } #endif return wrap(Attribute::get(*unwrap(C), fromRust(RustAttr))); } diff --git a/compiler/rustc_macros/src/print_attribute.rs b/compiler/rustc_macros/src/print_attribute.rs index 9023520c750..0114e0dfde0 100644 --- a/compiler/rustc_macros/src/print_attribute.rs +++ b/compiler/rustc_macros/src/print_attribute.rs @@ -4,7 +4,7 @@ use syn::spanned::Spanned; use syn::{Data, Fields, Ident}; use synstructure::Structure; -fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, TokenStream) { +fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream) { let string_name = name.to_string(); let mut disps = vec![quote! {let mut __printed_anything = false;}]; @@ -43,7 +43,6 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok #(#disps)* __p.word("}"); }, - quote! { true }, ) } Fields::Unnamed(fields_unnamed) => { @@ -76,10 +75,9 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok #(#disps)* __p.pclose(); }, - quote! { true }, ) } - Fields::Unit => (quote! {}, quote! { __p.word(#string_name) }, quote! { true }), + Fields::Unit => (quote! {}, quote! { __p.word(#string_name) }), } } @@ -89,51 +87,33 @@ pub(crate) fn print_attribute(input: Structure<'_>) -> TokenStream { }; // Must be applied to an enum type. - let (code, printed) = match &input.ast().data { + let code = match &input.ast().data { Data::Enum(e) => { - let (arms, printed) = e + let arms = e .variants .iter() .map(|x| { let ident = &x.ident; - let (pat, code, printed) = print_fields(ident, &x.fields); + let (pat, code) = print_fields(ident, &x.fields); - ( - quote! { - Self::#ident #pat => {#code} - }, - quote! { - Self::#ident #pat => {#printed} - }, - ) + quote! { + Self::#ident #pat => {#code} + } }) - .unzip::<_, _, Vec<_>, Vec<_>>(); + .collect::<Vec<_>>(); - ( - quote! { - match self { - #(#arms)* - } - }, - quote! { - match self { - #(#printed)* - } - }, - ) + quote! { + match self { + #(#arms)* + } + } } Data::Struct(s) => { - let (pat, code, printed) = print_fields(&input.ast().ident, &s.fields); - ( - quote! { - let Self #pat = self; - #code - }, - quote! { - let Self #pat = self; - #printed - }, - ) + let (pat, code) = print_fields(&input.ast().ident, &s.fields); + quote! { + let Self #pat = self; + #code + } } Data::Union(u) => { return span_error(u.union_token.span(), "can't derive PrintAttribute on unions"); @@ -144,7 +124,7 @@ pub(crate) fn print_attribute(input: Structure<'_>) -> TokenStream { input.gen_impl(quote! { #[allow(unused)] gen impl PrintAttribute for @Self { - fn should_render(&self) -> bool { #printed } + fn should_render(&self) -> bool { true } fn print_attribute(&self, __p: &mut rustc_ast_pretty::pp::Printer) { #code } } }) diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index 958e314efab..63f1b51df1c 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -701,7 +701,7 @@ impl<'tcx> Collector<'tcx> { .link_ordinal .map_or(import_name_type, |ord| Some(PeImportNameType::Ordinal(ord))); - let name = codegen_fn_attrs.link_name.unwrap_or_else(|| self.tcx.item_name(item)); + let name = codegen_fn_attrs.symbol_name.unwrap_or_else(|| self.tcx.item_name(item)); if self.tcx.sess.target.binary_format == BinaryFormat::Elf { let name = name.as_str(); diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index e5cc23c213d..cf0549fa668 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -60,6 +60,7 @@ #![feature(try_trait_v2_residual)] #![feature(try_trait_v2_yeet)] #![feature(type_alias_impl_trait)] +#![feature(unwrap_infallible)] #![feature(yeet_expr)] #![recursion_limit = "256"] // tidy-alphabetical-end diff --git a/compiler/rustc_middle/src/lint.rs b/compiler/rustc_middle/src/lint.rs index 341a735f88f..f70b7b6fad7 100644 --- a/compiler/rustc_middle/src/lint.rs +++ b/compiler/rustc_middle/src/lint.rs @@ -211,11 +211,28 @@ impl LintExpectation { } fn explain_lint_level_source( + sess: &Session, lint: &'static Lint, level: Level, src: LintLevelSource, err: &mut Diag<'_, ()>, ) { + // Find the name of the lint group that contains the given lint. + // Assumes the lint only belongs to one group. + let lint_group_name = |lint| { + let lint_groups_iter = sess.lint_groups_iter(); + let lint_id = LintId::of(lint); + lint_groups_iter + .filter(|lint_group| !lint_group.is_externally_loaded) + .find(|lint_group| { + lint_group + .lints + .iter() + .find(|lint_group_lint| **lint_group_lint == lint_id) + .is_some() + }) + .map(|lint_group| lint_group.name) + }; let name = lint.name_lower(); if let Level::Allow = level { // Do not point at `#[allow(compat_lint)]` as the reason for a compatibility lint @@ -224,7 +241,15 @@ fn explain_lint_level_source( } match src { LintLevelSource::Default => { - err.note_once(format!("`#[{}({})]` on by default", level.as_str(), name)); + let level_str = level.as_str(); + match lint_group_name(lint) { + Some(group_name) => { + err.note_once(format!("`#[{level_str}({name})]` (part of `#[{level_str}({group_name})]`) on by default")); + } + None => { + err.note_once(format!("`#[{level_str}({name})]` on by default")); + } + } } LintLevelSource::CommandLine(lint_flag_val, orig_level) => { let flag = orig_level.to_cmd_flag(); @@ -427,7 +452,7 @@ pub fn lint_level( decorate(&mut err); } - explain_lint_level_source(lint, level, src, &mut err); + explain_lint_level_source(sess, lint, level, src, &mut err); err.emit() } lint_level_impl(sess, lint, level, span, Box::new(decorate)) diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index da17ac04c76..78cafe8566b 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -35,14 +35,10 @@ pub struct CodegenFnAttrs { pub inline: InlineAttr, /// Parsed representation of the `#[optimize]` attribute pub optimize: OptimizeAttr, - /// The `#[export_name = "..."]` attribute, indicating a custom symbol a - /// function should be exported under - pub export_name: Option<Symbol>, - /// The `#[link_name = "..."]` attribute, indicating a custom symbol an - /// imported function should be imported as. Note that `export_name` - /// probably isn't set when this is set, this is for foreign items while - /// `#[export_name]` is for Rust-defined functions. - pub link_name: Option<Symbol>, + /// The name this function will be imported/exported under. This can be set + /// using the `#[export_name = "..."]` or `#[link_name = "..."]` attribute + /// depending on if this is a function definition or foreign function. + pub symbol_name: Option<Symbol>, /// The `#[link_ordinal = "..."]` attribute, indicating an ordinal an /// imported function has in the dynamic library. Note that this must not /// be set when `link_name` is set. This is for foreign items with the @@ -76,13 +72,23 @@ pub struct CodegenFnAttrs { pub patchable_function_entry: Option<PatchableFunctionEntry>, } +#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable, PartialEq, Eq)] +pub enum TargetFeatureKind { + /// The feature is implied by another feature, rather than explicitly added by the + /// `#[target_feature]` attribute + Implied, + /// The feature is added by the regular `target_feature` attribute. + Enabled, + /// The feature is added by the unsafe `force_target_feature` attribute. + Forced, +} + #[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] pub struct TargetFeature { /// The name of the target feature (e.g. "avx") pub name: Symbol, - /// The feature is implied by another feature, rather than explicitly added by the - /// `#[target_feature]` attribute - pub implied: bool, + /// The way this feature was enabled. + pub kind: TargetFeatureKind, } #[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] @@ -167,8 +173,7 @@ impl CodegenFnAttrs { flags: CodegenFnAttrFlags::empty(), inline: InlineAttr::None, optimize: OptimizeAttr::Default, - export_name: None, - link_name: None, + symbol_name: None, link_ordinal: None, target_features: vec![], safe_target_features: false, @@ -196,7 +201,7 @@ impl CodegenFnAttrs { self.flags.contains(CodegenFnAttrFlags::NO_MANGLE) || self.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) - || self.export_name.is_some() + || self.symbol_name.is_some() || match self.linkage { // These are private, so make sure we don't try to consider // them external. diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index 4a19cf1563c..18520089e3e 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -4,7 +4,7 @@ use std::num::NonZero; use rustc_ast::NodeId; -use rustc_errors::{Applicability, Diag, EmissionGuarantee}; +use rustc_errors::{Applicability, Diag, EmissionGuarantee, LintBuffer}; use rustc_feature::GateIssue; use rustc_hir::attrs::{DeprecatedSince, Deprecation}; use rustc_hir::def_id::{DefId, LocalDefId}; @@ -12,7 +12,7 @@ use rustc_hir::{self as hir, ConstStability, DefaultBodyStability, HirId, Stabil use rustc_macros::{Decodable, Encodable, HashStable, Subdiagnostic}; use rustc_session::Session; use rustc_session::lint::builtin::{DEPRECATED, DEPRECATED_IN_FUTURE, SOFT_UNSTABLE}; -use rustc_session::lint::{BuiltinLintDiag, DeprecatedSinceKind, Level, Lint, LintBuffer}; +use rustc_session::lint::{BuiltinLintDiag, DeprecatedSinceKind, Level, Lint}; use rustc_session::parse::feature_err_issue; use rustc_span::{Span, Symbol, sym}; use tracing::debug; diff --git a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs index ea8596ea286..dbbd95408c8 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs @@ -120,6 +120,17 @@ impl<Prov: Provenance> ProvenanceMap<Prov> { } } + /// Gets the provenances of all bytes (including from pointers) in a range. + pub fn get_range( + &self, + cx: &impl HasDataLayout, + range: AllocRange, + ) -> impl Iterator<Item = Prov> { + let ptr_provs = self.range_ptrs_get(range, cx).iter().map(|(_, p)| *p); + let byte_provs = self.range_bytes_get(range).iter().map(|(_, (p, _))| *p); + ptr_provs.chain(byte_provs) + } + /// Attempt to merge per-byte provenance back into ptr chunks, if the right fragments /// sit next to each other. Return `false` is that is not possible due to partial pointers. pub fn merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool { diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index 0e6f797b1e4..533066bdef9 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -548,10 +548,27 @@ impl<'tcx> CodegenUnit<'tcx> { let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect(); if !tcx.sess.opts.unstable_opts.codegen_source_order { - // It's already deterministic, so we can just use it. - return items; + // In this case, we do not need to keep the items in any specific order, as the input + // is already deterministic. + // + // However, it seems that moving related things (such as different + // monomorphizations of the same function) close to one another is actually beneficial + // for LLVM performance. + // LLVM will codegen the items in the order we pass them to it, and when it handles + // similar things in succession, it seems that it leads to better cache utilization, + // less branch mispredictions and in general to better performance. + // For example, if we have functions `a`, `c::<u32>`, `b`, `c::<i16>`, `d` and + // `c::<bool>`, it seems that it helps LLVM's performance to codegen the three `c` + // instantiations right after one another, as they will likely reference similar types, + // call similar functions, etc. + // + // See https://github.com/rust-lang/rust/pull/145358 for more details. + // + // Sorting by symbol name should not incur any new non-determinism. + items.sort_by_cached_key(|&(i, _)| i.symbol_name(tcx)); + } else { + items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i)); } - items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i)); items } diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index f2d5b13cbc8..128ae8549f7 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -929,7 +929,10 @@ impl<'tcx> TerminatorKind<'tcx> { } Yield { value, resume_arg, .. } => write!(fmt, "{resume_arg:?} = yield({value:?})"), Unreachable => write!(fmt, "unreachable"), - Drop { place, .. } => write!(fmt, "drop({place:?})"), + Drop { place, async_fut: None, .. } => write!(fmt, "drop({place:?})"), + Drop { place, async_fut: Some(async_fut), .. } => { + write!(fmt, "async drop({place:?}; poll={async_fut:?})") + } Call { func, args, destination, .. } => { write!(fmt, "{destination:?} = ")?; write!(fmt, "{func:?}(")?; diff --git a/compiler/rustc_middle/src/mir/statement.rs b/compiler/rustc_middle/src/mir/statement.rs index 683d7b48617..6e52bc775ef 100644 --- a/compiler/rustc_middle/src/mir/statement.rs +++ b/compiler/rustc_middle/src/mir/statement.rs @@ -160,7 +160,11 @@ impl<'tcx> PlaceTy<'tcx> { /// Convenience wrapper around `projection_ty_core` for `PlaceElem`, /// where we can just use the `Ty` that is already stored inline on /// field projection elems. - pub fn projection_ty(self, tcx: TyCtxt<'tcx>, elem: PlaceElem<'tcx>) -> PlaceTy<'tcx> { + pub fn projection_ty<V: ::std::fmt::Debug>( + self, + tcx: TyCtxt<'tcx>, + elem: ProjectionElem<V, Ty<'tcx>>, + ) -> PlaceTy<'tcx> { self.projection_ty_core(tcx, &elem, |ty| ty, |_, _, _, ty| ty, |ty| ty) } @@ -290,6 +294,36 @@ impl<V, T> ProjectionElem<V, T> { Self::UnwrapUnsafeBinder(..) => false, } } + + /// Returns the `ProjectionKind` associated to this projection. + pub fn kind(self) -> ProjectionKind { + self.try_map(|_| Some(()), |_| ()).unwrap() + } + + /// Apply functions to types and values in this projection and return the result. + pub fn try_map<V2, T2>( + self, + v: impl FnOnce(V) -> Option<V2>, + t: impl FnOnce(T) -> T2, + ) -> Option<ProjectionElem<V2, T2>> { + Some(match self { + ProjectionElem::Deref => ProjectionElem::Deref, + ProjectionElem::Downcast(name, read_variant) => { + ProjectionElem::Downcast(name, read_variant) + } + ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, t(ty)), + ProjectionElem::ConstantIndex { offset, min_length, from_end } => { + ProjectionElem::ConstantIndex { offset, min_length, from_end } + } + ProjectionElem::Subslice { from, to, from_end } => { + ProjectionElem::Subslice { from, to, from_end } + } + ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(t(ty)), + ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(t(ty)), + ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(t(ty)), + ProjectionElem::Index(val) => ProjectionElem::Index(v(val)?), + }) + } } /// Alias for projections as they appear in `UserTypeProjection`, where we diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 6039a03aa29..3b8def67f92 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -1371,12 +1371,7 @@ pub enum Rvalue<'tcx> { /// Creates an array where each element is the value of the operand. /// - /// This is the cause of a bug in the case where the repetition count is zero because the value - /// is not dropped, see [#74836]. - /// /// Corresponds to source code like `[x; 32]`. - /// - /// [#74836]: https://github.com/rust-lang/rust/issues/74836 Repeat(Operand<'tcx>, ty::Const<'tcx>), /// Creates a reference of the indicated kind to the place. diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 42a68b29ec7..904d78d69b6 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -531,13 +531,20 @@ macro_rules! make_mir_visitor { unwind: _, replace: _, drop: _, - async_fut: _, + async_fut, } => { self.visit_place( place, PlaceContext::MutatingUse(MutatingUseContext::Drop), location ); + if let Some(async_fut) = async_fut { + self.visit_local( + $(&$mutability)? *async_fut, + PlaceContext::MutatingUse(MutatingUseContext::Borrow), + location + ); + } } TerminatorKind::Call { diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 3bb8353f49e..7bd8a0525a2 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -696,6 +696,22 @@ rustc_queries! { return_result_from_ensure_ok } + /// Used in case `mir_borrowck` fails to prove an obligation. We generally assume that + /// all goals we prove in MIR type check hold as we've already checked them in HIR typeck. + /// + /// However, we replace each free region in the MIR body with a unique region inference + /// variable. As we may rely on structural identity when proving goals this may cause a + /// goal to no longer hold. We store obligations for which this may happen during HIR + /// typeck in the `TypeckResults`. We then uniquify and reprove them in case MIR typeck + /// encounters an unexpected error. We expect this to result in an error when used and + /// delay a bug if it does not. + query check_potentially_region_dependent_goals(key: LocalDefId) -> Result<(), ErrorGuaranteed> { + desc { + |tcx| "reproving potentially region dependent HIR typeck goals for `{}", + tcx.def_path_str(key) + } + } + /// MIR after our optimization passes have run. This is MIR that is ready /// for codegen. This is also the only query that can fetch non-local MIR, at present. query optimized_mir(key: DefId) -> &'tcx mir::Body<'tcx> { @@ -1116,6 +1132,11 @@ rustc_queries! { } /// Unsafety-check this `LocalDefId`. + query check_transmutes(key: LocalDefId) { + desc { |tcx| "check transmute calls inside `{}`", tcx.def_path_str(key) } + } + + /// Unsafety-check this `LocalDefId`. query check_unsafety(key: LocalDefId) { desc { |tcx| "unsafety-checking `{}`", tcx.def_path_str(key) } } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index a7ac3442898..546791135ba 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -645,34 +645,29 @@ impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> { let parent = Option::<LocalDefId>::decode(self); let tag: u8 = Decodable::decode(self); - if tag == TAG_PARTIAL_SPAN { - return Span::new(BytePos(0), BytePos(0), ctxt, parent); - } else if tag == TAG_RELATIVE_SPAN { - let dlo = u32::decode(self); - let dto = u32::decode(self); - - let enclosing = self.tcx.source_span_untracked(parent.unwrap()).data_untracked(); - let span = Span::new( - enclosing.lo + BytePos::from_u32(dlo), - enclosing.lo + BytePos::from_u32(dto), - ctxt, - parent, - ); - - return span; - } else { - debug_assert_eq!(tag, TAG_FULL_SPAN); - } - - let file_lo_index = SourceFileIndex::decode(self); - let line_lo = usize::decode(self); - let col_lo = RelativeBytePos::decode(self); - let len = BytePos::decode(self); - - let file_lo = self.file_index_to_file(file_lo_index); - let lo = file_lo.lines()[line_lo - 1] + col_lo; - let lo = file_lo.absolute_position(lo); - let hi = lo + len; + let (lo, hi) = match tag { + TAG_PARTIAL_SPAN => (BytePos(0), BytePos(0)), + TAG_RELATIVE_SPAN => { + let dlo = u32::decode(self); + let dto = u32::decode(self); + + let enclosing = self.tcx.source_span_untracked(parent.unwrap()).data_untracked(); + (enclosing.lo + BytePos::from_u32(dlo), enclosing.lo + BytePos::from_u32(dto)) + } + TAG_FULL_SPAN => { + let file_lo_index = SourceFileIndex::decode(self); + let line_lo = usize::decode(self); + let col_lo = RelativeBytePos::decode(self); + let len = BytePos::decode(self); + + let file_lo = self.file_index_to_file(file_lo_index); + let lo = file_lo.lines()[line_lo - 1] + col_lo; + let lo = file_lo.absolute_position(lo); + let hi = lo + len; + (lo, hi) + } + _ => unreachable!(), + }; Span::new(lo, hi, ctxt, parent) } diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 5bdde3a514e..32f91bfba6b 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -760,6 +760,9 @@ pub enum DynCompatibilityViolation { // Supertrait has a non-lifetime `for<T>` binder. SupertraitNonLifetimeBinder(SmallVec<[Span; 1]>), + // Trait has a `const Trait` supertrait. + SupertraitConst(SmallVec<[Span; 1]>), + /// Method has something illegal. Method(Symbol, MethodViolationCode, Span), @@ -785,6 +788,9 @@ impl DynCompatibilityViolation { DynCompatibilityViolation::SupertraitNonLifetimeBinder(_) => { "where clause cannot reference non-lifetime `for<...>` variables".into() } + DynCompatibilityViolation::SupertraitConst(_) => { + "it cannot have a `const` supertrait".into() + } DynCompatibilityViolation::Method(name, MethodViolationCode::StaticMethod(_), _) => { format!("associated function `{name}` has no `self` parameter").into() } @@ -842,7 +848,8 @@ impl DynCompatibilityViolation { match self { DynCompatibilityViolation::SizedSelf(_) | DynCompatibilityViolation::SupertraitSelf(_) - | DynCompatibilityViolation::SupertraitNonLifetimeBinder(..) => { + | DynCompatibilityViolation::SupertraitNonLifetimeBinder(..) + | DynCompatibilityViolation::SupertraitConst(_) => { DynCompatibilityViolationSolution::None } DynCompatibilityViolation::Method( @@ -873,15 +880,17 @@ impl DynCompatibilityViolation { match self { DynCompatibilityViolation::SupertraitSelf(spans) | DynCompatibilityViolation::SizedSelf(spans) - | DynCompatibilityViolation::SupertraitNonLifetimeBinder(spans) => spans.clone(), + | DynCompatibilityViolation::SupertraitNonLifetimeBinder(spans) + | DynCompatibilityViolation::SupertraitConst(spans) => spans.clone(), DynCompatibilityViolation::AssocConst(_, span) | DynCompatibilityViolation::GAT(_, span) - | DynCompatibilityViolation::Method(_, _, span) - if *span != DUMMY_SP => - { - smallvec![*span] + | DynCompatibilityViolation::Method(_, _, span) => { + if *span != DUMMY_SP { + smallvec![*span] + } else { + smallvec![] + } } - _ => smallvec![], } } } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 49a4733de3b..4990a85466e 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -33,7 +33,7 @@ use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, LintDiagnostic, LintEmitter, MultiSpan, }; use rustc_hir::attrs::AttributeKind; -use rustc_hir::def::{CtorKind, DefKind}; +use rustc_hir::def::{CtorKind, CtorOf, DefKind}; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId}; use rustc_hir::definitions::{DefPathData, Definitions, DisambiguatorState}; use rustc_hir::intravisit::VisitorExt; @@ -445,7 +445,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> { } fn fn_is_const(self, def_id: DefId) -> bool { - debug_assert_matches!(self.def_kind(def_id), DefKind::Fn | DefKind::AssocFn); + debug_assert_matches!( + self.def_kind(def_id), + DefKind::Fn | DefKind::AssocFn | DefKind::Ctor(CtorOf::Struct, CtorKind::Fn) + ); self.is_conditionally_const(def_id) } @@ -1418,6 +1421,8 @@ pub struct TyCtxt<'tcx> { } impl<'tcx> LintEmitter for TyCtxt<'tcx> { + type Id = HirId; + fn emit_node_span_lint( self, lint: &'static Lint, diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs index b3042904a29..b9a6f67ab0d 100644 --- a/compiler/rustc_middle/src/ty/diagnostics.rs +++ b/compiler/rustc_middle/src/ty/diagnostics.rs @@ -23,7 +23,7 @@ impl IntoDiagArg for Ty<'_> { fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> rustc_errors::DiagArgValue { ty::tls::with(|tcx| { let ty = tcx.short_string(self, path); - rustc_errors::DiagArgValue::Str(std::borrow::Cow::Owned(ty)) + DiagArgValue::Str(std::borrow::Cow::Owned(ty)) }) } } @@ -32,7 +32,7 @@ impl IntoDiagArg for Instance<'_> { fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> rustc_errors::DiagArgValue { ty::tls::with(|tcx| { let instance = tcx.short_string_namespace(self, path, Namespace::ValueNS); - rustc_errors::DiagArgValue::Str(std::borrow::Cow::Owned(instance)) + DiagArgValue::Str(std::borrow::Cow::Owned(instance)) }) } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index e70c98ab704..e567ba05f61 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -32,7 +32,7 @@ use rustc_data_structures::intern::Interned; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::steal::Steal; use rustc_data_structures::unord::{UnordMap, UnordSet}; -use rustc_errors::{Diag, ErrorGuaranteed}; +use rustc_errors::{Diag, ErrorGuaranteed, LintBuffer}; use rustc_hir::attrs::{AttributeKind, StrippedCfgItem}; use rustc_hir::def::{CtorKind, CtorOf, DefKind, DocLinkResMap, LifetimeRes, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap}; @@ -46,7 +46,6 @@ use rustc_macros::{ }; use rustc_query_system::ich::StableHashingContext; use rustc_serialize::{Decodable, Encodable}; -use rustc_session::lint::LintBuffer; pub use rustc_session::lint::RegisteredTools; use rustc_span::hygiene::MacroKind; use rustc_span::{DUMMY_SP, ExpnId, ExpnKind, Ident, Span, Symbol, sym}; @@ -830,14 +829,15 @@ impl<'tcx> OpaqueHiddenType<'tcx> { // Convert the type from the function into a type valid outside by mapping generic // parameters to into the context of the opaque. // - // We erase regions when doing this during HIR typeck. + // We erase regions when doing this during HIR typeck. We manually use `fold_regions` + // here as we do not want to anonymize bound variables. let this = match defining_scope_kind { - DefiningScopeKind::HirTypeck => tcx.erase_regions(self), + DefiningScopeKind::HirTypeck => fold_regions(tcx, self, |_, _| tcx.lifetimes.re_erased), DefiningScopeKind::MirBorrowck => self, }; let result = this.fold_with(&mut opaque_types::ReverseMapper::new(tcx, map, self.span)); if cfg!(debug_assertions) && matches!(defining_scope_kind, DefiningScopeKind::HirTypeck) { - assert_eq!(result.ty, tcx.erase_regions(result.ty)); + assert_eq!(result.ty, fold_regions(tcx, result.ty, |_, _| tcx.lifetimes.re_erased)); } result } diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index e6feafea122..9e6f277ef77 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -124,6 +124,15 @@ pub trait Printer<'tcx>: Sized { trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<(), PrintError>; + fn print_coroutine_with_kind( + &mut self, + def_id: DefId, + parent_args: &'tcx [GenericArg<'tcx>], + kind: Ty<'tcx>, + ) -> Result<(), PrintError> { + self.print_path_with_generic_args(|p| p.print_def_path(def_id, parent_args), &[kind.into()]) + } + // Defaults (should not be overridden): #[instrument(skip(self), level = "debug")] @@ -162,9 +171,10 @@ pub trait Printer<'tcx>: Sized { )) = self.tcx().coroutine_kind(def_id) && args.len() > parent_args.len() { - return self.print_path_with_generic_args( - |p| p.print_def_path(def_id, parent_args), - &args[..parent_args.len() + 1][..1], + return self.print_coroutine_with_kind( + def_id, + parent_args, + args[parent_args.len()].expect_ty(), ); } else { // Closures' own generics are only captures, don't print them. diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 72474a60566..755fc68d86f 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -821,10 +821,38 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn new_coroutine_witness( tcx: TyCtxt<'tcx>, - id: DefId, + def_id: DefId, args: GenericArgsRef<'tcx>, ) -> Ty<'tcx> { - Ty::new(tcx, CoroutineWitness(id, args)) + if cfg!(debug_assertions) { + tcx.debug_assert_args_compatible(tcx.typeck_root_def_id(def_id), args); + } + Ty::new(tcx, CoroutineWitness(def_id, args)) + } + + pub fn new_coroutine_witness_for_coroutine( + tcx: TyCtxt<'tcx>, + def_id: DefId, + coroutine_args: GenericArgsRef<'tcx>, + ) -> Ty<'tcx> { + tcx.debug_assert_args_compatible(def_id, coroutine_args); + // HACK: Coroutine witness types are lifetime erased, so they + // never reference any lifetime args from the coroutine. We erase + // the regions here since we may get into situations where a + // coroutine is recursively contained within itself, leading to + // witness types that differ by region args. This means that + // cycle detection in fulfillment will not kick in, which leads + // to unnecessary overflows in async code. See the issue: + // <https://github.com/rust-lang/rust/issues/145151>. + let args = + ty::GenericArgs::for_item(tcx, tcx.typeck_root_def_id(def_id), |def, _| { + match def.kind { + ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), + ty::GenericParamDefKind::Type { .. } + | ty::GenericParamDefKind::Const { .. } => coroutine_args[def.index as usize], + } + }); + Ty::new_coroutine_witness(tcx, def_id, args) } // misc @@ -985,6 +1013,14 @@ impl<'tcx> rustc_type_ir::inherent::Ty<TyCtxt<'tcx>> for Ty<'tcx> { Ty::new_coroutine_witness(interner, def_id, args) } + fn new_coroutine_witness_for_coroutine( + interner: TyCtxt<'tcx>, + def_id: DefId, + coroutine_args: ty::GenericArgsRef<'tcx>, + ) -> Self { + Ty::new_coroutine_witness_for_coroutine(interner, def_id, coroutine_args) + } + fn new_ptr(interner: TyCtxt<'tcx>, ty: Self, mutbl: hir::Mutability) -> Self { Ty::new_ptr(interner, ty, mutbl) } diff --git a/compiler/rustc_middle/src/ty/typeck_results.rs b/compiler/rustc_middle/src/ty/typeck_results.rs index 6b187c5325a..8dd80aab946 100644 --- a/compiler/rustc_middle/src/ty/typeck_results.rs +++ b/compiler/rustc_middle/src/ty/typeck_results.rs @@ -206,10 +206,25 @@ pub struct TypeckResults<'tcx> { /// formatting modified file tests/ui/coroutine/retain-resume-ref.rs pub coroutine_stalled_predicates: FxIndexSet<(ty::Predicate<'tcx>, ObligationCause<'tcx>)>, + /// Goals proven during HIR typeck which may be potentially region dependent. + /// + /// Borrowck *uniquifies* regions which may cause these goal to be ambiguous in MIR + /// type check. We ICE if goals fail in borrowck to detect bugs during MIR building or + /// missed checks in HIR typeck. To avoid ICE due to region dependence we store all + /// goals which may be region dependent and reprove them in case borrowck encounters + /// an error. + pub potentially_region_dependent_goals: + FxIndexSet<(ty::Predicate<'tcx>, ObligationCause<'tcx>)>, + /// Contains the data for evaluating the effect of feature `capture_disjoint_fields` /// on closure size. pub closure_size_eval: LocalDefIdMap<ClosureSizeProfileData<'tcx>>, + /// Stores the types involved in calls to `transmute` intrinsic. These are meant to be checked + /// outside of typeck and borrowck to avoid cycles with opaque types and coroutine layout + /// computation. + pub transmutes_to_check: Vec<(Ty<'tcx>, Ty<'tcx>, HirId)>, + /// Container types and field indices of `offset_of!` expressions offset_of_data: ItemLocalMap<(Ty<'tcx>, Vec<(VariantIdx, FieldIdx)>)>, } @@ -240,7 +255,9 @@ impl<'tcx> TypeckResults<'tcx> { closure_fake_reads: Default::default(), rvalue_scopes: Default::default(), coroutine_stalled_predicates: Default::default(), + potentially_region_dependent_goals: Default::default(), closure_size_eval: Default::default(), + transmutes_to_check: Default::default(), offset_of_data: Default::default(), } } diff --git a/compiler/rustc_mir_build/src/builder/coverageinfo.rs b/compiler/rustc_mir_build/src/builder/coverageinfo.rs index 14199c20921..091b9dad5bc 100644 --- a/compiler/rustc_mir_build/src/builder/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/builder/coverageinfo.rs @@ -157,6 +157,21 @@ impl CoverageInfoBuilder { // if there's nothing interesting in it. Box::new(CoverageInfoHi { num_block_markers, branch_spans }) } + + pub(crate) fn as_done(&self) -> Box<CoverageInfoHi> { + let &Self { nots: _, markers: BlockMarkerGen { num_block_markers }, ref branch_info } = + self; + + let branch_spans = branch_info + .as_ref() + .map(|branch_info| branch_info.branch_spans.as_slice()) + .unwrap_or_default() + .to_owned(); + + // For simplicity, always return an info struct (without Option), even + // if there's nothing interesting in it. + Box::new(CoverageInfoHi { num_block_markers, branch_spans }) + } } impl<'tcx> Builder<'_, 'tcx> { diff --git a/compiler/rustc_mir_build/src/builder/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs index 9570760f943..6a8f0b21ee0 100644 --- a/compiler/rustc_mir_build/src/builder/mod.rs +++ b/compiler/rustc_mir_build/src/builder/mod.rs @@ -790,6 +790,28 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { builder } + #[allow(dead_code)] + fn dump_for_debugging(&self) { + let mut body = Body::new( + MirSource::item(self.def_id.to_def_id()), + self.cfg.basic_blocks.clone(), + self.source_scopes.clone(), + self.local_decls.clone(), + self.canonical_user_type_annotations.clone(), + self.arg_count.clone(), + self.var_debug_info.clone(), + self.fn_span.clone(), + self.coroutine.clone(), + None, + ); + body.coverage_info_hi = self.coverage_info.as_ref().map(|b| b.as_done()); + + use rustc_middle::mir::pretty; + let options = pretty::PrettyPrintMirOptions::from_cli(self.tcx); + pretty::write_mir_fn(self.tcx, &body, &mut |_, _| Ok(()), &mut std::io::stdout(), options) + .unwrap(); + } + fn finish(self) -> Body<'tcx> { let mut body = Body::new( MirSource::item(self.def_id.to_def_id()), diff --git a/compiler/rustc_mir_build/src/check_tail_calls.rs b/compiler/rustc_mir_build/src/check_tail_calls.rs index d4b6da2c14b..e0cbe8519ed 100644 --- a/compiler/rustc_mir_build/src/check_tail_calls.rs +++ b/compiler/rustc_mir_build/src/check_tail_calls.rs @@ -135,30 +135,23 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { self.report_abi_mismatch(expr.span, caller_sig.abi, callee_sig.abi); } + // FIXME(explicit_tail_calls): this currently fails for cases where opaques are used. + // e.g. + // ``` + // fn a() -> impl Sized { become b() } // ICE + // fn b() -> u8 { 0 } + // ``` + // we should think what is the expected behavior here. + // (we should probably just accept this by revealing opaques?) if caller_sig.inputs_and_output != callee_sig.inputs_and_output { - if caller_sig.inputs() != callee_sig.inputs() { - self.report_arguments_mismatch( - expr.span, - self.tcx.liberate_late_bound_regions( - CRATE_DEF_ID.to_def_id(), - self.caller_ty.fn_sig(self.tcx), - ), - self.tcx - .liberate_late_bound_regions(CRATE_DEF_ID.to_def_id(), ty.fn_sig(self.tcx)), - ); - } - - // FIXME(explicit_tail_calls): this currently fails for cases where opaques are used. - // e.g. - // ``` - // fn a() -> impl Sized { become b() } // ICE - // fn b() -> u8 { 0 } - // ``` - // we should think what is the expected behavior here. - // (we should probably just accept this by revealing opaques?) - if caller_sig.output() != callee_sig.output() { - span_bug!(expr.span, "hir typeck should have checked the return type already"); - } + self.report_signature_mismatch( + expr.span, + self.tcx.liberate_late_bound_regions( + CRATE_DEF_ID.to_def_id(), + self.caller_ty.fn_sig(self.tcx), + ), + self.tcx.liberate_late_bound_regions(CRATE_DEF_ID.to_def_id(), ty.fn_sig(self.tcx)), + ); } { @@ -365,7 +358,7 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { self.found_errors = Err(err); } - fn report_arguments_mismatch( + fn report_signature_mismatch( &mut self, sp: Span, caller_sig: ty::FnSig<'_>, diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index cdab785e842..b5e165c7517 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -8,7 +8,7 @@ use rustc_errors::DiagArgValue; use rustc_hir::attrs::AttributeKind; use rustc_hir::def::DefKind; use rustc_hir::{self as hir, BindingMode, ByRef, HirId, Mutability, find_attr}; -use rustc_middle::middle::codegen_fn_attrs::TargetFeature; +use rustc_middle::middle::codegen_fn_attrs::{TargetFeature, TargetFeatureKind}; use rustc_middle::mir::BorrowKind; use rustc_middle::span_bug; use rustc_middle::thir::visit::Visitor; @@ -522,7 +522,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { .iter() .copied() .filter(|feature| { - !feature.implied + feature.kind == TargetFeatureKind::Enabled && !self .body_target_features .iter() diff --git a/compiler/rustc_mir_dataflow/Cargo.toml b/compiler/rustc_mir_dataflow/Cargo.toml index 293bcbef21b..9621f9f20bd 100644 --- a/compiler/rustc_mir_dataflow/Cargo.toml +++ b/compiler/rustc_mir_dataflow/Cargo.toml @@ -13,7 +13,6 @@ rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_graphviz = { path = "../rustc_graphviz" } -rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs index f86a15a8f92..372a3f3a8b8 100644 --- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs +++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs @@ -8,7 +8,6 @@ use std::sync::OnceLock; use std::{io, ops, str}; use regex::Regex; -use rustc_hir::def_id::DefId; use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{ self, BasicBlock, Body, Location, create_dump_file, dump_enabled, graphviz_safe_def_name, @@ -16,6 +15,7 @@ use rustc_middle::mir::{ }; use rustc_middle::ty::TyCtxt; use rustc_middle::ty::print::with_no_trimmed_paths; +use rustc_span::def_id::DefId; use rustc_span::{Symbol, sym}; use tracing::debug; use {rustc_ast as ast, rustc_graphviz as dot}; diff --git a/compiler/rustc_mir_dataflow/src/move_paths/abs_domain.rs b/compiler/rustc_mir_dataflow/src/move_paths/abs_domain.rs deleted file mode 100644 index d056ad3d4b4..00000000000 --- a/compiler/rustc_mir_dataflow/src/move_paths/abs_domain.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! The move-analysis portion of borrowck needs to work in an abstract -//! domain of lifted `Place`s. Most of the `Place` variants fall into a -//! one-to-one mapping between the concrete and abstract (e.g., a -//! field-deref on a local variable, `x.field`, has the same meaning -//! in both domains). Indexed projections are the exception: `a[x]` -//! needs to be treated as mapping to the same move path as `a[y]` as -//! well as `a[13]`, etc. So we map these `x`/`y` values to `()`. -//! -//! (In theory, the analysis could be extended to work with sets of -//! paths, so that `a[0]` and `a[13]` could be kept distinct, while -//! `a[x]` would still overlap them both. But that is not this -//! representation does today.) - -use rustc_middle::mir::{PlaceElem, ProjectionElem, ProjectionKind}; - -pub(crate) trait Lift { - fn lift(&self) -> ProjectionKind; -} - -impl<'tcx> Lift for PlaceElem<'tcx> { - fn lift(&self) -> ProjectionKind { - match *self { - ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Field(f, _ty) => ProjectionElem::Field(f, ()), - ProjectionElem::OpaqueCast(_ty) => ProjectionElem::OpaqueCast(()), - ProjectionElem::Index(_i) => ProjectionElem::Index(()), - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Downcast(a, u) => ProjectionElem::Downcast(a, u), - ProjectionElem::Subtype(_ty) => ProjectionElem::Subtype(()), - ProjectionElem::UnwrapUnsafeBinder(_ty) => ProjectionElem::UnwrapUnsafeBinder(()), - } - } -} diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index 8bbc89fdcec..48718cad597 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -7,7 +7,6 @@ use rustc_middle::{bug, span_bug}; use smallvec::{SmallVec, smallvec}; use tracing::debug; -use super::abs_domain::Lift; use super::{ Init, InitIndex, InitKind, InitLocation, LocationMap, LookupResult, MoveData, MoveOut, MoveOutIndex, MovePath, MovePathIndex, MovePathLookup, @@ -241,7 +240,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { if union_path.is_none() { // inlined from add_move_path because of a borrowck conflict with the iterator base = - *data.rev_lookup.projections.entry((base, elem.lift())).or_insert_with(|| { + *data.rev_lookup.projections.entry((base, elem.kind())).or_insert_with(|| { new_move_path( &mut data.move_paths, &mut data.path_map, @@ -272,7 +271,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { tcx, .. } = self; - *rev_lookup.projections.entry((base, elem.lift())).or_insert_with(move || { + *rev_lookup.projections.entry((base, elem.kind())).or_insert_with(move || { new_move_path(move_paths, path_map, init_path_map, Some(base), mk_place(*tcx)) }) } diff --git a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs index 18985ba0da2..466416d63f5 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs @@ -1,3 +1,15 @@ +//! The move-analysis portion of borrowck needs to work in an abstract domain of lifted `Place`s. +//! Most of the `Place` variants fall into a one-to-one mapping between the concrete and abstract +//! (e.g., a field projection on a local variable, `x.field`, has the same meaning in both +//! domains). In other words, all field projections for the same field on the same local do not +//! have meaningfully different types if ever. Indexed projections are the exception: `a[x]` needs +//! to be treated as mapping to the same move path as `a[y]` as well as `a[13]`, etc. So we map +//! these `x`/`y` values to `()`. +//! +//! (In theory, the analysis could be extended to work with sets of paths, so that `a[0]` and +//! `a[13]` could be kept distinct, while `a[x]` would still overlap them both. But that is not +//! what this representation does today.) + use std::fmt; use std::ops::{Index, IndexMut}; @@ -8,11 +20,8 @@ use rustc_middle::ty::{Ty, TyCtxt}; use rustc_span::Span; use smallvec::SmallVec; -use self::abs_domain::Lift; use crate::un_derefer::UnDerefer; -mod abs_domain; - rustc_index::newtype_index! { #[orderable] #[debug_format = "mp{}"] @@ -324,7 +333,7 @@ impl<'tcx> MovePathLookup<'tcx> { }; for (_, elem) in self.un_derefer.iter_projections(place) { - if let Some(&subpath) = self.projections.get(&(result, elem.lift())) { + if let Some(&subpath) = self.projections.get(&(result, elem.kind())) { result = subpath; } else { return LookupResult::Parent(Some(result)); diff --git a/compiler/rustc_mir_dataflow/src/rustc_peek.rs b/compiler/rustc_mir_dataflow/src/rustc_peek.rs index 1682f332857..a899ec1fa88 100644 --- a/compiler/rustc_mir_dataflow/src/rustc_peek.rs +++ b/compiler/rustc_mir_dataflow/src/rustc_peek.rs @@ -1,7 +1,7 @@ use rustc_ast::MetaItem; -use rustc_hir::def_id::DefId; use rustc_middle::mir::{self, Body, Local, Location}; use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::def_id::DefId; use rustc_span::{Span, Symbol, sym}; use tracing::{debug, info}; diff --git a/compiler/rustc_mir_transform/src/check_enums.rs b/compiler/rustc_mir_transform/src/check_enums.rs index 33a87cb9873..12447dc7cbb 100644 --- a/compiler/rustc_mir_transform/src/check_enums.rs +++ b/compiler/rustc_mir_transform/src/check_enums.rs @@ -48,6 +48,21 @@ impl<'tcx> crate::MirPass<'tcx> for CheckEnums { let new_block = split_block(basic_blocks, location); match check { + EnumCheckType::Direct { op_size, .. } + | EnumCheckType::WithNiche { op_size, .. } + if op_size.bytes() == 0 => + { + // It is never valid to use a ZST as a discriminant for an inhabited enum, but that will + // have been caught by the type checker. Do nothing but ensure that a bug has been signaled. + tcx.dcx().span_delayed_bug( + source_info.span, + "cannot build enum discriminant from zero-sized type", + ); + basic_blocks[block].terminator = Some(Terminator { + source_info, + kind: TerminatorKind::Goto { target: new_block }, + }); + } EnumCheckType::Direct { source_op, discr, op_size, valid_discrs } => { insert_direct_enum_check( tcx, diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 5568d42ab8f..879a20e771d 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -16,7 +16,6 @@ use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; mod balanced_flow; pub(crate) mod node_flow; -mod union_find; /// Struct containing the results of [`prepare_bcb_counters_data`]. pub(crate) struct BcbCountersData { diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs index 91ed54b8b59..e063f75887b 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs @@ -7,13 +7,12 @@ //! (Knuth & Stevenson, 1973). use rustc_data_structures::graph; +use rustc_data_structures::union_find::UnionFind; use rustc_index::bit_set::DenseBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; pub(crate) use rustc_middle::mir::coverage::NodeFlowData; use rustc_middle::mir::coverage::Op; -use crate::coverage::counters::union_find::UnionFind; - #[cfg(test)] mod tests; diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index 952da2cdf72..5a13394543b 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -447,26 +447,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { Projection(base, elem) => { let base = self.evaluated[base].as_ref()?; - let elem = match elem { - ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Downcast(name, read_variant) => { - ProjectionElem::Downcast(name, read_variant) - } - ProjectionElem::Field(f, ()) => ProjectionElem::Field(f, ty.ty), - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::OpaqueCast(()) => ProjectionElem::OpaqueCast(ty.ty), - ProjectionElem::Subtype(()) => ProjectionElem::Subtype(ty.ty), - ProjectionElem::UnwrapUnsafeBinder(()) => { - ProjectionElem::UnwrapUnsafeBinder(ty.ty) - } - // This should have been replaced by a `ConstantIndex` earlier. - ProjectionElem::Index(_) => return None, - }; + // `Index` by constants should have been replaced by `ConstantIndex` by + // `simplify_place_projection`. + let elem = elem.try_map(|_| None, |()| ty.ty)?; self.ecx.project(base, elem).discard_err()? } Address { place, kind: _, provenance: _ } => { @@ -476,13 +459,11 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { let local = self.locals[place.local]?; let pointer = self.evaluated[local].as_ref()?; let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?; - for proj in place.projection.iter().skip(1) { - // We have no call stack to associate a local with a value, so we cannot - // interpret indexing. - if matches!(proj, ProjectionElem::Index(_)) { - return None; - } - mplace = self.ecx.project(&mplace, proj).discard_err()?; + for elem in place.projection.iter().skip(1) { + // `Index` by constants should have been replaced by `ConstantIndex` by + // `simplify_place_projection`. + let elem = elem.try_map(|_| None, |ty| ty)?; + mplace = self.ecx.project(&mplace, elem).discard_err()?; } let pointer = mplace.to_ref(&self.ecx); ImmTy::from_immediate(pointer, ty).into() @@ -902,27 +883,14 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { proj: ProjectionElem<VnIndex, ()>, loc: Location, ) -> Option<PlaceElem<'tcx>> { - Some(match proj { - ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Field(idx, ()) => ProjectionElem::Field(idx, ty), - ProjectionElem::Index(idx) => { - let Some(local) = self.try_as_local(idx, loc) else { - return None; - }; + proj.try_map( + |value| { + let local = self.try_as_local(value, loc)?; self.reused_locals.insert(local); - ProjectionElem::Index(local) - } - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::Downcast(symbol, idx) => ProjectionElem::Downcast(symbol, idx), - ProjectionElem::OpaqueCast(()) => ProjectionElem::OpaqueCast(ty), - ProjectionElem::Subtype(()) => ProjectionElem::Subtype(ty), - ProjectionElem::UnwrapUnsafeBinder(()) => ProjectionElem::UnwrapUnsafeBinder(ty), - }) + Some(local) + }, + |()| ty, + ) } fn simplify_aggregate_to_copy( diff --git a/compiler/rustc_mir_transform/src/ref_prop.rs b/compiler/rustc_mir_transform/src/ref_prop.rs index d1c2d6b508f..6f61215cee2 100644 --- a/compiler/rustc_mir_transform/src/ref_prop.rs +++ b/compiler/rustc_mir_transform/src/ref_prop.rs @@ -79,6 +79,7 @@ impl<'tcx> crate::MirPass<'tcx> for ReferencePropagation { #[instrument(level = "trace", skip(self, tcx, body))] fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { debug!(def_id = ?body.source.def_id()); + move_to_copy_pointers(tcx, body); while propagate_ssa(tcx, body) {} } @@ -87,11 +88,43 @@ impl<'tcx> crate::MirPass<'tcx> for ReferencePropagation { } } +/// The SSA analysis done by [`SsaLocals`] treats [`Operand::Move`] as a read, even though in +/// general [`Operand::Move`] represents pass-by-pointer where the callee can overwrite the +/// pointee (Miri always considers the place deinitialized). CopyProp has a similar trick to +/// turn [`Operand::Move`] into [`Operand::Copy`] when required for an optimization, but in this +/// pass we just turn all moves of pointers into copies because pointers should be by-value anyway. +fn move_to_copy_pointers<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let mut visitor = MoveToCopyVisitor { tcx, local_decls: &body.local_decls }; + for (bb, data) in body.basic_blocks.as_mut_preserves_cfg().iter_enumerated_mut() { + visitor.visit_basic_block_data(bb, data); + } + + struct MoveToCopyVisitor<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + local_decls: &'a IndexVec<Local, LocalDecl<'tcx>>, + } + + impl<'a, 'tcx> MutVisitor<'tcx> for MoveToCopyVisitor<'a, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_operand(&mut self, operand: &mut Operand<'tcx>, loc: Location) { + if let Operand::Move(place) = *operand { + if place.ty(self.local_decls, self.tcx).ty.is_any_ptr() { + *operand = Operand::Copy(place); + } + } + self.super_operand(operand, loc); + } + } +} + fn propagate_ssa<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool { let typing_env = body.typing_env(tcx); let ssa = SsaLocals::new(tcx, body, typing_env); - let mut replacer = compute_replacement(tcx, body, &ssa); + let mut replacer = compute_replacement(tcx, body, ssa); debug!(?replacer.targets); debug!(?replacer.allowed_replacements); debug!(?replacer.storage_to_remove); @@ -119,7 +152,7 @@ enum Value<'tcx> { fn compute_replacement<'tcx>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, - ssa: &SsaLocals, + ssa: SsaLocals, ) -> Replacer<'tcx> { let always_live_locals = always_storage_live_locals(body); @@ -138,7 +171,7 @@ fn compute_replacement<'tcx>( // reborrowed references. let mut storage_to_remove = DenseBitSet::new_empty(body.local_decls.len()); - let fully_replaceable_locals = fully_replaceable_locals(ssa); + let fully_replaceable_locals = fully_replaceable_locals(&ssa); // Returns true iff we can use `place` as a pointee. // diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index 1bc35e599c7..da05c49756f 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -25,12 +25,8 @@ enum CanonicalizeInputKind { /// trait candidates relies on it when deciding whether a where-bound /// is trivial. ParamEnv, - /// When canonicalizing predicates, we don't keep `'static`. If we're - /// currently outside of the trait solver and canonicalize the root goal - /// during HIR typeck, we replace each occurance of a region with a - /// unique region variable. See the comment on `InferCtxt::in_hir_typeck` - /// for more details. - Predicate { is_hir_typeck_root_goal: bool }, + /// When canonicalizing predicates, we don't keep `'static`. + Predicate, } /// Whether we're canonicalizing a query input or the query response. @@ -191,7 +187,6 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> { pub fn canonicalize_input<P: TypeFoldable<I>>( delegate: &'a D, variables: &'a mut Vec<I::GenericArg>, - is_hir_typeck_root_goal: bool, input: QueryInput<I, P>, ) -> ty::Canonical<I, QueryInput<I, P>> { // First canonicalize the `param_env` while keeping `'static` @@ -201,9 +196,7 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> { // while *mostly* reusing the canonicalizer from above. let mut rest_canonicalizer = Canonicalizer { delegate, - canonicalize_mode: CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { - is_hir_typeck_root_goal, - }), + canonicalize_mode: CanonicalizeMode::Input(CanonicalizeInputKind::Predicate), variables, variable_lookup_table, @@ -481,31 +474,13 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz } }; - let var = if let CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { - is_hir_typeck_root_goal: true, - }) = self.canonicalize_mode - { - let var = ty::BoundVar::from(self.variables.len()); - self.variables.push(r.into()); - self.var_kinds.push(kind); - var - } else { - self.get_or_insert_bound_var(r, kind) - }; + let var = self.get_or_insert_bound_var(r, kind); Region::new_anon_bound(self.cx(), self.binder_index, var) } fn fold_ty(&mut self, t: I::Ty) -> I::Ty { - if let CanonicalizeMode::Input(CanonicalizeInputKind::Predicate { - is_hir_typeck_root_goal: true, - }) = self.canonicalize_mode - { - // If we're canonicalizing a root goal during HIR typeck, we - // must not use the `cache` as we want to map each occurrence - // of a region to a unique existential variable. - self.inner_fold_ty(t) - } else if let Some(&ty) = self.cache.get(&(self.binder_index, t)) { + if let Some(&ty) = self.cache.get(&(self.binder_index, t)) { ty } else { let res = self.inner_fold_ty(t); diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index faa86734d08..d25e74e7335 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -75,17 +75,10 @@ where Ok(ty::Binder::dummy(vec![args.as_coroutine_closure().tupled_upvars_ty()])) } - ty::Coroutine(def_id, args) => { - let coroutine_args = args.as_coroutine(); - Ok(ty::Binder::dummy(vec![ - coroutine_args.tupled_upvars_ty(), - Ty::new_coroutine_witness( - ecx.cx(), - def_id, - ecx.cx().mk_args(coroutine_args.parent_args().as_slice()), - ), - ])) - } + ty::Coroutine(def_id, args) => Ok(ty::Binder::dummy(vec![ + args.as_coroutine().tupled_upvars_ty(), + Ty::new_coroutine_witness_for_coroutine(ecx.cx(), def_id, args), + ])), ty::CoroutineWitness(def_id, args) => Ok(ecx .cx() @@ -251,14 +244,9 @@ where Movability::Static => Err(NoSolution), Movability::Movable => { if ecx.cx().features().coroutine_clone() { - let coroutine = args.as_coroutine(); Ok(ty::Binder::dummy(vec![ - coroutine.tupled_upvars_ty(), - Ty::new_coroutine_witness( - ecx.cx(), - def_id, - ecx.cx().mk_args(coroutine.parent_args().as_slice()), - ), + args.as_coroutine().tupled_upvars_ty(), + Ty::new_coroutine_witness_for_coroutine(ecx.cx(), def_id, args), ])) } else { Err(NoSolution) diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs index 9a22bf58c03..b7ae9994c62 100644 --- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs @@ -6,7 +6,7 @@ use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; use rustc_type_ir::solve::SizedTraitKind; use rustc_type_ir::solve::inspect::ProbeKind; -use rustc_type_ir::{self as ty, Interner, elaborate}; +use rustc_type_ir::{self as ty, Interner, TypingMode, elaborate}; use tracing::instrument; use super::assembly::{Candidate, structural_traits}; @@ -135,12 +135,16 @@ where } let impl_polarity = cx.impl_polarity(impl_def_id); - match impl_polarity { + let certainty = match impl_polarity { ty::ImplPolarity::Negative => return Err(NoSolution), - ty::ImplPolarity::Reservation => { - unimplemented!("reservation impl for const trait: {:?}", goal) - } - ty::ImplPolarity::Positive => {} + ty::ImplPolarity::Reservation => match ecx.typing_mode() { + TypingMode::Coherence => Certainty::AMBIGUOUS, + TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } + | TypingMode::PostBorrowckAnalysis { .. } + | TypingMode::PostAnalysis => return Err(NoSolution), + }, + ty::ImplPolarity::Positive => Certainty::Yes, }; if !cx.impl_is_const(impl_def_id) { @@ -171,7 +175,7 @@ where }); ecx.add_goals(GoalSource::ImplWhereBound, const_conditions); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.evaluate_added_goals_and_make_canonical_response(certainty) }) } diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs index 74c5b49ea92..4644b145b18 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs @@ -57,7 +57,6 @@ where /// This expects `goal` and `opaque_types` to be eager resolved. pub(super) fn canonicalize_goal( &self, - is_hir_typeck_root_goal: bool, goal: Goal<I, I::Predicate>, opaque_types: Vec<(ty::OpaqueTypeKey<I>, I::Ty)>, ) -> (Vec<I::GenericArg>, CanonicalInput<I, I::Predicate>) { @@ -65,7 +64,6 @@ where let canonical = Canonicalizer::canonicalize_input( self.delegate, &mut orig_values, - is_hir_typeck_root_goal, QueryInput { goal, predefined_opaques_in_body: self diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index a4738306181..4f87902e46e 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -4,7 +4,6 @@ use std::ops::ControlFlow; #[cfg(feature = "nightly")] use rustc_macros::HashStable_NoContext; use rustc_type_ir::data_structures::{HashMap, HashSet}; -use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; use rustc_type_ir::relate::Relate; use rustc_type_ir::relate::solver_relating::RelateExt; @@ -459,10 +458,7 @@ where let opaque_types = self.delegate.clone_opaque_types_lookup_table(); let (goal, opaque_types) = eager_resolve_vars(self.delegate, (goal, opaque_types)); - let is_hir_typeck_root_goal = matches!(goal_evaluation_kind, GoalEvaluationKind::Root) - && self.delegate.in_hir_typeck(); - let (orig_values, canonical_goal) = - self.canonicalize_goal(is_hir_typeck_root_goal, goal, opaque_types); + let (orig_values, canonical_goal) = self.canonicalize_goal(goal, opaque_types); let mut goal_evaluation = self.inspect.new_goal_evaluation(goal, &orig_values, goal_evaluation_kind); let canonical_result = self.search_graph.evaluate_goal( @@ -1131,6 +1127,7 @@ where self.delegate.fetch_eligible_assoc_item(goal_trait_ref, trait_assoc_def_id, impl_def_id) } + #[instrument(level = "debug", skip(self), ret)] pub(super) fn register_hidden_type_in_storage( &mut self, opaque_type_key: ty::OpaqueTypeKey<I>, @@ -1157,29 +1154,6 @@ where self.add_goals(GoalSource::AliasWellFormed, goals); } - // Do something for each opaque/hidden pair defined with `def_id` in the - // current inference context. - pub(super) fn probe_existing_opaque_ty( - &mut self, - key: ty::OpaqueTypeKey<I>, - ) -> Option<(ty::OpaqueTypeKey<I>, I::Ty)> { - // We shouldn't have any duplicate entries when using - // this function during `TypingMode::Analysis`. - let duplicate_entries = self.delegate.clone_duplicate_opaque_types(); - assert!(duplicate_entries.is_empty(), "unexpected duplicates: {duplicate_entries:?}"); - let mut matching = self.delegate.clone_opaque_types_lookup_table().into_iter().filter( - |(candidate_key, _)| { - candidate_key.def_id == key.def_id - && DeepRejectCtxt::relate_rigid_rigid(self.cx()) - .args_may_unify(candidate_key.args, key.args) - }, - ); - let first = matching.next(); - let second = matching.next(); - assert_eq!(second, None); - first - } - // Try to evaluate a const, or return `None` if the const is too generic. // This doesn't mean the const isn't evaluatable, though, and should be treated // as an ambiguity rather than no-solution. diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs index df3ad1e468b..a5f857a1dd8 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs @@ -1,13 +1,12 @@ //! Computes a normalizes-to (projection) goal for opaque types. This goal //! behaves differently depending on the current `TypingMode`. -use rustc_index::bit_set::GrowableBitSet; use rustc_type_ir::inherent::*; use rustc_type_ir::solve::GoalSource; use rustc_type_ir::{self as ty, Interner, TypingMode, fold_regions}; use crate::delegate::SolverDelegate; -use crate::solve::{Certainty, EvalCtxt, Goal, NoSolution, QueryResult, inspect}; +use crate::solve::{Certainty, EvalCtxt, Goal, QueryResult}; impl<D, I> EvalCtxt<'_, D> where @@ -39,100 +38,68 @@ where self.add_goal(GoalSource::Misc, goal.with(cx, ty::PredicateKind::Ambiguous)); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } - TypingMode::Analysis { defining_opaque_types_and_generators } => { + TypingMode::Analysis { + defining_opaque_types_and_generators: defining_opaque_types, + } + | TypingMode::Borrowck { defining_opaque_types } => { let Some(def_id) = opaque_ty .def_id .as_local() - .filter(|&def_id| defining_opaque_types_and_generators.contains(&def_id)) + .filter(|&def_id| defining_opaque_types.contains(&def_id)) else { + // If we're not in the defining scope, treat the alias as rigid. self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes); }; - // FIXME: This may have issues when the args contain aliases... - match uses_unique_placeholders_ignoring_regions(self.cx(), opaque_ty.args) { - Err(NotUniqueParam::NotParam(param)) if param.is_non_region_infer() => { - return self.evaluate_added_goals_and_make_canonical_response( - Certainty::AMBIGUOUS, - ); - } - Err(_) => { - return Err(NoSolution); - } - Ok(()) => {} - } - // Prefer opaques registered already. - let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args }; - // FIXME: This also unifies the previous hidden type with the expected. + // We structurally normalize the args so that we're able to detect defining uses + // later on. // - // If that fails, we insert `expected` as a new hidden type instead of - // eagerly emitting an error. - let existing = self.probe_existing_opaque_ty(opaque_type_key); - if let Some((candidate_key, candidate_ty)) = existing { - return self - .probe(|result| inspect::ProbeKind::OpaqueTypeStorageLookup { - result: *result, - }) - .enter(|ecx| { - for (a, b) in std::iter::zip( - candidate_key.args.iter(), - opaque_type_key.args.iter(), - ) { - ecx.eq(goal.param_env, a, b)?; - } - ecx.eq(goal.param_env, candidate_ty, expected)?; - ecx.add_item_bounds_for_hidden_type( - def_id.into(), - candidate_key.args, - goal.param_env, - candidate_ty, - ); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }); - } + // This reduces the amount of duplicate definitions in the `opaque_type_storage` and + // strengthens inference. This causes us to subtly depend on the normalization behavior + // when inferring the hidden type of opaques. + // + // E.g. it's observable that we don't normalize nested aliases with bound vars in + // `structurally_normalize` and because we use structural lookup, we also don't + // reuse an entry for `Tait<for<'a> fn(&'a ())>` for `Tait<for<'b> fn(&'b ())>`. + let normalized_args = + cx.mk_args_from_iter(opaque_ty.args.iter().map(|arg| match arg.kind() { + ty::GenericArgKind::Lifetime(lt) => Ok(lt.into()), + ty::GenericArgKind::Type(ty) => { + self.structurally_normalize_ty(goal.param_env, ty).map(Into::into) + } + ty::GenericArgKind::Const(ct) => { + self.structurally_normalize_const(goal.param_env, ct).map(Into::into) + } + }))?; - // Otherwise, define a new opaque type - let prev = self.register_hidden_type_in_storage(opaque_type_key, expected); - assert_eq!(prev, None); - self.add_item_bounds_for_hidden_type( - def_id.into(), - opaque_ty.args, - goal.param_env, - expected, - ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } - // Very similar to `TypingMode::Analysis` with some notably differences: - // - we accept opaque types even if they have non-universal arguments - // - we do a structural lookup instead of semantically unifying regions - // - the hidden type starts out as the type from HIR typeck with fresh region - // variables instead of a fully unconstrained inference variable - TypingMode::Borrowck { defining_opaque_types } => { - let Some(def_id) = opaque_ty - .def_id - .as_local() - .filter(|&def_id| defining_opaque_types.contains(&def_id)) - else { - self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); - return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes); - }; + let opaque_type_key = ty::OpaqueTypeKey { def_id, args: normalized_args }; + if let Some(prev) = self.register_hidden_type_in_storage(opaque_type_key, expected) + { + self.eq(goal.param_env, expected, prev)?; + } else { + // During HIR typeck, opaque types start out as unconstrained + // inference variables. In borrowck we instead use the type + // computed in HIR typeck as the initial value. + match self.typing_mode() { + TypingMode::Analysis { .. } => {} + TypingMode::Borrowck { .. } => { + let actual = cx + .type_of_opaque_hir_typeck(def_id) + .instantiate(cx, opaque_ty.args); + let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() { + ty::ReErased => self.next_region_var(), + _ => re, + }); + self.eq(goal.param_env, expected, actual)?; + } + _ => unreachable!(), + } + } - let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args }; - let actual = self - .register_hidden_type_in_storage(opaque_type_key, expected) - .unwrap_or_else(|| { - let actual = - cx.type_of_opaque_hir_typeck(def_id).instantiate(cx, opaque_ty.args); - let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() { - ty::ReErased => self.next_region_var(), - _ => re, - }); - actual - }); - self.eq(goal.param_env, expected, actual)?; self.add_item_bounds_for_hidden_type( def_id.into(), - opaque_ty.args, + normalized_args, goal.param_env, expected, ); @@ -168,44 +135,3 @@ where } } } - -/// Checks whether each generic argument is simply a unique generic placeholder. -/// -/// FIXME: Interner argument is needed to constrain the `I` parameter. -fn uses_unique_placeholders_ignoring_regions<I: Interner>( - _cx: I, - args: I::GenericArgs, -) -> Result<(), NotUniqueParam<I>> { - let mut seen = GrowableBitSet::default(); - for arg in args.iter() { - match arg.kind() { - // Ignore regions, since we can't resolve those in a canonicalized - // query in the trait solver. - ty::GenericArgKind::Lifetime(_) => {} - ty::GenericArgKind::Type(t) => match t.kind() { - ty::Placeholder(p) => { - if !seen.insert(p.var()) { - return Err(NotUniqueParam::DuplicateParam(t.into())); - } - } - _ => return Err(NotUniqueParam::NotParam(t.into())), - }, - ty::GenericArgKind::Const(c) => match c.kind() { - ty::ConstKind::Placeholder(p) => { - if !seen.insert(p.var()) { - return Err(NotUniqueParam::DuplicateParam(c.into())); - } - } - _ => return Err(NotUniqueParam::NotParam(c.into())), - }, - } - } - - Ok(()) -} - -// FIXME: This should check for dupes and non-params first, then infer vars. -enum NotUniqueParam<I: Interner> { - DuplicateParam(I::GenericArg), - NotParam(I::GenericArg), -} diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index 04ede365a21..891ecab041a 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -6,8 +6,8 @@ use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; use rustc_type_ir::solve::{CanonicalResponse, SizedTraitKind}; use rustc_type_ir::{ - self as ty, Interner, Movability, TraitPredicate, TraitRef, TypeVisitableExt as _, TypingMode, - Upcast as _, elaborate, + self as ty, Interner, Movability, PredicatePolarity, TraitPredicate, TraitRef, + TypeVisitableExt as _, TypingMode, Upcast as _, elaborate, }; use tracing::{debug, instrument, trace}; @@ -133,19 +133,26 @@ where cx: I, clause_def_id: I::DefId, goal_def_id: I::DefId, + polarity: PredicatePolarity, ) -> bool { clause_def_id == goal_def_id // PERF(sized-hierarchy): Sizedness supertraits aren't elaborated to improve perf, so // check for a `MetaSized` supertrait being matched against a `Sized` assumption. // // `PointeeSized` bounds are syntactic sugar for a lack of bounds so don't need this. - || (cx.is_lang_item(clause_def_id, TraitSolverLangItem::Sized) + || (polarity == PredicatePolarity::Positive + && cx.is_lang_item(clause_def_id, TraitSolverLangItem::Sized) && cx.is_lang_item(goal_def_id, TraitSolverLangItem::MetaSized)) } if let Some(trait_clause) = assumption.as_trait_clause() && trait_clause.polarity() == goal.predicate.polarity - && trait_def_id_matches(ecx.cx(), trait_clause.def_id(), goal.predicate.def_id()) + && trait_def_id_matches( + ecx.cx(), + trait_clause.def_id(), + goal.predicate.def_id(), + goal.predicate.polarity, + ) && DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( goal.predicate.trait_ref.args, trait_clause.skip_binder().trait_ref.args, @@ -168,6 +175,8 @@ where // PERF(sized-hierarchy): Sizedness supertraits aren't elaborated to improve perf, so // check for a `Sized` subtrait when looking for `MetaSized`. `PointeeSized` bounds // are syntactic sugar for a lack of bounds so don't need this. + // We don't need to check polarity, `fast_reject_assumption` already rejected non-`Positive` + // polarity `Sized` assumptions as matching non-`Positive` `MetaSized` goals. if ecx.cx().is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::MetaSized) && ecx.cx().is_lang_item(trait_clause.def_id(), TraitSolverLangItem::Sized) { diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml index 0ae0b613fa2..6d738a10371 100644 --- a/compiler/rustc_parse/Cargo.toml +++ b/compiler/rustc_parse/Cargo.toml @@ -9,7 +9,6 @@ bitflags = "2.4.1" rustc-literal-escaper = "0.0.5" rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } -rustc_attr_parsing = { path = "../rustc_attr_parsing" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 9e0075c21b9..4ca2f57bd87 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -359,6 +359,20 @@ parse_generics_in_path = unexpected generic arguments in path parse_help_set_edition_cargo = set `edition = "{$edition}"` in `Cargo.toml` parse_help_set_edition_standalone = pass `--edition {$edition}` to `rustc` + +parse_hidden_unicode_codepoints = unicode codepoint changing visible direction of text present in {$label} + .label = this {$label} contains {$count -> + [one] an invisible + *[other] invisible + } unicode text flow control {$count -> + [one] codepoint + *[other] codepoints + } + .note = these kind of unicode codepoints change the way text flows on applications that support them, but can cause confusion because they change the order of characters on the screen + .suggestion_remove = if their presence wasn't intentional, you can remove them + .suggestion_escape = if you want to keep them but make them visible in your source code, you can escape them + .no_suggestion_note_escape = if you want to keep them but make them visible in your source code, you can escape them: {$escaped} + parse_if_expression_missing_condition = missing condition for `if` expression .condition_label = expected condition here .block_label = if this block is the condition of the `if` expression, then it must be followed by another block @@ -436,11 +450,6 @@ parse_inner_doc_comment_not_permitted = expected outer doc comment .label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item} .sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style -parse_invalid_attr_unsafe = `{$name}` is not an unsafe attribute - .label = this is not an unsafe attribute - .suggestion = remove the `unsafe(...)` - .note = extraneous unsafe is not allowed in attributes - parse_invalid_block_macro_segment = cannot use a `block` macro fragment here .label = the `block` fragment is within this context .suggestion = wrap this in another block @@ -468,9 +477,6 @@ parse_invalid_dyn_keyword = invalid `dyn` keyword parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else` parse_invalid_identifier_with_leading_number = identifiers cannot start with a number -parse_invalid_label = - invalid label name `{$name}` - parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid .label = invalid suffix `{$suffix}` .tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases @@ -500,6 +506,8 @@ parse_invalid_unicode_escape = invalid unicode character escape parse_invalid_variable_declaration = invalid variable declaration +parse_keyword_label = labels cannot use keyword names + parse_keyword_lifetime = lifetimes cannot use keyword names @@ -601,7 +609,6 @@ parse_maybe_report_ambiguous_plus = ambiguous `+` in a type .suggestion = use parentheses to disambiguate -parse_meta_bad_delim = wrong meta list delimiters parse_meta_bad_delim_suggestion = the delimiters should be `(` and `)` parse_mismatched_closing_delimiter = mismatched closing delimiter: `{$delimiter}` @@ -748,9 +755,6 @@ parse_parentheses_with_struct_fields = invalid `struct` delimiters or `fn` call .suggestion_braces_for_struct = if `{$type}` is a struct, use braces as delimiters .suggestion_no_fields_for_fn = if `{$type}` is a function, use the arguments directly -parse_parenthesized_lifetime = parenthesized lifetime bounds are not supported -parse_parenthesized_lifetime_suggestion = remove the parentheses - parse_path_double_colon = path separator must be a double colon .suggestion = use a double colon instead @@ -993,10 +997,6 @@ parse_unmatched_angle_brackets = {$num_extra_brackets -> *[other] remove extra angle brackets } -parse_unsafe_attr_outside_unsafe = unsafe attribute used without unsafe - .label = usage of unsafe attribute -parse_unsafe_attr_outside_unsafe_suggestion = wrap the attribute in `unsafe(...)` - parse_unskipped_whitespace = whitespace symbol '{$ch}' is not skipped .label = {parse_unskipped_whitespace} diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index a105dd1909e..797d4830c2f 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -8,8 +8,9 @@ use rustc_ast::{Path, Visibility}; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic, + SuggestionStyle, }; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::edition::{Edition, LATEST_STABLE_EDITION}; use rustc_span::{Ident, Span, Symbol}; @@ -2228,11 +2229,10 @@ pub(crate) struct KeywordLifetime { } #[derive(Diagnostic)] -#[diag(parse_invalid_label)] -pub(crate) struct InvalidLabel { +#[diag(parse_keyword_label)] +pub(crate) struct KeywordLabel { #[primary_span] pub span: Span, - pub name: Symbol, } #[derive(Diagnostic)] @@ -3163,27 +3163,6 @@ pub(crate) struct ModifierLifetime { pub modifier: &'static str, } -#[derive(Subdiagnostic)] -#[multipart_suggestion( - parse_parenthesized_lifetime_suggestion, - applicability = "machine-applicable" -)] -pub(crate) struct RemoveParens { - #[suggestion_part(code = "")] - pub lo: Span, - #[suggestion_part(code = "")] - pub hi: Span, -} - -#[derive(Diagnostic)] -#[diag(parse_parenthesized_lifetime)] -pub(crate) struct ParenthesizedLifetime { - #[primary_span] - pub span: Span, - #[subdiagnostic] - pub sugg: RemoveParens, -} - #[derive(Diagnostic)] #[diag(parse_underscore_literal_suffix)] pub(crate) struct UnderscoreLiteralSuffix { @@ -3367,15 +3346,6 @@ pub(crate) struct KwBadCase<'a> { } #[derive(Diagnostic)] -#[diag(parse_meta_bad_delim)] -pub(crate) struct MetaBadDelim { - #[primary_span] - pub span: Span, - #[subdiagnostic] - pub sugg: MetaBadDelimSugg, -} - -#[derive(Diagnostic)] #[diag(parse_cfg_attr_bad_delim)] pub(crate) struct CfgAttrBadDelim { #[primary_span] @@ -3515,38 +3485,6 @@ pub(crate) struct DotDotRangeAttribute { } #[derive(Diagnostic)] -#[diag(parse_invalid_attr_unsafe)] -#[note] -pub(crate) struct InvalidAttrUnsafe { - #[primary_span] - #[label] - pub span: Span, - pub name: Path, -} - -#[derive(Diagnostic)] -#[diag(parse_unsafe_attr_outside_unsafe)] -pub(crate) struct UnsafeAttrOutsideUnsafe { - #[primary_span] - #[label] - pub span: Span, - #[subdiagnostic] - pub suggestion: UnsafeAttrOutsideUnsafeSuggestion, -} - -#[derive(Subdiagnostic)] -#[multipart_suggestion( - parse_unsafe_attr_outside_unsafe_suggestion, - applicability = "machine-applicable" -)] -pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion { - #[suggestion_part(code = "unsafe(")] - pub left: Span, - #[suggestion_part(code = ")")] - pub right: Span, -} - -#[derive(Diagnostic)] #[diag(parse_binder_before_modifiers)] pub(crate) struct BinderBeforeModifiers { #[primary_span] @@ -3664,3 +3602,76 @@ pub(crate) struct ExpectedRegisterClassOrExplicitRegister { #[primary_span] pub(crate) span: Span, } + +#[derive(LintDiagnostic)] +#[diag(parse_hidden_unicode_codepoints)] +#[note] +pub(crate) struct HiddenUnicodeCodepointsDiag { + pub label: String, + pub count: usize, + #[label] + pub span_label: Span, + #[subdiagnostic] + pub labels: Option<HiddenUnicodeCodepointsDiagLabels>, + #[subdiagnostic] + pub sub: HiddenUnicodeCodepointsDiagSub, +} + +pub(crate) struct HiddenUnicodeCodepointsDiagLabels { + pub spans: Vec<(char, Span)>, +} + +impl Subdiagnostic for HiddenUnicodeCodepointsDiagLabels { + fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { + for (c, span) in self.spans { + diag.span_label(span, format!("{c:?}")); + } + } +} + +pub(crate) enum HiddenUnicodeCodepointsDiagSub { + Escape { spans: Vec<(char, Span)> }, + NoEscape { spans: Vec<(char, Span)> }, +} + +// Used because of multiple multipart_suggestion and note +impl Subdiagnostic for HiddenUnicodeCodepointsDiagSub { + fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) { + match self { + HiddenUnicodeCodepointsDiagSub::Escape { spans } => { + diag.multipart_suggestion_with_style( + fluent::parse_suggestion_remove, + spans.iter().map(|(_, span)| (*span, "".to_string())).collect(), + Applicability::MachineApplicable, + SuggestionStyle::HideCodeAlways, + ); + diag.multipart_suggestion( + fluent::parse_suggestion_escape, + spans + .into_iter() + .map(|(c, span)| { + let c = format!("{c:?}"); + (span, c[1..c.len() - 1].to_string()) + }) + .collect(), + Applicability::MachineApplicable, + ); + } + HiddenUnicodeCodepointsDiagSub::NoEscape { spans } => { + // FIXME: in other suggestions we've reversed the inner spans of doc comments. We + // should do the same here to provide the same good suggestions as we do for + // literals above. + diag.arg( + "escaped", + spans + .into_iter() + .map(|(c, _)| format!("{c:?}")) + .collect::<Vec<String>>() + .join(", "), + ); + diag.note(fluent::parse_suggestion_remove); + diag.note(fluent::parse_no_suggestion_note_escape); + } + } + } +} diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 85af5a615ae..9792240a548 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -49,6 +49,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>( mut src: &'src str, mut start_pos: BytePos, override_span: Option<Span>, + frontmatter_allowed: FrontmatterAllowed, ) -> Result<TokenStream, Vec<Diag<'psess>>> { // Skip `#!`, if present. if let Some(shebang_len) = rustc_lexer::strip_shebang(src) { @@ -56,7 +57,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>( start_pos = start_pos + BytePos::from_usize(shebang_len); } - let cursor = Cursor::new(src, FrontmatterAllowed::Yes); + let cursor = Cursor::new(src, frontmatter_allowed); let mut lexer = Lexer { psess, start_pos, @@ -542,21 +543,21 @@ impl<'psess, 'src> Lexer<'psess, 'src> { }) .collect(); + let label = label.to_string(); let count = spans.len(); - let labels = point_at_inner_spans.then_some(spans.clone()); + let labels = point_at_inner_spans + .then_some(errors::HiddenUnicodeCodepointsDiagLabels { spans: spans.clone() }); + let sub = if point_at_inner_spans && !spans.is_empty() { + errors::HiddenUnicodeCodepointsDiagSub::Escape { spans } + } else { + errors::HiddenUnicodeCodepointsDiagSub::NoEscape { spans } + }; self.psess.buffer_lint( TEXT_DIRECTION_CODEPOINT_IN_LITERAL, span, ast::CRATE_NODE_ID, - BuiltinLintDiag::HiddenUnicodeCodepoints { - label: label.to_string(), - count, - span_label: span, - labels, - escape: point_at_inner_spans && !spans.is_empty(), - spans, - }, + errors::HiddenUnicodeCodepointsDiag { label, count, span_label: span, labels, sub }, ); } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 2050c5f9608..48289b2e8ab 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -16,10 +16,11 @@ use std::str::Utf8Error; use std::sync::Arc; use rustc_ast as ast; -use rustc_ast::tokenstream::TokenStream; +use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{AttrItem, Attribute, MetaItemInner, token}; use rustc_ast_pretty::pprust; use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize}; +use rustc_lexer::FrontmatterAllowed; use rustc_session::parse::ParseSess; use rustc_span::source_map::SourceMap; use rustc_span::{FileName, SourceFile, Span}; @@ -30,8 +31,9 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; use parser::Parser; +use rustc_ast::token::Delimiter; + pub mod lexer; -pub mod validate_attr; mod errors; @@ -146,7 +148,7 @@ fn new_parser_from_source_file( source_file: Arc<SourceFile>, ) -> Result<Parser<'_>, Vec<Diag<'_>>> { let end_pos = source_file.end_position(); - let stream = source_file_to_stream(psess, source_file, None)?; + let stream = source_file_to_stream(psess, source_file, None, FrontmatterAllowed::Yes)?; let mut parser = Parser::new(psess, stream, None); if parser.token == token::Eof { parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); @@ -161,7 +163,9 @@ pub fn source_str_to_stream( override_span: Option<Span>, ) -> Result<TokenStream, Vec<Diag<'_>>> { let source_file = psess.source_map().new_source_file(name, source); - source_file_to_stream(psess, source_file, override_span) + // used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse + // frontmatters as frontmatters. + source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No) } /// Given a source file, produces a sequence of token trees. Returns any buffered errors from @@ -170,6 +174,7 @@ fn source_file_to_stream<'psess>( psess: &'psess ParseSess, source_file: Arc<SourceFile>, override_span: Option<Span>, + frontmatter_allowed: FrontmatterAllowed, ) -> Result<TokenStream, Vec<Diag<'psess>>> { let src = source_file.src.as_ref().unwrap_or_else(|| { psess.dcx().bug(format!( @@ -178,7 +183,13 @@ fn source_file_to_stream<'psess>( )); }); - lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span) + lexer::lex_token_trees( + psess, + src.as_str(), + source_file.start_pos, + override_span, + frontmatter_allowed, + ) } /// Runs the given subparser `f` on the tokens of the given `attr`'s item. @@ -225,7 +236,7 @@ pub fn parse_cfg_attr( ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens }) if !tokens.is_empty() => { - crate::validate_attr::check_cfg_attr_bad_delim(psess, dspan, delim); + check_cfg_attr_bad_delim(psess, dspan, delim); match parse_in(psess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) { Ok(r) => return Some(r), Err(e) => { @@ -244,3 +255,13 @@ pub fn parse_cfg_attr( } None } + +fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) { + if let Delimiter::Parenthesis = delim { + return; + } + psess.dcx().emit_err(errors::CfgAttrBadDelim { + span: span.entire(), + sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close }, + }); +} diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 7f6afeba28c..acd338156ce 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -399,7 +399,7 @@ impl<'a> Parser<'a> { } /// Matches `COMMASEP(meta_item_inner)`. - pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::MetaItemInner>> { + pub fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::MetaItemInner>> { // Presumably, the majority of the time there will only be one attr. let mut nmis = ThinVec::with_capacity(1); while self.token != token::Eof { diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 220e4ac18fc..a28af7833c3 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -463,8 +463,8 @@ impl<'a> Parser<'a> { pub(super) fn expected_one_of_not_found( &mut self, - edible: &[ExpTokenPair<'_>], - inedible: &[ExpTokenPair<'_>], + edible: &[ExpTokenPair], + inedible: &[ExpTokenPair], ) -> PResult<'a, ErrorGuaranteed> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { @@ -1092,7 +1092,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `closes` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) { + pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair]) { if let Err(err) = self .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree())) { @@ -1113,7 +1113,7 @@ impl<'a> Parser<'a> { pub(super) fn check_trailing_angle_brackets( &mut self, segment: &PathSegment, - end: &[ExpTokenPair<'_>], + end: &[ExpTokenPair], ) -> Option<ErrorGuaranteed> { if !self.may_recover() { return None; @@ -1196,7 +1196,7 @@ impl<'a> Parser<'a> { // second case. if self.look_ahead(position, |t| { trace!("check_trailing_angle_brackets: t={:?}", t); - end.iter().any(|exp| exp.tok == &t.kind) + end.iter().any(|exp| exp.tok == t.kind) }) { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. @@ -2120,8 +2120,8 @@ impl<'a> Parser<'a> { pub(super) fn recover_seq_parse_error( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, lo: Span, err: Diag<'a>, ) -> Box<Expr> { @@ -2386,8 +2386,8 @@ impl<'a> Parser<'a> { pub(super) fn consume_block( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, consume_close: ConsumeClosingDelim, ) { let mut brace_depth = 0; diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index d0604f76317..f0f58e901a9 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1598,7 +1598,7 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr) } - fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, Box<Expr>> { + fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair) -> PResult<'a, Box<Expr>> { let lo = self.token.span; self.bump(); // `[` or other open delim @@ -2077,7 +2077,7 @@ impl<'a> Parser<'a> { (token::Lit { symbol: name, suffix: None, kind: token::Char }, span) } - fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit { + pub fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit { ast::MetaItemLit { symbol: name, suffix: None, @@ -2086,7 +2086,7 @@ impl<'a> Parser<'a> { } } - fn handle_missing_lit<L>( + pub fn handle_missing_lit<L>( &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { @@ -2156,7 +2156,7 @@ impl<'a> Parser<'a> { /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and /// `Lit::from_token` (excluding unary negation). - fn eat_token_lit(&mut self) -> Option<token::Lit> { + pub fn eat_token_lit(&mut self) -> Option<token::Lit> { let check_expr = |expr: Box<Expr>| { if let ast::ExprKind::Lit(token_lit) = expr.kind { Some(token_lit) @@ -2397,20 +2397,24 @@ impl<'a> Parser<'a> { let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; - let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; + let (bound_vars, _) = self.parse_higher_ranked_binder()?; let span = lo.to(self.prev_token.span); self.psess.gated_spans.gate(sym::closure_lifetime_binder, span); - ClosureBinder::For { span, generic_params: lifetime_defs } + ClosureBinder::For { span, generic_params: bound_vars } } else { ClosureBinder::NotPresent }; let constness = self.parse_closure_constness(); - let movability = - if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; + let movability = if self.eat_keyword(exp!(Static)) { + self.psess.gated_spans.gate(sym::coroutines, self.prev_token.span); + Movability::Static + } else { + Movability::Movable + }; let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) @@ -3090,7 +3094,7 @@ impl<'a> Parser<'a> { if let Some((ident, is_raw)) = self.token.lifetime() { // Disallow `'fn`, but with a better error message than `expect_lifetime`. if matches!(is_raw, IdentIsRaw::No) && ident.without_first_quote().is_reserved() { - self.dcx().emit_err(errors::InvalidLabel { span: ident.span, name: ident.name }); + self.dcx().emit_err(errors::KeywordLabel { span: ident.span }); } self.bump(); @@ -3657,7 +3661,7 @@ impl<'a> Parser<'a> { &mut self, pth: ast::Path, recover: bool, - close: ExpTokenPair<'_>, + close: ExpTokenPair, ) -> PResult< 'a, ( @@ -3676,8 +3680,8 @@ impl<'a> Parser<'a> { errors::HelpUseLatestEdition::new().add_to_diag(e); }; - while self.token != *close.tok { - if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) { + while self.token != close.tok { + if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(&close.tok) { let exp_span = self.prev_token.span; // We permit `.. }` on the left-hand side of a destructuring assignment. if self.check(close) { diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index 4a8530a2b38..eb684c3a62f 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -172,8 +172,11 @@ impl<'a> Parser<'a> { }) } - /// Parses a (possibly empty) list of lifetime and type parameters, possibly including - /// a trailing comma and erroneous trailing attributes. + /// Parse a (possibly empty) list of generic (lifetime, type, const) parameters. + /// + /// ```ebnf + /// GenericParams = (GenericParam ("," GenericParam)* ","?)? + /// ``` pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> { let mut params = ThinVec::new(); let mut done = false; @@ -520,7 +523,7 @@ impl<'a> Parser<'a> { // * `for<'a> Trait1<'a>: Trait2<'a /* ok */>` // * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>` // * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>` - let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; + let (bound_vars, _) = self.parse_higher_ranked_binder()?; // Parse type with mandatory colon and (possibly empty) bounds, // or with mandatory equality sign and the second type. @@ -528,7 +531,7 @@ impl<'a> Parser<'a> { if self.eat(exp!(Colon)) { let bounds = self.parse_generic_bounds()?; Ok(ast::WherePredicateKind::BoundPredicate(ast::WhereBoundPredicate { - bound_generic_params: lifetime_defs, + bound_generic_params: bound_vars, bounded_ty: ty, bounds, })) diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index ca89eb1e2cf..eb264f59fed 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -43,7 +43,7 @@ impl<'a> Parser<'a> { self.expect(exp!(OpenBrace))?; let (inner_attrs, items, inner_span) = self.parse_mod(exp!(CloseBrace))?; attrs.extend(inner_attrs); - ModKind::Loaded(items, Inline::Yes, inner_span, Ok(())) + ModKind::Loaded(items, Inline::Yes, inner_span) }; Ok(ItemKind::Mod(safety, ident, mod_kind)) } @@ -54,7 +54,7 @@ impl<'a> Parser<'a> { /// - `}` for mod items pub fn parse_mod( &mut self, - term: ExpTokenPair<'_>, + term: ExpTokenPair, ) -> PResult<'a, (AttrVec, ThinVec<Box<Item>>, ModSpans)> { let lo = self.token.span; let attrs = self.parse_inner_attributes()?; @@ -1201,7 +1201,7 @@ impl<'a> Parser<'a> { }?; let dash = exp!(Minus); - if self.token != *dash.tok { + if self.token != dash.tok { return Ok(ident); } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 41ed1f95a01..15598f32429 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -24,7 +24,7 @@ pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; pub(crate) use item::{FnContext, FnParseMode}; pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; -use path::PathStyle; +pub use path::PathStyle; use rustc_ast::token::{ self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind, }; @@ -261,19 +261,19 @@ struct CaptureState { /// A sequence separator. #[derive(Debug)] -struct SeqSep<'a> { +struct SeqSep { /// The separator token. - sep: Option<ExpTokenPair<'a>>, + sep: Option<ExpTokenPair>, /// `true` if a trailing separator is allowed. trailing_sep_allowed: bool, } -impl<'a> SeqSep<'a> { - fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> { +impl SeqSep { + fn trailing_allowed(sep: ExpTokenPair) -> SeqSep { SeqSep { sep: Some(sep), trailing_sep_allowed: true } } - fn none() -> SeqSep<'a> { + fn none() -> SeqSep { SeqSep { sep: None, trailing_sep_allowed: false } } } @@ -285,7 +285,7 @@ pub enum FollowedByType { } #[derive(Copy, Clone, Debug)] -enum Trailing { +pub enum Trailing { No, Yes, } @@ -425,13 +425,13 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> { + pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> { if self.expected_token_types.is_empty() { - if self.token == *exp.tok { + if self.token == exp.tok { self.bump(); Ok(Recovered::No) } else { - self.unexpected_try_recover(exp.tok) + self.unexpected_try_recover(&exp.tok) } } else { self.expect_one_of(slice::from_ref(&exp), &[]) @@ -443,13 +443,13 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. fn expect_one_of( &mut self, - edible: &[ExpTokenPair<'_>], - inedible: &[ExpTokenPair<'_>], + edible: &[ExpTokenPair], + inedible: &[ExpTokenPair], ) -> PResult<'a, Recovered> { - if edible.iter().any(|exp| exp.tok == &self.token.kind) { + if edible.iter().any(|exp| exp.tok == self.token.kind) { self.bump(); Ok(Recovered::No) - } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) { + } else if inedible.iter().any(|exp| exp.tok == self.token.kind) { // leave it in the input Ok(Recovered::No) } else if self.token != token::Eof @@ -494,8 +494,8 @@ impl<'a> Parser<'a> { /// This method will automatically add `tok` to `expected_token_types` if `tok` is not /// encountered. #[inline] - fn check(&mut self, exp: ExpTokenPair<'_>) -> bool { - let is_present = self.token == *exp.tok; + pub fn check(&mut self, exp: ExpTokenPair) -> bool { + let is_present = self.token == exp.tok; if !is_present { self.expected_token_types.insert(exp.token_type); } @@ -542,7 +542,7 @@ impl<'a> Parser<'a> { /// Consumes a token 'tok' if it exists. Returns whether the given token was present. #[inline] #[must_use] - pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool { + pub fn eat(&mut self, exp: ExpTokenPair) -> bool { let is_present = self.check(exp); if is_present { self.bump() @@ -633,7 +633,7 @@ impl<'a> Parser<'a> { } /// Consume a sequence produced by a metavar expansion, if present. - fn eat_metavar_seq<T>( + pub fn eat_metavar_seq<T>( &mut self, mv_kind: MetaVarKind, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, @@ -745,13 +745,13 @@ impl<'a> Parser<'a> { /// Eats the expected token if it's present possibly breaking /// compound tokens like multi-character operators in process. /// Returns `true` if the token was eaten. - fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool { - if self.token == *exp.tok { + fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool { + if self.token == exp.tok { self.bump(); return true; } match self.token.kind.break_two_token_op(1) { - Some((first, second)) if first == *exp.tok => { + Some((first, second)) if first == exp.tok => { let first_span = self.psess.source_map().start_point(self.token.span); let second_span = self.token.span.with_lo(first_span.hi()); self.token = Token::new(first, first_span); @@ -826,7 +826,7 @@ impl<'a> Parser<'a> { /// Checks if the next token is contained within `closes`, and returns `true` if so. fn expect_any_with_type( &mut self, - closes_expected: &[ExpTokenPair<'_>], + closes_expected: &[ExpTokenPair], closes_not_expected: &[&TokenKind], ) -> bool { closes_expected.iter().any(|&close| self.check(close)) @@ -838,9 +838,9 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_before_tokens<T>( &mut self, - closes_expected: &[ExpTokenPair<'_>], + closes_expected: &[ExpTokenPair], closes_not_expected: &[&TokenKind], - sep: SeqSep<'_>, + sep: SeqSep, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { let mut first = true; @@ -869,7 +869,7 @@ impl<'a> Parser<'a> { } Err(mut expect_err) => { let sp = self.prev_token.span.shrink_to_hi(); - let token_str = pprust::token_kind_to_string(exp.tok); + let token_str = pprust::token_kind_to_string(&exp.tok); match self.current_closure.take() { Some(closure_spans) if self.token == TokenKind::Semi => { @@ -1039,8 +1039,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_before_end<T>( &mut self, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[close], &[], sep, f) @@ -1051,8 +1051,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_end<T>( &mut self, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?; @@ -1070,9 +1070,9 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq<T>( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + open: ExpTokenPair, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.expect(open)?; @@ -1084,8 +1084,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_delim_comma_seq<T>( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f) @@ -1094,7 +1094,7 @@ impl<'a> Parser<'a> { /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`). /// The function `f` must consume tokens until reaching the next separator or /// closing bracket. - fn parse_paren_comma_seq<T>( + pub fn parse_paren_comma_seq<T>( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { @@ -1355,7 +1355,8 @@ impl<'a> Parser<'a> { AttrArgs::Delimited(args) } else if self.eat(exp!(Eq)) { let eq_span = self.prev_token.span; - AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? } + let expr = self.parse_expr_force_collect()?; + AttrArgs::Eq { eq_span, expr } } else { AttrArgs::Empty }) @@ -1388,15 +1389,26 @@ impl<'a> Parser<'a> { // matching `CloseDelim` we are *after* the delimited sequence, // i.e. at depth `d - 1`. let target_depth = self.token_cursor.stack.len() - 1; - loop { - // Advance one token at a time, so `TokenCursor::next()` - // can capture these tokens if necessary. + + if let Capturing::No = self.capture_state.capturing { + // We are not capturing tokens, so skip to the end of the + // delimited sequence. This is a perf win when dealing with + // declarative macros that pass large `tt` fragments through + // multiple rules, as seen in the uom-0.37.0 crate. + self.token_cursor.curr.bump_to_end(); self.bump(); - if self.token_cursor.stack.len() == target_depth { - debug_assert!(self.token.kind.close_delim().is_some()); - break; + debug_assert_eq!(self.token_cursor.stack.len(), target_depth); + } else { + loop { + // Advance one token at a time, so `TokenCursor::next()` + // can capture these tokens if necessary. + self.bump(); + if self.token_cursor.stack.len() == target_depth { + break; + } } } + debug_assert!(self.token.kind.close_delim().is_some()); // Consume close delimiter self.bump(); diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 37fc723cd89..86045648859 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -26,7 +26,7 @@ use crate::parser::{ /// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] -pub(super) enum PathStyle { +pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous /// with something else. For example, in expressions `segment < ....` can be interpreted /// as a comparison and `segment ( ....` can be interpreted as a function call. @@ -150,7 +150,7 @@ impl<'a> Parser<'a> { true } - pub(super) fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> { + pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> { self.parse_path_inner(style, None) } diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs index b91548196a3..bd4bb368df0 100644 --- a/compiler/rustc_parse/src/parser/token_type.rs +++ b/compiler/rustc_parse/src/parser/token_type.rs @@ -416,8 +416,8 @@ impl TokenType { /// is always by used those methods. The second field is only used when the /// first field doesn't match. #[derive(Clone, Copy, Debug)] -pub struct ExpTokenPair<'a> { - pub tok: &'a TokenKind, +pub struct ExpTokenPair { + pub tok: TokenKind, pub token_type: TokenType, } @@ -444,7 +444,7 @@ macro_rules! exp { // `ExpTokenPair` helper rules. (@tok, $tok:ident) => { $crate::parser::token_type::ExpTokenPair { - tok: &rustc_ast::token::$tok, + tok: rustc_ast::token::$tok, token_type: $crate::parser::token_type::TokenType::$tok } }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 290f0a440af..6168647183f 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -137,14 +137,37 @@ impl<'a> Parser<'a> { /// The difference from `parse_ty` is that this version allows `...` /// (`CVarArgs`) at the top level of the type. pub(super) fn parse_ty_for_param(&mut self) -> PResult<'a, Box<Ty>> { - self.parse_ty_common( + let ty = self.parse_ty_common( AllowPlus::Yes, AllowCVariadic::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes, None, RecoverQuestionMark::Yes, - ) + )?; + + // Recover a trailing `= EXPR` if present. + if self.may_recover() + && self.check_noexpect(&token::Eq) + && self.look_ahead(1, |tok| tok.can_begin_expr()) + { + let snapshot = self.create_snapshot_for_diagnostic(); + self.bump(); + let eq_span = self.prev_token.span; + match self.parse_expr() { + Ok(e) => { + self.dcx() + .struct_span_err(eq_span.to(e.span), "parameter defaults are not supported") + .emit(); + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + } + } + } + + Ok(ty) } /// Parses a type in restricted contexts where `+` is not permitted. @@ -308,11 +331,11 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; + let (bound_vars, _) = self.parse_higher_ranked_binder()?; if self.check_fn_front_matter(false, Case::Sensitive) { self.parse_ty_fn_ptr( lo, - lifetime_defs, + bound_vars, Some(self.prev_token.span.shrink_to_lo()), recover_return_sign, )? @@ -326,7 +349,7 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus(); let kind = self.parse_remaining_bounds_path( - lifetime_defs, + bound_vars, path, lo, parse_plus, @@ -359,7 +382,7 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus(); self.parse_remaining_bounds_path( - lifetime_defs, + bound_vars, path, lo, parse_plus, @@ -443,7 +466,7 @@ impl<'a> Parser<'a> { let ty = ts.into_iter().next().unwrap(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { - // `(TY_BOUND_NOPAREN) + BOUND + ...`. + // `"(" BareTraitBound ")" "+" Bound "+" ...`. TyKind::Path(None, path) if maybe_bounds => self.parse_remaining_bounds_path( ThinVec::new(), path, @@ -853,10 +876,13 @@ impl<'a> Parser<'a> { Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)) } - fn parse_precise_capturing_args( - &mut self, - ) -> PResult<'a, (ThinVec<PreciseCapturingArg>, Span)> { - let lo = self.token.span; + /// Parse a use-bound aka precise capturing list. + /// + /// ```ebnf + /// UseBound = "use" "<" (PreciseCapture ("," PreciseCapture)* ","?)? ">" + /// PreciseCapture = "Self" | Ident | Lifetime + /// ``` + fn parse_use_bound(&mut self, lo: Span, parens: ast::Parens) -> PResult<'a, GenericBound> { self.expect_lt()?; let (args, _, _) = self.parse_seq_to_before_tokens( &[exp!(Gt)], @@ -882,7 +908,13 @@ impl<'a> Parser<'a> { }, )?; self.expect_gt()?; - Ok((args, lo.to(self.prev_token.span))) + + if let ast::Parens::Yes = parens { + self.expect(exp!(CloseParen))?; + self.report_parenthesized_bound(lo, self.prev_token.span, "precise capturing lists"); + } + + Ok(GenericBound::Use(args, lo.to(self.prev_token.span))) } /// Is a `dyn B0 + ... + Bn` type allowed here? @@ -940,9 +972,10 @@ impl<'a> Parser<'a> { self.parse_generic_bounds_common(AllowPlus::Yes) } - /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`. + /// Parse generic bounds. /// - /// See `parse_generic_bound` for the `BOUND` grammar. + /// Only if `allow_plus` this parses a `+`-separated list of bounds (trailing `+` is admitted). + /// Otherwise, this only parses a single bound or none. fn parse_generic_bounds_common(&mut self, allow_plus: AllowPlus) -> PResult<'a, GenericBounds> { let mut bounds = Vec::new(); @@ -988,48 +1021,56 @@ impl<'a> Parser<'a> { || self.check_keyword(exp!(Use)) } - /// Parses a bound according to the grammar: + /// Parse a bound. + /// /// ```ebnf - /// BOUND = TY_BOUND | LT_BOUND + /// Bound = LifetimeBound | UseBound | TraitBound /// ``` fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> { - let lo = self.token.span; let leading_token = self.prev_token; + let lo = self.token.span; + + // We only admit parenthesized *trait* bounds. However, we want to gracefully recover from + // other kinds of parenthesized bounds, so parse the opening parenthesis *here*. + // + // In the future we might want to lift this syntactic restriction and + // introduce "`GenericBound::Paren(Box<GenericBound>)`". let parens = if self.eat(exp!(OpenParen)) { ast::Parens::Yes } else { ast::Parens::No }; - let bound = if self.token.is_lifetime() { - self.parse_generic_lt_bound(lo, parens)? + if self.token.is_lifetime() { + self.parse_lifetime_bound(lo, parens) } else if self.eat_keyword(exp!(Use)) { - // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of - // lifetimes and ident params (including SelfUpper). These are validated later - // for order, duplication, and whether they actually reference params. - let use_span = self.prev_token.span; - let (args, args_span) = self.parse_precise_capturing_args()?; - GenericBound::Use(args, use_span.to(args_span)) + self.parse_use_bound(lo, parens) } else { - self.parse_generic_ty_bound(lo, parens, &leading_token)? - }; - - Ok(bound) + self.parse_trait_bound(lo, parens, &leading_token) + } } - /// Parses a lifetime ("outlives") bound, e.g. `'a`, according to: + /// Parse a lifetime-bound aka outlives-bound. + /// /// ```ebnf - /// LT_BOUND = LIFETIME + /// LifetimeBound = Lifetime /// ``` - fn parse_generic_lt_bound( - &mut self, - lo: Span, - parens: ast::Parens, - ) -> PResult<'a, GenericBound> { + fn parse_lifetime_bound(&mut self, lo: Span, parens: ast::Parens) -> PResult<'a, GenericBound> { let lt = self.expect_lifetime(); - let bound = GenericBound::Outlives(lt); + if let ast::Parens::Yes = parens { - // FIXME(Centril): Consider not erroring here and accepting `('lt)` instead, - // possibly introducing `GenericBound::Paren(Box<GenericBound>)`? - self.recover_paren_lifetime(lo)?; + self.expect(exp!(CloseParen))?; + self.report_parenthesized_bound(lo, self.prev_token.span, "lifetime bounds"); } - Ok(bound) + + Ok(GenericBound::Outlives(lt)) + } + + fn report_parenthesized_bound(&self, lo: Span, hi: Span, kind: &str) -> ErrorGuaranteed { + let mut diag = + self.dcx().struct_span_err(lo.to(hi), format!("{kind} may not be parenthesized")); + diag.multipart_suggestion( + "remove the parentheses", + vec![(lo, String::new()), (hi, String::new())], + Applicability::MachineApplicable, + ); + diag.emit() } /// Emits an error if any trait bound modifiers were present. @@ -1074,27 +1115,17 @@ impl<'a> Parser<'a> { unreachable!("lifetime bound intercepted in `parse_generic_ty_bound` but no modifiers?") } - /// Recover on `('lifetime)` with `(` already eaten. - fn recover_paren_lifetime(&mut self, lo: Span) -> PResult<'a, ()> { - self.expect(exp!(CloseParen))?; - let span = lo.to(self.prev_token.span); - let sugg = errors::RemoveParens { lo, hi: self.prev_token.span }; - - self.dcx().emit_err(errors::ParenthesizedLifetime { span, sugg }); - Ok(()) - } - /// Parses the modifiers that may precede a trait in a bound, e.g. `?Trait` or `[const] Trait`. /// /// If no modifiers are present, this does not consume any tokens. /// /// ```ebnf - /// CONSTNESS = [["["] "const" ["]"]] - /// ASYNCNESS = ["async"] - /// POLARITY = ["?" | "!"] + /// Constness = ("const" | "[" "const" "]")? + /// Asyncness = "async"? + /// Polarity = ("?" | "!")? /// ``` /// - /// See `parse_generic_ty_bound` for the complete grammar of trait bound modifiers. + /// See `parse_trait_bound` for more context. fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> { let modifier_lo = self.token.span; let constness = self.parse_bound_constness()?; @@ -1187,20 +1218,21 @@ impl<'a> Parser<'a> { }) } - /// Parses a type bound according to: + /// Parse a trait bound. + /// /// ```ebnf - /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) - /// TY_BOUND_NOPAREN = [for<GENERIC_PARAMS> CONSTNESS ASYNCNESS | POLARITY] SIMPLE_PATH + /// TraitBound = BareTraitBound | "(" BareTraitBound ")" + /// BareTraitBound = + /// (HigherRankedBinder Constness Asyncness | Polarity) + /// TypePath /// ``` - /// - /// For example, this grammar accepts `for<'a: 'b> [const] ?m::Trait<'a>`. - fn parse_generic_ty_bound( + fn parse_trait_bound( &mut self, lo: Span, parens: ast::Parens, leading_token: &Token, ) -> PResult<'a, GenericBound> { - let (mut lifetime_defs, binder_span) = self.parse_late_bound_lifetime_defs()?; + let (mut bound_vars, binder_span) = self.parse_higher_ranked_binder()?; let modifiers_lo = self.token.span; let modifiers = self.parse_trait_bound_modifiers()?; @@ -1223,11 +1255,11 @@ impl<'a> Parser<'a> { // e.g. `T: for<'a> 'a` or `T: [const] 'a`. if self.token.is_lifetime() { let _: ErrorGuaranteed = self.error_lt_bound_with_modifiers(modifiers, binder_span); - return self.parse_generic_lt_bound(lo, parens); + return self.parse_lifetime_bound(lo, parens); } - if let (more_lifetime_defs, Some(binder_span)) = self.parse_late_bound_lifetime_defs()? { - lifetime_defs.extend(more_lifetime_defs); + if let (more_bound_vars, Some(binder_span)) = self.parse_higher_ranked_binder()? { + bound_vars.extend(more_bound_vars); self.dcx().emit_err(errors::BinderBeforeModifiers { binder_span, modifiers_span }); } @@ -1287,7 +1319,7 @@ impl<'a> Parser<'a> { }; if self.may_recover() && self.token == TokenKind::OpenParen { - self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?; + self.recover_fn_trait_with_lifetime_params(&mut path, &mut bound_vars)?; } if let ast::Parens::Yes = parens { @@ -1310,7 +1342,7 @@ impl<'a> Parser<'a> { } let poly_trait = - PolyTraitRef::new(lifetime_defs, path, modifiers, lo.to(self.prev_token.span), parens); + PolyTraitRef::new(bound_vars, path, modifiers, lo.to(self.prev_token.span), parens); Ok(GenericBound::Trait(poly_trait)) } @@ -1349,8 +1381,12 @@ impl<'a> Parser<'a> { } } - /// Optionally parses `for<$generic_params>`. - pub(super) fn parse_late_bound_lifetime_defs( + /// Parse an optional higher-ranked binder. + /// + /// ```ebnf + /// HigherRankedBinder = ("for" "<" GenericParams ">")? + /// ``` + pub(super) fn parse_higher_ranked_binder( &mut self, ) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> { if self.eat_keyword(exp!(For)) { @@ -1466,8 +1502,7 @@ impl<'a> Parser<'a> { pub(super) fn expect_lifetime(&mut self) -> Lifetime { if let Some((ident, is_raw)) = self.token.lifetime() { if matches!(is_raw, IdentIsRaw::No) - && ident.without_first_quote().is_reserved() - && ![kw::UnderscoreLifetime, kw::StaticLifetime].contains(&ident.name) + && ident.without_first_quote().is_reserved_lifetime() { self.dcx().emit_err(errors::KeywordLifetime { span: ident.span }); } diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index 03096f205c6..00a41e31a02 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -432,10 +432,6 @@ passes_must_not_suspend = `must_not_suspend` attribute should be applied to a struct, enum, union, or trait .label = is not a struct, enum, union, or trait -passes_must_use_no_effect = - `#[must_use]` has no effect when applied to {$target} - .suggestion = remove the attribute - passes_no_link = attribute should be applied to an `extern crate` item .label = not an `extern crate` item diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index c1a212d7ab5..7aef60b7b91 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -18,7 +18,7 @@ use rustc_feature::{ ACCEPTED_LANG_FEATURES, AttributeDuplicates, AttributeType, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute, }; -use rustc_hir::attrs::{AttributeKind, InlineAttr, MirDialect, MirPhase, ReprAttr}; +use rustc_hir::attrs::{AttributeKind, InlineAttr, MirDialect, MirPhase, ReprAttr, SanitizerSet}; use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalModDefId; use rustc_hir::intravisit::{self, Visitor}; @@ -167,7 +167,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Attribute::Parsed(AttributeKind::Deprecation { .. }) => { self.check_deprecated(hir_id, attr, span, target) } - Attribute::Parsed(AttributeKind::TargetFeature(_, attr_span)) => { + Attribute::Parsed(AttributeKind::TargetFeature{ attr_span, ..}) => { self.check_target_feature(hir_id, *attr_span, target, attrs) } Attribute::Parsed(AttributeKind::RustcObjectLifetimeDefault) => { @@ -194,12 +194,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Attribute::Parsed(AttributeKind::MayDangle(attr_span)) => { self.check_may_dangle(hir_id, *attr_span) } - Attribute::Parsed(AttributeKind::MustUse { span, .. }) => { - self.check_must_use(hir_id, *span, target) - } &Attribute::Parsed(AttributeKind::CustomMir(dialect, phase, attr_span)) => { self.check_custom_mir(dialect, phase, attr_span) } + &Attribute::Parsed(AttributeKind::Sanitize { on_set, off_set, span: attr_span}) => { + self.check_sanitize(attr_span, on_set | off_set, span, target); + }, Attribute::Parsed( AttributeKind::BodyStability { .. } | AttributeKind::ConstStabilityIndirect @@ -249,7 +249,9 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | AttributeKind::Coverage (..) | AttributeKind::ShouldPanic { .. } | AttributeKind::Coroutine(..) - | AttributeKind::Linkage(..), + | AttributeKind::Linkage(..) + | AttributeKind::MustUse { .. } + | AttributeKind::CrateName { .. } ) => { /* do nothing */ } Attribute::Unparsed(attr_item) => { style = Some(attr_item.style); @@ -260,9 +262,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { [sym::diagnostic, sym::on_unimplemented, ..] => { self.check_diagnostic_on_unimplemented(attr.span(), hir_id, target) } - [sym::sanitize, ..] => { - self.check_sanitize(attr, span, target) - } [sym::thread_local, ..] => self.check_thread_local(attr, span, target), [sym::doc, ..] => self.check_doc_attrs( attr, @@ -369,22 +368,49 @@ impl<'tcx> CheckAttrVisitor<'tcx> { let builtin = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)); if hir_id != CRATE_HIR_ID { - if let Some(BuiltinAttribute { type_: AttributeType::CrateLevel, .. }) = - attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)) - { - match style { - Some(ast::AttrStyle::Outer) => self.tcx.emit_node_span_lint( + match attr { + // FIXME(jdonszelmann) move to attribute parsing when validation gets better there + &Attribute::Parsed(AttributeKind::CrateName { + attr_span: span, style, .. + }) => match style { + ast::AttrStyle::Outer => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + span, errors::OuterCrateLevelAttr, ), - Some(ast::AttrStyle::Inner) | None => self.tcx.emit_node_span_lint( + ast::AttrStyle::Inner => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + span, errors::InnerCrateLevelAttr, ), + }, + Attribute::Parsed(_) => { /* not crate-level */ } + Attribute::Unparsed(attr) => { + // FIXME(jdonszelmann): remove once all crate-level attrs are parsed and caught by + // the above + if let Some(BuiltinAttribute { type_: AttributeType::CrateLevel, .. }) = + attr.path + .segments + .first() + .and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)) + { + match attr.style { + ast::AttrStyle::Outer => self.tcx.emit_node_span_lint( + UNUSED_ATTRIBUTES, + hir_id, + attr.span, + errors::OuterCrateLevelAttr, + ), + ast::AttrStyle::Inner => self.tcx.emit_node_span_lint( + UNUSED_ATTRIBUTES, + hir_id, + attr.span, + errors::InnerCrateLevelAttr, + ), + } + } } } } @@ -485,42 +511,48 @@ impl<'tcx> CheckAttrVisitor<'tcx> { /// Checks that the `#[sanitize(..)]` attribute is applied to a /// function/closure/method, or to an impl block or module. - fn check_sanitize(&self, attr: &Attribute, target_span: Span, target: Target) { + fn check_sanitize( + &self, + attr_span: Span, + set: SanitizerSet, + target_span: Span, + target: Target, + ) { let mut not_fn_impl_mod = None; let mut no_body = None; - if let Some(list) = attr.meta_item_list() { - for item in list.iter() { - let MetaItemInner::MetaItem(set) = item else { - return; - }; - let segments = set.path.segments.iter().map(|x| x.ident.name).collect::<Vec<_>>(); - match target { - Target::Fn - | Target::Closure - | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) - | Target::Impl { .. } - | Target::Mod => return, - Target::Static if matches!(segments.as_slice(), [sym::address]) => return, - - // These are "functions", but they aren't allowed because they don't - // have a body, so the usual explanation would be confusing. - Target::Method(MethodKind::Trait { body: false }) | Target::ForeignFn => { - no_body = Some(target_span); - } + match target { + Target::Fn + | Target::Closure + | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) + | Target::Impl { .. } + | Target::Mod => return, + Target::Static + // if we mask out the address bits, i.e. *only* address was set, + // we allow it + if set & !(SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS) + == SanitizerSet::empty() => + { + return; + } - _ => { - not_fn_impl_mod = Some(target_span); - } - } + // These are "functions", but they aren't allowed because they don't + // have a body, so the usual explanation would be confusing. + Target::Method(MethodKind::Trait { body: false }) | Target::ForeignFn => { + no_body = Some(target_span); + } + + _ => { + not_fn_impl_mod = Some(target_span); } - self.dcx().emit_err(errors::SanitizeAttributeNotAllowed { - attr_span: attr.span(), - not_fn_impl_mod, - no_body, - help: (), - }); } + + self.dcx().emit_err(errors::SanitizeAttributeNotAllowed { + attr_span, + not_fn_impl_mod, + no_body, + help: (), + }); } /// Checks if `#[naked]` is applied to a function definition. @@ -1263,41 +1295,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - /// Warns against some misuses of `#[must_use]` - fn check_must_use(&self, hir_id: HirId, attr_span: Span, target: Target) { - if matches!( - target, - Target::Fn - | Target::Enum - | Target::Struct - | Target::Union - | Target::Method(MethodKind::Trait { body: false } | MethodKind::Inherent) - | Target::ForeignFn - // `impl Trait` in return position can trip - // `unused_must_use` if `Trait` is marked as - // `#[must_use]` - | Target::Trait - ) { - return; - } - - // `#[must_use]` can be applied to a trait method definition with a default body - if let Target::Method(MethodKind::Trait { body: true }) = target - && let parent_def_id = self.tcx.hir_get_parent_item(hir_id).def_id - && let containing_item = self.tcx.hir_expect_item(parent_def_id) - && let hir::ItemKind::Trait(..) = containing_item.kind - { - return; - } - - self.tcx.emit_node_span_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr_span, - errors::MustUseNoEffect { target: target.plural_name(), attr_span }, - ); - } - /// Checks if `#[must_not_suspend]` is applied to a struct, enum, union, or trait. fn check_must_not_suspend(&self, attr: &Attribute, span: Span, target: Target) { match target { diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index 08d06402000..10cd9df4816 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -8,7 +8,6 @@ use std::mem; use hir::def_id::{LocalDefIdMap, LocalDefIdSet}; use rustc_abi::FieldIdx; use rustc_data_structures::fx::FxIndexSet; -use rustc_data_structures::unord::UnordSet; use rustc_errors::MultiSpan; use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId}; @@ -79,7 +78,7 @@ struct MarkSymbolVisitor<'tcx> { worklist: Vec<(LocalDefId, ComesFromAllowExpect)>, tcx: TyCtxt<'tcx>, maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>, - scanned: UnordSet<(LocalDefId, ComesFromAllowExpect)>, + scanned: LocalDefIdSet, live_symbols: LocalDefIdSet, repr_unconditionally_treats_fields_as_live: bool, repr_has_repr_simd: bool, @@ -321,18 +320,8 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { fn mark_live_symbols(&mut self) { while let Some(work) = self.worklist.pop() { - if !self.scanned.insert(work) { - continue; - } - let (mut id, comes_from_allow_expect) = work; - // Avoid accessing the HIR for the synthesized associated type generated for RPITITs. - if self.tcx.is_impl_trait_in_trait(id.to_def_id()) { - self.live_symbols.insert(id); - continue; - } - // in the case of tuple struct constructors we want to check the item, // not the generated tuple struct constructor function if let DefKind::Ctor(..) = self.tcx.def_kind(id) { @@ -360,9 +349,23 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { // this "duplication" is essential as otherwise a function with `#[expect]` // called from a `pub fn` may be falsely reported as not live, falsely // triggering the `unfulfilled_lint_expectations` lint. - if comes_from_allow_expect != ComesFromAllowExpect::Yes { + match comes_from_allow_expect { + ComesFromAllowExpect::Yes => {} + ComesFromAllowExpect::No => { + self.live_symbols.insert(id); + } + } + + if !self.scanned.insert(id) { + continue; + } + + // Avoid accessing the HIR for the synthesized associated type generated for RPITITs. + if self.tcx.is_impl_trait_in_trait(id.to_def_id()) { self.live_symbols.insert(id); + continue; } + self.visit_node(self.tcx.hir_node_by_def_id(id)); } } diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index ab46ac2dd8b..4fec6b0798a 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -358,14 +358,6 @@ pub(crate) struct BothFfiConstAndPure { pub attr_span: Span, } -#[derive(LintDiagnostic)] -#[diag(passes_must_use_no_effect)] -pub(crate) struct MustUseNoEffect { - pub target: &'static str, - #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")] - pub attr_span: Span, -} - #[derive(Diagnostic)] #[diag(passes_must_not_suspend)] pub(crate) struct MustNotSuspend { diff --git a/compiler/rustc_pattern_analysis/src/rustc/print.rs b/compiler/rustc_pattern_analysis/src/rustc/print.rs index 7649f72f868..cdf62c2553d 100644 --- a/compiler/rustc_pattern_analysis/src/rustc/print.rs +++ b/compiler/rustc_pattern_analysis/src/rustc/print.rs @@ -101,23 +101,11 @@ pub(crate) fn write_struct_like<'tcx>( let num_fields = variant_and_name.as_ref().map_or(subpatterns.len(), |(v, _)| v.fields.len()); if num_fields != 0 || variant_and_name.is_none() { write!(f, "(")?; - for i in 0..num_fields { - write!(f, "{}", start_or_comma())?; - - // Common case: the field is where we expect it. - if let Some(p) = subpatterns.get(i) { - if p.field.index() == i { - write!(f, "{}", p.pattern)?; - continue; - } - } - - // Otherwise, we have to go looking for it. - if let Some(p) = subpatterns.iter().find(|p| p.field.index() == i) { - write!(f, "{}", p.pattern)?; - } else { - write!(f, "_")?; - } + for FieldPat { pattern, .. } in subpatterns { + write!(f, "{}{pattern}", start_or_comma())?; + } + if matches!(ty.kind(), ty::Tuple(..)) && num_fields == 1 { + write!(f, ",")?; } write!(f, ")")?; } diff --git a/compiler/rustc_resolve/messages.ftl b/compiler/rustc_resolve/messages.ftl index 05b5abc3dc6..47280a93677 100644 --- a/compiler/rustc_resolve/messages.ftl +++ b/compiler/rustc_resolve/messages.ftl @@ -93,6 +93,9 @@ resolve_consider_adding_a_derive = resolve_consider_adding_macro_export = consider adding a `#[macro_export]` to the macro in the imported module +resolve_consider_marking_as_pub_crate = + in case you want to use the macro within this crate only, reduce the visibility to `pub(crate)` + resolve_consider_declaring_with_pub = consider declaring type or module `{$ident}` with `pub` diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 2f1c9fe4c1d..f75a625a279 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -11,7 +11,7 @@ use std::sync::Arc; use rustc_ast::visit::{self, AssocCtxt, Visitor, WalkItemKind}; use rustc_ast::{ self as ast, AssocItem, AssocItemKind, Block, ConstItem, Delegation, Fn, ForeignItem, - ForeignItemKind, Item, ItemKind, NodeId, StaticItem, StmtKind, TyAlias, + ForeignItemKind, Inline, Item, ItemKind, NodeId, StaticItem, StmtKind, TyAlias, }; use rustc_attr_parsing as attr; use rustc_attr_parsing::AttributeParser; @@ -485,6 +485,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { root_span, root_id, vis, + vis_span: item.vis.span, }); self.r.indeterminate_imports.push(import); @@ -812,7 +813,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { ItemKind::Mod(_, ident, ref mod_kind) => { self.r.define_local(parent, ident, TypeNS, res, vis, sp, expansion); - if let ast::ModKind::Loaded(_, _, _, Err(_)) = mod_kind { + if let ast::ModKind::Loaded(_, Inline::No { had_parse_error: Err(_) }, _) = mod_kind + { self.r.mods_with_parse_errors.insert(def_id); } self.parent_scope.module = self.r.new_local_module( @@ -977,6 +979,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { span: item.span, module_path: Vec::new(), vis, + vis_span: item.vis.span, }); if used { self.r.import_use_map.insert(import, Used::Other); @@ -1111,6 +1114,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { span, module_path: Vec::new(), vis: Visibility::Restricted(CRATE_DEF_ID), + vis_span: item.vis.span, }) }; @@ -1281,6 +1285,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { span, module_path: Vec::new(), vis, + vis_span: item.vis.span, }); self.r.import_use_map.insert(import, Used::Other); let import_binding = self.r.import(binding, import); diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index ff39ba46d97..337e7d2dd86 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -1020,11 +1020,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { &mut self, suggestions: &mut Vec<TypoSuggestion>, scope_set: ScopeSet<'ra>, - parent_scope: &ParentScope<'ra>, + ps: &ParentScope<'ra>, ctxt: SyntaxContext, filter_fn: &impl Fn(Res) -> bool, ) { - self.cm().visit_scopes(scope_set, parent_scope, ctxt, |this, scope, use_prelude, _| { + self.cm().visit_scopes(scope_set, ps, ctxt, None, |this, scope, use_prelude, _| { match scope { Scope::DeriveHelpers(expn_id) => { let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper); @@ -1557,7 +1557,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }); } for ns in [Namespace::MacroNS, Namespace::TypeNS, Namespace::ValueNS] { - let Ok(binding) = self.cm().early_resolve_ident_in_lexical_scope( + let Ok(binding) = self.cm().resolve_ident_in_scope_set( ident, ScopeSet::All(ns), parent_scope, @@ -2371,7 +2371,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } else { self.cm() - .early_resolve_ident_in_lexical_scope( + .resolve_ident_in_scope_set( ident, ScopeSet::All(ns_to_try), parent_scope, @@ -2474,7 +2474,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }, ) }); - if let Ok(binding) = self.cm().early_resolve_ident_in_lexical_scope( + if let Ok(binding) = self.cm().resolve_ident_in_scope_set( ident, ScopeSet::All(ValueNS), parent_scope, @@ -3410,7 +3410,7 @@ impl<'tcx> visit::Visitor<'tcx> for UsePlacementFinder { fn visit_item(&mut self, item: &'tcx ast::Item) { if self.target_module == item.id { - if let ItemKind::Mod(_, _, ModKind::Loaded(items, _inline, mod_spans, _)) = &item.kind { + if let ItemKind::Mod(_, _, ModKind::Loaded(items, _inline, mod_spans)) = &item.kind { let inject = mod_spans.inject_use_span; if is_span_suitable_for_use_injection(inject) { self.first_legal_span = Some(inject); diff --git a/compiler/rustc_resolve/src/errors.rs b/compiler/rustc_resolve/src/errors.rs index a1d62ba7a68..63d6fa23a14 100644 --- a/compiler/rustc_resolve/src/errors.rs +++ b/compiler/rustc_resolve/src/errors.rs @@ -776,6 +776,17 @@ pub(crate) struct ConsiderAddingMacroExport { } #[derive(Subdiagnostic)] +#[suggestion( + resolve_consider_marking_as_pub_crate, + code = "pub(crate)", + applicability = "maybe-incorrect" +)] +pub(crate) struct ConsiderMarkingAsPubCrate { + #[primary_span] + pub(crate) vis_span: Span, +} + +#[derive(Subdiagnostic)] #[note(resolve_consider_marking_as_pub)] pub(crate) struct ConsiderMarkingAsPub { #[primary_span] diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index c1b3aff4e69..dae42645bec 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -19,7 +19,7 @@ use crate::{ AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, BindingKey, CmResolver, Determinacy, Finalize, ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot, NameBinding, NameBindingKind, ParentScope, PathResult, PrivacyError, Res, ResolutionError, - Resolver, Scope, ScopeSet, Segment, Used, Weak, errors, + Resolver, Scope, ScopeSet, Segment, Stage, Used, Weak, errors, }; #[derive(Copy, Clone)] @@ -49,6 +49,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { scope_set: ScopeSet<'ra>, parent_scope: &ParentScope<'ra>, ctxt: SyntaxContext, + derive_fallback_lint_id: Option<NodeId>, mut visitor: impl FnMut( &mut CmResolver<'r, 'ra, 'tcx>, Scope<'ra>, @@ -99,15 +100,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let rust_2015 = ctxt.edition().is_rust_2015(); let (ns, macro_kind) = match scope_set { - ScopeSet::All(ns) - | ScopeSet::ModuleAndExternPrelude(ns, _) - | ScopeSet::Late(ns, ..) => (ns, None), + ScopeSet::All(ns) | ScopeSet::ModuleAndExternPrelude(ns, _) => (ns, None), ScopeSet::ExternPrelude => (TypeNS, None), ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind)), }; let module = match scope_set { // Start with the specified module. - ScopeSet::Late(_, module, _) | ScopeSet::ModuleAndExternPrelude(_, module) => module, + ScopeSet::ModuleAndExternPrelude(_, module) => module, // Jump out of trait or enum modules, they do not act as scopes. _ => parent_scope.module.nearest_item_scope(), }; @@ -193,10 +192,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { }, Scope::Module(module, prev_lint_id) => { use_prelude = !module.no_implicit_prelude; - let derive_fallback_lint_id = match scope_set { - ScopeSet::Late(.., lint_id) => lint_id, - _ => None, - }; match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) { Some((parent_module, lint_id)) => { Scope::Module(parent_module, lint_id.or(prev_lint_id)) @@ -349,11 +344,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { return Some(LexicalScopeBinding::Item(binding)); } else if let RibKind::Module(module) = rib.kind { // Encountered a module item, abandon ribs and look into that module and preludes. + let parent_scope = &ParentScope { module, ..*parent_scope }; + let finalize = finalize.map(|f| Finalize { stage: Stage::Late, ..f }); return self .cm() - .early_resolve_ident_in_lexical_scope( + .resolve_ident_in_scope_set( orig_ident, - ScopeSet::Late(ns, module, finalize.map(|finalize| finalize.node_id)), + ScopeSet::All(ns), parent_scope, finalize, finalize.is_some(), @@ -376,13 +373,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { unreachable!() } - /// Resolve an identifier in lexical scope. - /// This is a variation of `fn resolve_ident_in_lexical_scope` that can be run during - /// expansion and import resolution (perhaps they can be merged in the future). - /// The function is used for resolving initial segments of macro paths (e.g., `foo` in - /// `foo::bar!();` or `foo!();`) and also for import paths on 2018 edition. + /// Resolve an identifier in the specified set of scopes. #[instrument(level = "debug", skip(self))] - pub(crate) fn early_resolve_ident_in_lexical_scope<'r>( + pub(crate) fn resolve_ident_in_scope_set<'r>( self: CmResolver<'r, 'ra, 'tcx>, orig_ident: Ident, scope_set: ScopeSet<'ra>, @@ -411,9 +404,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } let (ns, macro_kind) = match scope_set { - ScopeSet::All(ns) - | ScopeSet::ModuleAndExternPrelude(ns, _) - | ScopeSet::Late(ns, ..) => (ns, None), + ScopeSet::All(ns) | ScopeSet::ModuleAndExternPrelude(ns, _) => (ns, None), ScopeSet::ExternPrelude => (TypeNS, None), ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind)), }; @@ -437,10 +428,15 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } // Go through all the scopes and try to resolve the name. + let derive_fallback_lint_id = match finalize { + Some(Finalize { node_id, stage: Stage::Late, .. }) => Some(node_id), + _ => None, + }; let break_result = self.visit_scopes( scope_set, parent_scope, orig_ident.span.ctxt(), + derive_fallback_lint_id, |this, scope, use_prelude, ctxt| { let ident = Ident::new(orig_ident.name, orig_ident.span.with_ctxt(ctxt)); let result = match scope { @@ -510,11 +506,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ident, ns, adjusted_parent_scope, - if matches!(scope_set, ScopeSet::Late(..)) { - Shadowing::Unrestricted - } else { - Shadowing::Restricted - }, + Shadowing::Restricted, adjusted_finalize, ignore_binding, ignore_import, @@ -528,7 +520,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { orig_ident.span, BuiltinLintDiag::ProcMacroDeriveResolutionFallback { span: orig_ident.span, - ns, + ns_descr: ns.descr(), ident, }, ); @@ -643,7 +635,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { return None; } - if finalize.is_none() || matches!(scope_set, ScopeSet::Late(..)) { + // Below we report various ambiguity errors. + // We do not need to report them if we are either in speculative resolution, + // or in late resolution when everything is already imported and expanded + // and no ambiguities exist. + if matches!(finalize, None | Some(Finalize { stage: Stage::Late, .. })) { return Some(Ok(binding)); } @@ -811,7 +807,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { ModuleOrUniformRoot::Module(module) => module, ModuleOrUniformRoot::ModuleAndExternPrelude(module) => { assert_eq!(shadowing, Shadowing::Unrestricted); - let binding = self.early_resolve_ident_in_lexical_scope( + let binding = self.resolve_ident_in_scope_set( ident, ScopeSet::ModuleAndExternPrelude(ns, module), parent_scope, @@ -827,7 +823,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { return if ns != TypeNS { Err((Determined, Weak::No)) } else { - let binding = self.early_resolve_ident_in_lexical_scope( + let binding = self.resolve_ident_in_scope_set( ident, ScopeSet::ExternPrelude, parent_scope, @@ -852,7 +848,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } - let binding = self.early_resolve_ident_in_lexical_scope( + let binding = self.resolve_ident_in_scope_set( ident, ScopeSet::All(ns), parent_scope, @@ -945,7 +941,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Now we are in situation when new item/import can appear only from a glob or a macro // expansion. With restricted shadowing names from globs and macro expansions cannot // shadow names from outer scopes, so we can freely fallback from module search to search - // in outer scopes. For `early_resolve_ident_in_lexical_scope` to continue search in outer + // in outer scopes. For `resolve_ident_in_scope_set` to continue search in outer // scopes we return `Undetermined` with `Weak::Yes`. // Check if one of unexpanded macros can still define the name, @@ -1040,6 +1036,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // Forbid expanded shadowing to avoid time travel. if let Some(shadowed_glob) = shadowed_glob && shadowing == Shadowing::Restricted + && finalize.stage == Stage::Early && binding.expansion != LocalExpnId::ROOT && binding.res() != shadowed_glob.res() { @@ -1635,7 +1632,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { _ => Err(Determinacy::determined(finalize.is_some())), } } else { - self.reborrow().early_resolve_ident_in_lexical_scope( + self.reborrow().resolve_ident_in_scope_set( ident, ScopeSet::All(ns), parent_scope, diff --git a/compiler/rustc_resolve/src/imports.rs b/compiler/rustc_resolve/src/imports.rs index 7c93fdb88ee..d7fc028287f 100644 --- a/compiler/rustc_resolve/src/imports.rs +++ b/compiler/rustc_resolve/src/imports.rs @@ -30,7 +30,7 @@ use crate::diagnostics::{DiagMode, Suggestion, import_candidates}; use crate::errors::{ CannotBeReexportedCratePublic, CannotBeReexportedCratePublicNS, CannotBeReexportedPrivate, CannotBeReexportedPrivateNS, CannotDetermineImportResolution, CannotGlobImportAllCrates, - ConsiderAddingMacroExport, ConsiderMarkingAsPub, + ConsiderAddingMacroExport, ConsiderMarkingAsPub, ConsiderMarkingAsPubCrate, }; use crate::{ AmbiguityError, AmbiguityKind, BindingKey, CmResolver, Determinacy, Finalize, ImportSuggestion, @@ -184,6 +184,9 @@ pub(crate) struct ImportData<'ra> { /// |`use foo` | `ModuleOrUniformRoot::CurrentScope` | - | pub imported_module: Cell<Option<ModuleOrUniformRoot<'ra>>>, pub vis: Visibility, + + /// Span of the visibility. + pub vis_span: Span, } /// All imports are unique and allocated on a same arena, @@ -866,7 +869,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } ImportKind::Glob { .. } => { // FIXME: Use mutable resolver directly as a hack, this should be an output of - // specualtive resolution. + // speculative resolution. self.get_mut_unchecked().resolve_glob_import(import); return 0; } @@ -903,7 +906,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // We need the `target`, `source` can be extracted. let imported_binding = this.import(binding, import); // FIXME: Use mutable resolver directly as a hack, this should be an output of - // specualtive resolution. + // speculative resolution. this.get_mut_unchecked().define_binding_local( parent, target, @@ -917,7 +920,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { if target.name != kw::Underscore { let key = BindingKey::new(target, ns); // FIXME: Use mutable resolver directly as a hack, this should be an output of - // specualtive resolution. + // speculative resolution. this.get_mut_unchecked().update_local_resolution( parent, key, @@ -1368,6 +1371,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { err.subdiagnostic( ConsiderAddingMacroExport { span: binding.span, }); + err.subdiagnostic( ConsiderMarkingAsPubCrate { + vis_span: import.vis_span, + }); } _ => { err.subdiagnostic( ConsiderMarkingAsPub { @@ -1440,7 +1446,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { return; } - match this.cm().early_resolve_ident_in_lexical_scope( + match this.cm().resolve_ident_in_scope_set( target, ScopeSet::All(ns), &import.parent_scope, diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 5200f9340e1..679e663f886 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -4270,7 +4270,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { if path.len() == 2 && let [segment] = prefix_path { - // Delay to check whether methond name is an associated function or not + // Delay to check whether method name is an associated function or not // ``` // let foo = Foo {}; // foo::bar(); // possibly suggest to foo.bar(); diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index 9b201e40d13..80b2095d8cc 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -38,8 +38,8 @@ use crate::late::{ }; use crate::ty::fast_reject::SimplifiedType; use crate::{ - Module, ModuleKind, ModuleOrUniformRoot, PathResult, PathSource, Resolver, ScopeSet, Segment, - errors, path_names_to_string, + Module, ModuleKind, ModuleOrUniformRoot, ParentScope, PathResult, PathSource, Resolver, + ScopeSet, Segment, errors, path_names_to_string, }; type Res = def::Res<ast::NodeId>; @@ -2460,10 +2460,11 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { self.r.add_module_candidates(module, &mut names, &filter_fn, Some(ctxt)); } else if let RibKind::Module(module) = rib.kind { // Encountered a module item, abandon ribs and look into that module and preludes. + let parent_scope = &ParentScope { module, ..self.parent_scope }; self.r.add_scope_set_candidates( &mut names, - ScopeSet::Late(ns, module, None), - &self.parent_scope, + ScopeSet::All(ns), + parent_scope, ctxt, filter_fn, ); @@ -3094,7 +3095,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { } else { self.suggest_introducing_lifetime( &mut err, - Some(lifetime_ref.ident.name.as_str()), + Some(lifetime_ref.ident), |err, _, span, message, suggestion, span_suggs| { err.multipart_suggestion_verbose( message, @@ -3112,7 +3113,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { fn suggest_introducing_lifetime( &self, err: &mut Diag<'_>, - name: Option<&str>, + name: Option<Ident>, suggest: impl Fn( &mut Diag<'_>, bool, @@ -3159,7 +3160,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { let mut rm_inner_binders: FxIndexSet<Span> = Default::default(); let (span, sugg) = if span.is_empty() { let mut binder_idents: FxIndexSet<Ident> = Default::default(); - binder_idents.insert(Ident::from_str(name.unwrap_or("'a"))); + binder_idents.insert(name.unwrap_or(Ident::from_str("'a"))); // We need to special case binders in the following situation: // Change `T: for<'a> Trait<T> + 'b` to `for<'a, 'b> T: Trait<T> + 'b` @@ -3189,16 +3190,11 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { } } - let binders_sugg = binder_idents.into_iter().enumerate().fold( - "".to_string(), - |mut binders, (i, x)| { - if i != 0 { - binders += ", "; - } - binders += x.as_str(); - binders - }, - ); + let binders_sugg: String = binder_idents + .into_iter() + .map(|ident| ident.to_string()) + .intersperse(", ".to_owned()) + .collect(); let sugg = format!( "{}<{}>{}", if higher_ranked { "for" } else { "" }, @@ -3214,7 +3210,8 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { .source_map() .span_through_char(span, '<') .shrink_to_hi(); - let sugg = format!("{}, ", name.unwrap_or("'a")); + let sugg = + format!("{}, ", name.map(|i| i.to_string()).as_deref().unwrap_or("'a")); (span, sugg) }; @@ -3222,7 +3219,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> { let message = Cow::from(format!( "consider making the {} lifetime-generic with a new `{}` lifetime", kind.descr(), - name.unwrap_or("'a"), + name.map(|i| i.to_string()).as_deref().unwrap_or("'a"), )); should_continue = suggest( err, diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index c0a46e3b16b..2afb52ef4d4 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -49,7 +49,7 @@ use rustc_data_structures::intern::Interned; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{FreezeReadGuard, FreezeWriteGuard}; use rustc_data_structures::unord::{UnordMap, UnordSet}; -use rustc_errors::{Applicability, Diag, ErrCode, ErrorGuaranteed}; +use rustc_errors::{Applicability, Diag, ErrCode, ErrorGuaranteed, LintBuffer}; use rustc_expand::base::{DeriveResolution, SyntaxExtension, SyntaxExtensionKind}; use rustc_feature::BUILTIN_ATTRIBUTES; use rustc_hir::attrs::StrippedCfgItem; @@ -72,8 +72,8 @@ use rustc_middle::ty::{ ResolverGlobalCtxt, TyCtxt, TyCtxtFeed, Visibility, }; use rustc_query_system::ich::StableHashingContext; +use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::PRIVATE_MACRO_USE; -use rustc_session::lint::{BuiltinLintDiag, LintBuffer}; use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind, SyntaxContext, Transparency}; use rustc_span::{DUMMY_SP, Ident, Macros20NormalizedIdent, Span, Symbol, kw, sym}; use smallvec::{SmallVec, smallvec}; @@ -156,11 +156,8 @@ enum ScopeSet<'ra> { ModuleAndExternPrelude(Namespace, Module<'ra>), /// Just two extern prelude scopes. ExternPrelude, - /// All scopes with macro namespace and the given macro kind restriction. + /// Same as `All(MacroNS)`, but with the given macro kind restriction. Macro(MacroKind), - /// All scopes with the given namespace, used for partially performing late resolution. - /// The node id enables lints and is used for reporting them. - Late(Namespace, Module<'ra>, Option<NodeId>), } /// Everything you need to know about a name's location to resolve it. @@ -781,7 +778,10 @@ impl<'ra> std::ops::Deref for Module<'ra> { impl<'ra> fmt::Debug for Module<'ra> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self.res()) + match self.kind { + ModuleKind::Block => write!(f, "block"), + ModuleKind::Def(..) => write!(f, "{:?}", self.res()), + } } } @@ -1885,7 +1885,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { } } - self.cm().visit_scopes(ScopeSet::All(TypeNS), parent_scope, ctxt, |this, scope, _, _| { + let scope_set = ScopeSet::All(TypeNS); + self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |this, scope, _, _| { match scope { Scope::Module(module, _) => { this.get_mut().traits_in_module(module, assoc_item, &mut found_traits); @@ -2452,6 +2453,17 @@ fn module_to_string(mut module: Module<'_>) -> Option<String> { Some(names_to_string(names.iter().rev().copied())) } +#[derive(Copy, Clone, PartialEq, Debug)] +enum Stage { + /// Resolving an import or a macro. + /// Used when macro expansion is either not yet finished, or we are finalizing its results. + /// Used by default as a more restrictive variant that can produce additional errors. + Early, + /// Resolving something in late resolution when all imports are resolved + /// and all macros are expanded. + Late, +} + #[derive(Copy, Clone, Debug)] struct Finalize { /// Node ID for linting. @@ -2464,9 +2476,11 @@ struct Finalize { root_span: Span, /// Whether to report privacy errors or silently return "no resolution" for them, /// similarly to speculative resolution. - report_private: bool, + report_private: bool = true, /// Tracks whether an item is used in scope or used relatively to a module. - used: Used, + used: Used = Used::Other, + /// Finalizing early or late resolution. + stage: Stage = Stage::Early, } impl Finalize { @@ -2475,7 +2489,7 @@ impl Finalize { } fn with_root_span(node_id: NodeId, path_span: Span, root_span: Span) -> Finalize { - Finalize { node_id, path_span, root_span, report_private: true, used: Used::Other } + Finalize { node_id, path_span, root_span, .. } } } diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index 72ed8990241..5fa87b4cd9b 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -789,7 +789,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { self.prohibit_imported_non_macro_attrs(None, res.ok(), path_span); res } else { - let binding = self.reborrow().early_resolve_ident_in_lexical_scope( + let binding = self.reborrow().resolve_ident_in_scope_set( path[0].ident, ScopeSet::Macro(kind), parent_scope, @@ -951,7 +951,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // FIXME: Should be an output of Speculative Resolution. let macro_resolutions = self.single_segment_macro_resolutions.take(); for (ident, kind, parent_scope, initial_binding, sugg_span) in macro_resolutions { - match self.cm().early_resolve_ident_in_lexical_scope( + match self.cm().resolve_ident_in_scope_set( ident, ScopeSet::Macro(kind), &parent_scope, @@ -1006,7 +1006,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let builtin_attrs = mem::take(&mut self.builtin_attrs); for (ident, parent_scope) in builtin_attrs { - let _ = self.cm().early_resolve_ident_in_lexical_scope( + let _ = self.cm().resolve_ident_in_scope_set( ident, ScopeSet::Macro(MacroKind::Attr), &parent_scope, @@ -1112,7 +1112,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // If such resolution is successful and gives the same result // (e.g. if the macro is re-imported), then silence the lint. let no_macro_rules = self.arenas.alloc_macro_rules_scope(MacroRulesScope::Empty); - let fallback_binding = self.reborrow().early_resolve_ident_in_lexical_scope( + let fallback_binding = self.reborrow().resolve_ident_in_scope_set( path.segments[0].ident, ScopeSet::Macro(MacroKind::Bang), &ParentScope { macro_rules: no_macro_rules, ..*parent_scope }, diff --git a/compiler/rustc_session/Cargo.toml b/compiler/rustc_session/Cargo.toml index 0516982aeab..60d56b1b808 100644 --- a/compiler/rustc_session/Cargo.toml +++ b/compiler/rustc_session/Cargo.toml @@ -27,8 +27,10 @@ tracing = "0.1" # tidy-alphabetical-end [target.'cfg(unix)'.dependencies] +# FIXME: Remove this pin once this rustix issue is resolved +# https://github.com/bytecodealliance/rustix/issues/1496 # tidy-alphabetical-start -libc = "0.2" +libc = "=0.2.174" # tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] diff --git a/compiler/rustc_session/messages.ftl b/compiler/rustc_session/messages.ftl index 528c52eace7..b46e8ab4fdc 100644 --- a/compiler/rustc_session/messages.ftl +++ b/compiler/rustc_session/messages.ftl @@ -49,6 +49,8 @@ session_hexadecimal_float_literal_not_supported = hexadecimal float literal is n session_incompatible_linker_flavor = linker flavor `{$flavor}` is incompatible with the current target .note = compatible flavors are: {$compatible_list} +session_indirect_branch_cs_prefix_requires_x86_or_x86_64 = `-Zindirect-branch-cs-prefix` is only supported on x86 and x86_64 + session_instrumentation_not_supported = {$us} instrumentation is not supported for this target session_int_literal_too_large = integer literal is too large @@ -129,6 +131,10 @@ session_target_small_data_threshold_not_supported = `-Z small-data-threshold` is session_target_stack_protector_not_supported = `-Z stack-protector={$stack_protector}` is not supported for target {$target_triple} and will be ignored +session_unexpected_builtin_cfg = unexpected `--cfg {$cfg}` flag + .controlled_by = config `{$cfg_name}` is only supposed to be controlled by `{$controlled_by}` + .incoherent = manually setting a built-in cfg can and does create incoherent behaviors + session_unleashed_feature_help_named = skipping check for `{$gate}` feature session_unleashed_feature_help_unnamed = skipping check that does not even have a feature gate diff --git a/compiler/rustc_session/src/config/cfg.rs b/compiler/rustc_session/src/config/cfg.rs index 62891eb4f26..8f63ce6f0ae 100644 --- a/compiler/rustc_session/src/config/cfg.rs +++ b/compiler/rustc_session/src/config/cfg.rs @@ -26,13 +26,12 @@ use std::iter; use rustc_abi::Align; use rustc_ast::ast; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; -use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::builtin::EXPLICIT_BUILTIN_CFGS_IN_FLAGS; use rustc_span::{Symbol, sym}; use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, Target}; -use crate::Session; use crate::config::{CrateType, FmtDebug}; +use crate::{Session, errors}; /// The parsed `--cfg` options that define the compilation environment of the /// crate, used to drive conditional compilation. @@ -99,7 +98,7 @@ pub(crate) fn disallow_cfgs(sess: &Session, user_cfgs: &Cfg) { EXPLICIT_BUILTIN_CFGS_IN_FLAGS, None, ast::CRATE_NODE_ID, - BuiltinLintDiag::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by }, + errors::UnexpectedBuiltinCfg { cfg, cfg_name, controlled_by }.into(), ) }; diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs index bf95014843d..34da54a20bf 100644 --- a/compiler/rustc_session/src/errors.rs +++ b/compiler/rustc_session/src/errors.rs @@ -7,7 +7,7 @@ use rustc_errors::{ Diag, DiagCtxtHandle, DiagMessage, Diagnostic, EmissionGuarantee, ErrorGuaranteed, Level, MultiSpan, }; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_span::{Span, Symbol}; use rustc_target::spec::{SplitDebuginfo, StackProtector, TargetTuple}; @@ -472,6 +472,10 @@ pub(crate) struct FunctionReturnRequiresX86OrX8664; pub(crate) struct FunctionReturnThunkExternRequiresNonLargeCodeModel; #[derive(Diagnostic)] +#[diag(session_indirect_branch_cs_prefix_requires_x86_or_x86_64)] +pub(crate) struct IndirectBranchCsPrefixRequiresX86OrX8664; + +#[derive(Diagnostic)] #[diag(session_unsupported_regparm)] pub(crate) struct UnsupportedRegparm { pub(crate) regparm: u32, @@ -501,3 +505,13 @@ pub(crate) struct SoftFloatIgnored; #[note] #[note(session_soft_float_deprecated_issue)] pub(crate) struct SoftFloatDeprecated; + +#[derive(LintDiagnostic)] +#[diag(session_unexpected_builtin_cfg)] +#[note(session_controlled_by)] +#[note(session_incoherent)] +pub(crate) struct UnexpectedBuiltinCfg { + pub(crate) cfg: String, + pub(crate) cfg_name: Symbol, + pub(crate) controlled_by: &'static str, +} diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 0e112edc733..6d5be2d92cd 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -2295,6 +2295,8 @@ options! { - hashes of green query instances - hash collisions of query keys - hash collisions when creating dep-nodes"), + indirect_branch_cs_prefix: bool = (false, parse_bool, [TRACKED TARGET_MODIFIER], + "add `cs` prefix to `call` and `jmp` to indirect thunks (default: no)"), inline_llvm: bool = (true, parse_bool, [TRACKED], "enable LLVM inlining (default: yes)"), inline_mir: Option<bool> = (None, parse_opt_bool, [TRACKED], diff --git a/compiler/rustc_session/src/parse.rs b/compiler/rustc_session/src/parse.rs index 426480f0dba..9048c51d5b4 100644 --- a/compiler/rustc_session/src/parse.rs +++ b/compiler/rustc_session/src/parse.rs @@ -11,8 +11,8 @@ use rustc_data_structures::sync::{AppendOnlyVec, Lock}; use rustc_errors::emitter::{FatalOnlyEmitter, HumanEmitter, stderr_destination}; use rustc_errors::translation::Translator; use rustc_errors::{ - ColorConfig, Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, EmissionGuarantee, MultiSpan, - StashKey, + BufferedEarlyLint, ColorConfig, DecorateDiagCompat, Diag, DiagCtxt, DiagCtxtHandle, + DiagMessage, EmissionGuarantee, MultiSpan, StashKey, }; use rustc_feature::{GateIssue, UnstableFeatures, find_feature_issue}; use rustc_span::edition::Edition; @@ -27,7 +27,7 @@ use crate::errors::{ FeatureDiagnosticSuggestion, FeatureGateError, SuggestUpgradeCompiler, }; use crate::lint::builtin::UNSTABLE_SYNTAX_PRE_EXPANSION; -use crate::lint::{BufferedEarlyLint, BuiltinLintDiag, Lint, LintId}; +use crate::lint::{Lint, LintId}; /// Collected spans during parsing for places where a certain feature was /// used and should be feature gated accordingly in `check_crate`. @@ -342,17 +342,17 @@ impl ParseSess { lint: &'static Lint, span: impl Into<MultiSpan>, node_id: NodeId, - diagnostic: BuiltinLintDiag, + diagnostic: impl Into<DecorateDiagCompat>, ) { - self.opt_span_buffer_lint(lint, Some(span.into()), node_id, diagnostic) + self.opt_span_buffer_lint(lint, Some(span.into()), node_id, diagnostic.into()) } - pub fn opt_span_buffer_lint( + pub(crate) fn opt_span_buffer_lint( &self, lint: &'static Lint, span: Option<MultiSpan>, node_id: NodeId, - diagnostic: BuiltinLintDiag, + diagnostic: DecorateDiagCompat, ) { self.buffered_lints.with_lock(|buffered_lints| { buffered_lints.push(BufferedEarlyLint { diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index c6956cf5f23..96767d05439 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -7,6 +7,7 @@ use std::sync::atomic::AtomicBool; use std::{env, fmt, io}; use rand::{RngCore, rng}; +use rustc_ast::NodeId; use rustc_data_structures::base_n::{CASE_INSENSITIVE, ToBaseN}; use rustc_data_structures::flock; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; @@ -22,7 +23,7 @@ use rustc_errors::timings::TimingSectionHandler; use rustc_errors::translation::Translator; use rustc_errors::{ Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, Diagnostic, ErrorGuaranteed, FatalAbort, - TerminalUrl, fallback_fluent_bundle, + LintEmitter, TerminalUrl, fallback_fluent_bundle, }; use rustc_macros::HashStable_Generic; pub use rustc_span::def_id::StableCrateId; @@ -44,6 +45,7 @@ use crate::config::{ SwitchWithOptPath, }; use crate::filesearch::FileSearch; +use crate::lint::LintId; use crate::parse::{ParseSess, add_feature_diagnostics}; use crate::search_paths::SearchPath; use crate::{errors, filesearch, lint}; @@ -139,7 +141,10 @@ pub struct CompilerIO { pub temps_dir: Option<PathBuf>, } -pub trait LintStoreMarker: Any + DynSync + DynSend {} +pub trait DynLintStore: Any + DynSync + DynSend { + /// Provides a way to access lint groups without depending on `rustc_lint` + fn lint_groups_iter(&self) -> Box<dyn Iterator<Item = LintGroup> + '_>; +} /// Represents the data associated with a compilation /// session for a single crate. @@ -164,7 +169,7 @@ pub struct Session { pub code_stats: CodeStats, /// This only ever stores a `LintStore` but we don't want a dependency on that type here. - pub lint_store: Option<Arc<dyn LintStoreMarker>>, + pub lint_store: Option<Arc<dyn DynLintStore>>, /// Cap lint level specified by a driver specifically. pub driver_lint_caps: FxHashMap<lint::LintId, lint::Level>, @@ -219,6 +224,20 @@ pub struct Session { pub invocation_temp: Option<String>, } +impl LintEmitter for &'_ Session { + type Id = NodeId; + + fn emit_node_span_lint( + self, + lint: &'static rustc_lint_defs::Lint, + node_id: Self::Id, + span: impl Into<rustc_errors::MultiSpan>, + decorator: impl for<'a> rustc_errors::LintDiagnostic<'a, ()> + DynSend + 'static, + ) { + self.psess.buffer_lint(lint, span, node_id, decorator); + } +} + #[derive(Clone, Copy)] pub enum CodegenUnits { /// Specified by the user. In this case we try fairly hard to produce the @@ -240,6 +259,12 @@ impl CodegenUnits { } } +pub struct LintGroup { + pub name: &'static str, + pub lints: Vec<LintId>, + pub is_externally_loaded: bool, +} + impl Session { pub fn miri_unleashed_feature(&self, span: Span, feature_gate: Option<Symbol>) { self.miri_unleashed_features.lock().push((span, feature_gate)); @@ -596,6 +621,13 @@ impl Session { (&*self.target.staticlib_prefix, &*self.target.staticlib_suffix) } } + + pub fn lint_groups_iter(&self) -> Box<dyn Iterator<Item = LintGroup> + '_> { + match self.lint_store { + Some(ref lint_store) => lint_store.lint_groups_iter(), + None => Box::new(std::iter::empty()), + } + } } // JUSTIFICATION: defn of the suggested wrapper fns @@ -1368,6 +1400,12 @@ fn validate_commandline_args_with_session_available(sess: &Session) { } } + if sess.opts.unstable_opts.indirect_branch_cs_prefix { + if sess.target.arch != "x86" && sess.target.arch != "x86_64" { + sess.dcx().emit_err(errors::IndirectBranchCsPrefixRequiresX86OrX8664); + } + } + if let Some(regparm) = sess.opts.unstable_opts.regparm { if regparm > 3 { sess.dcx().emit_err(errors::UnsupportedRegparm { regparm }); diff --git a/compiler/rustc_span/src/caching_source_map_view.rs b/compiler/rustc_span/src/caching_source_map_view.rs index a887b50ec1e..41cfaa3ee34 100644 --- a/compiler/rustc_span/src/caching_source_map_view.rs +++ b/compiler/rustc_span/src/caching_source_map_view.rs @@ -123,27 +123,25 @@ impl<'sm> CachingSourceMapView<'sm> { if lo_cache_idx != -1 && hi_cache_idx != -1 { // Cache hit for span lo and hi. Check if they belong to the same file. - let result = { - let lo = &self.line_cache[lo_cache_idx as usize]; - let hi = &self.line_cache[hi_cache_idx as usize]; + let lo_file_index = self.line_cache[lo_cache_idx as usize].file_index; + let hi_file_index = self.line_cache[hi_cache_idx as usize].file_index; - if lo.file_index != hi.file_index { - return None; - } - - ( - lo.file.stable_id, - lo.line_number, - span_data.lo - lo.line.start, - hi.line_number, - span_data.hi - hi.line.start, - ) - }; + if lo_file_index != hi_file_index { + return None; + } self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); - return Some(result); + let lo = &self.line_cache[lo_cache_idx as usize]; + let hi = &self.line_cache[hi_cache_idx as usize]; + return Some(( + lo.file.stable_id, + lo.line_number, + span_data.lo - lo.line.start, + hi.line_number, + span_data.hi - hi.line.start, + )); } // No cache hit or cache hit for only one of span lo and hi. diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs index d9315149798..8270de1e917 100644 --- a/compiler/rustc_span/src/source_map.rs +++ b/compiler/rustc_span/src/source_map.rs @@ -911,12 +911,13 @@ impl SourceMap { /// Returns a new span representing just the last character of this span. pub fn end_point(&self, sp: Span) -> Span { - let pos = sp.hi().0; + let sp = sp.data(); + let pos = sp.hi.0; let width = self.find_width_of_character_at_span(sp, false); let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); - let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); + let end_point = BytePos(cmp::max(corrected_end_position, sp.lo.0)); sp.with_lo(end_point) } @@ -930,8 +931,9 @@ impl SourceMap { if sp.is_dummy() { return sp; } - let start_of_next_point = sp.hi().0; + let sp = sp.data(); + let start_of_next_point = sp.hi.0; let width = self.find_width_of_character_at_span(sp, true); // If the width is 1, then the next span should only contain the next char besides current ending. // However, in the case of a multibyte character, where the width != 1, the next span should @@ -940,7 +942,7 @@ impl SourceMap { start_of_next_point.checked_add(width).unwrap_or(start_of_next_point); let end_of_next_point = BytePos(cmp::max(start_of_next_point + 1, end_of_next_point)); - Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt(), None) + Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt, None) } /// Check whether span is followed by some specified expected string in limit scope @@ -963,9 +965,7 @@ impl SourceMap { /// Finds the width of the character, either before or after the end of provided span, /// depending on the `forwards` parameter. #[instrument(skip(self, sp))] - fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { - let sp = sp.data(); - + fn find_width_of_character_at_span(&self, sp: SpanData, forwards: bool) -> u32 { if sp.lo == sp.hi && !forwards { debug!("early return empty span"); return 1; diff --git a/compiler/rustc_span/src/span_encoding.rs b/compiler/rustc_span/src/span_encoding.rs index a4a47dc99b0..e34efa784de 100644 --- a/compiler/rustc_span/src/span_encoding.rs +++ b/compiler/rustc_span/src/span_encoding.rs @@ -74,9 +74,8 @@ use crate::{BytePos, SPAN_TRACK, SpanData}; /// because `parent` isn't currently used by default. /// /// In order to reliably use parented spans in incremental compilation, -/// the dependency to the parent definition's span. This is performed -/// using the callback `SPAN_TRACK` to access the query engine. -/// +/// accesses to `lo` and `hi` must introduce a dependency to the parent definition's span. +/// This is performed using the callback `SPAN_TRACK` to access the query engine. #[derive(Clone, Copy, Eq, PartialEq, Hash)] #[rustc_pass_by_value] pub struct Span { diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 65a52b27d3b..5d140cc6117 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -901,6 +901,7 @@ symbols! { dynamic_no_pic: "dynamic-no-pic", e, edition_panic, + effective_target_features, effects, eh_catch_typeinfo, eh_personality, @@ -1061,6 +1062,7 @@ symbols! { fn_ptr_addr, fn_ptr_trait, forbid, + force_target_feature, forget, format, format_args, @@ -1754,6 +1756,7 @@ symbols! { readonly, realloc, reason, + reborrow, receiver, receiver_target, recursion_limit, @@ -1837,6 +1840,7 @@ symbols! { rustc_align, rustc_allocator, rustc_allocator_zeroed, + rustc_allocator_zeroed_variant, rustc_allow_const_fn_unstable, rustc_allow_incoherent_impl, rustc_allowed_through_unstable_modules, @@ -2532,10 +2536,16 @@ impl fmt::Debug for Ident { /// except that AST identifiers don't keep the rawness flag, so we have to guess it. impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&IdentPrinter::new(self.name, self.is_raw_guess(), None), f) + fmt::Display::fmt(&IdentPrinter::new(self.name, self.guess_print_mode(), None), f) } } +pub enum IdentPrintMode { + Normal, + RawIdent, + RawLifetime, +} + /// The most general type to print identifiers. /// /// AST pretty-printer is used as a fallback for turning AST structures into token streams for @@ -2551,7 +2561,7 @@ impl fmt::Display for Ident { /// done for a token stream or a single token. pub struct IdentPrinter { symbol: Symbol, - is_raw: bool, + mode: IdentPrintMode, /// Span used for retrieving the crate name to which `$crate` refers to, /// if this field is `None` then the `$crate` conversion doesn't happen. convert_dollar_crate: Option<Span>, @@ -2559,32 +2569,51 @@ pub struct IdentPrinter { impl IdentPrinter { /// The most general `IdentPrinter` constructor. Do not use this. - pub fn new(symbol: Symbol, is_raw: bool, convert_dollar_crate: Option<Span>) -> IdentPrinter { - IdentPrinter { symbol, is_raw, convert_dollar_crate } + pub fn new( + symbol: Symbol, + mode: IdentPrintMode, + convert_dollar_crate: Option<Span>, + ) -> IdentPrinter { + IdentPrinter { symbol, mode, convert_dollar_crate } } /// This implementation is supposed to be used when printing identifiers /// as a part of pretty-printing for larger AST pieces. /// Do not use this either. - pub fn for_ast_ident(ident: Ident, is_raw: bool) -> IdentPrinter { - IdentPrinter::new(ident.name, is_raw, Some(ident.span)) + pub fn for_ast_ident(ident: Ident, mode: IdentPrintMode) -> IdentPrinter { + IdentPrinter::new(ident.name, mode, Some(ident.span)) } } impl fmt::Display for IdentPrinter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.is_raw { - f.write_str("r#")?; - } else if self.symbol == kw::DollarCrate { - if let Some(span) = self.convert_dollar_crate { + let s = match self.mode { + IdentPrintMode::Normal + if self.symbol == kw::DollarCrate + && let Some(span) = self.convert_dollar_crate => + { let converted = span.ctxt().dollar_crate_name(); if !converted.is_path_segment_keyword() { f.write_str("::")?; } - return fmt::Display::fmt(&converted, f); + converted } - } - fmt::Display::fmt(&self.symbol, f) + IdentPrintMode::Normal => self.symbol, + IdentPrintMode::RawIdent => { + f.write_str("r#")?; + self.symbol + } + IdentPrintMode::RawLifetime => { + f.write_str("'r#")?; + let s = self + .symbol + .as_str() + .strip_prefix("'") + .expect("only lifetime idents should be passed with RawLifetime mode"); + Symbol::intern(s) + } + }; + s.fmt(f) } } @@ -3019,6 +3048,29 @@ impl Ident { self.name.can_be_raw() && self.is_reserved() } + /// Given the name of a lifetime without the first quote (`'`), + /// returns whether the lifetime name is reserved (therefore invalid) + pub fn is_reserved_lifetime(self) -> bool { + self.is_reserved() && ![kw::Underscore, kw::Static].contains(&self.name) + } + + pub fn is_raw_lifetime_guess(self) -> bool { + let name_without_apostrophe = self.without_first_quote(); + name_without_apostrophe.name != self.name + && name_without_apostrophe.name.can_be_raw() + && name_without_apostrophe.is_reserved_lifetime() + } + + pub fn guess_print_mode(self) -> IdentPrintMode { + if self.is_raw_lifetime_guess() { + IdentPrintMode::RawLifetime + } else if self.is_raw_guess() { + IdentPrintMode::RawIdent + } else { + IdentPrintMode::Normal + } + } + /// Whether this would be the identifier for a tuple field like `self.0`, as /// opposed to a named field like `self.thing`. pub fn is_numeric(self) -> bool { diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs index f3c96f64190..d97ee956525 100644 --- a/compiler/rustc_symbol_mangling/src/lib.rs +++ b/compiler/rustc_symbol_mangling/src/lib.rs @@ -193,29 +193,20 @@ fn compute_symbol_name<'tcx>( // defining crate. // Weak lang items automatically get #[rustc_std_internal_symbol] // applied by the code computing the CodegenFnAttrs. - // We are mangling all #[rustc_std_internal_symbol] items that don't - // also have #[no_mangle] as a combination of the rustc version and the - // unmangled linkage name. This is to ensure that if we link against a - // staticlib compiled by a different rustc version, we don't get symbol - // conflicts or even UB due to a different implementation/ABI. Rust - // staticlibs currently export all symbols, including those that are - // hidden in cdylibs. + // We are mangling all #[rustc_std_internal_symbol] items as a + // combination of the rustc version and the unmangled linkage name. + // This is to ensure that if we link against a staticlib compiled by a + // different rustc version, we don't get symbol conflicts or even UB + // due to a different implementation/ABI. Rust staticlibs currently + // export all symbols, including those that are hidden in cdylibs. // We are using the v0 symbol mangling scheme here as we need to be // consistent across all crates and in some contexts the legacy symbol // mangling scheme can't be used. For example both the GCC backend and // Rust-for-Linux don't support some of the characters used by the // legacy symbol mangling scheme. - let name = if tcx.is_foreign_item(def_id) { - if let Some(name) = attrs.link_name { name } else { tcx.item_name(def_id) } - } else { - if let Some(name) = attrs.export_name { name } else { tcx.item_name(def_id) } - }; + let name = if let Some(name) = attrs.symbol_name { name } else { tcx.item_name(def_id) }; - if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { - return name.to_string(); - } else { - return v0::mangle_internal_symbol(tcx, name.as_str()); - } + return v0::mangle_internal_symbol(tcx, name.as_str()); } let wasm_import_module_exception_force_mangling = { @@ -240,23 +231,16 @@ fn compute_symbol_name<'tcx>( && tcx.wasm_import_module_map(LOCAL_CRATE).contains_key(&def_id.into()) }; - if let Some(name) = attrs.link_name - && !wasm_import_module_exception_force_mangling - { - // Use provided name - return name.to_string(); - } - - if let Some(name) = attrs.export_name { - // Use provided name - return name.to_string(); - } + if !wasm_import_module_exception_force_mangling { + if let Some(name) = attrs.symbol_name { + // Use provided name + return name.to_string(); + } - if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) - && !wasm_import_module_exception_force_mangling - { - // Don't mangle - return tcx.item_name(def_id).to_string(); + if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { + // Don't mangle + return tcx.item_name(def_id).to_string(); + } } // If we're dealing with an instance of a function that's inlined from diff --git a/compiler/rustc_target/Cargo.toml b/compiler/rustc_target/Cargo.toml index 56932c24922..57c90a703f1 100644 --- a/compiler/rustc_target/Cargo.toml +++ b/compiler/rustc_target/Cargo.toml @@ -8,6 +8,7 @@ edition = "2024" bitflags = "2.4.1" rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_error_messages = { path = "../rustc_error_messages" } rustc_fs_util = { path = "../rustc_fs_util" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } diff --git a/compiler/rustc_target/src/callconv/mod.rs b/compiler/rustc_target/src/callconv/mod.rs index 63e56744aec..5f2a6f7ba38 100644 --- a/compiler/rustc_target/src/callconv/mod.rs +++ b/compiler/rustc_target/src/callconv/mod.rs @@ -119,6 +119,7 @@ mod attr_impl { const ReadOnly = 1 << 4; const InReg = 1 << 5; const NoUndef = 1 << 6; + const CapturesReadOnly = 1 << 7; } } rustc_data_structures::external_bitflags_debug! { ArgAttribute } diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 2aa8ab5d317..a67795e3e93 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -50,6 +50,7 @@ use rustc_abi::{ Align, CanonAbi, Endian, ExternAbi, Integer, Size, TargetDataLayout, TargetDataLayoutErrors, }; use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; +use rustc_error_messages::{DiagArgValue, IntoDiagArg, into_diag_arg_using_display}; use rustc_fs_util::try_canonicalize; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; @@ -875,6 +876,12 @@ impl FromStr for PanicStrategy { crate::json::serde_deserialize_from_str!(PanicStrategy); +impl IntoDiagArg for PanicStrategy { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(Cow::Owned(self.desc().to_string())) + } +} + impl ToJson for PanicStrategy { fn to_json(&self) -> Json { match *self { @@ -1518,6 +1525,8 @@ impl fmt::Display for SplitDebuginfo { } } +into_diag_arg_using_display!(SplitDebuginfo); + #[derive(Clone, Debug, PartialEq, Eq, serde_derive::Deserialize)] #[serde(tag = "kind")] #[serde(rename_all = "kebab-case")] @@ -1795,6 +1804,8 @@ impl fmt::Display for StackProtector { } } +into_diag_arg_using_display!(StackProtector); + #[derive(PartialEq, Clone, Debug)] pub enum BinaryFormat { Coff, @@ -2105,6 +2116,7 @@ supported_targets! { ("msp430-none-elf", msp430_none_elf), + ("aarch64_be-unknown-hermit", aarch64_be_unknown_hermit), ("aarch64-unknown-hermit", aarch64_unknown_hermit), ("riscv64gc-unknown-hermit", riscv64gc_unknown_hermit), ("x86_64-unknown-hermit", x86_64_unknown_hermit), @@ -3806,3 +3818,5 @@ impl fmt::Display for TargetTuple { write!(f, "{}", self.debug_tuple()) } } + +into_diag_arg_using_display!(&TargetTuple); diff --git a/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_hermit.rs b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_hermit.rs new file mode 100644 index 00000000000..cad57abc2b1 --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_hermit.rs @@ -0,0 +1,25 @@ +use rustc_abi::Endian; + +use crate::spec::{StackProbeType, Target, TargetMetadata, TargetOptions, base}; + +pub(crate) fn target() -> Target { + Target { + llvm_target: "aarch64_be-unknown-hermit".into(), + metadata: TargetMetadata { + description: Some("ARM64 Hermit (big-endian)".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(true), + }, + pointer_width: 64, + arch: "aarch64".into(), + data_layout: "E-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(), + options: TargetOptions { + features: "+v8a,+strict-align,+neon,+fp-armv8".into(), + max_atomic_width: Some(128), + stack_probes: StackProbeType::Inline, + endian: Endian::Big, + ..base::hermit::opts() + }, + } +} diff --git a/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs b/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs index 9b81362b27d..61e4cad3fa2 100644 --- a/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs +++ b/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs @@ -19,7 +19,7 @@ pub(crate) fn target() -> Target { data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(), arch: "aarch64".into(), options: TargetOptions { - features: "+v8a".into(), + features: "+v8a,+neon,+crypto,+crc".into(), linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), linker: Some("rust-lld".into()), link_script: Some(LINKER_SCRIPT.into()), diff --git a/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld b/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld index a4783de0183..0f3e6eeee19 100644 --- a/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld +++ b/compiler/rustc_target/src/spec/targets/armv7a_vex_v5_linker_script.ld @@ -17,15 +17,25 @@ PROVIDE(__vcodesig_type = 0); /* V5_SIG_TYPE_USER */ PROVIDE(__vcodesig_owner = 2); /* V5_SIG_OWNER_PARTNER */ PROVIDE(__vcodesig_options = 0); /* none (0) */ -PROVIDE(__user_ram_start = 0x03800000); -PROVIDE(__user_ram_length = 48M); -PROVIDE(__user_ram_end = __user_ram_start + __user_ram_length); /* 0x8000000 */ +__user_ram_start = 0x03800000; +__user_ram_end = 0x08000000; +/* (0x48 =) 72 MiB length */ +__user_ram_length = __user_ram_start - __user_ram_end; -PROVIDE(__code_signature_length = 0x20); +/* + * VEXos provides a method for pre-loading a "linked file" at a specified + * address in User RAM, conventionally near the end, after the primary + * program binary. We need to be sure not to place any data in that location, + * so we allow the user of this linker script to inform the start address of + * this blob. + */ +PROVIDE(__linked_file_length = 0); +PROVIDE(__linked_file_end = __user_ram_end); +PROVIDE(__linked_file_start = __linked_file_end - __linked_file_length); PROVIDE(__stack_length = 4M); -PROVIDE(__heap_end = __user_ram_end - __stack_length); -PROVIDE(__user_length = __heap_start - __user_ram_start); +PROVIDE(__stack_top = __linked_file_start); +PROVIDE(__stack_bottom = __linked_file_start - __stack_length); MEMORY { USER_RAM (RWX) : ORIGIN = __user_ram_start, LENGTH = __user_ram_length @@ -44,7 +54,7 @@ SECTIONS { LONG(__vcodesig_options) FILL(0) - . = __user_ram_start + __code_signature_length; + . = __user_ram_start + 0x20; } > USER_RAM /* @@ -125,7 +135,8 @@ SECTIONS { */ .heap (NOLOAD) : { __heap_start = .; - . = __heap_end; + . = __stack_bottom; + __heap_end = .; } > USER_RAM .stack (NOLOAD) : ALIGN(8) { diff --git a/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs b/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs index 53e2cb469ee..8892c50d844 100644 --- a/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs +++ b/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs @@ -7,7 +7,7 @@ pub(crate) fn target() -> Target { llvm_target, metadata: TargetMetadata { description: Some("x86_64 Apple macOS (10.12+, Sierra+)".into()), - tier: Some(1), + tier: Some(2), host_tools: Some(true), std: Some(true), }, diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs index 507e938d5cc..e45300b59cc 100644 --- a/compiler/rustc_target/src/target_features.rs +++ b/compiler/rustc_target/src/target_features.rs @@ -767,6 +767,7 @@ static CSKY_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ static LOONGARCH_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ // tidy-alphabetical-start + ("32s", Unstable(sym::loongarch_target_feature), &[]), ("d", Stable, &["f"]), ("div32", Unstable(sym::loongarch_target_feature), &[]), ("f", Stable, &[]), diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index f31a85ec07a..40285e5d0e9 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -537,7 +537,7 @@ impl<T> Trait<T> for X { } } TypeError::TargetFeatureCast(def_id) => { - let target_spans = find_attr!(tcx.get_all_attrs(def_id), AttributeKind::TargetFeature(.., span) => *span); + let target_spans = find_attr!(tcx.get_all_attrs(def_id), AttributeKind::TargetFeature{attr_span: span, was_forced: false, ..} => *span); diag.note( "functions with `#[target_feature]` can only be coerced to `unsafe` function pointers" ); diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 214a6e86d39..d768e0bf63f 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -275,8 +275,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { *err.long_ty_path() = long_ty_file; let mut suggested = false; + let mut noted_missing_impl = false; if is_try_conversion || is_question_mark { - suggested = self.try_conversion_context(&obligation, main_trait_predicate, &mut err); + (suggested, noted_missing_impl) = self.try_conversion_context(&obligation, main_trait_predicate, &mut err); } if let Some(ret_span) = self.return_type_span(&obligation) { @@ -335,6 +336,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { return err.emit(); } + let ty_span = match leaf_trait_predicate.self_ty().skip_binder().kind() { + ty::Adt(def, _) if def.did().is_local() + && !self.can_suggest_derive(&obligation, leaf_trait_predicate) => self.tcx.def_span(def.did()), + _ => DUMMY_SP, + }; if let Some(s) = label { // If it has a custom `#[rustc_on_unimplemented]` // error message, let's display it as the label! @@ -347,15 +353,28 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Don't say "the trait `FromResidual<Option<Infallible>>` is // not implemented for `Result<T, E>`". { - err.help(explanation); + // We do this just so that the JSON output's `help` position is the + // right one and not `file.rs:1:1`. The render is the same. + if ty_span == DUMMY_SP { + err.help(explanation); + } else { + err.span_help(ty_span, explanation); + } } } else if let Some(custom_explanation) = safe_transmute_explanation { err.span_label(span, custom_explanation); - } else if explanation.len() > self.tcx.sess.diagnostic_width() { + } else if (explanation.len() > self.tcx.sess.diagnostic_width() || ty_span != DUMMY_SP) && !noted_missing_impl { // Really long types don't look good as span labels, instead move it // to a `help`. err.span_label(span, "unsatisfied trait bound"); - err.help(explanation); + + // We do this just so that the JSON output's `help` position is the + // right one and not `file.rs:1:1`. The render is the same. + if ty_span == DUMMY_SP { + err.help(explanation); + } else { + err.span_help(ty_span, explanation); + } } else { err.span_label(span, explanation); } @@ -939,7 +958,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { obligation: &PredicateObligation<'tcx>, trait_pred: ty::PolyTraitPredicate<'tcx>, err: &mut Diag<'_>, - ) -> bool { + ) -> (bool, bool) { let span = obligation.cause.span; /// Look for the (direct) sub-expr of `?`, and return it if it's a `.` method call. struct FindMethodSubexprOfTry { @@ -959,21 +978,22 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } let hir_id = self.tcx.local_def_id_to_hir_id(obligation.cause.body_id); - let Some(body_id) = self.tcx.hir_node(hir_id).body_id() else { return false }; + let Some(body_id) = self.tcx.hir_node(hir_id).body_id() else { return (false, false) }; let ControlFlow::Break(expr) = (FindMethodSubexprOfTry { search_span: span }).visit_body(self.tcx.hir_body(body_id)) else { - return false; + return (false, false); }; let Some(typeck) = &self.typeck_results else { - return false; + return (false, false); }; let ObligationCauseCode::QuestionMark = obligation.cause.code().peel_derives() else { - return false; + return (false, false); }; let self_ty = trait_pred.skip_binder().self_ty(); let found_ty = trait_pred.skip_binder().trait_ref.args.get(1).and_then(|a| a.as_type()); - self.note_missing_impl_for_question_mark(err, self_ty, found_ty, trait_pred); + let noted_missing_impl = + self.note_missing_impl_for_question_mark(err, self_ty, found_ty, trait_pred); let mut prev_ty = self.resolve_vars_if_possible( typeck.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(self.tcx)), @@ -1137,7 +1157,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } prev = Some(err_ty); } - suggested + (suggested, noted_missing_impl) } fn note_missing_impl_for_question_mark( @@ -1146,7 +1166,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self_ty: Ty<'_>, found_ty: Option<Ty<'_>>, trait_pred: ty::PolyTraitPredicate<'tcx>, - ) { + ) -> bool { match (self_ty.kind(), found_ty) { (ty::Adt(def, _), Some(ty)) if let ty::Adt(found, _) = ty.kind() @@ -1187,8 +1207,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { format!("`{ty}` needs to implement `Into<{self_ty}>`"), ); } - _ => {} + _ => return false, } + true } fn report_const_param_not_wf( diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index bc8c8a44405..e31ff8b8729 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -2878,7 +2878,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // we check if `TraitB` can be reachable from `S` // to determine whether to note `TraitA` is sealed trait. if let ty::Adt(adt, _) = ty.kind() { - let visibilities = tcx.effective_visibilities(()); + let visibilities = &tcx.resolutions(()).effective_visibilities; visibilities.effective_vis(local).is_none_or(|v| { v.at_level(Level::Reexported) .is_accessible_from(adt.did(), tcx) @@ -3853,59 +3853,71 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } - pub fn suggest_derive( + pub fn can_suggest_derive( &self, obligation: &PredicateObligation<'tcx>, - err: &mut Diag<'_>, trait_pred: ty::PolyTraitPredicate<'tcx>, - ) { + ) -> bool { if trait_pred.polarity() == ty::PredicatePolarity::Negative { - return; + return false; } let Some(diagnostic_name) = self.tcx.get_diagnostic_name(trait_pred.def_id()) else { - return; + return false; }; let (adt, args) = match trait_pred.skip_binder().self_ty().kind() { ty::Adt(adt, args) if adt.did().is_local() => (adt, args), - _ => return, + _ => return false, }; - let can_derive = { - let is_derivable_trait = match diagnostic_name { - sym::Default => !adt.is_enum(), - sym::PartialEq | sym::PartialOrd => { - let rhs_ty = trait_pred.skip_binder().trait_ref.args.type_at(1); - trait_pred.skip_binder().self_ty() == rhs_ty - } - sym::Eq | sym::Ord | sym::Clone | sym::Copy | sym::Hash | sym::Debug => true, - _ => false, - }; - is_derivable_trait && - // Ensure all fields impl the trait. - adt.all_fields().all(|field| { - let field_ty = ty::GenericArg::from(field.ty(self.tcx, args)); - let trait_args = match diagnostic_name { - sym::PartialEq | sym::PartialOrd => { - Some(field_ty) - } - _ => None, - }; - let trait_pred = trait_pred.map_bound_ref(|tr| ty::TraitPredicate { - trait_ref: ty::TraitRef::new(self.tcx, - trait_pred.def_id(), - [field_ty].into_iter().chain(trait_args), - ), - ..*tr - }); - let field_obl = Obligation::new( - self.tcx, - obligation.cause.clone(), - obligation.param_env, - trait_pred, - ); - self.predicate_must_hold_modulo_regions(&field_obl) - }) + let is_derivable_trait = match diagnostic_name { + sym::Default => !adt.is_enum(), + sym::PartialEq | sym::PartialOrd => { + let rhs_ty = trait_pred.skip_binder().trait_ref.args.type_at(1); + trait_pred.skip_binder().self_ty() == rhs_ty + } + sym::Eq | sym::Ord | sym::Clone | sym::Copy | sym::Hash | sym::Debug => true, + _ => false, + }; + is_derivable_trait && + // Ensure all fields impl the trait. + adt.all_fields().all(|field| { + let field_ty = ty::GenericArg::from(field.ty(self.tcx, args)); + let trait_args = match diagnostic_name { + sym::PartialEq | sym::PartialOrd => { + Some(field_ty) + } + _ => None, + }; + let trait_pred = trait_pred.map_bound_ref(|tr| ty::TraitPredicate { + trait_ref: ty::TraitRef::new(self.tcx, + trait_pred.def_id(), + [field_ty].into_iter().chain(trait_args), + ), + ..*tr + }); + let field_obl = Obligation::new( + self.tcx, + obligation.cause.clone(), + obligation.param_env, + trait_pred, + ); + self.predicate_must_hold_modulo_regions(&field_obl) + }) + } + + pub fn suggest_derive( + &self, + obligation: &PredicateObligation<'tcx>, + err: &mut Diag<'_>, + trait_pred: ty::PolyTraitPredicate<'tcx>, + ) { + let Some(diagnostic_name) = self.tcx.get_diagnostic_name(trait_pred.def_id()) else { + return; + }; + let adt = match trait_pred.skip_binder().self_ty().kind() { + ty::Adt(adt, _) if adt.did().is_local() => adt, + _ => return, }; - if can_derive { + if self.can_suggest_derive(obligation, trait_pred) { err.span_suggestion_verbose( self.tcx.def_span(adt.did()).shrink_to_lo(), format!( diff --git a/compiler/rustc_trait_selection/src/opaque_types.rs b/compiler/rustc_trait_selection/src/opaque_types.rs index d5bde9192d5..bc7bdd372ba 100644 --- a/compiler/rustc_trait_selection/src/opaque_types.rs +++ b/compiler/rustc_trait_selection/src/opaque_types.rs @@ -4,8 +4,8 @@ use rustc_hir::def_id::LocalDefId; use rustc_infer::infer::outlives::env::OutlivesEnvironment; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; use rustc_middle::ty::{ - self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, TyCtxt, TypeVisitableExt, - TypingMode, fold_regions, + self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, Ty, TyCtxt, + TypeVisitableExt, TypingMode, fold_regions, }; use rustc_span::{ErrorGuaranteed, Span}; @@ -13,22 +13,23 @@ use crate::errors::NonGenericOpaqueTypeParam; use crate::regions::OutlivesEnvironmentBuildExt; use crate::traits::ObligationCtxt; -pub enum InvalidOpaqueTypeArgs<'tcx> { - AlreadyReported(ErrorGuaranteed), +#[derive(Debug)] +pub enum NonDefiningUseReason<'tcx> { + Tainted(ErrorGuaranteed), NotAParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_index: usize, span: Span }, DuplicateParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_indices: Vec<usize>, span: Span }, } -impl From<ErrorGuaranteed> for InvalidOpaqueTypeArgs<'_> { +impl From<ErrorGuaranteed> for NonDefiningUseReason<'_> { fn from(guar: ErrorGuaranteed) -> Self { - InvalidOpaqueTypeArgs::AlreadyReported(guar) + NonDefiningUseReason::Tainted(guar) } } -impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { +impl<'tcx> NonDefiningUseReason<'tcx> { pub fn report(self, infcx: &InferCtxt<'tcx>) -> ErrorGuaranteed { let tcx = infcx.tcx; match self { - InvalidOpaqueTypeArgs::AlreadyReported(guar) => guar, - InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index, span } => { + NonDefiningUseReason::Tainted(guar) => guar, + NonDefiningUseReason::NotAParam { opaque_type_key, param_index, span } => { let opaque_generics = tcx.generics_of(opaque_type_key.def_id); let opaque_param = opaque_generics.param_at(param_index, tcx); let kind = opaque_param.kind.descr(); @@ -39,7 +40,7 @@ impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { param_span: tcx.def_span(opaque_param.def_id), }) } - InvalidOpaqueTypeArgs::DuplicateParam { opaque_type_key, param_indices, span } => { + NonDefiningUseReason::DuplicateParam { opaque_type_key, param_indices, span } => { let opaque_generics = tcx.generics_of(opaque_type_key.def_id); let descr = opaque_generics.param_at(param_indices[0], tcx).kind.descr(); let spans: Vec<_> = param_indices @@ -57,15 +58,17 @@ impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { } /// Opaque type parameter validity check as documented in the [rustc-dev-guide chapter]. +/// With the new solver, uses which fail this check are simply treated as non-defining +/// and we only emit an error if no defining use exists. /// /// [rustc-dev-guide chapter]: /// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html -pub fn check_opaque_type_parameter_valid<'tcx>( +pub fn opaque_type_has_defining_use_args<'tcx>( infcx: &InferCtxt<'tcx>, opaque_type_key: OpaqueTypeKey<'tcx>, span: Span, defining_scope_kind: DefiningScopeKind, -) -> Result<(), InvalidOpaqueTypeArgs<'tcx>> { +) -> Result<(), NonDefiningUseReason<'tcx>> { let tcx = infcx.tcx; let opaque_env = LazyOpaqueTyEnv::new(tcx, opaque_type_key.def_id); let mut seen_params: FxIndexMap<_, Vec<_>> = FxIndexMap::default(); @@ -104,13 +107,13 @@ pub fn check_opaque_type_parameter_valid<'tcx>( } else { // Prevent `fn foo() -> Foo<u32>` from being defining. opaque_env.param_is_error(i)?; - return Err(InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index: i, span }); + return Err(NonDefiningUseReason::NotAParam { opaque_type_key, param_index: i, span }); } } for (_, param_indices) in seen_params { if param_indices.len() > 1 { - return Err(InvalidOpaqueTypeArgs::DuplicateParam { + return Err(NonDefiningUseReason::DuplicateParam { opaque_type_key, param_indices, span, @@ -205,3 +208,27 @@ impl<'tcx> LazyOpaqueTyEnv<'tcx> { canonical_args } } + +pub fn report_item_does_not_constrain_error<'tcx>( + tcx: TyCtxt<'tcx>, + item_def_id: LocalDefId, + def_id: LocalDefId, + non_defining_use: Option<(OpaqueTypeKey<'tcx>, Span)>, +) -> ErrorGuaranteed { + let span = tcx.def_ident_span(item_def_id).unwrap_or_else(|| tcx.def_span(item_def_id)); + let opaque_type_span = tcx.def_span(def_id); + let opaque_type_name = tcx.def_path_str(def_id); + + let mut err = + tcx.dcx().struct_span_err(span, format!("item does not constrain `{opaque_type_name}`")); + err.note("consider removing `#[define_opaque]` or adding an empty `#[define_opaque()]`"); + err.span_note(opaque_type_span, "this opaque type is supposed to be constrained"); + if let Some((key, span)) = non_defining_use { + let opaque_ty = Ty::new_opaque(tcx, key.def_id.into(), key.args); + err.span_note( + span, + format!("this use of `{opaque_ty}` does not have unique universal generic arguments"), + ); + } + err.emit() +} diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index 3f628d80662..575e0472e0e 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -252,7 +252,9 @@ where // inside of an opaque type, e.g. if there's `Opaque = (?x, ?x)` in the // storage, we can also rely on structural identity of `?x` even if we // later uniquify it in MIR borrowck. - if infcx.in_hir_typeck && obligation.has_non_region_infer() { + if infcx.in_hir_typeck + && (obligation.has_non_region_infer() || obligation.has_free_regions()) + { infcx.push_hir_typeck_potentially_region_dependent_goal(obligation); } } diff --git a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs index ea1eed95723..4b493c95d59 100644 --- a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs +++ b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs @@ -106,6 +106,10 @@ fn dyn_compatibility_violations_for_trait( if !spans.is_empty() { violations.push(DynCompatibilityViolation::SupertraitNonLifetimeBinder(spans)); } + let spans = super_predicates_are_unconditionally_const(tcx, trait_def_id); + if !spans.is_empty() { + violations.push(DynCompatibilityViolation::SupertraitConst(spans)); + } violations } @@ -247,16 +251,31 @@ fn super_predicates_have_non_lifetime_binders( tcx: TyCtxt<'_>, trait_def_id: DefId, ) -> SmallVec<[Span; 1]> { - // If non_lifetime_binders is disabled, then exit early - if !tcx.features().non_lifetime_binders() { - return SmallVec::new(); - } tcx.explicit_super_predicates_of(trait_def_id) .iter_identity_copied() .filter_map(|(pred, span)| pred.has_non_region_bound_vars().then_some(span)) .collect() } +/// Checks for `const Trait` supertraits. We're okay with `[const] Trait`, +/// supertraits since for a non-const instantiation of that trait, the +/// conditionally-const supertrait is also not required to be const. +fn super_predicates_are_unconditionally_const( + tcx: TyCtxt<'_>, + trait_def_id: DefId, +) -> SmallVec<[Span; 1]> { + tcx.explicit_super_predicates_of(trait_def_id) + .iter_identity_copied() + .filter_map(|(pred, span)| { + if let ty::ClauseKind::HostEffect(_) = pred.kind().skip_binder() { + Some(span) + } else { + None + } + }) + .collect() +} + fn trait_has_sized_self(tcx: TyCtxt<'_>, trait_def_id: DefId) -> bool { tcx.generics_require_sized_self(trait_def_id) } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs index 0ca2d216228..6ce68507d65 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs @@ -1,6 +1,7 @@ use std::fmt; use rustc_errors::ErrorGuaranteed; +use rustc_hir::def_id::LocalDefId; use rustc_infer::infer::region_constraints::RegionConstraintData; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{TyCtxt, TypeFoldable}; @@ -42,13 +43,14 @@ where fn fully_perform( self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> { if cfg!(debug_assertions) { info!("fully_perform({:?})", self); } - Ok(scrape_region_constraints(infcx, self.closure, self.description, span)?.0) + Ok(scrape_region_constraints(infcx, root_def_id, self.description, span, self.closure)?.0) } } @@ -62,9 +64,10 @@ impl<F> fmt::Debug for CustomTypeOp<F> { /// constraints that result, creating query-region-constraints. pub fn scrape_region_constraints<'tcx, Op, R>( infcx: &InferCtxt<'tcx>, - op: impl FnOnce(&ObligationCtxt<'_, 'tcx>) -> Result<R, NoSolution>, + root_def_id: LocalDefId, name: &'static str, span: Span, + op: impl FnOnce(&ObligationCtxt<'_, 'tcx>) -> Result<R, NoSolution>, ) -> Result<(TypeOpOutput<'tcx, Op>, RegionConstraintData<'tcx>), ErrorGuaranteed> where R: TypeFoldable<TyCtxt<'tcx>>, @@ -94,6 +97,8 @@ where let errors = ocx.select_all_or_error(); if errors.is_empty() { Ok(value) + } else if let Err(guar) = infcx.tcx.check_potentially_region_dependent_goals(root_def_id) { + Err(guar) } else { Err(infcx .dcx() diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs index 018e9748cf0..4b8bf868123 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs @@ -1,6 +1,7 @@ use std::fmt; use rustc_errors::ErrorGuaranteed; +use rustc_hir::def_id::LocalDefId; use rustc_infer::traits::PredicateObligations; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{ParamEnvAnd, TyCtxt, TypeFoldable}; @@ -37,6 +38,7 @@ pub trait TypeOp<'tcx>: Sized + fmt::Debug { fn fully_perform( self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed>; } @@ -140,6 +142,7 @@ where fn fully_perform( self, infcx: &InferCtxt<'tcx>, + root_def_id: LocalDefId, span: Span, ) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> { // In the new trait solver, query type ops are performed locally. This @@ -152,9 +155,10 @@ where if infcx.next_trait_solver() { return Ok(scrape_region_constraints( infcx, - |ocx| QueryTypeOp::perform_locally_with_next_solver(ocx, self, span), + root_def_id, "query type op", span, + |ocx| QueryTypeOp::perform_locally_with_next_solver(ocx, self, span), )? .0); } @@ -166,19 +170,15 @@ where // we sometimes end up with `Opaque<'a> = Opaque<'b>` instead of an actual hidden type. In that case we don't register a // hidden type but just equate the lifetimes. Thus we need to scrape the region constraints even though we're also manually // collecting region constraints via `region_constraints`. - let (mut output, _) = scrape_region_constraints( - infcx, - |ocx| { + let (mut output, _) = + scrape_region_constraints(infcx, root_def_id, "fully_perform", span, |ocx| { let (output, ei, obligations, _) = Q::fully_perform_into(self, infcx, &mut region_constraints, span)?; error_info = ei; ocx.register_obligations(obligations); Ok(output) - }, - "fully_perform", - span, - )?; + })?; output.error_info = error_info; if let Some(QueryRegionConstraints { outlives, assumptions }) = output.constraints { region_constraints.outlives.extend(outlives.iter().cloned()); diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 468c42abf48..1dd31990ab7 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -4,9 +4,9 @@ use std::assert_matches::assert_matches; use std::cell::{Cell, RefCell}; +use std::cmp; use std::fmt::{self, Display}; use std::ops::ControlFlow; -use std::{cmp, iter}; use hir::def::DefKind; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; @@ -2185,32 +2185,23 @@ impl<'tcx> SelectionContext<'_, 'tcx> { ty::Binder::dummy(vec![ty]) } - ty::Coroutine(coroutine_def_id, args) => { - match self.tcx().coroutine_movability(coroutine_def_id) { - hir::Movability::Static => { - unreachable!("tried to assemble `Sized` for static coroutine") - } - hir::Movability::Movable => { - if self.tcx().features().coroutine_clone() { - ty::Binder::dummy( - args.as_coroutine() - .upvar_tys() - .iter() - .chain([Ty::new_coroutine_witness( - self.tcx(), - coroutine_def_id, - self.tcx().mk_args(args.as_coroutine().parent_args()), - )]) - .collect::<Vec<_>>(), - ) - } else { - unreachable!( - "tried to assemble `Sized` for coroutine without enabled feature" - ) - } + ty::Coroutine(def_id, args) => match self.tcx().coroutine_movability(def_id) { + hir::Movability::Static => { + unreachable!("tried to assemble `Clone` for static coroutine") + } + hir::Movability::Movable => { + if self.tcx().features().coroutine_clone() { + ty::Binder::dummy(vec![ + args.as_coroutine().tupled_upvars_ty(), + Ty::new_coroutine_witness_for_coroutine(self.tcx(), def_id, args), + ]) + } else { + unreachable!( + "tried to assemble `Clone` for coroutine without enabled feature" + ) } } - } + }, ty::CoroutineWitness(def_id, args) => self .infcx @@ -2334,25 +2325,9 @@ impl<'tcx> SelectionContext<'_, 'tcx> { ty::Coroutine(def_id, args) => { let ty = self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); let tcx = self.tcx(); - let witness = Ty::new_coroutine_witness( - tcx, - def_id, - ty::GenericArgs::for_item(tcx, def_id, |def, _| match def.kind { - // HACK: Coroutine witnesse types are lifetime erased, so they - // never reference any lifetime args from the coroutine. We erase - // the regions here since we may get into situations where a - // coroutine is recursively contained within itself, leading to - // witness types that differ by region args. This means that - // cycle detection in fulfillment will not kick in, which leads - // to unnecessary overflows in async code. See the issue: - // <https://github.com/rust-lang/rust/issues/145151>. - ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), - ty::GenericParamDefKind::Type { .. } - | ty::GenericParamDefKind::Const { .. } => args[def.index as usize], - }), - ); + let witness = Ty::new_coroutine_witness_for_coroutine(tcx, def_id, args); ty::Binder::dummy(AutoImplConstituents { - types: [ty].into_iter().chain(iter::once(witness)).collect(), + types: vec![ty, witness], assumptions: vec![], }) } diff --git a/compiler/rustc_trait_selection/src/traits/util.rs b/compiler/rustc_trait_selection/src/traits/util.rs index 83c0969762f..335942d5bcc 100644 --- a/compiler/rustc_trait_selection/src/traits/util.rs +++ b/compiler/rustc_trait_selection/src/traits/util.rs @@ -9,8 +9,8 @@ pub use rustc_infer::traits::util::*; use rustc_middle::bug; use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::{ - self, PolyTraitPredicate, SizedTraitKind, TraitPredicate, TraitRef, Ty, TyCtxt, TypeFoldable, - TypeFolder, TypeSuperFoldable, TypeVisitableExt, + self, PolyTraitPredicate, PredicatePolarity, SizedTraitKind, TraitPredicate, TraitRef, Ty, + TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, }; pub use rustc_next_trait_solver::placeholder::BoundVarReplacer; use rustc_span::Span; @@ -427,7 +427,9 @@ pub(crate) fn lazily_elaborate_sizedness_candidate<'tcx>( return candidate; } - if obligation.predicate.polarity() != candidate.polarity() { + if obligation.predicate.polarity() != PredicatePolarity::Positive + || candidate.polarity() != PredicatePolarity::Positive + { return candidate; } diff --git a/compiler/rustc_traits/Cargo.toml b/compiler/rustc_traits/Cargo.toml index 04aef4e7b9e..9f40f4d5c23 100644 --- a/compiler/rustc_traits/Cargo.toml +++ b/compiler/rustc_traits/Cargo.toml @@ -6,7 +6,6 @@ edition = "2024" [dependencies] # tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures" } -rustc_hir = { path = "../rustc_hir" } rustc_infer = { path = "../rustc_infer" } rustc_middle = { path = "../rustc_middle" } rustc_span = { path = "../rustc_span" } diff --git a/compiler/rustc_traits/src/coroutine_witnesses.rs b/compiler/rustc_traits/src/coroutine_witnesses.rs index 8a2a0832ddb..20f9b013724 100644 --- a/compiler/rustc_traits/src/coroutine_witnesses.rs +++ b/compiler/rustc_traits/src/coroutine_witnesses.rs @@ -1,9 +1,9 @@ -use rustc_hir::def_id::DefId; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::infer::canonical::query_response::make_query_region_constraints; use rustc_infer::infer::resolve::OpportunisticRegionResolver; use rustc_infer::traits::{Obligation, ObligationCause}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, fold_regions}; +use rustc_span::def_id::DefId; use rustc_trait_selection::traits::{ObligationCtxt, with_replaced_escaping_bound_vars}; /// Return the set of types that should be taken into account when checking diff --git a/compiler/rustc_traits/src/dropck_outlives.rs b/compiler/rustc_traits/src/dropck_outlives.rs index 5eddad39e2b..d33ade7a9e1 100644 --- a/compiler/rustc_traits/src/dropck_outlives.rs +++ b/compiler/rustc_traits/src/dropck_outlives.rs @@ -1,5 +1,4 @@ use rustc_data_structures::fx::FxHashSet; -use rustc_hir::def_id::DefId; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::infer::canonical::{Canonical, QueryResponse}; use rustc_middle::bug; @@ -7,6 +6,7 @@ use rustc_middle::query::Providers; use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult}; use rustc_middle::ty::{self, GenericArgs, TyCtxt}; use rustc_span::DUMMY_SP; +use rustc_span::def_id::DefId; use rustc_trait_selection::infer::InferCtxtBuilderExt; use rustc_trait_selection::traits::query::dropck_outlives::{ compute_dropck_outlives_inner, dtorck_constraint_for_ty_inner, diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index 89d1dd8cf23..e4ed084b073 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -356,6 +356,7 @@ fn arg_attrs_for_rust_scalar<'tcx>( if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return { attrs.set(ArgAttribute::ReadOnly); + attrs.set(ArgAttribute::CapturesReadOnly); } } } diff --git a/compiler/rustc_type_ir/Cargo.toml b/compiler/rustc_type_ir/Cargo.toml index e7fee574dd4..d55e9b3b1be 100644 --- a/compiler/rustc_type_ir/Cargo.toml +++ b/compiler/rustc_type_ir/Cargo.toml @@ -12,6 +12,7 @@ indexmap = "2.0.0" rustc-hash = "2.0.0" rustc_ast_ir = { path = "../rustc_ast_ir", default-features = false } rustc_data_structures = { path = "../rustc_data_structures", optional = true } +rustc_error_messages = { path = "../rustc_error_messages", optional = true } rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } @@ -27,6 +28,7 @@ tracing = "0.1" default = ["nightly"] nightly = [ "dep:rustc_data_structures", + "dep:rustc_error_messages", "dep:rustc_macros", "dep:rustc_serialize", "dep:rustc_span", diff --git a/compiler/rustc_type_ir/src/infer_ctxt.rs b/compiler/rustc_type_ir/src/infer_ctxt.rs index e39b99e992b..b4462294700 100644 --- a/compiler/rustc_type_ir/src/infer_ctxt.rs +++ b/compiler/rustc_type_ir/src/infer_ctxt.rs @@ -148,10 +148,6 @@ pub trait InferCtxtLike: Sized { true } - fn in_hir_typeck(&self) -> bool { - false - } - fn typing_mode(&self) -> TypingMode<Self::Interner>; fn universe(&self) -> ty::UniverseIndex; diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 1a6c99ce7de..569570b5783 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -91,6 +91,12 @@ pub trait Ty<I: Interner<Ty = Self>>: fn new_coroutine_witness(interner: I, def_id: I::DefId, args: I::GenericArgs) -> Self; + fn new_coroutine_witness_for_coroutine( + interner: I, + def_id: I::DefId, + coroutine_args: I::GenericArgs, + ) -> Self; + fn new_ptr(interner: I, ty: Self, mutbl: Mutability) -> Self; fn new_ref(interner: I, region: I::Region, ty: Self, mutbl: Mutability) -> Self; diff --git a/compiler/rustc_type_ir/src/ir_print.rs b/compiler/rustc_type_ir/src/ir_print.rs index 388ad09cb20..82bb8791b84 100644 --- a/compiler/rustc_type_ir/src/ir_print.rs +++ b/compiler/rustc_type_ir/src/ir_print.rs @@ -1,9 +1,9 @@ use std::fmt; use crate::{ - AliasTerm, AliasTy, Binder, CoercePredicate, ExistentialProjection, ExistentialTraitRef, FnSig, - HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, PatternKind, - ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef, + AliasTerm, AliasTy, Binder, ClosureKind, CoercePredicate, ExistentialProjection, + ExistentialTraitRef, FnSig, HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, + PatternKind, ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef, UnevaluatedConst, }; pub trait IrPrint<T> { @@ -70,3 +70,46 @@ where <I as IrPrint<OutlivesPredicate<I, T>>>::print(self, fmt) } } + +#[cfg(feature = "nightly")] +mod into_diag_arg_impls { + use rustc_error_messages::{DiagArgValue, IntoDiagArg}; + + use super::*; + + impl<I: Interner> IntoDiagArg for TraitRef<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } + } + + impl<I: Interner> IntoDiagArg for ExistentialTraitRef<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.to_string().into_diag_arg(path) + } + } + + impl<I: Interner> IntoDiagArg for UnevaluatedConst<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + format!("{self:?}").into_diag_arg(path) + } + } + + impl<I: Interner> IntoDiagArg for FnSig<I> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + format!("{self:?}").into_diag_arg(path) + } + } + + impl<I: Interner, T: IntoDiagArg> IntoDiagArg for Binder<I, T> { + fn into_diag_arg(self, path: &mut Option<std::path::PathBuf>) -> DiagArgValue { + self.skip_binder().into_diag_arg(path) + } + } + + impl IntoDiagArg for ClosureKind { + fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue { + DiagArgValue::Str(self.as_str().into()) + } + } +} diff --git a/compiler/rustc_type_ir/src/region_kind.rs b/compiler/rustc_type_ir/src/region_kind.rs index cca81dcb4a0..06048af0436 100644 --- a/compiler/rustc_type_ir/src/region_kind.rs +++ b/compiler/rustc_type_ir/src/region_kind.rs @@ -154,7 +154,7 @@ pub enum RegionKind<I: Interner> { /// parameters via `tcx.liberate_late_bound_regions`. They are then treated /// the same way as `ReEarlyParam` while inside of the function. /// - /// See <https://rustc-dev-guide.rust-lang.org/early-late-bound-params/early-late-bound-summary.html> for + /// See <https://rustc-dev-guide.rust-lang.org/early_late_parameters.html> for /// more info about early and late bound lifetime parameters. ReLateParam(I::LateParamRegion), |
