diff options
Diffstat (limited to 'compiler')
96 files changed, 1649 insertions, 1084 deletions
diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 11b5131b8d7..704f124dbcb 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -917,12 +917,13 @@ impl<'hir> LoweringContext<'_, 'hir> { let poll_expr = { let awaitee = self.expr_ident(span, awaitee_ident, awaitee_pat_hid); let ref_mut_awaitee = self.expr_mut_addr_of(span, awaitee); - let task_context = if let Some(task_context_hid) = self.task_context { - self.expr_ident_mut(span, task_context_ident, task_context_hid) - } else { - // Use of `await` outside of an async context, we cannot use `task_context` here. - self.expr_err(span, self.tcx.sess.span_delayed_bug(span, "no task_context hir id")) + + let Some(task_context_hid) = self.task_context else { + unreachable!("use of `await` outside of an async context."); }; + + let task_context = self.expr_ident_mut(span, task_context_ident, task_context_hid); + let new_unchecked = self.expr_call_lang_item_fn_mut( span, hir::LangItem::PinNewUnchecked, @@ -991,16 +992,14 @@ impl<'hir> LoweringContext<'_, 'hir> { ); let yield_expr = self.arena.alloc(yield_expr); - if let Some(task_context_hid) = self.task_context { - let lhs = self.expr_ident(span, task_context_ident, task_context_hid); - let assign = - self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span))); - self.stmt_expr(span, assign) - } else { - // Use of `await` outside of an async context. Return `yield_expr` so that we can - // proceed with type checking. - self.stmt(span, hir::StmtKind::Semi(yield_expr)) - } + let Some(task_context_hid) = self.task_context else { + unreachable!("use of `await` outside of an async context."); + }; + + let lhs = self.expr_ident(span, task_context_ident, task_context_hid); + let assign = + self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span))); + self.stmt_expr(span, assign) }; let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None); @@ -1635,19 +1634,32 @@ impl<'hir> LoweringContext<'_, 'hir> { } }; - let mut yielded = + let yielded = opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span)); if is_async_gen { - // yield async_gen_ready($expr); - yielded = self.expr_call_lang_item_fn( + // `yield $expr` is transformed into `task_context = yield async_gen_ready($expr)`. + // This ensures that we store our resumed `ResumeContext` correctly, and also that + // the apparent value of the `yield` expression is `()`. + let wrapped_yielded = self.expr_call_lang_item_fn( span, hir::LangItem::AsyncGenReady, std::slice::from_ref(yielded), ); - } + let yield_expr = self.arena.alloc( + self.expr(span, hir::ExprKind::Yield(wrapped_yielded, hir::YieldSource::Yield)), + ); - hir::ExprKind::Yield(yielded, hir::YieldSource::Yield) + let Some(task_context_hid) = self.task_context else { + unreachable!("use of `await` outside of an async context."); + }; + let task_context_ident = Ident::with_dummy_span(sym::_task_context); + let lhs = self.expr_ident(span, task_context_ident, task_context_hid); + + hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)) + } else { + hir::ExprKind::Yield(yielded, hir::YieldSource::Yield) + } } /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into: diff --git a/compiler/rustc_ast_lowering/src/index.rs b/compiler/rustc_ast_lowering/src/index.rs index f042f46e59c..993ddf00eb5 100644 --- a/compiler/rustc_ast_lowering/src/index.rs +++ b/compiler/rustc_ast_lowering/src/index.rs @@ -1,7 +1,6 @@ -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sorted_map::SortedMap; use rustc_hir as hir; -use rustc_hir::def_id::LocalDefId; +use rustc_hir::def_id::{LocalDefId, LocalDefIdMap}; use rustc_hir::intravisit::Visitor; use rustc_hir::*; use rustc_index::{Idx, IndexVec}; @@ -17,7 +16,7 @@ struct NodeCollector<'a, 'hir> { /// Outputs nodes: IndexVec<ItemLocalId, Option<ParentedNode<'hir>>>, - parenting: FxHashMap<LocalDefId, ItemLocalId>, + parenting: LocalDefIdMap<ItemLocalId>, /// The parent of this node parent_node: hir::ItemLocalId, @@ -30,7 +29,7 @@ pub(super) fn index_hir<'hir>( tcx: TyCtxt<'hir>, item: hir::OwnerNode<'hir>, bodies: &SortedMap<ItemLocalId, &'hir Body<'hir>>, -) -> (IndexVec<ItemLocalId, Option<ParentedNode<'hir>>>, FxHashMap<LocalDefId, ItemLocalId>) { +) -> (IndexVec<ItemLocalId, Option<ParentedNode<'hir>>>, LocalDefIdMap<ItemLocalId>) { let mut nodes = IndexVec::new(); // This node's parent should never be accessed: the owner's parent is computed by the // hir_owner_parent query. Make it invalid (= ItemLocalId::MAX) to force an ICE whenever it is @@ -42,7 +41,7 @@ pub(super) fn index_hir<'hir>( parent_node: ItemLocalId::new(0), nodes, bodies, - parenting: FxHashMap::default(), + parenting: Default::default(), }; match item { diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index 96ed3eee02e..e29ecf55e2f 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -44,20 +44,20 @@ extern crate tracing; use crate::errors::{AssocTyParentheses, AssocTyParenthesesSub, MisplacedImplTrait}; +use rustc_ast::node_id::NodeMap; use rustc_ast::ptr::P; use rustc_ast::{self as ast, *}; use rustc_ast_pretty::pprust; use rustc_data_structures::captures::Captures; use rustc_data_structures::fingerprint::Fingerprint; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::Lrc; use rustc_errors::{DiagnosticArgFromDisplay, StashKey}; use rustc_hir as hir; use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res}; -use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID, LOCAL_CRATE}; -use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate}; +use rustc_hir::def_id::{LocalDefId, LocalDefIdMap, CRATE_DEF_ID, LOCAL_CRATE}; +use rustc_hir::{ConstArg, GenericArg, ItemLocalMap, ParamName, TraitCandidate}; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_middle::span_bug; use rustc_middle::ty::{ResolverAstLowering, TyCtxt, Visibility}; @@ -119,13 +119,13 @@ struct LoweringContext<'a, 'hir> { current_hir_id_owner: hir::OwnerId, item_local_id_counter: hir::ItemLocalId, - trait_map: FxHashMap<ItemLocalId, Box<[TraitCandidate]>>, + trait_map: ItemLocalMap<Box<[TraitCandidate]>>, impl_trait_defs: Vec<hir::GenericParam<'hir>>, impl_trait_bounds: Vec<hir::WherePredicate<'hir>>, /// NodeIds that are lowered inside the current HIR owner. - node_id_to_local_id: FxHashMap<NodeId, hir::ItemLocalId>, + node_id_to_local_id: NodeMap<hir::ItemLocalId>, allow_try_trait: Lrc<[Symbol]>, allow_gen_future: Lrc<[Symbol]>, @@ -135,7 +135,7 @@ struct LoweringContext<'a, 'hir> { /// For each captured lifetime (e.g., 'a), we create a new lifetime parameter that is a generic /// defined on the TAIT, so we have type Foo<'a1> = ... and we establish a mapping in this /// field from the original parameter 'a to the new parameter 'a1. - generics_def_id_map: Vec<FxHashMap<LocalDefId, LocalDefId>>, + generics_def_id_map: Vec<LocalDefIdMap<LocalDefId>>, host_param_id: Option<LocalDefId>, } @@ -380,7 +380,7 @@ enum AstOwner<'a> { } fn index_crate<'a>( - node_id_to_def_id: &FxHashMap<NodeId, LocalDefId>, + node_id_to_def_id: &NodeMap<LocalDefId>, krate: &'a Crate, ) -> IndexVec<LocalDefId, AstOwner<'a>> { let mut indexer = Indexer { node_id_to_def_id, index: IndexVec::new() }; @@ -390,7 +390,7 @@ fn index_crate<'a>( return indexer.index; struct Indexer<'s, 'a> { - node_id_to_def_id: &'s FxHashMap<NodeId, LocalDefId>, + node_id_to_def_id: &'s NodeMap<LocalDefId>, index: IndexVec<LocalDefId, AstOwner<'a>>, } @@ -642,7 +642,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { /// `'a` declared on the TAIT, instead of the function. fn with_remapping<R>( &mut self, - remap: FxHashMap<LocalDefId, LocalDefId>, + remap: LocalDefIdMap<LocalDefId>, f: impl FnOnce(&mut Self) -> R, ) -> R { self.generics_def_id_map.push(remap); @@ -1657,7 +1657,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // Map from captured (old) lifetime to synthetic (new) lifetime. // Used to resolve lifetimes in the bounds of the opaque. - let mut captured_to_synthesized_mapping = FxHashMap::default(); + let mut captured_to_synthesized_mapping = LocalDefIdMap::default(); // List of (early-bound) synthetic lifetimes that are owned by the opaque. // This is used to create the `hir::Generics` owned by the opaque. let mut synthesized_lifetime_definitions = vec![]; diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index b308cd82e54..948221e9407 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -674,13 +674,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // eagerly. let mut outlives_requirements = infcx.tcx.is_typeck_child(mir_def_id).then(Vec::new); - self.check_type_tests( - infcx, - param_env, - body, - outlives_requirements.as_mut(), - &mut errors_buffer, - ); + self.check_type_tests(infcx, body, outlives_requirements.as_mut(), &mut errors_buffer); debug!(?errors_buffer); debug!(?outlives_requirements); @@ -938,7 +932,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn check_type_tests( &self, infcx: &InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, body: &Body<'tcx>, mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'tcx>>>, errors_buffer: &mut RegionErrors<'tcx>, @@ -956,7 +949,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { let generic_ty = type_test.generic_kind.to_ty(tcx); if self.eval_verify_bound( infcx, - param_env, generic_ty, type_test.lower_bound, &type_test.verify_bound, @@ -967,7 +959,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { if let Some(propagated_outlives_requirements) = &mut propagated_outlives_requirements { if self.try_promote_type_test( infcx, - param_env, body, type_test, propagated_outlives_requirements, @@ -1025,7 +1016,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn try_promote_type_test( &self, infcx: &InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, body: &Body<'tcx>, type_test: &TypeTest<'tcx>, propagated_outlives_requirements: &mut Vec<ClosureOutlivesRequirement<'tcx>>, @@ -1087,7 +1077,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // where `ur` is a local bound -- we are sometimes in a // position to prove things that our caller cannot. See // #53570 for an example. - if self.eval_verify_bound(infcx, param_env, generic_ty, ur, &type_test.verify_bound) { + if self.eval_verify_bound(infcx, generic_ty, ur, &type_test.verify_bound) { continue; } @@ -1270,7 +1260,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn eval_verify_bound( &self, infcx: &InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, generic_ty: Ty<'tcx>, lower_bound: RegionVid, verify_bound: &VerifyBound<'tcx>, @@ -1279,7 +1268,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { match verify_bound { VerifyBound::IfEq(verify_if_eq_b) => { - self.eval_if_eq(infcx, param_env, generic_ty, lower_bound, *verify_if_eq_b) + self.eval_if_eq(infcx, generic_ty, lower_bound, *verify_if_eq_b) } VerifyBound::IsEmpty => { @@ -1293,11 +1282,11 @@ impl<'tcx> RegionInferenceContext<'tcx> { } VerifyBound::AnyBound(verify_bounds) => verify_bounds.iter().any(|verify_bound| { - self.eval_verify_bound(infcx, param_env, generic_ty, lower_bound, verify_bound) + self.eval_verify_bound(infcx, generic_ty, lower_bound, verify_bound) }), VerifyBound::AllBounds(verify_bounds) => verify_bounds.iter().all(|verify_bound| { - self.eval_verify_bound(infcx, param_env, generic_ty, lower_bound, verify_bound) + self.eval_verify_bound(infcx, generic_ty, lower_bound, verify_bound) }), } } @@ -1305,19 +1294,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn eval_if_eq( &self, infcx: &InferCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, generic_ty: Ty<'tcx>, lower_bound: RegionVid, verify_if_eq_b: ty::Binder<'tcx, VerifyIfEq<'tcx>>, ) -> bool { let generic_ty = self.normalize_to_scc_representatives(infcx.tcx, generic_ty); let verify_if_eq_b = self.normalize_to_scc_representatives(infcx.tcx, verify_if_eq_b); - match test_type_match::extract_verify_if_eq( - infcx.tcx, - param_env, - &verify_if_eq_b, - generic_ty, - ) { + match test_type_match::extract_verify_if_eq(infcx.tcx, &verify_if_eq_b, generic_ty) { Some(r) => { let r_vid = self.to_region_vid(r); self.eval_outlives(r_vid, lower_bound) diff --git a/compiler/rustc_codegen_cranelift/build_system/tests.rs b/compiler/rustc_codegen_cranelift/build_system/tests.rs index 1a38d5967f4..cb7b2454cd5 100644 --- a/compiler/rustc_codegen_cranelift/build_system/tests.rs +++ b/compiler/rustc_codegen_cranelift/build_system/tests.rs @@ -232,6 +232,13 @@ const EXTENDED_SYSROOT_SUITE: &[TestCase] = &[ if runner.is_native { let mut test_cmd = PORTABLE_SIMD.test(&runner.target_compiler, &runner.dirs); test_cmd.arg("-q"); + // FIXME remove after portable-simd update + test_cmd + .arg("--") + .arg("--skip") + .arg("core_simd::swizzle::simd_swizzle") + .arg("--skip") + .arg("core_simd::vector::Simd<T,N>::lanes"); spawn_and_wait(test_cmd); } }), diff --git a/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs b/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs index afc51a47f14..1d51b499c8b 100644 --- a/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs +++ b/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs @@ -337,17 +337,6 @@ fn main() { static REF2: &u8 = REF1; assert_eq!(*REF1, *REF2); - extern "C" { - type A; - } - - fn main() { - let x: &A = unsafe { &*(1usize as *const A) }; - - assert_eq!(unsafe { intrinsics::size_of_val(x) }, 0); - assert_eq!(unsafe { intrinsics::min_align_of_val(x) }, 1); - } - #[repr(simd)] struct V([f64; 2]); diff --git a/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Enable-the-exposed_provenance-feature.patch b/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Enable-the-exposed_provenance-feature.patch new file mode 100644 index 00000000000..b8c0783f524 --- /dev/null +++ b/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Enable-the-exposed_provenance-feature.patch @@ -0,0 +1,22 @@ +From a101a43b795431ce617e7782afb451f4853afc00 Mon Sep 17 00:00:00 2001 +From: bjorn3 <17426603+bjorn3@users.noreply.github.com> +Date: Thu, 7 Dec 2023 14:51:35 +0000 +Subject: [PATCH] Enable the exposed_provenance feature + +--- + crates/core_simd/tests/pointers.rs | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/crates/core_simd/tests/pointers.rs b/crates/core_simd/tests/pointers.rs +index 0ae8f83..06620d6 100644 +--- a/crates/core_simd/tests/pointers.rs ++++ b/crates/core_simd/tests/pointers.rs +@@ -1,4 +1,4 @@ +-#![feature(portable_simd, strict_provenance)] ++#![feature(exposed_provenance, portable_simd, strict_provenance)] + + use core_simd::simd::{Simd, SimdConstPtr, SimdMutPtr}; + +-- +2.34.1 + diff --git a/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml b/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml index 8a690bada0d..8e213f71c3f 100644 --- a/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml +++ b/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml @@ -36,15 +36,18 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56fc6cf8dc8c4158eed8649f9b8b0ea1518eb62b544fe9490d66fa0b349eafe9" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] [[package]] name = "cfg-if" @@ -58,9 +61,9 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.103" +version = "0.1.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3b73c3443a5fd2438d7ba4853c64e4c8efc2404a9e28a9234cc2d5eebc6c242" +checksum = "99c3f9035afc33f4358773239573f7d121099856753e1bbd2a6a5207098fc741" dependencies = [ "cc", "rustc-std-workspace-core", @@ -124,9 +127,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", @@ -135,9 +138,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ "allocator-api2", "compiler_builtins", @@ -147,9 +150,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", @@ -167,9 +170,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -189,9 +192,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "compiler_builtins", "memchr", @@ -241,9 +244,9 @@ dependencies = [ [[package]] name = "r-efi" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "575fc2d9b3da54adbdfaddf6eca48fec256d977c8630a1750b8991347d1ac911" +checksum = "0e244f96e03a3067f9e521d3167bd42657594cb8588c8d3a2db01545dc1af2e0" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -402,9 +405,9 @@ dependencies = [ [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -419,6 +422,18 @@ dependencies = [ "compiler_builtins", "core", "libc", + "unwinding", +] + +[[package]] +name = "unwinding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37a19a21a537f635c16c7576f22d0f2f7d63353c1337ad4ce0d8001c7952a25b" +dependencies = [ + "compiler_builtins", + "gimli", + "rustc-std-workspace-core", ] [[package]] diff --git a/compiler/rustc_codegen_cranelift/rust-toolchain b/compiler/rustc_codegen_cranelift/rust-toolchain index 2997816d96c..4ba08f1af44 100644 --- a/compiler/rustc_codegen_cranelift/rust-toolchain +++ b/compiler/rustc_codegen_cranelift/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-11-25" +channel = "nightly-2023-12-19" components = ["rust-src", "rustc-dev", "llvm-tools"] diff --git a/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh b/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh index a299b6de6b1..7d7ffdadc7f 100755 --- a/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh +++ b/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh @@ -44,6 +44,7 @@ rm tests/ui/proc-macro/no-mangle-in-proc-macro-issue-111888.rs # vendor intrinsics rm tests/ui/sse2.rs # CodegenBackend::target_features not yet implemented rm tests/ui/simd/array-type.rs # "Index argument for `simd_insert` is not a constant" +rm tests/ui/simd/masked-load-store.rs # exotic linkages rm tests/ui/issues/issue-33992.rs # unsupported linkages diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index 8b0dc611075..df40a5eb475 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -353,7 +353,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { fx, rustc_hir::LangItem::PanicBoundsCheck, &[index, len, location], - source_info.span, + Some(source_info.span), ); } AssertKind::MisalignedPointerDereference { ref required, ref found } => { @@ -365,7 +365,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { fx, rustc_hir::LangItem::PanicMisalignedPointerDereference, &[required, found, location], - source_info.span, + Some(source_info.span), ); } _ => { @@ -945,19 +945,19 @@ pub(crate) fn codegen_panic<'tcx>( let msg_len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap()); let args = [msg_ptr, msg_len, location]; - codegen_panic_inner(fx, rustc_hir::LangItem::Panic, &args, source_info.span); + codegen_panic_inner(fx, rustc_hir::LangItem::Panic, &args, Some(source_info.span)); } pub(crate) fn codegen_panic_nounwind<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, msg_str: &str, - source_info: mir::SourceInfo, + span: Option<Span>, ) { let msg_ptr = fx.anonymous_str(msg_str); let msg_len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap()); let args = [msg_ptr, msg_len]; - codegen_panic_inner(fx, rustc_hir::LangItem::PanicNounwind, &args, source_info.span); + codegen_panic_inner(fx, rustc_hir::LangItem::PanicNounwind, &args, span); } pub(crate) fn codegen_unwind_terminate<'tcx>( @@ -967,16 +967,16 @@ pub(crate) fn codegen_unwind_terminate<'tcx>( ) { let args = []; - codegen_panic_inner(fx, reason.lang_item(), &args, source_info.span); + codegen_panic_inner(fx, reason.lang_item(), &args, Some(source_info.span)); } fn codegen_panic_inner<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, lang_item: rustc_hir::LangItem, args: &[Value], - span: Span, + span: Option<Span>, ) { - let def_id = fx.tcx.require_lang_item(lang_item, Some(span)); + let def_id = fx.tcx.require_lang_item(lang_item, span); let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx); let symbol_name = fx.tcx.symbol_name(instance).name; diff --git a/compiler/rustc_codegen_cranelift/src/common.rs b/compiler/rustc_codegen_cranelift/src/common.rs index 63562d33508..bd19a7ed059 100644 --- a/compiler/rustc_codegen_cranelift/src/common.rs +++ b/compiler/rustc_codegen_cranelift/src/common.rs @@ -98,11 +98,15 @@ fn clif_pair_type_from_ty<'tcx>( /// Is a pointer to this type a fat ptr? pub(crate) fn has_ptr_meta<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { - let ptr_ty = Ty::new_ptr(tcx, TypeAndMut { ty, mutbl: rustc_hir::Mutability::Not }); - match &tcx.layout_of(ParamEnv::reveal_all().and(ptr_ty)).unwrap().abi { - Abi::Scalar(_) => false, - Abi::ScalarPair(_, _) => true, - abi => unreachable!("Abi of ptr to {:?} is {:?}???", ty, abi), + if ty.is_sized(tcx, ParamEnv::reveal_all()) { + return false; + } + + let tail = tcx.struct_tail_erasing_lifetimes(ty, ParamEnv::reveal_all()); + match tail.kind() { + ty::Foreign(..) => false, + ty::Str | ty::Slice(..) | ty::Dynamic(..) => true, + _ => bug!("unexpected unsized tail: {:?}", tail), } } diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs index bfeeb117ff5..68126f12424 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs @@ -487,13 +487,12 @@ fn codegen_regular_intrinsic_call<'tcx>( let layout = fx.layout_of(generic_args.type_at(0)); // Note: Can't use is_unsized here as truly unsized types need to take the fixed size // branch - let size = if let Abi::ScalarPair(_, _) = ptr.layout().abi { - let (_ptr, info) = ptr.load_scalar_pair(fx); - let (size, _align) = crate::unsize::size_and_align_of_dst(fx, layout, info); - size + let meta = if let Abi::ScalarPair(_, _) = ptr.layout().abi { + Some(ptr.load_scalar_pair(fx).1) } else { - fx.bcx.ins().iconst(fx.pointer_type, layout.size.bytes() as i64) + None }; + let (size, _align) = crate::unsize::size_and_align_of(fx, layout, meta); ret.write_cvalue(fx, CValue::by_val(size, usize_layout)); } sym::min_align_of_val => { @@ -502,13 +501,12 @@ fn codegen_regular_intrinsic_call<'tcx>( let layout = fx.layout_of(generic_args.type_at(0)); // Note: Can't use is_unsized here as truly unsized types need to take the fixed size // branch - let align = if let Abi::ScalarPair(_, _) = ptr.layout().abi { - let (_ptr, info) = ptr.load_scalar_pair(fx); - let (_size, align) = crate::unsize::size_and_align_of_dst(fx, layout, info); - align + let meta = if let Abi::ScalarPair(_, _) = ptr.layout().abi { + Some(ptr.load_scalar_pair(fx).1) } else { - fx.bcx.ins().iconst(fx.pointer_type, layout.align.abi.bytes() as i64) + None }; + let (_size, align) = crate::unsize::size_and_align_of(fx, layout, meta); ret.write_cvalue(fx, CValue::by_val(align, usize_layout)); } @@ -688,7 +686,7 @@ fn codegen_regular_intrinsic_call<'tcx>( } }) }); - crate::base::codegen_panic_nounwind(fx, &msg_str, source_info); + crate::base::codegen_panic_nounwind(fx, &msg_str, Some(source_info.span)); return; } } diff --git a/compiler/rustc_codegen_cranelift/src/unsize.rs b/compiler/rustc_codegen_cranelift/src/unsize.rs index c6133f2b35c..f777e11371f 100644 --- a/compiler/rustc_codegen_cranelift/src/unsize.rs +++ b/compiler/rustc_codegen_cranelift/src/unsize.rs @@ -2,6 +2,9 @@ //! //! [`PointerCoercion::Unsize`]: `rustc_middle::ty::adjustment::PointerCoercion::Unsize` +use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; + +use crate::base::codegen_panic_nounwind; use crate::prelude::*; // Adapted from https://github.com/rust-lang/rust/blob/2a663555ddf36f6b041445894a8c175cd1bc718c/src/librustc_codegen_ssa/base.rs#L159-L307 @@ -187,63 +190,113 @@ pub(crate) fn coerce_dyn_star<'tcx>( // Adapted from https://github.com/rust-lang/rust/blob/2a663555ddf36f6b041445894a8c175cd1bc718c/src/librustc_codegen_ssa/glue.rs -pub(crate) fn size_and_align_of_dst<'tcx>( +pub(crate) fn size_and_align_of<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, layout: TyAndLayout<'tcx>, - info: Value, + info: Option<Value>, ) -> (Value, Value) { - assert!(layout.is_unsized() || layout.abi == Abi::Uninhabited); - match layout.ty.kind() { + if layout.is_sized() { + return ( + fx.bcx.ins().iconst(fx.pointer_type, layout.size.bytes() as i64), + fx.bcx.ins().iconst(fx.pointer_type, layout.align.abi.bytes() as i64), + ); + } + + let ty = layout.ty; + match ty.kind() { ty::Dynamic(..) => { // load size/align from vtable - (crate::vtable::size_of_obj(fx, info), crate::vtable::min_align_of_obj(fx, info)) + ( + crate::vtable::size_of_obj(fx, info.unwrap()), + crate::vtable::min_align_of_obj(fx, info.unwrap()), + ) } ty::Slice(_) | ty::Str => { let unit = layout.field(fx, 0); // The info in this case is the length of the str, so the size is that // times the unit size. ( - fx.bcx.ins().imul_imm(info, unit.size.bytes() as i64), + fx.bcx.ins().imul_imm(info.unwrap(), unit.size.bytes() as i64), fx.bcx.ins().iconst(fx.pointer_type, unit.align.abi.bytes() as i64), ) } - _ => { + ty::Foreign(_) => { + let trap_block = fx.bcx.create_block(); + let true_ = fx.bcx.ins().iconst(types::I8, 1); + let next_block = fx.bcx.create_block(); + fx.bcx.ins().brif(true_, trap_block, &[], next_block, &[]); + fx.bcx.seal_block(trap_block); + fx.bcx.seal_block(next_block); + fx.bcx.switch_to_block(trap_block); + + // `extern` type. We cannot compute the size, so panic. + let msg_str = with_no_visible_paths!({ + with_no_trimmed_paths!({ + format!("attempted to compute the size or alignment of extern type `{ty}`") + }) + }); + + codegen_panic_nounwind(fx, &msg_str, None); + + fx.bcx.switch_to_block(next_block); + + // This function does not return so we can now return whatever we want. + let size = fx.bcx.ins().iconst(fx.pointer_type, 42); + let align = fx.bcx.ins().iconst(fx.pointer_type, 42); + (size, align) + } + ty::Adt(..) | ty::Tuple(..) => { // First get the size of all statically known fields. // Don't use size_of because it also rounds up to alignment, which we // want to avoid, as the unsized field's alignment could be smaller. assert!(!layout.ty.is_simd()); let i = layout.fields.count() - 1; - let sized_size = layout.fields.offset(i).bytes(); + let unsized_offset_unadjusted = layout.fields.offset(i).bytes(); + let unsized_offset_unadjusted = + fx.bcx.ins().iconst(fx.pointer_type, unsized_offset_unadjusted as i64); let sized_align = layout.align.abi.bytes(); let sized_align = fx.bcx.ins().iconst(fx.pointer_type, sized_align as i64); // Recurse to get the size of the dynamically sized field (must be // the last field). let field_layout = layout.field(fx, i); - let (unsized_size, mut unsized_align) = size_and_align_of_dst(fx, field_layout, info); - - // FIXME (#26403, #27023): We should be adding padding - // to `sized_size` (to accommodate the `unsized_align` - // required of the unsized field that follows) before - // summing it with `sized_size`. (Note that since #26403 - // is unfixed, we do not yet add the necessary padding - // here. But this is where the add would go.) - - // Return the sum of sizes and max of aligns. - let size = fx.bcx.ins().iadd_imm(unsized_size, sized_size as i64); - - // Packed types ignore the alignment of their fields. - if let ty::Adt(def, _) = layout.ty.kind() { - if def.repr().packed() { - unsized_align = sized_align; + let (unsized_size, mut unsized_align) = size_and_align_of(fx, field_layout, info); + + // # First compute the dynamic alignment + + // For packed types, we need to cap the alignment. + if let ty::Adt(def, _) = ty.kind() { + if let Some(packed) = def.repr().pack { + if packed.bytes() == 1 { + // We know this will be capped to 1. + unsized_align = fx.bcx.ins().iconst(fx.pointer_type, 1); + } else { + // We have to dynamically compute `min(unsized_align, packed)`. + let packed = fx.bcx.ins().iconst(fx.pointer_type, packed.bytes() as i64); + let cmp = fx.bcx.ins().icmp(IntCC::UnsignedLessThan, unsized_align, packed); + unsized_align = fx.bcx.ins().select(cmp, unsized_align, packed); + } } } // Choose max of two known alignments (combined value must // be aligned according to more restrictive of the two). let cmp = fx.bcx.ins().icmp(IntCC::UnsignedGreaterThan, sized_align, unsized_align); - let align = fx.bcx.ins().select(cmp, sized_align, unsized_align); + let full_align = fx.bcx.ins().select(cmp, sized_align, unsized_align); + + // # Then compute the dynamic size + + // The full formula for the size would be: + // let unsized_offset_adjusted = unsized_offset_unadjusted.align_to(unsized_align); + // let full_size = (unsized_offset_adjusted + unsized_size).align_to(full_align); + // However, `unsized_size` is a multiple of `unsized_align`. + // Therefore, we can equivalently do the `align_to(unsized_align)` *after* adding `unsized_size`: + // let full_size = (unsized_offset_unadjusted + unsized_size).align_to(unsized_align).align_to(full_align); + // Furthermore, `align >= unsized_align`, and therefore we only need to do: + // let full_size = (unsized_offset_unadjusted + unsized_size).align_to(full_align); + + let full_size = fx.bcx.ins().iadd(unsized_offset_unadjusted, unsized_size); // Issue #27023: must add any necessary padding to `size` // (to make it a multiple of `align`) before returning it. @@ -255,12 +308,13 @@ pub(crate) fn size_and_align_of_dst<'tcx>( // emulated via the semi-standard fast bit trick: // // `(size + (align-1)) & -align` - let addend = fx.bcx.ins().iadd_imm(align, -1); - let add = fx.bcx.ins().iadd(size, addend); - let neg = fx.bcx.ins().ineg(align); - let size = fx.bcx.ins().band(add, neg); + let addend = fx.bcx.ins().iadd_imm(full_align, -1); + let add = fx.bcx.ins().iadd(full_size, addend); + let neg = fx.bcx.ins().ineg(full_align); + let full_size = fx.bcx.ins().band(add, neg); - (size, align) + (full_size, full_align) } + _ => bug!("size_and_align_of_dst: {ty} not supported"), } } diff --git a/compiler/rustc_codegen_cranelift/src/value_and_place.rs b/compiler/rustc_codegen_cranelift/src/value_and_place.rs index f52f59716a8..567a5669d49 100644 --- a/compiler/rustc_codegen_cranelift/src/value_and_place.rs +++ b/compiler/rustc_codegen_cranelift/src/value_and_place.rs @@ -20,34 +20,36 @@ fn codegen_field<'tcx>( (base.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()), field_layout) }; - if let Some(extra) = extra { - if field_layout.is_sized() { - return simple(fx); - } - match field_layout.ty.kind() { - ty::Slice(..) | ty::Str | ty::Foreign(..) => simple(fx), - ty::Adt(def, _) if def.repr().packed() => { - assert_eq!(layout.align.abi.bytes(), 1); - simple(fx) - } - _ => { - // We have to align the offset for DST's - let unaligned_offset = field_offset.bytes(); - let (_, unsized_align) = - crate::unsize::size_and_align_of_dst(fx, field_layout, extra); + if field_layout.is_sized() { + return simple(fx); + } + match field_layout.ty.kind() { + ty::Slice(..) | ty::Str => simple(fx), + _ => { + let unaligned_offset = field_offset.bytes(); - let one = fx.bcx.ins().iconst(fx.pointer_type, 1); - let align_sub_1 = fx.bcx.ins().isub(unsized_align, one); - let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64); - let zero = fx.bcx.ins().iconst(fx.pointer_type, 0); - let and_rhs = fx.bcx.ins().isub(zero, unsized_align); - let offset = fx.bcx.ins().band(and_lhs, and_rhs); + // Get the alignment of the field + let (_, mut unsized_align) = crate::unsize::size_and_align_of(fx, field_layout, extra); - (base.offset_value(fx, offset), field_layout) + // For packed types, we need to cap alignment. + if let ty::Adt(def, _) = layout.ty.kind() { + if let Some(packed) = def.repr().pack { + let packed = fx.bcx.ins().iconst(fx.pointer_type, packed.bytes() as i64); + let cmp = fx.bcx.ins().icmp(IntCC::UnsignedLessThan, unsized_align, packed); + unsized_align = fx.bcx.ins().select(cmp, unsized_align, packed); + } } + + // Bump the unaligned offset up to the appropriate alignment + let one = fx.bcx.ins().iconst(fx.pointer_type, 1); + let align_sub_1 = fx.bcx.ins().isub(unsized_align, one); + let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64); + let zero = fx.bcx.ins().iconst(fx.pointer_type, 0); + let and_rhs = fx.bcx.ins().isub(zero, unsized_align); + let offset = fx.bcx.ins().band(and_lhs, and_rhs); + + (base.offset_value(fx, offset), field_layout) } - } else { - simple(fx) } } @@ -731,13 +733,8 @@ impl<'tcx> CPlace<'tcx> { }; let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field); - if field_layout.is_unsized() { - if let ty::Foreign(_) = field_layout.ty.kind() { - assert!(extra.is_none()); - CPlace::for_ptr(field_ptr, field_layout) - } else { - CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout) - } + if has_ptr_meta(fx.tcx, field_layout.ty) { + CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout) } else { CPlace::for_ptr(field_ptr, field_layout) } diff --git a/compiler/rustc_codegen_cranelift/y.cmd b/compiler/rustc_codegen_cranelift/y.cmd new file mode 100644 index 00000000000..e9b688645a4 --- /dev/null +++ b/compiler/rustc_codegen_cranelift/y.cmd @@ -0,0 +1,9 @@ +@echo off +echo [BUILD] build system >&2 +mkdir build 2>nul +rustc build_system/main.rs -o build\y.exe -Cdebuginfo=1 --edition 2021 || goto :error +build\y.exe %* || goto :error +goto :EOF + +:error +exit /b diff --git a/compiler/rustc_codegen_cranelift/y.ps1 b/compiler/rustc_codegen_cranelift/y.ps1 new file mode 100644 index 00000000000..02ef0fcbd50 --- /dev/null +++ b/compiler/rustc_codegen_cranelift/y.ps1 @@ -0,0 +1,12 @@ +$ErrorActionPreference = "Stop" + +$host.ui.WriteErrorLine("[BUILD] build system") +New-Item -ItemType Directory -Force -Path build | Out-Null +& rustc build_system/main.rs -o build\y.exe -Cdebuginfo=1 --edition 2021 +if ($LASTEXITCODE -ne 0) { + exit $LASTEXITCODE +} +& build\y.exe $args +if ($LASTEXITCODE -ne 0) { + exit $LASTEXITCODE +} diff --git a/compiler/rustc_codegen_ssa/src/assert_module_sources.rs b/compiler/rustc_codegen_ssa/src/assert_module_sources.rs index 01d1b1059b9..a5bd10ecb34 100644 --- a/compiler/rustc_codegen_ssa/src/assert_module_sources.rs +++ b/compiler/rustc_codegen_ssa/src/assert_module_sources.rs @@ -278,13 +278,13 @@ impl CguReuseTracker { if error { let at_least = if at_least { 1 } else { 0 }; - errors::IncorrectCguReuseType { + sess.emit_err(errors::IncorrectCguReuseType { span: *error_span, cgu_user_name, actual_reuse, expected_reuse, at_least, - }; + }); } } else { sess.emit_fatal(errors::CguNotRecorded { cgu_user_name, cgu_name }); diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs index 258d6710bc5..e2bccf1ffa5 100644 --- a/compiler/rustc_hir/src/def.rs +++ b/compiler/rustc_hir/src/def.rs @@ -3,8 +3,8 @@ use crate::hir; use rustc_ast as ast; use rustc_ast::NodeId; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::ToStableHashKey; +use rustc_data_structures::unord::UnordMap; use rustc_macros::HashStable_Generic; use rustc_span::def_id::{DefId, LocalDefId}; use rustc_span::hygiene::MacroKind; @@ -806,4 +806,4 @@ pub enum LifetimeRes { ElidedAnchor { start: NodeId, end: NodeId }, } -pub type DocLinkResMap = FxHashMap<(Symbol, Namespace), Option<Res<NodeId>>>; +pub type DocLinkResMap = UnordMap<(Symbol, Namespace), Option<Res<NodeId>>>; diff --git a/compiler/rustc_hir/src/definitions.rs b/compiler/rustc_hir/src/definitions.rs index d222325475d..2ab9a6ef32c 100644 --- a/compiler/rustc_hir/src/definitions.rs +++ b/compiler/rustc_hir/src/definitions.rs @@ -8,8 +8,8 @@ pub use crate::def_id::DefPathHash; use crate::def_id::{CrateNum, DefIndex, LocalDefId, StableCrateId, CRATE_DEF_INDEX, LOCAL_CRATE}; use crate::def_path_hash_map::DefPathHashMap; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::{Hash64, StableHasher}; +use rustc_data_structures::unord::UnordMap; use rustc_index::IndexVec; use rustc_span::symbol::{kw, sym, Symbol}; @@ -95,7 +95,7 @@ impl DefPathTable { #[derive(Debug)] pub struct Definitions { table: DefPathTable, - next_disambiguator: FxHashMap<(LocalDefId, DefPathData), u32>, + next_disambiguator: UnordMap<(LocalDefId, DefPathData), u32>, /// The [StableCrateId] of the local crate. stable_crate_id: StableCrateId, diff --git a/compiler/rustc_hir/src/diagnostic_items.rs b/compiler/rustc_hir/src/diagnostic_items.rs index 243014b0027..d4d09f9a4e0 100644 --- a/compiler/rustc_hir/src/diagnostic_items.rs +++ b/compiler/rustc_hir/src/diagnostic_items.rs @@ -1,12 +1,13 @@ use crate::def_id::DefId; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_span::def_id::DefIdMap; use rustc_span::Symbol; #[derive(Debug, Default)] pub struct DiagnosticItems { - pub id_to_name: FxHashMap<DefId, Symbol>, - pub name_to_id: FxHashMap<Symbol, DefId>, + pub id_to_name: DefIdMap<Symbol>, + pub name_to_id: FxIndexMap<Symbol, DefId>, } impl<CTX: crate::HashStableContext> HashStable<CTX> for DiagnosticItems { diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 760945554f0..d148137091c 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -1,6 +1,6 @@ use crate::def::{CtorKind, DefKind, Res}; -use crate::def_id::DefId; -pub(crate) use crate::hir_id::{HirId, ItemLocalId, OwnerId}; +use crate::def_id::{DefId, LocalDefIdMap}; +pub(crate) use crate::hir_id::{HirId, ItemLocalId, ItemLocalMap, OwnerId}; use crate::intravisit::FnKind; use crate::LangItem; @@ -11,7 +11,6 @@ pub use rustc_ast::{BinOp, BinOpKind, BindingAnnotation, BorrowKind, ByRef, Capt pub use rustc_ast::{ImplPolarity, IsAuto, Movability, Mutability, UnOp}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_data_structures::fingerprint::Fingerprint; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sorted_map::SortedMap; use rustc_index::IndexVec; use rustc_macros::HashStable_Generic; @@ -874,12 +873,12 @@ pub struct OwnerInfo<'hir> { /// Contents of the HIR. pub nodes: OwnerNodes<'hir>, /// Map from each nested owner to its parent's local id. - pub parenting: FxHashMap<LocalDefId, ItemLocalId>, + pub parenting: LocalDefIdMap<ItemLocalId>, /// Collected attributes of the HIR nodes. pub attrs: AttributeMap<'hir>, /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. - pub trait_map: FxHashMap<ItemLocalId, Box<[TraitCandidate]>>, + pub trait_map: ItemLocalMap<Box<[TraitCandidate]>>, } impl<'tcx> OwnerInfo<'tcx> { diff --git a/compiler/rustc_hir/src/pat_util.rs b/compiler/rustc_hir/src/pat_util.rs index 838c123f83c..e6050327186 100644 --- a/compiler/rustc_hir/src/pat_util.rs +++ b/compiler/rustc_hir/src/pat_util.rs @@ -1,7 +1,6 @@ use crate::def::{CtorOf, DefKind, Res}; -use crate::def_id::DefId; +use crate::def_id::{DefId, DefIdSet}; use crate::hir::{self, BindingAnnotation, ByRef, HirId, PatKind}; -use rustc_data_structures::fx::FxHashSet; use rustc_span::symbol::Ident; use rustc_span::Span; @@ -114,9 +113,9 @@ impl hir::Pat<'_> { } _ => true, }); - // We remove duplicates by inserting into a `FxHashSet` to avoid re-ordering + // We remove duplicates by inserting into a hash set to avoid re-ordering // the bounds - let mut duplicates = FxHashSet::default(); + let mut duplicates = DefIdSet::default(); variants.retain(|def_id| duplicates.insert(*def_id)); variants } diff --git a/compiler/rustc_hir_analysis/src/astconv/generics.rs b/compiler/rustc_hir_analysis/src/astconv/generics.rs index be73c027fdc..b495b00ec70 100644 --- a/compiler/rustc_hir_analysis/src/astconv/generics.rs +++ b/compiler/rustc_hir_analysis/src/astconv/generics.rs @@ -262,7 +262,7 @@ pub fn create_args_for_parent_generic_args<'tcx, 'a>( // impl const PartialEq for () {} // ``` // - // Since this is a const impl, we need to insert `<false>` at the end of + // Since this is a const impl, we need to insert a host arg at the end of // `PartialEq`'s generics, but this errors since `Rhs` isn't specified. // To work around this, we infer all arguments until we reach the host param. args.push(ctx.inferred_kind(Some(&args), param, infer_args)); diff --git a/compiler/rustc_hir_analysis/src/check/entry.rs b/compiler/rustc_hir_analysis/src/check/entry.rs index 8f194ae88ab..1d737e17e82 100644 --- a/compiler/rustc_hir_analysis/src/check/entry.rs +++ b/compiler/rustc_hir_analysis/src/check/entry.rs @@ -92,24 +92,6 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { let mut error = false; let main_diagnostics_def_id = main_fn_diagnostics_def_id(tcx, main_def_id, main_span); - let main_fn_generics = tcx.generics_of(main_def_id); - let main_fn_predicates = tcx.predicates_of(main_def_id); - if main_fn_generics.count() != 0 || !main_fnsig.bound_vars().is_empty() { - let generics_param_span = main_fn_generics_params_span(tcx, main_def_id); - tcx.sess.emit_err(errors::MainFunctionGenericParameters { - span: generics_param_span.unwrap_or(main_span), - label_span: generics_param_span, - }); - error = true; - } else if !main_fn_predicates.predicates.is_empty() { - // generics may bring in implicit predicates, so we skip this check if generics is present. - let generics_where_clauses_span = main_fn_where_clauses_span(tcx, main_def_id); - tcx.sess.emit_err(errors::WhereClauseOnMain { - span: generics_where_clauses_span.unwrap_or(main_span), - generics_span: generics_where_clauses_span, - }); - error = true; - } let main_asyncness = tcx.asyncness(main_def_id); if main_asyncness.is_async() { @@ -142,10 +124,6 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { if let Some(term_did) = tcx.lang_items().termination() { let return_ty = main_fnsig.output(); let return_ty_span = main_fn_return_type_span(tcx, main_def_id).unwrap_or(main_span); - if !return_ty.bound_vars().is_empty() { - tcx.sess.emit_err(errors::MainFunctionReturnTypeGeneric { span: return_ty_span }); - error = true; - } let return_ty = return_ty.skip_binder(); let infcx = tcx.infer_ctxt().build(); let cause = traits::ObligationCause::new( @@ -180,7 +158,7 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { Abi::Rust, )); - check_function_signature( + if check_function_signature( tcx, ObligationCause::new( main_span, @@ -189,7 +167,28 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { ), main_def_id, expected_sig, - ); + ) + .is_err() + { + return; + } + + let main_fn_generics = tcx.generics_of(main_def_id); + let main_fn_predicates = tcx.predicates_of(main_def_id); + if main_fn_generics.count() != 0 || !main_fnsig.bound_vars().is_empty() { + let generics_param_span = main_fn_generics_params_span(tcx, main_def_id); + tcx.sess.emit_err(errors::MainFunctionGenericParameters { + span: generics_param_span.unwrap_or(main_span), + label_span: generics_param_span, + }); + } else if !main_fn_predicates.predicates.is_empty() { + // generics may bring in implicit predicates, so we skip this check if generics is present. + let generics_where_clauses_span = main_fn_where_clauses_span(tcx, main_def_id); + tcx.sess.emit_err(errors::WhereClauseOnMain { + span: generics_where_clauses_span.unwrap_or(main_span), + generics_span: generics_where_clauses_span, + }); + } } fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { @@ -255,7 +254,7 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { Abi::Rust, )); - check_function_signature( + let _ = check_function_signature( tcx, ObligationCause::new( start_span, diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index 33337190562..126bab68ae3 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -55,7 +55,7 @@ fn equate_intrinsic_type<'tcx>( && gen_count_ok(own_counts.consts, n_cts, "const") { let it_def_id = it.owner_id.def_id; - check_function_signature( + let _ = check_function_signature( tcx, ObligationCause::new(it.span, it_def_id, ObligationCauseCode::IntrinsicType), it_def_id.into(), diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index df17879a967..e4904a0437b 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -77,6 +77,7 @@ use std::num::NonZeroU32; use check::check_mod_item_types; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_errors::ErrorGuaranteed; use rustc_errors::{pluralize, struct_span_err, Diagnostic, DiagnosticBuilder}; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::Visitor; @@ -570,7 +571,26 @@ pub fn check_function_signature<'tcx>( mut cause: ObligationCause<'tcx>, fn_id: DefId, expected_sig: ty::PolyFnSig<'tcx>, -) { +) -> Result<(), ErrorGuaranteed> { + fn extract_span_for_error_reporting<'tcx>( + tcx: TyCtxt<'tcx>, + err: TypeError<'_>, + cause: &ObligationCause<'tcx>, + fn_id: LocalDefId, + ) -> rustc_span::Span { + let mut args = { + let node = tcx.hir().expect_owner(fn_id); + let decl = node.fn_decl().unwrap_or_else(|| bug!("expected fn decl, found {:?}", node)); + decl.inputs.iter().map(|t| t.span).chain(std::iter::once(decl.output.span())) + }; + + match err { + TypeError::ArgumentMutability(i) + | TypeError::ArgumentSorts(ExpectedFound { .. }, i) => args.nth(i).unwrap(), + _ => cause.span(), + } + } + let local_id = fn_id.as_local().unwrap_or(CRATE_DEF_ID); let param_env = ty::ParamEnv::empty(); @@ -587,8 +607,7 @@ pub fn check_function_signature<'tcx>( Ok(()) => { let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(errors); - return; + return Err(infcx.err_ctxt().report_fulfillment_errors(errors)); } } Err(err) => { @@ -610,30 +629,14 @@ pub fn check_function_signature<'tcx>( false, false, ); - diag.emit(); - return; + return Err(diag.emit()); } } let outlives_env = OutlivesEnvironment::new(param_env); - let _ = ocx.resolve_regions_and_report_errors(local_id, &outlives_env); - - fn extract_span_for_error_reporting<'tcx>( - tcx: TyCtxt<'tcx>, - err: TypeError<'_>, - cause: &ObligationCause<'tcx>, - fn_id: LocalDefId, - ) -> rustc_span::Span { - let mut args = { - let node = tcx.hir().expect_owner(fn_id); - let decl = node.fn_decl().unwrap_or_else(|| bug!("expected fn decl, found {:?}", node)); - decl.inputs.iter().map(|t| t.span).chain(std::iter::once(decl.output.span())) - }; - - match err { - TypeError::ArgumentMutability(i) - | TypeError::ArgumentSorts(ExpectedFound { .. }, i) => args.nth(i).unwrap(), - _ => cause.span(), - } + if let Err(e) = ocx.resolve_regions_and_report_errors(local_id, &outlives_env) { + return Err(e); } + + Ok(()) } diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 51f38240033..5e6b54950b3 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -471,6 +471,65 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } + if let Some(def_id) = def_id + && self.tcx.def_kind(def_id) == hir::def::DefKind::Fn + && self.tcx.is_intrinsic(def_id) + && self.tcx.item_name(def_id) == sym::const_eval_select + { + let fn_sig = self.resolve_vars_if_possible(fn_sig); + for idx in 0..=1 { + let arg_ty = fn_sig.inputs()[idx + 1]; + let span = arg_exprs.get(idx + 1).map_or(call_expr.span, |arg| arg.span); + // Check that second and third argument of `const_eval_select` must be `FnDef`, and additionally that + // the second argument must be `const fn`. The first argument must be a tuple, but this is already expressed + // in the function signature (`F: FnOnce<ARG>`), so I did not bother to add another check here. + // + // This check is here because there is currently no way to express a trait bound for `FnDef` types only. + if let ty::FnDef(def_id, _args) = *arg_ty.kind() { + let fn_once_def_id = + self.tcx.require_lang_item(hir::LangItem::FnOnce, Some(span)); + let fn_once_output_def_id = + self.tcx.require_lang_item(hir::LangItem::FnOnceOutput, Some(span)); + if self.tcx.generics_of(fn_once_def_id).host_effect_index.is_none() { + if idx == 0 && !self.tcx.is_const_fn_raw(def_id) { + self.tcx.sess.emit_err(errors::ConstSelectMustBeConst { span }); + } + } else { + let const_param: ty::GenericArg<'tcx> = + ([self.tcx.consts.false_, self.tcx.consts.true_])[idx].into(); + self.register_predicate(traits::Obligation::new( + self.tcx, + self.misc(span), + self.param_env, + ty::TraitRef::new( + self.tcx, + fn_once_def_id, + [arg_ty.into(), fn_sig.inputs()[0].into(), const_param], + ), + )); + + self.register_predicate(traits::Obligation::new( + self.tcx, + self.misc(span), + self.param_env, + ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new( + self.tcx, + fn_once_output_def_id, + [arg_ty.into(), fn_sig.inputs()[0].into(), const_param], + ), + term: fn_sig.output().into(), + }, + )); + + self.select_obligations_where_possible(|_| {}); + } + } else { + self.tcx.sess.emit_err(errors::ConstSelectMustBeFn { span, ty: arg_ty }); + } + } + } + fn_sig.output() } diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index 7facf8a4016..2855cea80b2 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -261,7 +261,7 @@ fn check_panic_info_fn(tcx: TyCtxt<'_>, fn_id: LocalDefId, fn_sig: ty::FnSig<'_> bounds, ); - check_function_signature( + let _ = check_function_signature( tcx, ObligationCause::new( tcx.def_span(fn_id), @@ -300,7 +300,7 @@ fn check_lang_start_fn<'tcx>(tcx: TyCtxt<'tcx>, fn_sig: ty::FnSig<'tcx>, def_id: Abi::Rust, )); - check_function_signature( + let _ = check_function_signature( tcx, ObligationCause::new( tcx.def_span(def_id), diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs index 7e43d67587b..d19d304128a 100644 --- a/compiler/rustc_hir_typeck/src/closure.rs +++ b/compiler/rustc_hir_typeck/src/closure.rs @@ -650,9 +650,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }, ) } - // For a `gen {}` block created as a `gen fn` body, we need the return type to be - // (). - Some(hir::CoroutineKind::Gen(hir::CoroutineSource::Fn)) => self.tcx.types.unit, + // All `gen {}` and `async gen {}` must return unit. + Some(hir::CoroutineKind::Gen(_) | hir::CoroutineKind::AsyncGen(_)) => { + self.tcx.types.unit + } _ => astconv.ty_infer(None, decl.output.span()), }, diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 24b577fd3c5..4bc237c2383 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -1498,7 +1498,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let ty = self.resolve_vars_with_obligations(ty); if self.next_trait_solver() - && let ty::Alias(ty::Projection | ty::Inherent | ty::Weak, _) = ty.kind() + && let ty::Alias(..) = ty.kind() { match self .at(&self.misc(sp), self.param_env) diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 17022f1fd37..4caa0df58b6 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -230,11 +230,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let minimum_input_count = expected_input_tys.len(); let provided_arg_count = provided_args.len(); - let is_const_eval_select = matches!(fn_def_id, Some(def_id) if - self.tcx.def_kind(def_id) == hir::def::DefKind::Fn - && self.tcx.is_intrinsic(def_id) - && self.tcx.item_name(def_id) == sym::const_eval_select); - // We introduce a helper function to demand that a given argument satisfy a given input // This is more complicated than just checking type equality, as arguments could be coerced // This version writes those types back so further type checking uses the narrowed types @@ -269,30 +264,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return Compatibility::Incompatible(coerce_error); } - // Check that second and third argument of `const_eval_select` must be `FnDef`, and additionally that - // the second argument must be `const fn`. The first argument must be a tuple, but this is already expressed - // in the function signature (`F: FnOnce<ARG>`), so I did not bother to add another check here. - // - // This check is here because there is currently no way to express a trait bound for `FnDef` types only. - if is_const_eval_select && (1..=2).contains(&idx) { - if let ty::FnDef(def_id, args) = *checked_ty.kind() { - if idx == 1 { - if !self.tcx.is_const_fn_raw(def_id) { - self.tcx.sess.emit_err(errors::ConstSelectMustBeConst { - span: provided_arg.span, - }); - } else { - self.enforce_context_effects(provided_arg.span, def_id, args) - } - } - } else { - self.tcx.sess.emit_err(errors::ConstSelectMustBeFn { - span: provided_arg.span, - ty: checked_ty, - }); - } - } - // 3. Check if the formal type is a supertype of the checked one // and register any such obligations for future type checks let supertype_error = self.at(&self.misc(provided_arg.span), self.param_env).sup( diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index d0b4889b45f..3ea1a52ae28 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -9,6 +9,7 @@ use std::slice; use arrayvec::ArrayVec; use smallvec::{smallvec, SmallVec}; +#[cfg(feature = "nightly")] use rustc_macros::{Decodable, Encodable}; use crate::{Idx, IndexVec}; @@ -111,7 +112,8 @@ macro_rules! bit_relations_inherent_impls { /// to or greater than the domain size. All operations that involve two bitsets /// will panic if the bitsets have differing domain sizes. /// -#[derive(Eq, PartialEq, Hash, Decodable, Encodable)] +#[cfg_attr(feature = "nightly", derive(Decodable, Encodable))] +#[derive(Eq, PartialEq, Hash)] pub struct BitSet<T> { domain_size: usize, words: SmallVec<[Word; 2]>, @@ -491,10 +493,21 @@ impl<T: Idx> ChunkedBitSet<T> { match *chunk { Zeros(chunk_domain_size) => { if chunk_domain_size > 1 { - // We take some effort to avoid copying the words. - let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed(); - // SAFETY: `words` can safely be all zeroes. - let mut words = unsafe { words.assume_init() }; + #[cfg(feature = "nightly")] + let mut words = { + // We take some effort to avoid copying the words. + let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed(); + // SAFETY: `words` can safely be all zeroes. + unsafe { words.assume_init() } + }; + #[cfg(not(feature = "nightly"))] + let mut words = { + let words = mem::MaybeUninit::<[Word; CHUNK_WORDS]>::zeroed(); + // SAFETY: `words` can safely be all zeroes. + let words = unsafe { words.assume_init() }; + // Unfortunate possibly-large copy + Rc::new(words) + }; let words_ref = Rc::get_mut(&mut words).unwrap(); let (word_index, mask) = chunk_word_index_and_mask(elem); @@ -545,10 +558,21 @@ impl<T: Idx> ChunkedBitSet<T> { Zeros(_) => false, Ones(chunk_domain_size) => { if chunk_domain_size > 1 { - // We take some effort to avoid copying the words. - let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed(); - // SAFETY: `words` can safely be all zeroes. - let mut words = unsafe { words.assume_init() }; + #[cfg(feature = "nightly")] + let mut words = { + // We take some effort to avoid copying the words. + let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed(); + // SAFETY: `words` can safely be all zeroes. + unsafe { words.assume_init() } + }; + #[cfg(not(feature = "nightly"))] + let mut words = { + let words = mem::MaybeUninit::<[Word; CHUNK_WORDS]>::zeroed(); + // SAFETY: `words` can safely be all zeroes. + let words = unsafe { words.assume_init() }; + // Unfortunate possibly-large copy + Rc::new(words) + }; let words_ref = Rc::get_mut(&mut words).unwrap(); // Set only the bits in use. @@ -1564,7 +1588,8 @@ impl<T: Idx> From<BitSet<T>> for GrowableBitSet<T> { /// /// All operations that involve a row and/or column index will panic if the /// index exceeds the relevant bound. -#[derive(Clone, Eq, PartialEq, Hash, Decodable, Encodable)] +#[cfg_attr(feature = "nightly", derive(Decodable, Encodable))] +#[derive(Clone, Eq, PartialEq, Hash)] pub struct BitMatrix<R: Idx, C: Idx> { num_rows: usize, num_columns: usize, @@ -1993,7 +2018,8 @@ impl std::fmt::Debug for FiniteBitSet<u32> { /// A fixed-sized bitset type represented by an integer type. Indices outwith than the range /// representable by `T` are considered set. -#[derive(Copy, Clone, Eq, PartialEq, Decodable, Encodable)] +#[cfg_attr(feature = "nightly", derive(Decodable, Encodable))] +#[derive(Copy, Clone, Eq, PartialEq)] pub struct FiniteBitSet<T: FiniteBitSetTy>(pub T); impl<T: FiniteBitSetTy> FiniteBitSet<T> { diff --git a/compiler/rustc_index/src/lib.rs b/compiler/rustc_index/src/lib.rs index c5602392c53..185e0c7d698 100644 --- a/compiler/rustc_index/src/lib.rs +++ b/compiler/rustc_index/src/lib.rs @@ -14,7 +14,6 @@ )] #![cfg_attr(feature = "nightly", allow(internal_features))] -#[cfg(feature = "nightly")] pub mod bit_set; #[cfg(feature = "nightly")] pub mod interval; diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index c118c405c20..d396c41007b 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -2653,11 +2653,6 @@ impl<'tcx> TypeRelation<'tcx> for SameTypeModuloInfer<'_, 'tcx> { self.0.tcx } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - // Unused, only for consts which we treat as always equal - ty::ParamEnv::empty() - } - fn tag(&self) -> &'static str { "SameTypeModuloInfer" } diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index b6e86e2b676..0fbc4a0ce50 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -31,13 +31,12 @@ use super::outlives::test_type_match; /// all the variables as well as a set of errors that must be reported. #[instrument(level = "debug", skip(region_rels, var_infos, data))] pub(crate) fn resolve<'tcx>( - param_env: ty::ParamEnv<'tcx>, region_rels: &RegionRelations<'_, 'tcx>, var_infos: VarInfos, data: RegionConstraintData<'tcx>, ) -> (LexicalRegionResolutions<'tcx>, Vec<RegionResolutionError<'tcx>>) { let mut errors = vec![]; - let mut resolver = LexicalResolver { param_env, region_rels, var_infos, data }; + let mut resolver = LexicalResolver { region_rels, var_infos, data }; let values = resolver.infer_variable_values(&mut errors); (values, errors) } @@ -120,7 +119,6 @@ struct RegionAndOrigin<'tcx> { type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>; struct LexicalResolver<'cx, 'tcx> { - param_env: ty::ParamEnv<'tcx>, region_rels: &'cx RegionRelations<'cx, 'tcx>, var_infos: VarInfos, data: RegionConstraintData<'tcx>, @@ -914,12 +912,8 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { match bound { VerifyBound::IfEq(verify_if_eq_b) => { let verify_if_eq_b = var_values.normalize(self.region_rels.tcx, *verify_if_eq_b); - match test_type_match::extract_verify_if_eq( - self.tcx(), - self.param_env, - &verify_if_eq_b, - generic_ty, - ) { + match test_type_match::extract_verify_if_eq(self.tcx(), &verify_if_eq_b, generic_ty) + { Some(r) => { self.bound_is_met(&VerifyBound::OutlivedBy(r), var_values, generic_ty, min) } diff --git a/compiler/rustc_infer/src/infer/outlives/for_liveness.rs b/compiler/rustc_infer/src/infer/outlives/for_liveness.rs index 52cc107ae52..42e3d6cad5a 100644 --- a/compiler/rustc_infer/src/infer/outlives/for_liveness.rs +++ b/compiler/rustc_infer/src/infer/outlives/for_liveness.rs @@ -84,7 +84,6 @@ where } else { test_type_match::extract_verify_if_eq( tcx, - param_env, &outlives.map_bound(|ty::OutlivesPredicate(ty, bound)| { VerifyIfEq { ty, bound } }), diff --git a/compiler/rustc_infer/src/infer/outlives/mod.rs b/compiler/rustc_infer/src/infer/outlives/mod.rs index f7129a5ad89..6379f84aa25 100644 --- a/compiler/rustc_infer/src/infer/outlives/mod.rs +++ b/compiler/rustc_infer/src/infer/outlives/mod.rs @@ -67,7 +67,7 @@ impl<'tcx> InferCtxt<'tcx> { let region_rels = &RegionRelations::new(self.tcx, outlives_env.free_region_map()); let (lexical_region_resolutions, errors) = - lexical_region_resolve::resolve(outlives_env.param_env, region_rels, var_infos, data); + lexical_region_resolve::resolve(region_rels, var_infos, data); let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); assert!(old_value.is_none()); diff --git a/compiler/rustc_infer/src/infer/outlives/test_type_match.rs b/compiler/rustc_infer/src/infer/outlives/test_type_match.rs index 959b34aa145..236dc4ec384 100644 --- a/compiler/rustc_infer/src/infer/outlives/test_type_match.rs +++ b/compiler/rustc_infer/src/infer/outlives/test_type_match.rs @@ -36,15 +36,14 @@ use crate::infer::region_constraints::VerifyIfEq; /// like are used. This is a particular challenge since this function is invoked /// very late in inference and hence cannot make use of the normal inference /// machinery. -#[instrument(level = "debug", skip(tcx, param_env))] +#[instrument(level = "debug", skip(tcx))] pub fn extract_verify_if_eq<'tcx>( tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, verify_if_eq_b: &ty::Binder<'tcx, VerifyIfEq<'tcx>>, test_ty: Ty<'tcx>, ) -> Option<ty::Region<'tcx>> { assert!(!verify_if_eq_b.has_escaping_bound_vars()); - let mut m = MatchAgainstHigherRankedOutlives::new(tcx, param_env); + let mut m = MatchAgainstHigherRankedOutlives::new(tcx); let verify_if_eq = verify_if_eq_b.skip_binder(); m.relate(verify_if_eq.ty, test_ty).ok()?; @@ -73,10 +72,9 @@ pub fn extract_verify_if_eq<'tcx>( } /// True if a (potentially higher-ranked) outlives -#[instrument(level = "debug", skip(tcx, param_env))] +#[instrument(level = "debug", skip(tcx))] pub(super) fn can_match_erased_ty<'tcx>( tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, outlives_predicate: ty::Binder<'tcx, ty::TypeOutlivesPredicate<'tcx>>, erased_ty: Ty<'tcx>, ) -> bool { @@ -87,25 +85,20 @@ pub(super) fn can_match_erased_ty<'tcx>( // pointless micro-optimization true } else { - MatchAgainstHigherRankedOutlives::new(tcx, param_env).relate(outlives_ty, erased_ty).is_ok() + MatchAgainstHigherRankedOutlives::new(tcx).relate(outlives_ty, erased_ty).is_ok() } } struct MatchAgainstHigherRankedOutlives<'tcx> { tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, pattern_depth: ty::DebruijnIndex, map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>, } impl<'tcx> MatchAgainstHigherRankedOutlives<'tcx> { - fn new( - tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - ) -> MatchAgainstHigherRankedOutlives<'tcx> { + fn new(tcx: TyCtxt<'tcx>) -> MatchAgainstHigherRankedOutlives<'tcx> { MatchAgainstHigherRankedOutlives { tcx, - param_env, pattern_depth: ty::INNERMOST, map: FxHashMap::default(), } @@ -144,15 +137,13 @@ impl<'tcx> MatchAgainstHigherRankedOutlives<'tcx> { impl<'tcx> TypeRelation<'tcx> for MatchAgainstHigherRankedOutlives<'tcx> { fn tag(&self) -> &'static str { - "Match" + "MatchAgainstHigherRankedOutlives" } fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.param_env - } + fn a_is_expected(&self) -> bool { true } // irrelevant diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index bb578a482e4..90282f58e94 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -1,7 +1,7 @@ use crate::infer::outlives::components::{compute_alias_components_recursive, Component}; use crate::infer::outlives::env::RegionBoundPairs; use crate::infer::region_constraints::VerifyIfEq; -use crate::infer::VerifyBound; +use crate::infer::{GenericKind, VerifyBound}; use rustc_data_structures::sso::SsoHashSet; use rustc_middle::ty::GenericArg; use rustc_middle::ty::{self, OutlivesPredicate, Ty, TyCtxt}; @@ -240,10 +240,20 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { "declared_generic_bounds_from_env_for_erased_ty: region_bound_pair = {:?}", (r, p) ); + // Fast path for the common case. + match (&p, erased_ty.kind()) { + // In outlive routines, all types are expected to be fully normalized. + // And therefore we can safely use structural equality for alias types. + (GenericKind::Param(p1), ty::Param(p2)) if p1 == p2 => {} + (GenericKind::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => {} + (GenericKind::Alias(a1), ty::Alias(_, a2)) if a1.def_id == a2.def_id => {} + _ => return None, + } + let p_ty = p.to_ty(tcx); let erased_p_ty = self.tcx.erase_regions(p_ty); (erased_p_ty == erased_ty) - .then_some(ty::Binder::dummy(ty::OutlivesPredicate(p.to_ty(tcx), r))) + .then_some(ty::Binder::dummy(ty::OutlivesPredicate(p_ty, r))) }); param_bounds @@ -312,14 +322,8 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { ) -> impl Iterator<Item = ty::Binder<'tcx, ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>>> { let tcx = self.tcx; - let param_env = self.param_env; clauses.filter_map(|p| p.as_type_outlives_clause()).filter(move |outlives_predicate| { - super::test_type_match::can_match_erased_ty( - tcx, - param_env, - *outlives_predicate, - erased_ty, - ) + super::test_type_match::can_match_erased_ty(tcx, *outlives_predicate, erased_ty) }) } } diff --git a/compiler/rustc_infer/src/infer/relate/combine.rs b/compiler/rustc_infer/src/infer/relate/combine.rs index dfaca3458d6..ee911c43284 100644 --- a/compiler/rustc_infer/src/infer/relate/combine.rs +++ b/compiler/rustc_infer/src/infer/relate/combine.rs @@ -563,6 +563,8 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> { } pub trait ObligationEmittingRelation<'tcx>: TypeRelation<'tcx> { + fn param_env(&self) -> ty::ParamEnv<'tcx>; + /// Register obligations that must hold in order for this relation to hold fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>); diff --git a/compiler/rustc_infer/src/infer/relate/equate.rs b/compiler/rustc_infer/src/infer/relate/equate.rs index 9943c638a91..cb62f258373 100644 --- a/compiler/rustc_infer/src/infer/relate/equate.rs +++ b/compiler/rustc_infer/src/infer/relate/equate.rs @@ -33,10 +33,6 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> { self.fields.tcx() } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.fields.param_env - } - fn a_is_expected(&self) -> bool { self.a_is_expected } @@ -174,6 +170,10 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> { } impl<'tcx> ObligationEmittingRelation<'tcx> for Equate<'_, '_, 'tcx> { + fn param_env(&self) -> ty::ParamEnv<'tcx> { + self.fields.param_env + } + fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ty::ToPredicate<'tcx>>) { self.fields.register_predicates(obligations); } diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs index 66f7b08ee12..665af7381dc 100644 --- a/compiler/rustc_infer/src/infer/relate/generalize.rs +++ b/compiler/rustc_infer/src/infer/relate/generalize.rs @@ -182,10 +182,6 @@ where self.infcx.tcx } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.delegate.param_env() - } - fn tag(&self) -> &'static str { "Generalizer" } diff --git a/compiler/rustc_infer/src/infer/relate/glb.rs b/compiler/rustc_infer/src/infer/relate/glb.rs index 6a3413879c4..aa89124301e 100644 --- a/compiler/rustc_infer/src/infer/relate/glb.rs +++ b/compiler/rustc_infer/src/infer/relate/glb.rs @@ -32,10 +32,6 @@ impl<'tcx> TypeRelation<'tcx> for Glb<'_, '_, 'tcx> { self.fields.tcx() } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.fields.param_env - } - fn a_is_expected(&self) -> bool { self.a_is_expected } @@ -138,6 +134,10 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Glb<'combine, 'infcx, } impl<'tcx> ObligationEmittingRelation<'tcx> for Glb<'_, '_, 'tcx> { + fn param_env(&self) -> ty::ParamEnv<'tcx> { + self.fields.param_env + } + fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ty::ToPredicate<'tcx>>) { self.fields.register_predicates(obligations); } diff --git a/compiler/rustc_infer/src/infer/relate/lub.rs b/compiler/rustc_infer/src/infer/relate/lub.rs index 41cd98ed0cf..87d777530c8 100644 --- a/compiler/rustc_infer/src/infer/relate/lub.rs +++ b/compiler/rustc_infer/src/infer/relate/lub.rs @@ -32,10 +32,6 @@ impl<'tcx> TypeRelation<'tcx> for Lub<'_, '_, 'tcx> { self.fields.tcx() } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.fields.param_env - } - fn a_is_expected(&self) -> bool { self.a_is_expected } @@ -138,6 +134,10 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Lub<'combine, 'infcx, } impl<'tcx> ObligationEmittingRelation<'tcx> for Lub<'_, '_, 'tcx> { + fn param_env(&self) -> ty::ParamEnv<'tcx> { + self.fields.param_env + } + fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ty::ToPredicate<'tcx>>) { self.fields.register_predicates(obligations); } diff --git a/compiler/rustc_infer/src/infer/relate/nll.rs b/compiler/rustc_infer/src/infer/relate/nll.rs index afc2a8b2f62..1ef865cfc5f 100644 --- a/compiler/rustc_infer/src/infer/relate/nll.rs +++ b/compiler/rustc_infer/src/infer/relate/nll.rs @@ -431,10 +431,6 @@ where self.infcx.tcx } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.delegate.param_env() - } - fn tag(&self) -> &'static str { "nll::subtype" } @@ -670,6 +666,10 @@ impl<'tcx, D> ObligationEmittingRelation<'tcx> for TypeRelating<'_, 'tcx, D> where D: TypeRelatingDelegate<'tcx>, { + fn param_env(&self) -> ty::ParamEnv<'tcx> { + self.delegate.param_env() + } + fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ty::ToPredicate<'tcx>>) { self.delegate.register_obligations( obligations diff --git a/compiler/rustc_infer/src/infer/relate/sub.rs b/compiler/rustc_infer/src/infer/relate/sub.rs index 5a623e48c93..36876acd7c0 100644 --- a/compiler/rustc_infer/src/infer/relate/sub.rs +++ b/compiler/rustc_infer/src/infer/relate/sub.rs @@ -39,10 +39,6 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> { self.fields.infcx.tcx } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.fields.param_env - } - fn a_is_expected(&self) -> bool { self.a_is_expected } @@ -203,6 +199,10 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> { } impl<'tcx> ObligationEmittingRelation<'tcx> for Sub<'_, '_, 'tcx> { + fn param_env(&self) -> ty::ParamEnv<'tcx> { + self.fields.param_env + } + fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ty::ToPredicate<'tcx>>) { self.fields.register_predicates(obligations); } diff --git a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp index d61ec0b641c..373bc5cc581 100644 --- a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp @@ -139,6 +139,9 @@ extern "C" void LLVMRustCoverageWriteMappingToBuffer( RustMappingRegions, NumMappingRegions)) { MappingRegions.emplace_back( fromRust(Region.Count), fromRust(Region.FalseCount), +#if LLVM_VERSION_GE(18, 0) + coverage::CounterMappingRegion::MCDCParameters{}, +#endif Region.FileID, Region.ExpandedFileID, Region.LineStart, Region.ColumnStart, Region.LineEnd, Region.ColumnEnd, fromRust(Region.Kind)); diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 24ab4f94d5c..281a0eafee1 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -12,7 +12,7 @@ use rustc_data_structures::unhash::UnhashMap; use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind}; use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro}; use rustc_hir::def::Res; -use rustc_hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc_hir::def_id::{DefIdMap, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::definitions::{DefPath, DefPathData}; use rustc_hir::diagnostic_items::DiagnosticItems; use rustc_index::Idx; @@ -1200,7 +1200,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { /// Iterates over the diagnostic items in the given crate. fn get_diagnostic_items(self) -> DiagnosticItems { - let mut id_to_name = FxHashMap::default(); + let mut id_to_name = DefIdMap::default(); let name_to_id = self .root .diagnostic_items diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index a69bff6ed8c..3a54f5f6b3d 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -69,7 +69,7 @@ use rustc_hir::def_id::{ CrateNum, DefId, DefIdMap, DefIdSet, LocalDefId, LocalDefIdMap, LocalDefIdSet, LocalModDefId, }; use rustc_hir::lang_items::{LangItem, LanguageItems}; -use rustc_hir::{Crate, ItemLocalId, TraitCandidate}; +use rustc_hir::{Crate, ItemLocalId, ItemLocalMap, TraitCandidate}; use rustc_index::IndexVec; use rustc_query_system::ich::StableHashingContext; use rustc_query_system::query::{try_get_cached, CacheSelector, QueryCache, QueryMode, QueryState}; @@ -1490,7 +1490,7 @@ rustc_queries! { desc { "computing whether impls specialize one another" } } query in_scope_traits_map(_: hir::OwnerId) - -> Option<&'tcx FxHashMap<ItemLocalId, Box<[TraitCandidate]>>> { + -> Option<&'tcx ItemLocalMap<Box<[TraitCandidate]>>> { desc { "getting traits in scope at a block" } } diff --git a/compiler/rustc_middle/src/traits/select.rs b/compiler/rustc_middle/src/traits/select.rs index c52103eb247..734c2b61c07 100644 --- a/compiler/rustc_middle/src/traits/select.rs +++ b/compiler/rustc_middle/src/traits/select.rs @@ -153,7 +153,7 @@ pub enum SelectionCandidate<'tcx> { /// Implementation of a `Fn`-family trait by one of the anonymous /// types generated for a fn pointer type (e.g., `fn(int) -> int`) FnPointerCandidate { - is_const: bool, + fn_host_effect: ty::Const<'tcx>, }, TraitAliasCandidate, diff --git a/compiler/rustc_middle/src/traits/solve.rs b/compiler/rustc_middle/src/traits/solve.rs index 27a1e64a78b..048df367bd6 100644 --- a/compiler/rustc_middle/src/traits/solve.rs +++ b/compiler/rustc_middle/src/traits/solve.rs @@ -233,6 +233,27 @@ impl<'tcx> TypeVisitable<TyCtxt<'tcx>> for PredefinedOpaques<'tcx> { } } +/// Why a specific goal has to be proven. +/// +/// This is necessary as we treat nested goals different depending on +/// their source. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum GoalSource { + Misc, + /// We're proving a where-bound of an impl. + /// + /// FIXME(-Znext-solver=coinductive): Explain how and why this + /// changes whether cycles are coinductive. + /// + /// This also impacts whether we erase constraints on overflow. + /// Erasing constraints is generally very useful for perf and also + /// results in better error messages by avoiding spurious errors. + /// We do not erase overflow constraints in `normalizes-to` goals unless + /// they are from an impl where-clause. This is necessary due to + /// backwards compatability, cc trait-system-refactor-initiatitive#70. + ImplWhereBound, +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, HashStable)] pub enum IsNormalizesToHack { Yes, diff --git a/compiler/rustc_middle/src/traits/solve/inspect.rs b/compiler/rustc_middle/src/traits/solve/inspect.rs index 7883cd338be..77d112d0afc 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect.rs @@ -19,8 +19,8 @@ //! [canonicalized]: https://rustc-dev-guide.rust-lang.org/solve/canonicalization.html use super::{ - CandidateSource, Canonical, CanonicalInput, Certainty, Goal, IsNormalizesToHack, NoSolution, - QueryInput, QueryResult, + CandidateSource, Canonical, CanonicalInput, Certainty, Goal, GoalSource, IsNormalizesToHack, + NoSolution, QueryInput, QueryResult, }; use crate::{infer::canonical::CanonicalVarValues, ty}; use format::ProofTreeFormatter; @@ -115,7 +115,7 @@ impl Debug for Probe<'_> { pub enum ProbeStep<'tcx> { /// We added a goal to the `EvalCtxt` which will get proven /// the next time `EvalCtxt::try_evaluate_added_goals` is called. - AddGoal(CanonicalState<'tcx, Goal<'tcx, ty::Predicate<'tcx>>>), + AddGoal(GoalSource, CanonicalState<'tcx, Goal<'tcx, ty::Predicate<'tcx>>>), /// The inside of a `EvalCtxt::try_evaluate_added_goals` call. EvaluateGoals(AddedGoalsEvaluation<'tcx>), /// A call to `probe` while proving the current goal. This is diff --git a/compiler/rustc_middle/src/traits/solve/inspect/format.rs b/compiler/rustc_middle/src/traits/solve/inspect/format.rs index ab9e0283918..4e2207ed523 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect/format.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect/format.rs @@ -123,7 +123,13 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> { self.nested(|this| { for step in &probe.steps { match step { - ProbeStep::AddGoal(goal) => writeln!(this.f, "ADDED GOAL: {goal:?}")?, + ProbeStep::AddGoal(source, goal) => { + let source = match source { + GoalSource::Misc => "misc", + GoalSource::ImplWhereBound => "impl where-bound", + }; + writeln!(this.f, "ADDED GOAL ({source}): {goal:?}")? + } ProbeStep::EvaluateGoals(eval) => this.format_added_goals_evaluation(eval)?, ProbeStep::NestedProbe(probe) => this.format_probe(probe)?, ProbeStep::CommitIfOkStart => writeln!(this.f, "COMMIT_IF_OK START")?, diff --git a/compiler/rustc_middle/src/ty/_match.rs b/compiler/rustc_middle/src/ty/_match.rs index 85181720d17..a2794a100f1 100644 --- a/compiler/rustc_middle/src/ty/_match.rs +++ b/compiler/rustc_middle/src/ty/_match.rs @@ -20,12 +20,11 @@ use crate::ty::{self, InferConst, Ty, TyCtxt}; /// affects any type variables or unification state. pub struct MatchAgainstFreshVars<'tcx> { tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, } impl<'tcx> MatchAgainstFreshVars<'tcx> { - pub fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> MatchAgainstFreshVars<'tcx> { - MatchAgainstFreshVars { tcx, param_env } + pub fn new(tcx: TyCtxt<'tcx>) -> MatchAgainstFreshVars<'tcx> { + MatchAgainstFreshVars { tcx } } } @@ -33,13 +32,11 @@ impl<'tcx> TypeRelation<'tcx> for MatchAgainstFreshVars<'tcx> { fn tag(&self) -> &'static str { "MatchAgainstFreshVars" } + fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.param_env - } fn a_is_expected(&self) -> bool { true } // irrelevant diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 96de9c447b6..35c135830c3 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -192,7 +192,7 @@ pub struct ResolverAstLowering { pub next_node_id: ast::NodeId, - pub node_id_to_def_id: FxHashMap<ast::NodeId, LocalDefId>, + pub node_id_to_def_id: NodeMap<LocalDefId>, pub def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>, pub trait_map: NodeMap<Vec<hir::TraitCandidate>>, diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs index d7d9afc30e7..9d92f81db0b 100644 --- a/compiler/rustc_middle/src/ty/relate.rs +++ b/compiler/rustc_middle/src/ty/relate.rs @@ -23,8 +23,6 @@ pub enum Cause { pub trait TypeRelation<'tcx>: Sized { fn tcx(&self) -> TyCtxt<'tcx>; - fn param_env(&self) -> ty::ParamEnv<'tcx>; - /// Returns a static string we can use for printouts. fn tag(&self) -> &'static str; @@ -505,13 +503,9 @@ pub fn structurally_relate_tys<'tcx, R: TypeRelation<'tcx>>( Err(err) => { // Check whether the lengths are both concrete/known values, // but are unequal, for better diagnostics. - // - // It might seem dubious to eagerly evaluate these constants here, - // we however cannot end up with errors in `Relate` during both - // `type_of` and `predicates_of`. This means that evaluating the - // constants should not cause cycle errors here. - let sz_a = sz_a.try_eval_target_usize(tcx, relation.param_env()); - let sz_b = sz_b.try_eval_target_usize(tcx, relation.param_env()); + let sz_a = sz_a.try_to_target_usize(tcx); + let sz_b = sz_b.try_to_target_usize(tcx); + match (sz_a, sz_b) { (Some(sz_a_val), Some(sz_b_val)) if sz_a_val != sz_b_val => Err( TypeError::FixedArraySize(expected_found(relation, sz_a_val, sz_b_val)), diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index 541b87af797..487b1f44b5e 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -1699,59 +1699,51 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("tested_candidates: {}", total_candidate_count - candidates.len()); debug!("untested_candidates: {}", candidates.len()); - // HACK(matthewjasper) This is a closure so that we can let the test - // create its blocks before the rest of the match. This currently - // improves the speed of llvm when optimizing long string literal - // matches - let make_target_blocks = move |this: &mut Self| -> Vec<BasicBlock> { - // The block that we should branch to if none of the - // `target_candidates` match. This is either the block where we - // start matching the untested candidates if there are any, - // otherwise it's the `otherwise_block`. - let remainder_start = &mut None; - let remainder_start = - if candidates.is_empty() { &mut *otherwise_block } else { remainder_start }; - - // For each outcome of test, process the candidates that still - // apply. Collect a list of blocks where control flow will - // branch if one of the `target_candidate` sets is not - // exhaustive. - let target_blocks: Vec<_> = target_candidates - .into_iter() - .map(|mut candidates| { - if !candidates.is_empty() { - let candidate_start = this.cfg.start_new_block(); - this.match_candidates( - span, - scrutinee_span, - candidate_start, - remainder_start, - &mut *candidates, - fake_borrows, - ); - candidate_start - } else { - *remainder_start.get_or_insert_with(|| this.cfg.start_new_block()) - } - }) - .collect(); - - if !candidates.is_empty() { - let remainder_start = remainder_start.unwrap_or_else(|| this.cfg.start_new_block()); - this.match_candidates( - span, - scrutinee_span, - remainder_start, - otherwise_block, - candidates, - fake_borrows, - ); - }; + // The block that we should branch to if none of the + // `target_candidates` match. This is either the block where we + // start matching the untested candidates if there are any, + // otherwise it's the `otherwise_block`. + let remainder_start = &mut None; + let remainder_start = + if candidates.is_empty() { &mut *otherwise_block } else { remainder_start }; + + // For each outcome of test, process the candidates that still + // apply. Collect a list of blocks where control flow will + // branch if one of the `target_candidate` sets is not + // exhaustive. + let target_blocks: Vec<_> = target_candidates + .into_iter() + .map(|mut candidates| { + if !candidates.is_empty() { + let candidate_start = self.cfg.start_new_block(); + self.match_candidates( + span, + scrutinee_span, + candidate_start, + remainder_start, + &mut *candidates, + fake_borrows, + ); + candidate_start + } else { + *remainder_start.get_or_insert_with(|| self.cfg.start_new_block()) + } + }) + .collect(); - target_blocks - }; + if !candidates.is_empty() { + let remainder_start = remainder_start.unwrap_or_else(|| self.cfg.start_new_block()); + self.match_candidates( + span, + scrutinee_span, + remainder_start, + otherwise_block, + candidates, + fake_borrows, + ); + } - self.perform_test(span, scrutinee_span, block, &match_place, &test, make_target_blocks); + self.perform_test(span, scrutinee_span, block, &match_place, &test, target_blocks); } /// Determine the fake borrows that are needed from a set of places that diff --git a/compiler/rustc_mir_build/src/build/matches/test.rs b/compiler/rustc_mir_build/src/build/matches/test.rs index d1952704da3..53e5d70f946 100644 --- a/compiler/rustc_mir_build/src/build/matches/test.rs +++ b/compiler/rustc_mir_build/src/build/matches/test.rs @@ -147,7 +147,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - #[instrument(skip(self, make_target_blocks, place_builder), level = "debug")] + #[instrument(skip(self, target_blocks, place_builder), level = "debug")] pub(super) fn perform_test( &mut self, match_start_span: Span, @@ -155,7 +155,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { block: BasicBlock, place_builder: &PlaceBuilder<'tcx>, test: &Test<'tcx>, - make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>, + target_blocks: Vec<BasicBlock>, ) { let place = place_builder.to_place(self); let place_ty = place.ty(&self.local_decls, self.tcx); @@ -164,7 +164,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let source_info = self.source_info(test.span); match test.kind { TestKind::Switch { adt_def, ref variants } => { - let target_blocks = make_target_blocks(self); // Variants is a BitVec of indexes into adt_def.variants. let num_enum_variants = adt_def.variants().len(); debug_assert_eq!(target_blocks.len(), num_enum_variants + 1); @@ -210,7 +209,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } TestKind::SwitchInt { switch_ty, ref options } => { - let target_blocks = make_target_blocks(self); let terminator = if *switch_ty.kind() == ty::Bool { assert!(!options.is_empty() && options.len() <= 2); let [first_bb, second_bb] = *target_blocks else { @@ -240,6 +238,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TestKind::Eq { value, ty } => { let tcx = self.tcx; + let [success_block, fail_block] = *target_blocks else { + bug!("`TestKind::Eq` should have two target blocks") + }; if let ty::Adt(def, _) = ty.kind() && Some(def.did()) == tcx.lang_items().string() { @@ -280,38 +281,43 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); self.non_scalar_compare( eq_block, - make_target_blocks, + success_block, + fail_block, source_info, value, ref_str, ref_str_ty, ); - return; - } - if !ty.is_scalar() { + } else if !ty.is_scalar() { // Use `PartialEq::eq` instead of `BinOp::Eq` // (the binop can only handle primitives) self.non_scalar_compare( block, - make_target_blocks, + success_block, + fail_block, source_info, value, place, ty, ); - } else if let [success, fail] = *make_target_blocks(self) { + } else { assert_eq!(value.ty(), ty); let expect = self.literal_operand(test.span, value); let val = Operand::Copy(place); - self.compare(block, success, fail, source_info, BinOp::Eq, expect, val); - } else { - bug!("`TestKind::Eq` should have two target blocks"); + self.compare( + block, + success_block, + fail_block, + source_info, + BinOp::Eq, + expect, + val, + ); } } TestKind::Range(ref range) => { let lower_bound_success = self.cfg.start_new_block(); - let target_blocks = make_target_blocks(self); // Test `val` by computing `lo <= val && val <= hi`, using primitive comparisons. // FIXME: skip useless comparison when the range is half-open. @@ -341,8 +347,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } TestKind::Len { len, op } => { - let target_blocks = make_target_blocks(self); - let usize_ty = self.tcx.types.usize; let actual = self.temp(usize_ty, test.span); @@ -406,7 +410,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn non_scalar_compare( &mut self, block: BasicBlock, - make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>, + success_block: BasicBlock, + fail_block: BasicBlock, source_info: SourceInfo, value: Const<'tcx>, mut val: Place<'tcx>, @@ -531,9 +536,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); self.diverge_from(block); - let [success_block, fail_block] = *make_target_blocks(self) else { - bug!("`TestKind::Eq` should have two target blocks") - }; // check the result self.cfg.terminate( eq_block, diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index db2624cac02..c66687330dc 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -6,7 +6,7 @@ use rustc_errors::{ }; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::{self, Ty}; -use rustc_pattern_analysis::{cx::MatchCheckCtxt, errors::Uncovered}; +use rustc_pattern_analysis::{errors::Uncovered, rustc::RustcMatchCheckCtxt}; use rustc_span::symbol::Symbol; use rustc_span::Span; @@ -454,7 +454,7 @@ pub enum UnusedUnsafeEnclosing { } pub(crate) struct NonExhaustivePatternsTypeNotEmpty<'p, 'tcx, 'm> { - pub cx: &'m MatchCheckCtxt<'p, 'tcx>, + pub cx: &'m RustcMatchCheckCtxt<'p, 'tcx>, pub expr_span: Span, pub span: Span, pub ty: Ty<'tcx>, diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index 792a443c908..c435f4023af 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -1,13 +1,13 @@ -use rustc_pattern_analysis::constructor::Constructor; -use rustc_pattern_analysis::cx::MatchCheckCtxt; use rustc_pattern_analysis::errors::Uncovered; -use rustc_pattern_analysis::pat::{DeconstructedPat, WitnessPat}; -use rustc_pattern_analysis::usefulness::{Usefulness, UsefulnessReport}; +use rustc_pattern_analysis::rustc::{ + Constructor, DeconstructedPat, RustcMatchCheckCtxt as MatchCheckCtxt, Usefulness, + UsefulnessReport, WitnessPat, +}; use rustc_pattern_analysis::{analyze_match, MatchArm}; use crate::errors::*; -use rustc_arena::TypedArena; +use rustc_arena::{DroplessArena, TypedArena}; use rustc_ast::Mutability; use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::stack::ensure_sufficient_stack; @@ -31,6 +31,7 @@ pub(crate) fn check_match(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(), Err let (thir, expr) = tcx.thir_body(def_id)?; let thir = thir.borrow(); let pattern_arena = TypedArena::default(); + let dropless_arena = DroplessArena::default(); let mut visitor = MatchVisitor { tcx, thir: &*thir, @@ -38,6 +39,7 @@ pub(crate) fn check_match(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(), Err lint_level: tcx.local_def_id_to_hir_id(def_id), let_source: LetSource::None, pattern_arena: &pattern_arena, + dropless_arena: &dropless_arena, error: Ok(()), }; visitor.visit_expr(&thir[expr]); @@ -82,6 +84,7 @@ struct MatchVisitor<'thir, 'p, 'tcx> { lint_level: HirId, let_source: LetSource, pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>, + dropless_arena: &'p DroplessArena, /// Tracks if we encountered an error while checking this body. That the first function to /// report it stores it here. Some functions return `Result` to allow callers to short-circuit /// on error, but callers don't need to store it here again. @@ -382,6 +385,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { param_env: self.param_env, module: self.tcx.parent_module(self.lint_level).to_def_id(), pattern_arena: self.pattern_arena, + dropless_arena: self.dropless_arena, match_lint_level: self.lint_level, whole_match_span, scrut_span, @@ -425,7 +429,8 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { let arm = &self.thir.arms[arm]; let got_error = self.with_lint_level(arm.lint_level, |this| { let Ok(pat) = this.lower_pattern(&cx, &arm.pattern) else { return true }; - let arm = MatchArm { pat, hir_id: this.lint_level, has_guard: arm.guard.is_some() }; + let arm = + MatchArm { pat, arm_data: this.lint_level, has_guard: arm.guard.is_some() }; tarms.push(arm); false }); @@ -548,7 +553,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { ) -> Result<(MatchCheckCtxt<'p, 'tcx>, UsefulnessReport<'p, 'tcx>), ErrorGuaranteed> { let cx = self.new_cx(refutability, None, scrut, pat.span); let pat = self.lower_pattern(&cx, pat)?; - let arms = [MatchArm { pat, hir_id: self.lint_level, has_guard: false }]; + let arms = [MatchArm { pat, arm_data: self.lint_level, has_guard: false }]; let report = analyze_match(&cx, &arms, pat.ty()); Ok((cx, report)) } @@ -847,34 +852,34 @@ fn report_arm_reachability<'p, 'tcx>( ); }; - use Usefulness::*; let mut catchall = None; for (arm, is_useful) in report.arm_usefulness.iter() { match is_useful { - Redundant => report_unreachable_pattern(arm.pat.span(), arm.hir_id, catchall), - Useful(redundant_spans) if redundant_spans.is_empty() => {} + Usefulness::Redundant => { + report_unreachable_pattern(*arm.pat.data(), arm.arm_data, catchall) + } + Usefulness::Useful(redundant_subpats) if redundant_subpats.is_empty() => {} // The arm is reachable, but contains redundant subpatterns (from or-patterns). - Useful(redundant_spans) => { - let mut redundant_spans = redundant_spans.clone(); + Usefulness::Useful(redundant_subpats) => { + let mut redundant_subpats = redundant_subpats.clone(); // Emit lints in the order in which they occur in the file. - redundant_spans.sort_unstable(); - for span in redundant_spans { - report_unreachable_pattern(span, arm.hir_id, None); + redundant_subpats.sort_unstable_by_key(|pat| pat.data()); + for pat in redundant_subpats { + report_unreachable_pattern(*pat.data(), arm.arm_data, None); } } } if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) { - catchall = Some(arm.pat.span()); + catchall = Some(*arm.pat.data()); } } } /// Checks for common cases of "catchall" patterns that may not be intended as such. fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool { - use Constructor::*; match pat.ctor() { - Wildcard => true, - Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)), + Constructor::Wildcard => true, + Constructor::Struct | Constructor::Ref => pat.iter_fields().all(|pat| pat_is_catchall(pat)), _ => false, } } @@ -885,7 +890,7 @@ fn report_non_exhaustive_match<'p, 'tcx>( thir: &Thir<'tcx>, scrut_ty: Ty<'tcx>, sp: Span, - witnesses: Vec<WitnessPat<'tcx>>, + witnesses: Vec<WitnessPat<'p, 'tcx>>, arms: &[ArmId], expr_span: Span, ) -> ErrorGuaranteed { @@ -1082,10 +1087,10 @@ fn report_non_exhaustive_match<'p, 'tcx>( fn joined_uncovered_patterns<'p, 'tcx>( cx: &MatchCheckCtxt<'p, 'tcx>, - witnesses: &[WitnessPat<'tcx>], + witnesses: &[WitnessPat<'p, 'tcx>], ) -> String { const LIMIT: usize = 3; - let pat_to_str = |pat: &WitnessPat<'tcx>| cx.hoist_witness_pat(pat).to_string(); + let pat_to_str = |pat: &WitnessPat<'p, 'tcx>| cx.hoist_witness_pat(pat).to_string(); match witnesses { [] => bug!(), [witness] => format!("`{}`", cx.hoist_witness_pat(witness)), @@ -1103,7 +1108,7 @@ fn joined_uncovered_patterns<'p, 'tcx>( fn collect_non_exhaustive_tys<'tcx>( cx: &MatchCheckCtxt<'_, 'tcx>, - pat: &WitnessPat<'tcx>, + pat: &WitnessPat<'_, 'tcx>, non_exhaustive_tys: &mut FxIndexSet<Ty<'tcx>>, ) { if matches!(pat.ctor(), Constructor::NonExhaustive) { @@ -1122,7 +1127,7 @@ fn collect_non_exhaustive_tys<'tcx>( fn report_adt_defined_here<'tcx>( tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, - witnesses: &[WitnessPat<'tcx>], + witnesses: &[WitnessPat<'_, 'tcx>], point_at_non_local_ty: bool, ) -> Option<AdtDefinedHere<'tcx>> { let ty = ty.peel_refs(); @@ -1144,15 +1149,14 @@ fn report_adt_defined_here<'tcx>( Some(AdtDefinedHere { adt_def_span, ty, variants }) } -fn maybe_point_at_variant<'a, 'tcx: 'a>( +fn maybe_point_at_variant<'a, 'p: 'a, 'tcx: 'p>( tcx: TyCtxt<'tcx>, def: AdtDef<'tcx>, - patterns: impl Iterator<Item = &'a WitnessPat<'tcx>>, + patterns: impl Iterator<Item = &'a WitnessPat<'p, 'tcx>>, ) -> Vec<Span> { - use Constructor::*; let mut covered = vec![]; for pattern in patterns { - if let Variant(variant_index) = pattern.ctor() { + if let Constructor::Variant(variant_index) = pattern.ctor() { if let ty::Adt(this_def, _) = pattern.ty().kind() && this_def.did() != def.did() { diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 9e3637ea9f3..c077e0a83a1 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -10,13 +10,13 @@ use crate::errors::{ ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything, DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg, GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg, - HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon, - IncorrectUseOfAwait, PatternMethodParamWithoutBody, QuestionMarkInType, QuestionMarkInTypeSugg, - SelfParamNotFirst, StructLiteralBodyWithoutPath, StructLiteralBodyWithoutPathSugg, - StructLiteralNeedingParens, StructLiteralNeedingParensSugg, SuggAddMissingLetStmt, - SuggEscapeIdentifier, SuggRemoveComma, TernaryOperator, UnexpectedConstInGenericParam, - UnexpectedConstParamDeclaration, UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, - UseEqInstead, WrapType, + HelpIdentifierStartsWithNumber, HelpUseLatestEdition, InInTypo, IncorrectAwait, + IncorrectSemicolon, IncorrectUseOfAwait, PatternMethodParamWithoutBody, QuestionMarkInType, + QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath, + StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens, StructLiteralNeedingParensSugg, + SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma, TernaryOperator, + UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration, + UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType, }; use crate::fluent_generated as fluent; use crate::parser; @@ -640,6 +640,28 @@ impl<'a> Parser<'a> { } } + // Try to detect an intended c-string literal while using a pre-2021 edition. The heuristic + // here is to identify a cooked, uninterpolated `c` id immediately followed by a string, or + // a cooked, uninterpolated `cr` id immediately followed by a string or a `#`, in an edition + // where c-string literals are not allowed. There is the very slight possibility of a false + // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying + // that in the parser requires unbounded lookahead, so we only add a hint to the existing + // error rather than replacing it entirely. + if ((self.prev_token.kind == TokenKind::Ident(sym::c, false) + && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) + || (self.prev_token.kind == TokenKind::Ident(sym::cr, false) + && matches!( + &self.token.kind, + TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound + ))) + && self.prev_token.span.hi() == self.token.span.lo() + && !self.token.span.at_least_rust_2021() + { + err.note("you may be trying to write a c-string literal"); + err.note("c-string literals require Rust 2021 or later"); + HelpUseLatestEdition::new().add_to_diagnostic(&mut err); + } + // `pub` may be used for an item or `pub(crate)` if self.prev_token.is_ident_named(sym::public) && (self.token.can_begin_item() diff --git a/compiler/rustc_passes/src/diagnostic_items.rs b/compiler/rustc_passes/src/diagnostic_items.rs index 5f767c9acaa..d8b9f4fae87 100644 --- a/compiler/rustc_passes/src/diagnostic_items.rs +++ b/compiler/rustc_passes/src/diagnostic_items.rs @@ -83,9 +83,6 @@ fn all_diagnostic_items(tcx: TyCtxt<'_>, (): ()) -> DiagnosticItems { // Collect diagnostic items in other crates. for &cnum in tcx.crates(()).iter().chain(std::iter::once(&LOCAL_CRATE)) { - // We are collecting many DiagnosticItems hash maps into one - // DiagnosticItems hash map. The iteration order does not matter. - #[allow(rustc::potential_query_instability)] for (&name, &def_id) in &tcx.diagnostic_items(cnum).name_to_id { collect_item(tcx, &mut items, name, def_id); } diff --git a/compiler/rustc_pattern_analysis/Cargo.toml b/compiler/rustc_pattern_analysis/Cargo.toml index 0639944a45c..908d00cf105 100644 --- a/compiler/rustc_pattern_analysis/Cargo.toml +++ b/compiler/rustc_pattern_analysis/Cargo.toml @@ -6,17 +6,40 @@ edition = "2021" [dependencies] # tidy-alphabetical-start rustc_apfloat = "0.2.0" -rustc_arena = { path = "../rustc_arena" } -rustc_data_structures = { path = "../rustc_data_structures" } -rustc_errors = { path = "../rustc_errors" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_hir = { path = "../rustc_hir" } -rustc_index = { path = "../rustc_index" } -rustc_macros = { path = "../rustc_macros" } -rustc_middle = { path = "../rustc_middle" } -rustc_session = { path = "../rustc_session" } -rustc_span = { path = "../rustc_span" } -rustc_target = { path = "../rustc_target" } -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +rustc_arena = { path = "../rustc_arena", optional = true } +rustc_data_structures = { path = "../rustc_data_structures", optional = true } +rustc_errors = { path = "../rustc_errors", optional = true } +rustc_fluent_macro = { path = "../rustc_fluent_macro", optional = true } +rustc_hir = { path = "../rustc_hir", optional = true } +rustc_index = { path = "../rustc_index", default-features = false } +rustc_macros = { path = "../rustc_macros", optional = true } +rustc_middle = { path = "../rustc_middle", optional = true } +rustc_session = { path = "../rustc_session", optional = true } +rustc_span = { path = "../rustc_span", optional = true } +rustc_target = { path = "../rustc_target", optional = true } +smallvec = { version = "1.8.1", features = ["union"] } tracing = "0.1" +typed-arena = { version = "2.0.2", optional = true } # tidy-alphabetical-end + +[features] +default = ["rustc"] +# It's not possible to only enable the `typed_arena` dependency when the `rustc` feature is off, so +# we use another feature instead. The crate won't compile if one of these isn't enabled. +rustc = [ + "dep:rustc_arena", + "dep:rustc_data_structures", + "dep:rustc_errors", + "dep:rustc_fluent_macro", + "dep:rustc_hir", + "dep:rustc_macros", + "dep:rustc_middle", + "dep:rustc_session", + "dep:rustc_span", + "dep:rustc_target", + "smallvec/may_dangle", + "rustc_index/nightly", +] +stable = [ + "dep:typed-arena", +] diff --git a/compiler/rustc_pattern_analysis/src/constructor.rs b/compiler/rustc_pattern_analysis/src/constructor.rs index 6486ad8b483..af0a7497a34 100644 --- a/compiler/rustc_pattern_analysis/src/constructor.rs +++ b/compiler/rustc_pattern_analysis/src/constructor.rs @@ -40,7 +40,7 @@ //! - That have no non-trivial intersection with any of the constructors in the column (i.e. they're //! each either disjoint with or covered by any given column constructor). //! -//! We compute this in two steps: first [`crate::cx::MatchCheckCtxt::ctors_for_ty`] determines the +//! We compute this in two steps: first [`TypeCx::ctors_for_ty`] determines the //! set of all possible constructors for the type. Then [`ConstructorSet::split`] looks at the //! column of constructors and splits the set into groups accordingly. The precise invariants of //! [`ConstructorSet::split`] is described in [`SplitConstructorSet`]. @@ -136,7 +136,7 @@ //! the algorithm can't distinguish them from a nonempty constructor. The only known case where this //! could happen is the `[..]` pattern on `[!; N]` with `N > 0` so we must take care to not emit it. //! -//! This is all handled by [`crate::cx::MatchCheckCtxt::ctors_for_ty`] and +//! This is all handled by [`TypeCx::ctors_for_ty`] and //! [`ConstructorSet::split`]. The invariants of [`SplitConstructorSet`] are also of interest. //! //! @@ -155,17 +155,15 @@ use std::iter::once; use smallvec::SmallVec; use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS}; -use rustc_data_structures::fx::FxHashSet; -use rustc_hir::RangeEnd; +use rustc_index::bit_set::{BitSet, GrowableBitSet}; use rustc_index::IndexVec; -use rustc_middle::mir::Const; -use rustc_target::abi::VariantIdx; use self::Constructor::*; use self::MaybeInfiniteInt::*; use self::SliceKind::*; -use crate::usefulness::PatCtxt; +use crate::usefulness::PlaceCtxt; +use crate::TypeCx; /// Whether we have seen a constructor in the column or not. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] @@ -174,6 +172,21 @@ enum Presence { Seen, } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum RangeEnd { + Included, + Excluded, +} + +impl fmt::Display for RangeEnd { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + RangeEnd::Included => "..=", + RangeEnd::Excluded => "..", + }) + } +} + /// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the /// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as /// `255`. See `signed_bias` for details. @@ -221,7 +234,7 @@ impl MaybeInfiniteInt { match self { Finite(n) => match n.checked_sub(1) { Some(m) => Finite(m), - None => bug!(), + None => panic!("Called `MaybeInfiniteInt::minus_one` on 0"), }, JustAfterMax => Finite(u128::MAX), x => x, @@ -234,7 +247,7 @@ impl MaybeInfiniteInt { Some(m) => Finite(m), None => JustAfterMax, }, - JustAfterMax => bug!(), + JustAfterMax => panic!("Called `MaybeInfiniteInt::plus_one` on u128::MAX+1"), x => x, } } @@ -253,7 +266,7 @@ pub struct IntRange { impl IntRange { /// Best effort; will not know that e.g. `255u8..` is a singleton. - pub(crate) fn is_singleton(&self) -> bool { + pub fn is_singleton(&self) -> bool { // Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite // to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`. self.lo.plus_one() == self.hi @@ -271,7 +284,7 @@ impl IntRange { } if lo >= hi { // This should have been caught earlier by E0030. - bug!("malformed range pattern: {lo:?}..{hi:?}"); + panic!("malformed range pattern: {lo:?}..{hi:?}"); } IntRange { lo, hi } } @@ -432,7 +445,7 @@ impl Slice { let kind = match (array_len, kind) { // If the middle `..` has length 0, we effectively have a fixed-length pattern. (Some(len), VarLen(prefix, suffix)) if prefix + suffix == len => FixedLen(len), - (Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => bug!( + (Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => panic!( "Slice pattern of length {} longer than its array length {len}", prefix + suffix ), @@ -532,7 +545,7 @@ impl Slice { // therefore `Presence::Seen` in the column. let mut min_var_len = usize::MAX; // Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`. - let mut seen_fixed_lens = FxHashSet::default(); + let mut seen_fixed_lens = GrowableBitSet::new_empty(); match &mut max_slice { VarLen(max_prefix_len, max_suffix_len) => { // A length larger than any fixed-length slice encountered. @@ -600,7 +613,7 @@ impl Slice { smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| { let arity = kind.arity(); - let seen = if min_var_len <= arity || seen_fixed_lens.contains(&arity) { + let seen = if min_var_len <= arity || seen_fixed_lens.contains(arity) { Presence::Seen } else { Presence::Unseen @@ -630,12 +643,17 @@ impl OpaqueId { /// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and /// `Fields`. #[derive(Clone, Debug, PartialEq)] -pub enum Constructor<'tcx> { - /// The constructor for patterns that have a single constructor, like tuples, struct patterns, - /// and references. Fixed-length arrays are treated separately with `Slice`. - Single, +pub enum Constructor<Cx: TypeCx> { + /// Tuples and structs. + Struct, /// Enum variants. - Variant(VariantIdx), + Variant(Cx::VariantIdx), + /// References + Ref, + /// Array and slice patterns. + Slice(Slice), + /// Union field accesses. + UnionField, /// Booleans Bool(bool), /// Ranges of integer literal values (`2`, `2..=5` or `2..5`). @@ -644,9 +662,7 @@ pub enum Constructor<'tcx> { F32Range(IeeeFloat<SingleS>, IeeeFloat<SingleS>, RangeEnd), F64Range(IeeeFloat<DoubleS>, IeeeFloat<DoubleS>, RangeEnd), /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately. - Str(Const<'tcx>), - /// Array and slice patterns. - Slice(Slice), + Str(Cx::StrLit), /// Constants that must not be matched structurally. They are treated as black boxes for the /// purposes of exhaustiveness: we must not inspect them, and they don't count towards making a /// match exhaustive. @@ -669,12 +685,12 @@ pub enum Constructor<'tcx> { Missing, } -impl<'tcx> Constructor<'tcx> { +impl<Cx: TypeCx> Constructor<Cx> { pub(crate) fn is_non_exhaustive(&self) -> bool { matches!(self, NonExhaustive) } - pub(crate) fn as_variant(&self) -> Option<VariantIdx> { + pub(crate) fn as_variant(&self) -> Option<Cx::VariantIdx> { match self { Variant(i) => Some(*i), _ => None, @@ -701,8 +717,8 @@ impl<'tcx> Constructor<'tcx> { /// The number of fields for this constructor. This must be kept in sync with /// `Fields::wildcards`. - pub(crate) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize { - pcx.cx.ctor_arity(self, pcx.ty) + pub(crate) fn arity(&self, pcx: &PlaceCtxt<'_, '_, Cx>) -> usize { + pcx.ctor_arity(self) } /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`. @@ -710,20 +726,20 @@ impl<'tcx> Constructor<'tcx> { /// this checks for inclusion. // We inline because this has a single call site in `Matrix::specialize_constructor`. #[inline] - pub(crate) fn is_covered_by<'p>(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { + pub(crate) fn is_covered_by<'p>(&self, pcx: &PlaceCtxt<'_, 'p, Cx>, other: &Self) -> bool { match (self, other) { - (Wildcard, _) => { - span_bug!( - pcx.cx.scrut_span, - "Constructor splitting should not have returned `Wildcard`" - ) - } + (Wildcard, _) => pcx + .mcx + .tycx + .bug(format_args!("Constructor splitting should not have returned `Wildcard`")), // Wildcards cover anything (_, Wildcard) => true, // Only a wildcard pattern can match these special constructors. (Missing { .. } | NonExhaustive | Hidden, _) => false, - (Single, Single) => true, + (Struct, Struct) => true, + (Ref, Ref) => true, + (UnionField, UnionField) => true, (Variant(self_id), Variant(other_id)) => self_id == other_id, (Bool(self_b), Bool(other_b)) => self_b == other_b, @@ -756,12 +772,9 @@ impl<'tcx> Constructor<'tcx> { (Opaque(self_id), Opaque(other_id)) => self_id == other_id, (Opaque(..), _) | (_, Opaque(..)) => false, - _ => span_bug!( - pcx.cx.scrut_span, - "trying to compare incompatible constructors {:?} and {:?}", - self, - other - ), + _ => pcx.mcx.tycx.bug(format_args!( + "trying to compare incompatible constructors {self:?} and {other:?}" + )), } } } @@ -785,13 +798,16 @@ pub enum VariantVisibility { /// In terms of division of responsibility, [`ConstructorSet::split`] handles all of the /// `exhaustive_patterns` feature. #[derive(Debug)] -pub enum ConstructorSet { - /// The type has a single constructor, e.g. `&T` or a struct. `empty` tracks whether the - /// constructor is empty. - Single { empty: bool }, +pub enum ConstructorSet<Cx: TypeCx> { + /// The type is a tuple or struct. `empty` tracks whether the type is empty. + Struct { empty: bool }, /// This type has the following list of constructors. If `variants` is empty and /// `non_exhaustive` is false, don't use this; use `NoConstructors` instead. - Variants { variants: IndexVec<VariantIdx, VariantVisibility>, non_exhaustive: bool }, + Variants { variants: IndexVec<Cx::VariantIdx, VariantVisibility>, non_exhaustive: bool }, + /// The type is `&T`. + Ref, + /// The type is a union. + Union, /// Booleans. Bool, /// The type is spanned by integer values. The range or ranges give the set of allowed values. @@ -830,25 +846,25 @@ pub enum ConstructorSet { /// of the `ConstructorSet` for the type, yet if we forgot to include them in `present` we would be /// ignoring any row with `Opaque`s in the algorithm. Hence the importance of point 4. #[derive(Debug)] -pub(crate) struct SplitConstructorSet<'tcx> { - pub(crate) present: SmallVec<[Constructor<'tcx>; 1]>, - pub(crate) missing: Vec<Constructor<'tcx>>, - pub(crate) missing_empty: Vec<Constructor<'tcx>>, +pub(crate) struct SplitConstructorSet<Cx: TypeCx> { + pub(crate) present: SmallVec<[Constructor<Cx>; 1]>, + pub(crate) missing: Vec<Constructor<Cx>>, + pub(crate) missing_empty: Vec<Constructor<Cx>>, } -impl ConstructorSet { +impl<Cx: TypeCx> ConstructorSet<Cx> { /// This analyzes a column of constructors to 1/ determine which constructors of the type (if /// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges /// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation /// and its invariants. #[instrument(level = "debug", skip(self, pcx, ctors), ret)] - pub(crate) fn split<'a, 'tcx>( + pub(crate) fn split<'a>( &self, - pcx: &PatCtxt<'_, '_, 'tcx>, - ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone, - ) -> SplitConstructorSet<'tcx> + pcx: &PlaceCtxt<'_, '_, Cx>, + ctors: impl Iterator<Item = &'a Constructor<Cx>> + Clone, + ) -> SplitConstructorSet<Cx> where - 'tcx: 'a, + Cx: 'a, { let mut present: SmallVec<[_; 1]> = SmallVec::new(); // Empty constructors found missing. @@ -866,22 +882,39 @@ impl ConstructorSet { } match self { - ConstructorSet::Single { empty } => { + ConstructorSet::Struct { empty } => { if !seen.is_empty() { - present.push(Single); + present.push(Struct); } else if *empty { - missing_empty.push(Single); + missing_empty.push(Struct); + } else { + missing.push(Struct); + } + } + ConstructorSet::Ref => { + if !seen.is_empty() { + present.push(Ref); } else { - missing.push(Single); + missing.push(Ref); + } + } + ConstructorSet::Union => { + if !seen.is_empty() { + present.push(UnionField); + } else { + missing.push(UnionField); } } ConstructorSet::Variants { variants, non_exhaustive } => { - let seen_set: FxHashSet<_> = seen.iter().map(|c| c.as_variant().unwrap()).collect(); + let mut seen_set: BitSet<_> = BitSet::new_empty(variants.len()); + for idx in seen.iter().map(|c| c.as_variant().unwrap()) { + seen_set.insert(idx); + } let mut skipped_a_hidden_variant = false; for (idx, visibility) in variants.iter_enumerated() { let ctor = Variant(idx); - if seen_set.contains(&idx) { + if seen_set.contains(idx) { present.push(ctor); } else { // We only put visible variants directly into `missing`. @@ -975,8 +1008,8 @@ impl ConstructorSet { // We have now grouped all the constructors into 3 buckets: present, missing, missing_empty. // In the absence of the `exhaustive_patterns` feature however, we don't count nested empty // types as empty. Only non-nested `!` or `enum Foo {}` are considered empty. - if !pcx.cx.tcx.features().exhaustive_patterns - && !(pcx.is_top_level && matches!(self, Self::NoConstructors)) + if !pcx.mcx.tycx.is_exhaustive_patterns_feature_on() + && !(pcx.is_scrutinee && matches!(self, Self::NoConstructors)) { // Treat all missing constructors as nonempty. // This clears `missing_empty`. diff --git a/compiler/rustc_pattern_analysis/src/errors.rs b/compiler/rustc_pattern_analysis/src/errors.rs index 0efa8a0ec08..88770b0c43b 100644 --- a/compiler/rustc_pattern_analysis/src/errors.rs +++ b/compiler/rustc_pattern_analysis/src/errors.rs @@ -1,11 +1,11 @@ -use crate::{cx::MatchCheckCtxt, pat::WitnessPat}; - use rustc_errors::{AddToDiagnostic, Diagnostic, SubdiagnosticMessage}; use rustc_macros::{LintDiagnostic, Subdiagnostic}; use rustc_middle::thir::Pat; use rustc_middle::ty::Ty; use rustc_span::Span; +use crate::rustc::{RustcMatchCheckCtxt, WitnessPat}; + #[derive(Subdiagnostic)] #[label(pattern_analysis_uncovered)] pub struct Uncovered<'tcx> { @@ -21,8 +21,8 @@ pub struct Uncovered<'tcx> { impl<'tcx> Uncovered<'tcx> { pub fn new<'p>( span: Span, - cx: &MatchCheckCtxt<'p, 'tcx>, - witnesses: Vec<WitnessPat<'tcx>>, + cx: &RustcMatchCheckCtxt<'p, 'tcx>, + witnesses: Vec<WitnessPat<'p, 'tcx>>, ) -> Self { let witness_1 = cx.hoist_witness_pat(witnesses.get(0).unwrap()); Self { diff --git a/compiler/rustc_pattern_analysis/src/lib.rs b/compiler/rustc_pattern_analysis/src/lib.rs index 07730aa49d3..785a60e9978 100644 --- a/compiler/rustc_pattern_analysis/src/lib.rs +++ b/compiler/rustc_pattern_analysis/src/lib.rs @@ -1,54 +1,133 @@ //! Analysis of patterns, notably match exhaustiveness checking. pub mod constructor; -pub mod cx; +#[cfg(feature = "rustc")] pub mod errors; +#[cfg(feature = "rustc")] pub(crate) mod lints; pub mod pat; +#[cfg(feature = "rustc")] +pub mod rustc; pub mod usefulness; #[macro_use] extern crate tracing; +#[cfg(feature = "rustc")] #[macro_use] extern crate rustc_middle; +#[cfg(feature = "rustc")] rustc_fluent_macro::fluent_messages! { "../messages.ftl" } -use lints::PatternColumn; -use rustc_hir::HirId; +use std::fmt; + +use rustc_index::Idx; +#[cfg(feature = "rustc")] use rustc_middle::ty::Ty; -use usefulness::{compute_match_usefulness, UsefulnessReport}; -use crate::cx::MatchCheckCtxt; -use crate::lints::{lint_nonexhaustive_missing_variants, lint_overlapping_range_endpoints}; +use crate::constructor::{Constructor, ConstructorSet}; +#[cfg(feature = "rustc")] +use crate::lints::{ + lint_nonexhaustive_missing_variants, lint_overlapping_range_endpoints, PatternColumn, +}; use crate::pat::DeconstructedPat; +#[cfg(feature = "rustc")] +use crate::rustc::RustcMatchCheckCtxt; +#[cfg(feature = "rustc")] +use crate::usefulness::{compute_match_usefulness, ValidityConstraint}; + +// It's not possible to only enable the `typed_arena` dependency when the `rustc` feature is off, so +// we use another feature instead. The crate won't compile if one of these isn't enabled. +#[cfg(feature = "rustc")] +pub(crate) use rustc_arena::TypedArena; +#[cfg(feature = "stable")] +pub(crate) use typed_arena::Arena as TypedArena; + +pub trait Captures<'a> {} +impl<'a, T: ?Sized> Captures<'a> for T {} + +/// Context that provides type information about constructors. +/// +/// Most of the crate is parameterized on a type that implements this trait. +pub trait TypeCx: Sized + Clone + fmt::Debug { + /// The type of a pattern. + type Ty: Copy + Clone + fmt::Debug; // FIXME: remove Copy + /// The index of an enum variant. + type VariantIdx: Clone + Idx; + /// A string literal + type StrLit: Clone + PartialEq + fmt::Debug; + /// Extra data to store in a match arm. + type ArmData: Copy + Clone + fmt::Debug; + /// Extra data to store in a pattern. `Default` needed when we create fictitious wildcard + /// patterns during analysis. + type PatData: Clone + Default; + + fn is_opaque_ty(ty: Self::Ty) -> bool; + fn is_exhaustive_patterns_feature_on(&self) -> bool; + + /// The number of fields for this constructor. + fn ctor_arity(&self, ctor: &Constructor<Self>, ty: Self::Ty) -> usize; + + /// The types of the fields for this constructor. The result must have a length of + /// `ctor_arity()`. + fn ctor_sub_tys(&self, ctor: &Constructor<Self>, ty: Self::Ty) -> &[Self::Ty]; + + /// The set of all the constructors for `ty`. + /// + /// This must follow the invariants of `ConstructorSet` + fn ctors_for_ty(&self, ty: Self::Ty) -> ConstructorSet<Self>; + + /// Best-effort `Debug` implementation. + fn debug_pat(f: &mut fmt::Formatter<'_>, pat: &DeconstructedPat<'_, Self>) -> fmt::Result; + + /// Raise a bug. + fn bug(&self, fmt: fmt::Arguments<'_>) -> !; +} + +/// Context that provides information global to a match. +#[derive(Clone)] +pub struct MatchCtxt<'a, 'p, Cx: TypeCx> { + /// The context for type information. + pub tycx: &'a Cx, + /// An arena to store the wildcards we produce during analysis. + pub wildcard_arena: &'a TypedArena<DeconstructedPat<'p, Cx>>, +} + +impl<'a, 'p, Cx: TypeCx> Copy for MatchCtxt<'a, 'p, Cx> {} /// The arm of a match expression. -#[derive(Clone, Copy, Debug)] -pub struct MatchArm<'p, 'tcx> { - /// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`. - pub pat: &'p DeconstructedPat<'p, 'tcx>, - pub hir_id: HirId, +#[derive(Clone, Debug)] +pub struct MatchArm<'p, Cx: TypeCx> { + pub pat: &'p DeconstructedPat<'p, Cx>, pub has_guard: bool, + pub arm_data: Cx::ArmData, } +impl<'p, Cx: TypeCx> Copy for MatchArm<'p, Cx> {} + /// The entrypoint for this crate. Computes whether a match is exhaustive and which of its arms are /// useful, and runs some lints. +#[cfg(feature = "rustc")] pub fn analyze_match<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - arms: &[MatchArm<'p, 'tcx>], + tycx: &RustcMatchCheckCtxt<'p, 'tcx>, + arms: &[rustc::MatchArm<'p, 'tcx>], scrut_ty: Ty<'tcx>, -) -> UsefulnessReport<'p, 'tcx> { - let pat_column = PatternColumn::new(arms); +) -> rustc::UsefulnessReport<'p, 'tcx> { + // Arena to store the extra wildcards we construct during analysis. + let wildcard_arena = tycx.pattern_arena; + let scrut_validity = ValidityConstraint::from_bool(tycx.known_valid_scrutinee); + let cx = MatchCtxt { tycx, wildcard_arena }; - let report = compute_match_usefulness(cx, arms, scrut_ty); + let report = compute_match_usefulness(cx, arms, scrut_ty, scrut_validity); + + let pat_column = PatternColumn::new(arms); // Lint on ranges that overlap on their endpoints, which is likely a mistake. lint_overlapping_range_endpoints(cx, &pat_column); // Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting // `if let`s. Only run if the match is exhaustive otherwise the error is redundant. - if cx.refutable && report.non_exhaustiveness_witnesses.is_empty() { + if tycx.refutable && report.non_exhaustiveness_witnesses.is_empty() { lint_nonexhaustive_missing_variants(cx, arms, &pat_column, scrut_ty) } diff --git a/compiler/rustc_pattern_analysis/src/lints.rs b/compiler/rustc_pattern_analysis/src/lints.rs index 8ab559c9e7a..072ef4836a8 100644 --- a/compiler/rustc_pattern_analysis/src/lints.rs +++ b/compiler/rustc_pattern_analysis/src/lints.rs @@ -6,15 +6,16 @@ use rustc_session::lint; use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS; use rustc_span::Span; -use crate::constructor::{Constructor, IntRange, MaybeInfiniteInt, SplitConstructorSet}; -use crate::cx::MatchCheckCtxt; +use crate::constructor::{IntRange, MaybeInfiniteInt}; use crate::errors::{ NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap, OverlappingRangeEndpoints, Uncovered, }; -use crate::pat::{DeconstructedPat, WitnessPat}; -use crate::usefulness::PatCtxt; -use crate::MatchArm; +use crate::rustc::{ + Constructor, DeconstructedPat, MatchArm, MatchCtxt, PlaceCtxt, RustcMatchCheckCtxt, + SplitConstructorSet, WitnessPat, +}; +use crate::TypeCx; /// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that /// inspect the same subvalue/place". @@ -27,11 +28,11 @@ use crate::MatchArm; /// /// This is not used in the main algorithm; only in lints. #[derive(Debug)] -pub(crate) struct PatternColumn<'p, 'tcx> { - patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>, +pub(crate) struct PatternColumn<'a, 'p, 'tcx> { + patterns: Vec<&'a DeconstructedPat<'p, 'tcx>>, } -impl<'p, 'tcx> PatternColumn<'p, 'tcx> { +impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> { pub(crate) fn new(arms: &[MatchArm<'p, 'tcx>]) -> Self { let mut patterns = Vec::with_capacity(arms.len()); for arm in arms { @@ -53,12 +54,11 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> { } // If the type is opaque and it is revealed anywhere in the column, we take the revealed // version. Otherwise we could encounter constructors for the revealed type and crash. - let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..)); let first_ty = self.patterns[0].ty(); - if is_opaque(first_ty) { + if RustcMatchCheckCtxt::is_opaque_ty(first_ty) { for pat in &self.patterns { let ty = pat.ty(); - if !is_opaque(ty) { + if !RustcMatchCheckCtxt::is_opaque_ty(ty) { return Some(ty); } } @@ -67,12 +67,12 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> { } /// Do constructor splitting on the constructors of the column. - fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> { + fn analyze_ctors(&self, pcx: &PlaceCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'p, 'tcx> { let column_ctors = self.patterns.iter().map(|p| p.ctor()); - pcx.cx.ctors_for_ty(pcx.ty).split(pcx, column_ctors) + pcx.ctors_for_ty().split(pcx, column_ctors) } - fn iter<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> { + fn iter<'b>(&'b self) -> impl Iterator<Item = &'a DeconstructedPat<'p, 'tcx>> + Captures<'b> { self.patterns.iter().copied() } @@ -81,7 +81,11 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> { /// This returns one column per field of the constructor. They usually all have the same length /// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns /// which may change the lengths. - fn specialize(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Vec<Self> { + fn specialize( + &self, + pcx: &PlaceCtxt<'a, 'p, 'tcx>, + ctor: &Constructor<'p, 'tcx>, + ) -> Vec<PatternColumn<'a, 'p, 'tcx>> { let arity = ctor.arity(pcx); if arity == 0 { return Vec::new(); @@ -117,14 +121,14 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> { /// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned /// in a given column. #[instrument(level = "debug", skip(cx), ret)] -fn collect_nonexhaustive_missing_variants<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - column: &PatternColumn<'p, 'tcx>, -) -> Vec<WitnessPat<'tcx>> { +fn collect_nonexhaustive_missing_variants<'a, 'p, 'tcx>( + cx: MatchCtxt<'a, 'p, 'tcx>, + column: &PatternColumn<'a, 'p, 'tcx>, +) -> Vec<WitnessPat<'p, 'tcx>> { let Some(ty) = column.head_ty() else { return Vec::new(); }; - let pcx = &PatCtxt::new_dummy(cx, ty); + let pcx = &PlaceCtxt::new_dummy(cx, ty); let set = column.analyze_ctors(pcx); if set.present.is_empty() { @@ -135,7 +139,7 @@ fn collect_nonexhaustive_missing_variants<'p, 'tcx>( } let mut witnesses = Vec::new(); - if cx.is_foreign_non_exhaustive_enum(ty) { + if cx.tycx.is_foreign_non_exhaustive_enum(ty) { witnesses.extend( set.missing .into_iter() @@ -164,14 +168,15 @@ fn collect_nonexhaustive_missing_variants<'p, 'tcx>( witnesses } -pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, +pub(crate) fn lint_nonexhaustive_missing_variants<'a, 'p, 'tcx>( + cx: MatchCtxt<'a, 'p, 'tcx>, arms: &[MatchArm<'p, 'tcx>], - pat_column: &PatternColumn<'p, 'tcx>, + pat_column: &PatternColumn<'a, 'p, 'tcx>, scrut_ty: Ty<'tcx>, ) { + let rcx: &RustcMatchCheckCtxt<'_, '_> = cx.tycx; if !matches!( - cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, cx.match_lint_level).0, + rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, rcx.match_lint_level).0, rustc_session::lint::Level::Allow ) { let witnesses = collect_nonexhaustive_missing_variants(cx, pat_column); @@ -180,13 +185,13 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>( // is not exhaustive enough. // // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`. - cx.tcx.emit_spanned_lint( + rcx.tcx.emit_spanned_lint( NON_EXHAUSTIVE_OMITTED_PATTERNS, - cx.match_lint_level, - cx.scrut_span, + rcx.match_lint_level, + rcx.scrut_span, NonExhaustiveOmittedPattern { scrut_ty, - uncovered: Uncovered::new(cx.scrut_span, cx, witnesses), + uncovered: Uncovered::new(rcx.scrut_span, rcx, witnesses), }, ); } @@ -196,17 +201,17 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>( // usage of the lint. for arm in arms { let (lint_level, lint_level_source) = - cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.hir_id); + rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.arm_data); if !matches!(lint_level, rustc_session::lint::Level::Allow) { let decorator = NonExhaustiveOmittedPatternLintOnArm { lint_span: lint_level_source.span(), - suggest_lint_on_match: cx.whole_match_span.map(|span| span.shrink_to_lo()), + suggest_lint_on_match: rcx.whole_match_span.map(|span| span.shrink_to_lo()), lint_level: lint_level.as_str(), lint_name: "non_exhaustive_omitted_patterns", }; use rustc_errors::DecorateLint; - let mut err = cx.tcx.sess.struct_span_warn(arm.pat.span(), ""); + let mut err = rcx.tcx.sess.struct_span_warn(*arm.pat.data(), ""); err.set_primary_message(decorator.msg()); decorator.decorate_lint(&mut err); err.emit(); @@ -217,28 +222,29 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>( /// Traverse the patterns to warn the user about ranges that overlap on their endpoints. #[instrument(level = "debug", skip(cx))] -pub(crate) fn lint_overlapping_range_endpoints<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - column: &PatternColumn<'p, 'tcx>, +pub(crate) fn lint_overlapping_range_endpoints<'a, 'p, 'tcx>( + cx: MatchCtxt<'a, 'p, 'tcx>, + column: &PatternColumn<'a, 'p, 'tcx>, ) { let Some(ty) = column.head_ty() else { return; }; - let pcx = &PatCtxt::new_dummy(cx, ty); + let pcx = &PlaceCtxt::new_dummy(cx, ty); + let rcx: &RustcMatchCheckCtxt<'_, '_> = cx.tycx; let set = column.analyze_ctors(pcx); if matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) { let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| { - let overlap_as_pat = cx.hoist_pat_range(overlap, ty); + let overlap_as_pat = rcx.hoist_pat_range(overlap, ty); let overlaps: Vec<_> = overlapped_spans .iter() .copied() .map(|span| Overlap { range: overlap_as_pat.clone(), span }) .collect(); - cx.tcx.emit_spanned_lint( + rcx.tcx.emit_spanned_lint( lint::builtin::OVERLAPPING_RANGE_ENDPOINTS, - cx.match_lint_level, + rcx.match_lint_level, this_span, OverlappingRangeEndpoints { overlap: overlaps, range: this_span }, ); @@ -255,7 +261,7 @@ pub(crate) fn lint_overlapping_range_endpoints<'p, 'tcx>( let mut suffixes: SmallVec<[_; 1]> = Default::default(); // Iterate on patterns that contained `overlap`. for pat in column.iter() { - let this_span = pat.span(); + let this_span = *pat.data(); let Constructor::IntRange(this_range) = pat.ctor() else { continue }; if this_range.is_singleton() { // Don't lint when one of the ranges is a singleton. diff --git a/compiler/rustc_pattern_analysis/src/pat.rs b/compiler/rustc_pattern_analysis/src/pat.rs index 404651124ad..0cc8477b7cd 100644 --- a/compiler/rustc_pattern_analysis/src/pat.rs +++ b/compiler/rustc_pattern_analysis/src/pat.rs @@ -5,16 +5,11 @@ use std::fmt; use smallvec::{smallvec, SmallVec}; -use rustc_data_structures::captures::Captures; -use rustc_middle::ty::{self, Ty}; -use rustc_span::{Span, DUMMY_SP}; +use crate::constructor::{Constructor, Slice, SliceKind}; +use crate::usefulness::PlaceCtxt; +use crate::{Captures, TypeCx}; use self::Constructor::*; -use self::SliceKind::*; - -use crate::constructor::{Constructor, SliceKind}; -use crate::cx::MatchCheckCtxt; -use crate::usefulness::PatCtxt; /// Values and patterns can be represented as a constructor applied to some fields. This represents /// a pattern in this form. @@ -27,34 +22,34 @@ use crate::usefulness::PatCtxt; /// This happens if a private or `non_exhaustive` field is uninhabited, because the code mustn't /// observe that it is uninhabited. In that case that field is not included in `fields`. Care must /// be taken when converting to/from `thir::Pat`. -pub struct DeconstructedPat<'p, 'tcx> { - ctor: Constructor<'tcx>, - fields: &'p [DeconstructedPat<'p, 'tcx>], - ty: Ty<'tcx>, - span: Span, +pub struct DeconstructedPat<'p, Cx: TypeCx> { + ctor: Constructor<Cx>, + fields: &'p [DeconstructedPat<'p, Cx>], + ty: Cx::Ty, + data: Cx::PatData, /// Whether removing this arm would change the behavior of the match expression. useful: Cell<bool>, } -impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> { - pub fn wildcard(ty: Ty<'tcx>, span: Span) -> Self { - Self::new(Wildcard, &[], ty, span) +impl<'p, Cx: TypeCx> DeconstructedPat<'p, Cx> { + pub fn wildcard(ty: Cx::Ty, data: Cx::PatData) -> Self { + Self::new(Wildcard, &[], ty, data) } pub fn new( - ctor: Constructor<'tcx>, - fields: &'p [DeconstructedPat<'p, 'tcx>], - ty: Ty<'tcx>, - span: Span, + ctor: Constructor<Cx>, + fields: &'p [DeconstructedPat<'p, Cx>], + ty: Cx::Ty, + data: Cx::PatData, ) -> Self { - DeconstructedPat { ctor, fields, ty, span, useful: Cell::new(false) } + DeconstructedPat { ctor, fields, ty, data, useful: Cell::new(false) } } pub(crate) fn is_or_pat(&self) -> bool { matches!(self.ctor, Or) } /// Expand this (possibly-nested) or-pattern into its alternatives. - pub(crate) fn flatten_or_pat(&'p self) -> SmallVec<[&'p Self; 1]> { + pub(crate) fn flatten_or_pat(&self) -> SmallVec<[&Self; 1]> { if self.is_or_pat() { self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect() } else { @@ -62,66 +57,64 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> { } } - pub fn ctor(&self) -> &Constructor<'tcx> { + pub fn ctor(&self) -> &Constructor<Cx> { &self.ctor } - pub fn ty(&self) -> Ty<'tcx> { + pub fn ty(&self) -> Cx::Ty { self.ty } - pub fn span(&self) -> Span { - self.span + pub fn data(&self) -> &Cx::PatData { + &self.data } pub fn iter_fields<'a>( &'a self, - ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> { + ) -> impl Iterator<Item = &'p DeconstructedPat<'p, Cx>> + Captures<'a> { self.fields.iter() } /// Specialize this pattern with a constructor. /// `other_ctor` can be different from `self.ctor`, but must be covered by it. pub(crate) fn specialize<'a>( - &'a self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - other_ctor: &Constructor<'tcx>, - ) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> { + &self, + pcx: &PlaceCtxt<'a, 'p, Cx>, + other_ctor: &Constructor<Cx>, + ) -> SmallVec<[&'a DeconstructedPat<'p, Cx>; 2]> { + let wildcard_sub_tys = || { + let tys = pcx.ctor_sub_tys(other_ctor); + tys.iter() + .map(|ty| DeconstructedPat::wildcard(*ty, Cx::PatData::default())) + .map(|pat| pcx.mcx.wildcard_arena.alloc(pat) as &_) + .collect() + }; match (&self.ctor, other_ctor) { - (Wildcard, _) => { - // We return a wildcard for each field of `other_ctor`. - pcx.cx.ctor_wildcard_fields(other_ctor, pcx.ty).iter().collect() - } - (Slice(self_slice), Slice(other_slice)) - if self_slice.arity() != other_slice.arity() => - { - // The only tricky case: two slices of different arity. Since `self_slice` covers - // `other_slice`, `self_slice` must be `VarLen`, i.e. of the form - // `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger - // arity. So we fill the middle part with enough wildcards to reach the length of - // the new, larger slice. - match self_slice.kind { - FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice), - VarLen(prefix, suffix) => { - let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else { - bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty); - }; - let prefix = &self.fields[..prefix]; - let suffix = &self.fields[self_slice.arity() - suffix..]; - let wildcard: &_ = pcx - .cx - .pattern_arena - .alloc(DeconstructedPat::wildcard(inner_ty, DUMMY_SP)); - let extra_wildcards = other_slice.arity() - self_slice.arity(); - let extra_wildcards = (0..extra_wildcards).map(|_| wildcard); - prefix.iter().chain(extra_wildcards).chain(suffix).collect() - } + // Return a wildcard for each field of `other_ctor`. + (Wildcard, _) => wildcard_sub_tys(), + // The only non-trivial case: two slices of different arity. `other_slice` is + // guaranteed to have a larger arity, so we fill the middle part with enough + // wildcards to reach the length of the new, larger slice. + ( + &Slice(self_slice @ Slice { kind: SliceKind::VarLen(prefix, suffix), .. }), + &Slice(other_slice), + ) if self_slice.arity() != other_slice.arity() => { + // Start with a slice of wildcards of the appropriate length. + let mut fields: SmallVec<[_; 2]> = wildcard_sub_tys(); + // Fill in the fields from both ends. + let new_arity = fields.len(); + for i in 0..prefix { + fields[i] = &self.fields[i]; + } + for i in 0..suffix { + fields[new_arity - 1 - i] = &self.fields[self.fields.len() - 1 - i]; } + fields } _ => self.fields.iter().collect(), } } - /// We keep track for each pattern if it was ever useful during the analysis. This is used - /// with `redundant_spans` to report redundant subpatterns arising from or patterns. + /// We keep track for each pattern if it was ever useful during the analysis. This is used with + /// `redundant_subpatterns` to report redundant subpatterns arising from or patterns. pub(crate) fn set_useful(&self) { self.useful.set(true) } @@ -139,19 +132,19 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> { } } - /// Report the spans of subpatterns that were not useful, if any. - pub(crate) fn redundant_spans(&self) -> Vec<Span> { - let mut spans = Vec::new(); - self.collect_redundant_spans(&mut spans); - spans + /// Report the subpatterns that were not useful, if any. + pub(crate) fn redundant_subpatterns(&self) -> Vec<&Self> { + let mut subpats = Vec::new(); + self.collect_redundant_subpatterns(&mut subpats); + subpats } - fn collect_redundant_spans(&self, spans: &mut Vec<Span>) { + fn collect_redundant_subpatterns<'a>(&'a self, subpats: &mut Vec<&'a Self>) { // We don't look at subpatterns if we already reported the whole pattern as redundant. if !self.is_useful() { - spans.push(self.span); + subpats.push(self); } else { for p in self.iter_fields() { - p.collect_redundant_spans(spans); + p.collect_redundant_subpatterns(subpats); } } } @@ -159,47 +152,46 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> { /// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a /// `Display` impl. -impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> { +impl<'p, Cx: TypeCx> fmt::Debug for DeconstructedPat<'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - MatchCheckCtxt::debug_pat(f, self) + Cx::debug_pat(f, self) } } /// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics /// purposes. As such they don't use interning and can be cloned. #[derive(Debug, Clone)] -pub struct WitnessPat<'tcx> { - ctor: Constructor<'tcx>, - pub(crate) fields: Vec<WitnessPat<'tcx>>, - ty: Ty<'tcx>, +pub struct WitnessPat<Cx: TypeCx> { + ctor: Constructor<Cx>, + pub(crate) fields: Vec<WitnessPat<Cx>>, + ty: Cx::Ty, } -impl<'tcx> WitnessPat<'tcx> { - pub(crate) fn new(ctor: Constructor<'tcx>, fields: Vec<Self>, ty: Ty<'tcx>) -> Self { +impl<Cx: TypeCx> WitnessPat<Cx> { + pub(crate) fn new(ctor: Constructor<Cx>, fields: Vec<Self>, ty: Cx::Ty) -> Self { Self { ctor, fields, ty } } - pub(crate) fn wildcard(ty: Ty<'tcx>) -> Self { + pub(crate) fn wildcard(ty: Cx::Ty) -> Self { Self::new(Wildcard, Vec::new(), ty) } /// Construct a pattern that matches everything that starts with this constructor. /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern /// `Some(_)`. - pub(crate) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self { - let field_tys = - pcx.cx.ctor_wildcard_fields(&ctor, pcx.ty).iter().map(|deco_pat| deco_pat.ty()); - let fields = field_tys.map(|ty| Self::wildcard(ty)).collect(); + pub(crate) fn wild_from_ctor(pcx: &PlaceCtxt<'_, '_, Cx>, ctor: Constructor<Cx>) -> Self { + let field_tys = pcx.ctor_sub_tys(&ctor); + let fields = field_tys.iter().map(|ty| Self::wildcard(*ty)).collect(); Self::new(ctor, fields, pcx.ty) } - pub fn ctor(&self) -> &Constructor<'tcx> { + pub fn ctor(&self) -> &Constructor<Cx> { &self.ctor } - pub fn ty(&self) -> Ty<'tcx> { + pub fn ty(&self) -> Cx::Ty { self.ty } - pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a WitnessPat<'tcx>> { + pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a WitnessPat<Cx>> { self.fields.iter() } } diff --git a/compiler/rustc_pattern_analysis/src/cx.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index 8a4f39a1f4a..65c90aa9f1d 100644 --- a/compiler/rustc_pattern_analysis/src/cx.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -1,15 +1,15 @@ use std::fmt; use std::iter::once; -use rustc_arena::TypedArena; +use rustc_arena::{DroplessArena, TypedArena}; use rustc_data_structures::captures::Captures; use rustc_hir::def_id::DefId; -use rustc_hir::{HirId, RangeEnd}; +use rustc_hir::HirId; use rustc_index::Idx; use rustc_index::IndexVec; use rustc_middle::middle::stability::EvalResult; -use rustc_middle::mir; use rustc_middle::mir::interpret::Scalar; +use rustc_middle::mir::{self, Const}; use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary}; use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef}; @@ -18,14 +18,31 @@ use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT}; use smallvec::SmallVec; use crate::constructor::{ - Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, OpaqueId, Slice, SliceKind, - VariantVisibility, + IntRange, MaybeInfiniteInt, OpaqueId, RangeEnd, Slice, SliceKind, VariantVisibility, }; -use crate::pat::{DeconstructedPat, WitnessPat}; +use crate::TypeCx; -use Constructor::*; +use crate::constructor::Constructor::*; -pub struct MatchCheckCtxt<'p, 'tcx> { +// Re-export rustc-specific versions of all these types. +pub type Constructor<'p, 'tcx> = crate::constructor::Constructor<RustcMatchCheckCtxt<'p, 'tcx>>; +pub type ConstructorSet<'p, 'tcx> = + crate::constructor::ConstructorSet<RustcMatchCheckCtxt<'p, 'tcx>>; +pub type DeconstructedPat<'p, 'tcx> = + crate::pat::DeconstructedPat<'p, RustcMatchCheckCtxt<'p, 'tcx>>; +pub type MatchArm<'p, 'tcx> = crate::MatchArm<'p, RustcMatchCheckCtxt<'p, 'tcx>>; +pub type MatchCtxt<'a, 'p, 'tcx> = crate::MatchCtxt<'a, 'p, RustcMatchCheckCtxt<'p, 'tcx>>; +pub(crate) type PlaceCtxt<'a, 'p, 'tcx> = + crate::usefulness::PlaceCtxt<'a, 'p, RustcMatchCheckCtxt<'p, 'tcx>>; +pub(crate) type SplitConstructorSet<'p, 'tcx> = + crate::constructor::SplitConstructorSet<RustcMatchCheckCtxt<'p, 'tcx>>; +pub type Usefulness<'p, 'tcx> = crate::usefulness::Usefulness<'p, RustcMatchCheckCtxt<'p, 'tcx>>; +pub type UsefulnessReport<'p, 'tcx> = + crate::usefulness::UsefulnessReport<'p, RustcMatchCheckCtxt<'p, 'tcx>>; +pub type WitnessPat<'p, 'tcx> = crate::pat::WitnessPat<RustcMatchCheckCtxt<'p, 'tcx>>; + +#[derive(Clone)] +pub struct RustcMatchCheckCtxt<'p, 'tcx> { pub tcx: TyCtxt<'tcx>, /// The module in which the match occurs. This is necessary for /// checking inhabited-ness of types because whether a type is (visibly) @@ -35,6 +52,7 @@ pub struct MatchCheckCtxt<'p, 'tcx> { pub module: DefId, pub param_env: ty::ParamEnv<'tcx>, pub pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>, + pub dropless_arena: &'p DroplessArena, /// Lint level at the match. pub match_lint_level: HirId, /// The span of the whole match, if applicable. @@ -48,8 +66,14 @@ pub struct MatchCheckCtxt<'p, 'tcx> { pub known_valid_scrutinee: bool, } -impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { - pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { +impl<'p, 'tcx> fmt::Debug for RustcMatchCheckCtxt<'p, 'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RustcMatchCheckCtxt").finish() + } +} + +impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { + pub(crate) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { !ty.is_inhabited_from(self.tcx, self.module, self.param_env) } @@ -63,12 +87,18 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } } - pub(crate) fn alloc_wildcard_slice( - &self, - tys: impl IntoIterator<Item = Ty<'tcx>>, - ) -> &'p [DeconstructedPat<'p, 'tcx>] { - self.pattern_arena - .alloc_from_iter(tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, DUMMY_SP))) + /// Whether the range denotes the fictitious values before `isize::MIN` or after + /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist). + pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: Ty<'tcx>) -> bool { + ty.is_ptr_sized_integral() && { + // The two invalid ranges are `NegInfinity..isize::MIN` (represented as + // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy` + // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo` + // otherwise. + let lo = self.hoist_pat_range_bdy(range.lo, ty); + matches!(lo, PatRangeBoundary::PosInfinity) + || matches!(range.hi, MaybeInfiniteInt::Finite(0)) + } } // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide @@ -100,12 +130,12 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } pub(crate) fn variant_index_for_adt( - ctor: &Constructor<'tcx>, + ctor: &Constructor<'p, 'tcx>, adt: ty::AdtDef<'tcx>, ) -> VariantIdx { match *ctor { Variant(idx) => idx, - Single => { + Struct | UnionField => { assert!(!adt.is_enum()); FIRST_VARIANT } @@ -113,37 +143,36 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } } - /// Creates a new list of wildcard fields for a given constructor. The result must have a length - /// of `ctor.arity()`. + /// Returns the types of the fields for a given constructor. The result must have a length of + /// `ctor.arity()`. #[instrument(level = "trace", skip(self))] - pub(crate) fn ctor_wildcard_fields( - &self, - ctor: &Constructor<'tcx>, - ty: Ty<'tcx>, - ) -> &'p [DeconstructedPat<'p, 'tcx>] { + pub(crate) fn ctor_sub_tys(&self, ctor: &Constructor<'p, 'tcx>, ty: Ty<'tcx>) -> &[Ty<'tcx>] { let cx = self; match ctor { - Single | Variant(_) => match ty.kind() { - ty::Tuple(fs) => cx.alloc_wildcard_slice(fs.iter()), - ty::Ref(_, rty, _) => cx.alloc_wildcard_slice(once(*rty)), + Struct | Variant(_) | UnionField => match ty.kind() { + ty::Tuple(fs) => cx.dropless_arena.alloc_from_iter(fs.iter()), ty::Adt(adt, args) => { if adt.is_box() { // The only legal patterns of type `Box` (outside `std`) are `_` and box // patterns. If we're here we can assume this is a box pattern. - cx.alloc_wildcard_slice(once(args.type_at(0))) + cx.dropless_arena.alloc_from_iter(once(args.type_at(0))) } else { let variant = - &adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); + &adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); let tys = cx.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty); - cx.alloc_wildcard_slice(tys) + cx.dropless_arena.alloc_from_iter(tys) } } - _ => bug!("Unexpected type for `Single` constructor: {:?}", ty), + _ => bug!("Unexpected type for constructor `{ctor:?}`: {ty:?}"), + }, + Ref => match ty.kind() { + ty::Ref(_, rty, _) => cx.dropless_arena.alloc_from_iter(once(*rty)), + _ => bug!("Unexpected type for `Ref` constructor: {ty:?}"), }, Slice(slice) => match *ty.kind() { ty::Slice(ty) | ty::Array(ty, _) => { let arity = slice.arity(); - cx.alloc_wildcard_slice((0..arity).map(|_| ty)) + cx.dropless_arena.alloc_from_iter((0..arity).map(|_| ty)) } _ => bug!("bad slice pattern {:?} {:?}", ctor, ty), }, @@ -163,13 +192,11 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } } - /// The number of fields for this constructor. This must be kept in sync with - /// `Fields::wildcards`. - pub(crate) fn ctor_arity(&self, ctor: &Constructor<'tcx>, ty: Ty<'tcx>) -> usize { + /// The number of fields for this constructor. + pub(crate) fn ctor_arity(&self, ctor: &Constructor<'p, 'tcx>, ty: Ty<'tcx>) -> usize { match ctor { - Single | Variant(_) => match ty.kind() { + Struct | Variant(_) | UnionField => match ty.kind() { ty::Tuple(fs) => fs.len(), - ty::Ref(..) => 1, ty::Adt(adt, ..) => { if adt.is_box() { // The only legal patterns of type `Box` (outside `std`) are `_` and box @@ -177,12 +204,13 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { 1 } else { let variant = - &adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); + &adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); self.list_variant_nonhidden_fields(ty, variant).count() } } - _ => bug!("Unexpected type for `Single` constructor: {:?}", ty), + _ => bug!("Unexpected type for constructor `{ctor:?}`: {ty:?}"), }, + Ref => 1, Slice(slice) => slice.arity(), Bool(..) | IntRange(..) @@ -202,7 +230,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { /// /// See [`crate::constructor`] for considerations of emptiness. #[instrument(level = "debug", skip(self), ret)] - pub fn ctors_for_ty(&self, ty: Ty<'tcx>) -> ConstructorSet { + pub fn ctors_for_ty(&self, ty: Ty<'tcx>) -> ConstructorSet<'p, 'tcx> { let cx = self; let make_uint_range = |start, end| { IntRange::from_range( @@ -298,9 +326,9 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive } } } - ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => { - ConstructorSet::Single { empty: cx.is_uninhabited(ty) } - } + ty::Adt(def, _) if def.is_union() => ConstructorSet::Union, + ty::Adt(..) | ty::Tuple(..) => ConstructorSet::Struct { empty: cx.is_uninhabited(ty) }, + ty::Ref(..) => ConstructorSet::Ref, ty::Never => ConstructorSet::NoConstructors, // This type is one for which we cannot list constructors, like `str` or `f64`. // FIXME(Nadrieril): which of these are actually allowed? @@ -359,13 +387,18 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { fields = &[]; } PatKind::Deref { subpattern } => { - ctor = Single; fields = singleton(self.lower_pat(subpattern)); + ctor = match pat.ty.kind() { + // This is a box pattern. + ty::Adt(adt, ..) if adt.is_box() => Struct, + ty::Ref(..) => Ref, + _ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty), + }; } PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => { match pat.ty.kind() { ty::Tuple(fs) => { - ctor = Single; + ctor = Struct; let mut wilds: SmallVec<[_; 2]> = fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect(); for pat in subpatterns { @@ -380,7 +413,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { // _)` or a box pattern. As a hack to avoid an ICE with the former, we // ignore other fields than the first one. This will trigger an error later // anyway. - // See https://github.com/rust-lang/rust/issues/82772 , + // See https://github.com/rust-lang/rust/issues/82772, // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977 // The problem is that we can't know from the type whether we'll match // normally or through box-patterns. We'll have to figure out a proper @@ -392,17 +425,18 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } else { DeconstructedPat::wildcard(args.type_at(0), pat.span) }; - ctor = Single; + ctor = Struct; fields = singleton(pat); } ty::Adt(adt, _) => { ctor = match pat.kind { - PatKind::Leaf { .. } => Single, + PatKind::Leaf { .. } if adt.is_union() => UnionField, + PatKind::Leaf { .. } => Struct, PatKind::Variant { variant_index, .. } => Variant(variant_index), _ => bug!(), }; let variant = - &adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); + &adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); // For each field in the variant, we store the relevant index into `self.fields` if any. let mut field_id_to_id: Vec<Option<usize>> = (0..variant.fields.len()).map(|_| None).collect(); @@ -477,11 +511,11 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { // with other `Deref` patterns. This could have been done in `const_to_pat`, // but that causes issues with the rest of the matching code. // So here, the constructor for a `"foo"` pattern is `&` (represented by - // `Single`), and has one field. That field has constructor `Str(value)` and no - // fields. + // `Ref`), and has one field. That field has constructor `Str(value)` and no + // subfields. // Note: `t` is `str`, not `&str`. let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat.span); - ctor = Single; + ctor = Ref; fields = singleton(subpattern) } // All constants that can be structurally matched have already been expanded @@ -495,12 +529,16 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } PatKind::Range(patrange) => { let PatRange { lo, hi, end, .. } = patrange.as_ref(); + let end = match end { + rustc_hir::RangeEnd::Included => RangeEnd::Included, + rustc_hir::RangeEnd::Excluded => RangeEnd::Excluded, + }; let ty = pat.ty; ctor = match ty.kind() { ty::Char | ty::Int(_) | ty::Uint(_) => { let lo = cx.lower_pat_range_bdy(*lo, ty); let hi = cx.lower_pat_range_bdy(*hi, ty); - IntRange(IntRange::from_range(lo, hi, *end)) + IntRange(IntRange::from_range(lo, hi, end)) } ty::Float(fty) => { use rustc_apfloat::Float; @@ -511,13 +549,13 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { use rustc_apfloat::ieee::Single; let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY); let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY); - F32Range(lo, hi, *end) + F32Range(lo, hi, end) } ty::FloatTy::F64 => { use rustc_apfloat::ieee::Double; let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY); let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY); - F64Range(lo, hi, *end) + F64Range(lo, hi, end) } } } @@ -597,20 +635,6 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } } - /// Whether the range denotes the fictitious values before `isize::MIN` or after - /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist). - pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: Ty<'tcx>) -> bool { - ty.is_ptr_sized_integral() && { - // The two invalid ranges are `NegInfinity..isize::MIN` (represented as - // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy` - // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo` - // otherwise. - let lo = self.hoist_pat_range_bdy(range.lo, ty); - matches!(lo, PatRangeBoundary::PosInfinity) - || matches!(range.hi, MaybeInfiniteInt::Finite(0)) - } - } - /// Convert back to a `thir::Pat` for diagnostic purposes. pub(crate) fn hoist_pat_range(&self, range: &IntRange, ty: Ty<'tcx>) -> Pat<'tcx> { use MaybeInfiniteInt::*; @@ -623,7 +647,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { PatKind::Constant { value } } else { // We convert to an inclusive range for diagnostics. - let mut end = RangeEnd::Included; + let mut end = rustc_hir::RangeEnd::Included; let mut lo = cx.hoist_pat_range_bdy(range.lo, ty); if matches!(lo, PatRangeBoundary::PosInfinity) { // The only reason to get `PosInfinity` here is the special case where @@ -637,7 +661,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } let hi = if matches!(range.hi, Finite(0)) { // The range encodes `..ty::MIN`, so we can't convert it to an inclusive range. - end = RangeEnd::Excluded; + end = rustc_hir::RangeEnd::Excluded; range.hi } else { range.hi.minus_one() @@ -650,14 +674,14 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } /// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't /// appear in diagnostics, like float ranges. - pub fn hoist_witness_pat(&self, pat: &WitnessPat<'tcx>) -> Pat<'tcx> { + pub fn hoist_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> Pat<'tcx> { let cx = self; let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild); let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p))); let kind = match pat.ctor() { Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) }, IntRange(range) => return self.hoist_pat_range(range, pat.ty()), - Single | Variant(_) => match pat.ty().kind() { + Struct | Variant(_) | UnionField => match pat.ty().kind() { ty::Tuple(..) => PatKind::Leaf { subpatterns: subpatterns .enumerate() @@ -672,7 +696,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } ty::Adt(adt_def, args) => { let variant_index = - MatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def); + RustcMatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def); let variant = &adt_def.variant(variant_index); let subpatterns = cx .list_variant_nonhidden_fields(pat.ty(), variant) @@ -686,13 +710,13 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { PatKind::Leaf { subpatterns } } } - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to reconstruct the correct constant pattern here. However a string - // literal pattern will never be reported as a non-exhaustiveness witness, so we - // ignore this issue. - ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty()), }, + // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should + // be careful to reconstruct the correct constant pattern here. However a string + // literal pattern will never be reported as a non-exhaustiveness witness, so we + // ignore this issue. + Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, Slice(slice) => { match slice.kind { SliceKind::FixedLen(_) => PatKind::Slice { @@ -744,7 +768,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { /// Best-effort `Debug` implementation. pub(crate) fn debug_pat( f: &mut fmt::Formatter<'_>, - pat: &DeconstructedPat<'p, 'tcx>, + pat: &crate::pat::DeconstructedPat<'_, Self>, ) -> fmt::Result { let mut first = true; let mut start_or_continue = |s| { @@ -758,7 +782,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { let mut start_or_comma = || start_or_continue(", "); match pat.ctor() { - Single | Variant(_) => match pat.ty().kind() { + Struct | Variant(_) | UnionField => match pat.ty().kind() { ty::Adt(def, _) if def.is_box() => { // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside // of `std`). So this branch is only reachable when the feature is enabled and @@ -767,13 +791,14 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { write!(f, "box {subpattern:?}") } ty::Adt(..) | ty::Tuple(..) => { - let variant = match pat.ty().kind() { - ty::Adt(adt, _) => Some( - adt.variant(MatchCheckCtxt::variant_index_for_adt(pat.ctor(), *adt)), - ), - ty::Tuple(_) => None, - _ => unreachable!(), - }; + let variant = + match pat.ty().kind() { + ty::Adt(adt, _) => Some(adt.variant( + RustcMatchCheckCtxt::variant_index_for_adt(pat.ctor(), *adt), + )), + ty::Tuple(_) => None, + _ => unreachable!(), + }; if let Some(variant) = variant { write!(f, "{}", variant.name)?; @@ -789,15 +814,15 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } write!(f, ")") } - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to detect strings here. However a string literal pattern will never - // be reported as a non-exhaustiveness witness, so we can ignore this issue. - ty::Ref(_, _, mutbl) => { - let subpattern = pat.iter_fields().next().unwrap(); - write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern) - } _ => write!(f, "_"), }, + // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should + // be careful to detect strings here. However a string literal pattern will never + // be reported as a non-exhaustiveness witness, so we can ignore this issue. + Ref => { + let subpattern = pat.iter_fields().next().unwrap(); + write!(f, "&{:?}", subpattern) + } Slice(slice) => { let mut subpatterns = pat.iter_fields(); write!(f, "[")?; @@ -838,6 +863,45 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { } } +impl<'p, 'tcx> TypeCx for RustcMatchCheckCtxt<'p, 'tcx> { + type Ty = Ty<'tcx>; + type VariantIdx = VariantIdx; + type StrLit = Const<'tcx>; + type ArmData = HirId; + type PatData = Span; + + fn is_exhaustive_patterns_feature_on(&self) -> bool { + self.tcx.features().exhaustive_patterns + } + fn is_opaque_ty(ty: Self::Ty) -> bool { + matches!(ty.kind(), ty::Alias(ty::Opaque, ..)) + } + + fn ctor_arity(&self, ctor: &crate::constructor::Constructor<Self>, ty: Self::Ty) -> usize { + self.ctor_arity(ctor, ty) + } + fn ctor_sub_tys( + &self, + ctor: &crate::constructor::Constructor<Self>, + ty: Self::Ty, + ) -> &[Self::Ty] { + self.ctor_sub_tys(ctor, ty) + } + fn ctors_for_ty(&self, ty: Self::Ty) -> crate::constructor::ConstructorSet<Self> { + self.ctors_for_ty(ty) + } + + fn debug_pat( + f: &mut fmt::Formatter<'_>, + pat: &crate::pat::DeconstructedPat<'_, Self>, + ) -> fmt::Result { + Self::debug_pat(f, pat) + } + fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { + span_bug!(self.scrut_span, "{}", fmt) + } +} + /// Recursively expand this pattern into its subpatterns. Only useful for or-patterns. fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> { fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) { diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs index f268a551547..6b1de807797 100644 --- a/compiler/rustc_pattern_analysis/src/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -242,7 +242,7 @@ //! Therefore `usefulness(tp_1, tp_2, tq)` returns the single witness-tuple `[Variant2(Some(true), 0)]`. //! //! -//! Computing the set of constructors for a type is done in [`MatchCheckCtxt::ctors_for_ty`]. See +//! Computing the set of constructors for a type is done in [`TypeCx::ctors_for_ty`]. See //! the following sections for more accurate versions of the algorithm and corresponding links. //! //! @@ -555,37 +555,52 @@ use smallvec::{smallvec, SmallVec}; use std::fmt; -use rustc_data_structures::{captures::Captures, stack::ensure_sufficient_stack}; -use rustc_middle::ty::{self, Ty}; -use rustc_span::{Span, DUMMY_SP}; - use crate::constructor::{Constructor, ConstructorSet}; -use crate::cx::MatchCheckCtxt; use crate::pat::{DeconstructedPat, WitnessPat}; -use crate::MatchArm; +use crate::{Captures, MatchArm, MatchCtxt, TypeCx, TypedArena}; use self::ValidityConstraint::*; -#[derive(Copy, Clone)] -pub(crate) struct PatCtxt<'a, 'p, 'tcx> { - pub(crate) cx: &'a MatchCheckCtxt<'p, 'tcx>, - /// Type of the current column under investigation. - pub(crate) ty: Ty<'tcx>, - /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a - /// subpattern. - pub(crate) is_top_level: bool, +#[cfg(feature = "rustc")] +use rustc_data_structures::stack::ensure_sufficient_stack; +#[cfg(not(feature = "rustc"))] +pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R { + f() } -impl<'a, 'p, 'tcx> PatCtxt<'a, 'p, 'tcx> { - /// A `PatCtxt` when code other than `is_useful` needs one. - pub(crate) fn new_dummy(cx: &'a MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self { - PatCtxt { cx, ty, is_top_level: false } +/// Context that provides information local to a place under investigation. +#[derive(Clone)] +pub(crate) struct PlaceCtxt<'a, 'p, Cx: TypeCx> { + pub(crate) mcx: MatchCtxt<'a, 'p, Cx>, + /// Type of the place under investigation. + pub(crate) ty: Cx::Ty, + /// Whether the place is the original scrutinee place, as opposed to a subplace of it. + pub(crate) is_scrutinee: bool, +} + +impl<'a, 'p, Cx: TypeCx> PlaceCtxt<'a, 'p, Cx> { + /// A `PlaceCtxt` when code other than `is_useful` needs one. + #[cfg_attr(not(feature = "rustc"), allow(dead_code))] + pub(crate) fn new_dummy(mcx: MatchCtxt<'a, 'p, Cx>, ty: Cx::Ty) -> Self { + PlaceCtxt { mcx, ty, is_scrutinee: false } + } + + pub(crate) fn ctor_arity(&self, ctor: &Constructor<Cx>) -> usize { + self.mcx.tycx.ctor_arity(ctor, self.ty) + } + pub(crate) fn ctor_sub_tys(&self, ctor: &Constructor<Cx>) -> &[Cx::Ty] { + self.mcx.tycx.ctor_sub_tys(ctor, self.ty) + } + pub(crate) fn ctors_for_ty(&self) -> ConstructorSet<Cx> { + self.mcx.tycx.ctors_for_ty(self.ty) } } -impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> { +impl<'a, 'p, Cx: TypeCx> Copy for PlaceCtxt<'a, 'p, Cx> {} + +impl<'a, 'p, Cx: TypeCx> fmt::Debug for PlaceCtxt<'a, 'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("PatCtxt").field("ty", &self.ty).finish() + f.debug_struct("PlaceCtxt").field("ty", &self.ty).finish() } } @@ -595,7 +610,7 @@ impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> { /// - in the matrix, track whether a given place (aka column) is known to contain a valid value or /// not. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -enum ValidityConstraint { +pub enum ValidityConstraint { ValidOnly, MaybeInvalid, /// Option for backwards compatibility: the place is not known to be valid but we allow omitting @@ -604,7 +619,7 @@ enum ValidityConstraint { } impl ValidityConstraint { - fn from_bool(is_valid_only: bool) -> Self { + pub fn from_bool(is_valid_only: bool) -> Self { if is_valid_only { ValidOnly } else { MaybeInvalid } } @@ -629,12 +644,9 @@ impl ValidityConstraint { /// /// Pending further opsem decisions, the current behavior is: validity is preserved, except /// inside `&` and union fields where validity is reset to `MaybeInvalid`. - fn specialize<'tcx>(self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) -> Self { + fn specialize<Cx: TypeCx>(self, ctor: &Constructor<Cx>) -> Self { // We preserve validity except when we go inside a reference or a union field. - if matches!(ctor, Constructor::Single) - && (matches!(pcx.ty.kind(), ty::Ref(..)) - || matches!(pcx.ty.kind(), ty::Adt(def, ..) if def.is_union())) - { + if matches!(ctor, Constructor::Ref | Constructor::UnionField) { // Validity of `x: &T` does not imply validity of `*x: T`. MaybeInvalid } else { @@ -654,14 +666,18 @@ impl fmt::Display for ValidityConstraint { } /// Represents a pattern-tuple under investigation. +// The three lifetimes are: +// - 'a allocated by us +// - 'p coming from the input +// - Cx global compilation context #[derive(Clone)] -struct PatStack<'p, 'tcx> { +struct PatStack<'a, 'p, Cx: TypeCx> { // Rows of len 1 are very common, which is why `SmallVec[_; 2]` works well. - pats: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>, + pats: SmallVec<[&'a DeconstructedPat<'p, Cx>; 2]>, } -impl<'p, 'tcx> PatStack<'p, 'tcx> { - fn from_pattern(pat: &'p DeconstructedPat<'p, 'tcx>) -> Self { +impl<'a, 'p, Cx: TypeCx> PatStack<'a, 'p, Cx> { + fn from_pattern(pat: &'a DeconstructedPat<'p, Cx>) -> Self { PatStack { pats: smallvec![pat] } } @@ -673,17 +689,17 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> { self.pats.len() } - fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> { + fn head(&self) -> &'a DeconstructedPat<'p, Cx> { self.pats[0] } - fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> { + fn iter<'b>(&'b self) -> impl Iterator<Item = &'a DeconstructedPat<'p, Cx>> + Captures<'b> { self.pats.iter().copied() } // Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is // an or-pattern. Panics if `self` is empty. - fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = PatStack<'p, 'tcx>> + Captures<'a> { + fn expand_or_pat<'b>(&'b self) -> impl Iterator<Item = PatStack<'a, 'p, Cx>> + Captures<'b> { self.head().flatten_or_pat().into_iter().map(move |pat| { let mut new = self.clone(); new.pats[0] = pat; @@ -695,9 +711,9 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> { /// Only call if `ctor.is_covered_by(self.head().ctor())` is true. fn pop_head_constructor( &self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - ctor: &Constructor<'tcx>, - ) -> PatStack<'p, 'tcx> { + pcx: &PlaceCtxt<'a, 'p, Cx>, + ctor: &Constructor<Cx>, + ) -> PatStack<'a, 'p, Cx> { // We pop the head pattern and push the new fields extracted from the arguments of // `self.head()`. let mut new_pats = self.head().specialize(pcx, ctor); @@ -706,7 +722,7 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> { } } -impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> { +impl<'a, 'p, Cx: TypeCx> fmt::Debug for PatStack<'a, 'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // We pretty-print similarly to the `Debug` impl of `Matrix`. write!(f, "+")?; @@ -719,9 +735,9 @@ impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> { /// A row of the matrix. #[derive(Clone)] -struct MatrixRow<'p, 'tcx> { +struct MatrixRow<'a, 'p, Cx: TypeCx> { // The patterns in the row. - pats: PatStack<'p, 'tcx>, + pats: PatStack<'a, 'p, Cx>, /// Whether the original arm had a guard. This is inherited when specializing. is_under_guard: bool, /// When we specialize, we remember which row of the original matrix produced a given row of the @@ -734,7 +750,7 @@ struct MatrixRow<'p, 'tcx> { useful: bool, } -impl<'p, 'tcx> MatrixRow<'p, 'tcx> { +impl<'a, 'p, Cx: TypeCx> MatrixRow<'a, 'p, Cx> { fn is_empty(&self) -> bool { self.pats.is_empty() } @@ -743,17 +759,17 @@ impl<'p, 'tcx> MatrixRow<'p, 'tcx> { self.pats.len() } - fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> { + fn head(&self) -> &'a DeconstructedPat<'p, Cx> { self.pats.head() } - fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> { + fn iter<'b>(&'b self) -> impl Iterator<Item = &'a DeconstructedPat<'p, Cx>> + Captures<'b> { self.pats.iter() } // Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is // an or-pattern. Panics if `self` is empty. - fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = MatrixRow<'p, 'tcx>> + Captures<'a> { + fn expand_or_pat<'b>(&'b self) -> impl Iterator<Item = MatrixRow<'a, 'p, Cx>> + Captures<'b> { self.pats.expand_or_pat().map(|patstack| MatrixRow { pats: patstack, parent_row: self.parent_row, @@ -766,10 +782,10 @@ impl<'p, 'tcx> MatrixRow<'p, 'tcx> { /// Only call if `ctor.is_covered_by(self.head().ctor())` is true. fn pop_head_constructor( &self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - ctor: &Constructor<'tcx>, + pcx: &PlaceCtxt<'a, 'p, Cx>, + ctor: &Constructor<Cx>, parent_row: usize, - ) -> MatrixRow<'p, 'tcx> { + ) -> MatrixRow<'a, 'p, Cx> { MatrixRow { pats: self.pats.pop_head_constructor(pcx, ctor), parent_row, @@ -779,7 +795,7 @@ impl<'p, 'tcx> MatrixRow<'p, 'tcx> { } } -impl<'p, 'tcx> fmt::Debug for MatrixRow<'p, 'tcx> { +impl<'a, 'p, Cx: TypeCx> fmt::Debug for MatrixRow<'a, 'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.pats.fmt(f) } @@ -796,22 +812,22 @@ impl<'p, 'tcx> fmt::Debug for MatrixRow<'p, 'tcx> { /// specializing `(,)` and `Some` on a pattern of type `(Option<u32>, bool)`, the first column of /// the matrix will correspond to `scrutinee.0.Some.0` and the second column to `scrutinee.1`. #[derive(Clone)] -struct Matrix<'p, 'tcx> { +struct Matrix<'a, 'p, Cx: TypeCx> { /// Vector of rows. The rows must form a rectangular 2D array. Moreover, all the patterns of /// each column must have the same type. Each column corresponds to a place within the /// scrutinee. - rows: Vec<MatrixRow<'p, 'tcx>>, + rows: Vec<MatrixRow<'a, 'p, Cx>>, /// Stores an extra fictitious row full of wildcards. Mostly used to keep track of the type of /// each column. This must obey the same invariants as the real rows. - wildcard_row: PatStack<'p, 'tcx>, + wildcard_row: PatStack<'a, 'p, Cx>, /// Track for each column/place whether it contains a known valid value. place_validity: SmallVec<[ValidityConstraint; 2]>, } -impl<'p, 'tcx> Matrix<'p, 'tcx> { +impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> { /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively /// expands it. Internal method, prefer [`Matrix::new`]. - fn expand_and_push(&mut self, row: MatrixRow<'p, 'tcx>) { + fn expand_and_push(&mut self, row: MatrixRow<'a, 'p, Cx>) { if !row.is_empty() && row.head().is_or_pat() { // Expand nested or-patterns. for new_row in row.expand_or_pat() { @@ -823,16 +839,14 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { } /// Build a new matrix from an iterator of `MatchArm`s. - fn new<'a>( - cx: &MatchCheckCtxt<'p, 'tcx>, - arms: &[MatchArm<'p, 'tcx>], - scrut_ty: Ty<'tcx>, + fn new( + wildcard_arena: &'a TypedArena<DeconstructedPat<'p, Cx>>, + arms: &'a [MatchArm<'p, Cx>], + scrut_ty: Cx::Ty, scrut_validity: ValidityConstraint, - ) -> Self - where - 'p: 'a, - { - let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty, DUMMY_SP)); + ) -> Self { + let wild_pattern = + wildcard_arena.alloc(DeconstructedPat::wildcard(scrut_ty, Default::default())); let wildcard_row = PatStack::from_pattern(wild_pattern); let mut matrix = Matrix { rows: Vec::with_capacity(arms.len()), @@ -851,7 +865,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { matrix } - fn head_ty(&self) -> Option<Ty<'tcx>> { + fn head_ty(&self) -> Option<Cx::Ty> { if self.column_count() == 0 { return None; } @@ -859,11 +873,10 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { let mut ty = self.wildcard_row.head().ty(); // If the type is opaque and it is revealed anywhere in the column, we take the revealed // version. Otherwise we could encounter constructors for the revealed type and crash. - let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..)); - if is_opaque(ty) { + if Cx::is_opaque_ty(ty) { for pat in self.heads() { let pat_ty = pat.ty(); - if !is_opaque(pat_ty) { + if !Cx::is_opaque_ty(pat_ty) { ty = pat_ty; break; } @@ -875,34 +888,34 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { self.wildcard_row.len() } - fn rows<'a>( - &'a self, - ) -> impl Iterator<Item = &'a MatrixRow<'p, 'tcx>> + Clone + DoubleEndedIterator + ExactSizeIterator + fn rows<'b>( + &'b self, + ) -> impl Iterator<Item = &'b MatrixRow<'a, 'p, Cx>> + Clone + DoubleEndedIterator + ExactSizeIterator { self.rows.iter() } - fn rows_mut<'a>( - &'a mut self, - ) -> impl Iterator<Item = &'a mut MatrixRow<'p, 'tcx>> + DoubleEndedIterator + ExactSizeIterator + fn rows_mut<'b>( + &'b mut self, + ) -> impl Iterator<Item = &'b mut MatrixRow<'a, 'p, Cx>> + DoubleEndedIterator + ExactSizeIterator { self.rows.iter_mut() } /// Iterate over the first pattern of each row. - fn heads<'a>( - &'a self, - ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Clone + Captures<'a> { + fn heads<'b>( + &'b self, + ) -> impl Iterator<Item = &'b DeconstructedPat<'p, Cx>> + Clone + Captures<'a> { self.rows().map(|r| r.head()) } /// This computes `specialize(ctor, self)`. See top of the file for explanations. fn specialize_constructor( &self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - ctor: &Constructor<'tcx>, - ) -> Matrix<'p, 'tcx> { + pcx: &PlaceCtxt<'a, 'p, Cx>, + ctor: &Constructor<Cx>, + ) -> Matrix<'a, 'p, Cx> { let wildcard_row = self.wildcard_row.pop_head_constructor(pcx, ctor); - let new_validity = self.place_validity[0].specialize(pcx, ctor); + let new_validity = self.place_validity[0].specialize(ctor); let new_place_validity = std::iter::repeat(new_validity) .take(ctor.arity(pcx)) .chain(self.place_validity[1..].iter().copied()) @@ -929,7 +942,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { /// + _ + [_, _, tail @ ..] + /// | ✓ | ? | // column validity /// ``` -impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { +impl<'a, 'p, Cx: TypeCx> fmt::Debug for Matrix<'a, 'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\n")?; @@ -1020,17 +1033,17 @@ impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { /// /// See the top of the file for more detailed explanations and examples. #[derive(Debug, Clone)] -struct WitnessStack<'tcx>(Vec<WitnessPat<'tcx>>); +struct WitnessStack<Cx: TypeCx>(Vec<WitnessPat<Cx>>); -impl<'tcx> WitnessStack<'tcx> { +impl<Cx: TypeCx> WitnessStack<Cx> { /// Asserts that the witness contains a single pattern, and returns it. - fn single_pattern(self) -> WitnessPat<'tcx> { + fn single_pattern(self) -> WitnessPat<Cx> { assert_eq!(self.0.len(), 1); self.0.into_iter().next().unwrap() } /// Reverses specialization by the `Missing` constructor by pushing a whole new pattern. - fn push_pattern(&mut self, pat: WitnessPat<'tcx>) { + fn push_pattern(&mut self, pat: WitnessPat<Cx>) { self.0.push(pat); } @@ -1048,7 +1061,7 @@ impl<'tcx> WitnessStack<'tcx> { /// pats: [(false, "foo"), _, true] /// result: [Enum::Variant { a: (false, "foo"), b: _ }, true] /// ``` - fn apply_constructor(&mut self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) { + fn apply_constructor(&mut self, pcx: &PlaceCtxt<'_, '_, Cx>, ctor: &Constructor<Cx>) { let len = self.0.len(); let arity = ctor.arity(pcx); let fields = self.0.drain((len - arity)..).rev().collect(); @@ -1067,9 +1080,9 @@ impl<'tcx> WitnessStack<'tcx> { /// Just as the `Matrix` starts with a single column, by the end of the algorithm, this has a single /// column, which contains the patterns that are missing for the match to be exhaustive. #[derive(Debug, Clone)] -struct WitnessMatrix<'tcx>(Vec<WitnessStack<'tcx>>); +struct WitnessMatrix<Cx: TypeCx>(Vec<WitnessStack<Cx>>); -impl<'tcx> WitnessMatrix<'tcx> { +impl<Cx: TypeCx> WitnessMatrix<Cx> { /// New matrix with no witnesses. fn empty() -> Self { WitnessMatrix(vec![]) @@ -1084,12 +1097,12 @@ impl<'tcx> WitnessMatrix<'tcx> { self.0.is_empty() } /// Asserts that there is a single column and returns the patterns in it. - fn single_column(self) -> Vec<WitnessPat<'tcx>> { + fn single_column(self) -> Vec<WitnessPat<Cx>> { self.0.into_iter().map(|w| w.single_pattern()).collect() } /// Reverses specialization by the `Missing` constructor by pushing a whole new pattern. - fn push_pattern(&mut self, pat: WitnessPat<'tcx>) { + fn push_pattern(&mut self, pat: WitnessPat<Cx>) { for witness in self.0.iter_mut() { witness.push_pattern(pat.clone()) } @@ -1098,9 +1111,9 @@ impl<'tcx> WitnessMatrix<'tcx> { /// Reverses specialization by `ctor`. See the section on `unspecialize` at the top of the file. fn apply_constructor( &mut self, - pcx: &PatCtxt<'_, '_, 'tcx>, - missing_ctors: &[Constructor<'tcx>], - ctor: &Constructor<'tcx>, + pcx: &PlaceCtxt<'_, '_, Cx>, + missing_ctors: &[Constructor<Cx>], + ctor: &Constructor<Cx>, report_individual_missing_ctors: bool, ) { if self.is_empty() { @@ -1160,12 +1173,12 @@ impl<'tcx> WitnessMatrix<'tcx> { /// - unspecialization, where we lift the results from the previous step into results for this step /// (using `apply_constructor` and by updating `row.useful` for each parent row). /// This is all explained at the top of the file. -#[instrument(level = "debug", skip(cx, is_top_level), ret)] -fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - matrix: &mut Matrix<'p, 'tcx>, +#[instrument(level = "debug", skip(mcx, is_top_level), ret)] +fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( + mcx: MatchCtxt<'a, 'p, Cx>, + matrix: &mut Matrix<'a, 'p, Cx>, is_top_level: bool, -) -> WitnessMatrix<'tcx> { +) -> WitnessMatrix<Cx> { debug_assert!(matrix.rows().all(|r| r.len() == matrix.column_count())); let Some(ty) = matrix.head_ty() else { @@ -1185,7 +1198,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( }; debug!("ty: {ty:?}"); - let pcx = &PatCtxt { cx, ty, is_top_level }; + let pcx = &PlaceCtxt { mcx, ty, is_scrutinee: is_top_level }; // Whether the place/column we are inspecting is known to contain valid data. let place_validity = matrix.place_validity[0]; @@ -1194,7 +1207,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( // Analyze the constructors present in this column. let ctors = matrix.heads().map(|p| p.ctor()); - let ctors_for_ty = &cx.ctors_for_ty(ty); + let ctors_for_ty = pcx.ctors_for_ty(); let is_integers = matches!(ctors_for_ty, ConstructorSet::Integers { .. }); // For diagnostics. let split_set = ctors_for_ty.split(pcx, ctors); let all_missing = split_set.present.is_empty(); @@ -1228,7 +1241,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( // Dig into rows that match `ctor`. let mut spec_matrix = matrix.specialize_constructor(pcx, &ctor); let mut witnesses = ensure_sufficient_stack(|| { - compute_exhaustiveness_and_usefulness(cx, &mut spec_matrix, false) + compute_exhaustiveness_and_usefulness(mcx, &mut spec_matrix, false) }); let counts_for_exhaustiveness = match ctor { @@ -1270,34 +1283,34 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( /// Indicates whether or not a given arm is useful. #[derive(Clone, Debug)] -pub enum Usefulness { +pub enum Usefulness<'p, Cx: TypeCx> { /// The arm is useful. This additionally carries a set of or-pattern branches that have been /// found to be redundant despite the overall arm being useful. Used only in the presence of /// or-patterns, otherwise it stays empty. - Useful(Vec<Span>), + Useful(Vec<&'p DeconstructedPat<'p, Cx>>), /// The arm is redundant and can be removed without changing the behavior of the match /// expression. Redundant, } /// The output of checking a match for exhaustiveness and arm usefulness. -pub struct UsefulnessReport<'p, 'tcx> { +pub struct UsefulnessReport<'p, Cx: TypeCx> { /// For each arm of the input, whether that arm is useful after the arms above it. - pub arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Usefulness)>, + pub arm_usefulness: Vec<(MatchArm<'p, Cx>, Usefulness<'p, Cx>)>, /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of /// exhaustiveness. - pub non_exhaustiveness_witnesses: Vec<WitnessPat<'tcx>>, + pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>, } /// Computes whether a match is exhaustive and which of its arms are useful. #[instrument(skip(cx, arms), level = "debug")] -pub(crate) fn compute_match_usefulness<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - arms: &[MatchArm<'p, 'tcx>], - scrut_ty: Ty<'tcx>, -) -> UsefulnessReport<'p, 'tcx> { - let scrut_validity = ValidityConstraint::from_bool(cx.known_valid_scrutinee); - let mut matrix = Matrix::new(cx, arms, scrut_ty, scrut_validity); +pub fn compute_match_usefulness<'p, Cx: TypeCx>( + cx: MatchCtxt<'_, 'p, Cx>, + arms: &[MatchArm<'p, Cx>], + scrut_ty: Cx::Ty, + scrut_validity: ValidityConstraint, +) -> UsefulnessReport<'p, Cx> { + let mut matrix = Matrix::new(cx.wildcard_arena, arms, scrut_ty, scrut_validity); let non_exhaustiveness_witnesses = compute_exhaustiveness_and_usefulness(cx, &mut matrix, true); let non_exhaustiveness_witnesses: Vec<_> = non_exhaustiveness_witnesses.single_column(); @@ -1308,7 +1321,7 @@ pub(crate) fn compute_match_usefulness<'p, 'tcx>( debug!(?arm); // We warn when a pattern is not useful. let usefulness = if arm.pat.is_useful() { - Usefulness::Useful(arm.pat.redundant_spans()) + Usefulness::Useful(arm.pat.redundant_subpatterns()) } else { Usefulness::Redundant }; diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index ad637472b47..670fdcfb0d2 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -1084,7 +1084,7 @@ pub struct Resolver<'a, 'tcx> { next_node_id: NodeId, - node_id_to_def_id: FxHashMap<ast::NodeId, LocalDefId>, + node_id_to_def_id: NodeMap<LocalDefId>, def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>, /// Indices of unnamed struct or variant fields with unresolved attributes. @@ -1296,7 +1296,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let mut def_id_to_node_id = IndexVec::default(); assert_eq!(def_id_to_node_id.push(CRATE_NODE_ID), CRATE_DEF_ID); - let mut node_id_to_def_id = FxHashMap::default(); + let mut node_id_to_def_id = NodeMap::default(); node_id_to_def_id.insert(CRATE_NODE_ID, CRATE_DEF_ID); let mut invocation_parents = FxHashMap::default(); diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs index 41ab4007a67..49bf2192f82 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs @@ -37,6 +37,7 @@ impl<'tcx> Stable<'tcx> for mir::Body<'tcx> { self.arg_count, self.var_debug_info.iter().map(|info| info.stable(tables)).collect(), self.spread_arg.stable(tables), + self.span.stable(tables), ) } } diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index a78df69f187..b688c97311a 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -1578,6 +1578,7 @@ supported_targets! { ("armv7k-apple-watchos", armv7k_apple_watchos), ("arm64_32-apple-watchos", arm64_32_apple_watchos), ("x86_64-apple-watchos-sim", x86_64_apple_watchos_sim), + ("aarch64-apple-watchos", aarch64_apple_watchos), ("aarch64-apple-watchos-sim", aarch64_apple_watchos_sim), ("armebv7r-none-eabi", armebv7r_none_eabi), diff --git a/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos.rs b/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos.rs new file mode 100644 index 00000000000..b62666dcc7e --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos.rs @@ -0,0 +1,19 @@ +use crate::spec::base::apple::{opts, Arch}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + let base = opts("watchos", Arch::Arm64); + Target { + llvm_target: "aarch-apple-watchos".into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a,+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + dynamic_linking: false, + position_independent_executables: true, + ..base + }, + } +} diff --git a/compiler/rustc_trait_selection/src/solve/alias_relate.rs b/compiler/rustc_trait_selection/src/solve/alias_relate.rs index 2e99854ddc6..626569fb40f 100644 --- a/compiler/rustc_trait_selection/src/solve/alias_relate.rs +++ b/compiler/rustc_trait_selection/src/solve/alias_relate.rs @@ -11,7 +11,7 @@ //! * bidirectional-normalizes-to: If `A` and `B` are both projections, and both //! may apply, then we can compute the "intersection" of both normalizes-to by //! performing them together. This is used specifically to resolve ambiguities. -use super::EvalCtxt; +use super::{EvalCtxt, GoalSource}; use rustc_infer::infer::DefineOpaqueTypes; use rustc_infer::traits::query::NoSolution; use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; @@ -89,11 +89,10 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ty::TermKind::Const(_) => { if let Some(alias) = term.to_alias_ty(self.tcx()) { let term = self.next_term_infer_of_kind(term); - self.add_goal(Goal::new( - self.tcx(), - param_env, - ty::NormalizesTo { alias, term }, - )); + self.add_goal( + GoalSource::Misc, + Goal::new(self.tcx(), param_env, ty::NormalizesTo { alias, term }), + ); self.try_evaluate_added_goals()?; Ok(Some(self.resolve_vars_if_possible(term))) } else { @@ -109,7 +108,10 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { opaque: ty::AliasTy<'tcx>, term: ty::Term<'tcx>, ) -> QueryResult<'tcx> { - self.add_goal(Goal::new(self.tcx(), param_env, ty::NormalizesTo { alias: opaque, term })); + self.add_goal( + GoalSource::Misc, + Goal::new(self.tcx(), param_env, ty::NormalizesTo { alias: opaque, term }), + ); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } diff --git a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs index 62d62bdfd11..81a766f24b0 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs @@ -1,6 +1,7 @@ //! Code shared by trait and projection goals for candidate assembly. use super::{EvalCtxt, SolverMode}; +use crate::solve::GoalSource; use crate::traits::coherence; use rustc_hir::def_id::DefId; use rustc_infer::traits::query::NoSolution; @@ -62,7 +63,9 @@ pub(super) trait GoalKind<'tcx>: requirements: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>, ) -> QueryResult<'tcx> { Self::probe_and_match_goal_against_assumption(ecx, goal, assumption, |ecx| { - ecx.add_goals(requirements); + // FIXME(-Znext-solver=coinductive): check whether this should be + // `GoalSource::ImplWhereBound` for any caller. + ecx.add_goals(GoalSource::Misc, requirements); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) } @@ -94,12 +97,16 @@ pub(super) trait GoalKind<'tcx>: let ty::Dynamic(bounds, _, _) = *goal.predicate.self_ty().kind() else { bug!("expected object type in `consider_object_bound_candidate`"); }; - ecx.add_goals(structural_traits::predicates_for_object_candidate( - ecx, - goal.param_env, - goal.predicate.trait_ref(tcx), - bounds, - )); + // FIXME(-Znext-solver=coinductive): Should this be `GoalSource::ImplWhereBound`? + ecx.add_goals( + GoalSource::Misc, + structural_traits::predicates_for_object_candidate( + ecx, + goal.param_env, + goal.predicate.trait_ref(tcx), + bounds, + ), + ); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) } @@ -364,7 +371,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { let normalized_ty = ecx.next_ty_infer(); let normalizes_to_goal = goal.with(tcx, ty::NormalizesTo { alias, term: normalized_ty.into() }); - ecx.add_goal(normalizes_to_goal); + ecx.add_goal(GoalSource::Misc, normalizes_to_goal); if let Err(NoSolution) = ecx.try_evaluate_added_goals() { debug!("self type normalization failed"); return vec![]; diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/canonical.rs index 7457ba837f5..ecdae2521b9 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/canonical.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/canonical.rs @@ -94,20 +94,6 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ); let certainty = certainty.unify_with(goals_certainty); - if let Certainty::OVERFLOW = certainty { - // If we have overflow, it's probable that we're substituting a type - // into itself infinitely and any partial substitutions in the query - // response are probably not useful anyways, so just return an empty - // query response. - // - // This may prevent us from potentially useful inference, e.g. - // 2 candidates, one ambiguous and one overflow, which both - // have the same inference constraints. - // - // Changing this to retain some constraints in the future - // won't be a breaking change, so this is good enough for now. - return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Overflow)); - } let var_values = self.var_values; let external_constraints = self.compute_external_query_constraints()?; diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs index cafb858794a..76c50a11102 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs @@ -23,14 +23,15 @@ use rustc_middle::ty::{ use rustc_session::config::DumpSolverProofTree; use rustc_span::DUMMY_SP; use std::io::Write; +use std::iter; use std::ops::ControlFlow; use crate::traits::vtable::{count_own_vtable_entries, prepare_vtable_segments, VtblSegment}; use super::inspect::ProofTreeBuilder; -use super::SolverMode; use super::{search_graph, GoalEvaluationKind}; use super::{search_graph::SearchGraph, Goal}; +use super::{GoalSource, SolverMode}; pub use select::InferCtxtSelectExt; mod canonical; @@ -105,7 +106,7 @@ pub(super) struct NestedGoals<'tcx> { /// can be unsound with more powerful coinduction in the future. pub(super) normalizes_to_hack_goal: Option<Goal<'tcx, ty::NormalizesTo<'tcx>>>, /// The rest of the goals which have not yet processed or remain ambiguous. - pub(super) goals: Vec<Goal<'tcx, ty::Predicate<'tcx>>>, + pub(super) goals: Vec<(GoalSource, Goal<'tcx, ty::Predicate<'tcx>>)>, } impl<'tcx> NestedGoals<'tcx> { @@ -156,7 +157,7 @@ impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> { Option<inspect::GoalEvaluation<'tcx>>, ) { EvalCtxt::enter_root(self, generate_proof_tree, |ecx| { - ecx.evaluate_goal(GoalEvaluationKind::Root, goal) + ecx.evaluate_goal(GoalEvaluationKind::Root, GoalSource::Misc, goal) }) } } @@ -334,6 +335,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { fn evaluate_goal( &mut self, goal_evaluation_kind: GoalEvaluationKind, + source: GoalSource, goal: Goal<'tcx, ty::Predicate<'tcx>>, ) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> { let (orig_values, canonical_goal) = self.canonicalize_goal(goal); @@ -353,13 +355,13 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { Ok(response) => response, }; - let has_changed = !canonical_response.value.var_values.is_identity_modulo_regions() - || !canonical_response.value.external_constraints.opaque_types.is_empty(); - let (certainty, nested_goals) = match self.instantiate_and_apply_query_response( - goal.param_env, - orig_values, - canonical_response, - ) { + let (certainty, has_changed, nested_goals) = match self + .instantiate_response_discarding_overflow( + goal.param_env, + source, + orig_values, + canonical_response, + ) { Err(e) => { self.inspect.goal_evaluation(goal_evaluation); return Err(e); @@ -386,6 +388,44 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { Ok((has_changed, certainty, nested_goals)) } + fn instantiate_response_discarding_overflow( + &mut self, + param_env: ty::ParamEnv<'tcx>, + source: GoalSource, + original_values: Vec<ty::GenericArg<'tcx>>, + response: CanonicalResponse<'tcx>, + ) -> Result<(Certainty, bool, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> { + // The old solver did not evaluate nested goals when normalizing. + // It returned the selection constraints allowing a `Projection` + // obligation to not hold in coherence while avoiding the fatal error + // from overflow. + // + // We match this behavior here by considering all constraints + // from nested goals which are not from where-bounds. We will already + // need to track which nested goals are required by impl where-bounds + // for coinductive cycles, so we simply reuse that here. + // + // While we could consider overflow constraints in more cases, this should + // not be necessary for backcompat and results in better perf. It also + // avoids a potential inconsistency which would otherwise require some + // tracking for root goals as well. See #119071 for an example. + let keep_overflow_constraints = || { + self.search_graph.current_goal_is_normalizes_to() + && source != GoalSource::ImplWhereBound + }; + + if response.value.certainty == Certainty::OVERFLOW && !keep_overflow_constraints() { + Ok((Certainty::OVERFLOW, false, Vec::new())) + } else { + let has_changed = !response.value.var_values.is_identity_modulo_regions() + || !response.value.external_constraints.opaque_types.is_empty(); + + let (certainty, nested_goals) = + self.instantiate_and_apply_query_response(param_env, original_values, response)?; + Ok((certainty, has_changed, nested_goals)) + } + } + fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> { let Goal { param_env, predicate } = goal; let kind = predicate.kind(); @@ -439,7 +479,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { } else { let kind = self.infcx.instantiate_binder_with_placeholders(kind); let goal = goal.with(self.tcx(), ty::Binder::dummy(kind)); - self.add_goal(goal); + self.add_goal(GoalSource::Misc, goal); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } } @@ -488,6 +528,13 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { let mut goals = core::mem::replace(&mut self.nested_goals, NestedGoals::new()); self.inspect.evaluate_added_goals_loop_start(); + + fn with_misc_source<'tcx>( + it: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>, + ) -> impl Iterator<Item = (GoalSource, Goal<'tcx, ty::Predicate<'tcx>>)> { + iter::zip(iter::repeat(GoalSource::Misc), it) + } + // If this loop did not result in any progress, what's our final certainty. let mut unchanged_certainty = Some(Certainty::Yes); if let Some(goal) = goals.normalizes_to_hack_goal.take() { @@ -501,9 +548,10 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { let (_, certainty, instantiate_goals) = self.evaluate_goal( GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::Yes }, + GoalSource::Misc, unconstrained_goal, )?; - self.nested_goals.goals.extend(instantiate_goals); + self.nested_goals.goals.extend(with_misc_source(instantiate_goals)); // Finally, equate the goal's RHS with the unconstrained var. // We put the nested goals from this into goals instead of @@ -512,7 +560,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { // matters in practice, though. let eq_goals = self.eq_and_get_goals(goal.param_env, goal.predicate.term, unconstrained_rhs)?; - goals.goals.extend(eq_goals); + goals.goals.extend(with_misc_source(eq_goals)); // We only look at the `projection_ty` part here rather than // looking at the "has changed" return from evaluate_goal, @@ -533,12 +581,13 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { } } - for goal in goals.goals.drain(..) { + for (source, goal) in goals.goals.drain(..) { let (has_changed, certainty, instantiate_goals) = self.evaluate_goal( GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::No }, + source, goal, )?; - self.nested_goals.goals.extend(instantiate_goals); + self.nested_goals.goals.extend(with_misc_source(instantiate_goals)); if has_changed { unchanged_certainty = None; } @@ -546,7 +595,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { match certainty { Certainty::Yes => {} Certainty::Maybe(_) => { - self.nested_goals.goals.push(goal); + self.nested_goals.goals.push((source, goal)); unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); } } @@ -670,7 +719,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { .at(&ObligationCause::dummy(), param_env) .eq(DefineOpaqueTypes::No, lhs, rhs) .map(|InferOk { value: (), obligations }| { - self.add_goals(obligations.into_iter().map(|o| o.into())); + self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); }) .map_err(|e| { debug!(?e, "failed to equate"); @@ -689,7 +738,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { .at(&ObligationCause::dummy(), param_env) .sub(DefineOpaqueTypes::No, sub, sup) .map(|InferOk { value: (), obligations }| { - self.add_goals(obligations.into_iter().map(|o| o.into())); + self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); }) .map_err(|e| { debug!(?e, "failed to subtype"); @@ -709,7 +758,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { .at(&ObligationCause::dummy(), param_env) .relate(DefineOpaqueTypes::No, lhs, variance, rhs) .map(|InferOk { value: (), obligations }| { - self.add_goals(obligations.into_iter().map(|o| o.into())); + self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); }) .map_err(|e| { debug!(?e, "failed to relate"); @@ -842,7 +891,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { true, &mut obligations, )?; - self.add_goals(obligations.into_iter().map(|o| o.into())); + self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); Ok(()) } @@ -862,7 +911,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { hidden_ty, &mut obligations, ); - self.add_goals(obligations.into_iter().map(|o| o.into())); + self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); } // Do something for each opaque/hidden pair defined with `def_id` in the diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index a287582dca7..6db53d6ddc4 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -119,7 +119,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { ) { for step in &probe.steps { match step { - &inspect::ProbeStep::AddGoal(goal) => nested_goals.push(goal), + &inspect::ProbeStep::AddGoal(_source, goal) => nested_goals.push(goal), inspect::ProbeStep::NestedProbe(ref probe) => { // Nested probes have to prove goals added in their parent // but do not leak them, so we truncate the added goals diff --git a/compiler/rustc_trait_selection/src/solve/inspect/build.rs b/compiler/rustc_trait_selection/src/solve/inspect/build.rs index c857aae572d..d8caef5b03f 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/build.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/build.rs @@ -7,7 +7,7 @@ use std::mem; use rustc_middle::traits::query::NoSolution; use rustc_middle::traits::solve::{ - CanonicalInput, Certainty, Goal, IsNormalizesToHack, QueryInput, QueryResult, + CanonicalInput, Certainty, Goal, GoalSource, IsNormalizesToHack, QueryInput, QueryResult, }; use rustc_middle::ty::{self, TyCtxt}; use rustc_session::config::DumpSolverProofTree; @@ -216,7 +216,7 @@ impl<'tcx> WipProbe<'tcx> { #[derive(Eq, PartialEq, Debug)] enum WipProbeStep<'tcx> { - AddGoal(inspect::CanonicalState<'tcx, Goal<'tcx, ty::Predicate<'tcx>>>), + AddGoal(GoalSource, inspect::CanonicalState<'tcx, Goal<'tcx, ty::Predicate<'tcx>>>), EvaluateGoals(WipAddedGoalsEvaluation<'tcx>), NestedProbe(WipProbe<'tcx>), CommitIfOkStart, @@ -226,7 +226,7 @@ enum WipProbeStep<'tcx> { impl<'tcx> WipProbeStep<'tcx> { fn finalize(self) -> inspect::ProbeStep<'tcx> { match self { - WipProbeStep::AddGoal(goal) => inspect::ProbeStep::AddGoal(goal), + WipProbeStep::AddGoal(source, goal) => inspect::ProbeStep::AddGoal(source, goal), WipProbeStep::EvaluateGoals(eval) => inspect::ProbeStep::EvaluateGoals(eval.finalize()), WipProbeStep::NestedProbe(probe) => inspect::ProbeStep::NestedProbe(probe.finalize()), WipProbeStep::CommitIfOkStart => inspect::ProbeStep::CommitIfOkStart, @@ -428,7 +428,11 @@ impl<'tcx> ProofTreeBuilder<'tcx> { } } - pub fn add_goal(ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, ty::Predicate<'tcx>>) { + pub fn add_goal( + ecx: &mut EvalCtxt<'_, 'tcx>, + source: GoalSource, + goal: Goal<'tcx, ty::Predicate<'tcx>>, + ) { // Can't use `if let Some(this) = ecx.inspect.as_mut()` here because // we have to immutably use the `EvalCtxt` for `make_canonical_state`. if ecx.inspect.is_noop() { @@ -442,7 +446,9 @@ impl<'tcx> ProofTreeBuilder<'tcx> { evaluation: WipProbe { steps, .. }, .. }) - | DebugSolver::Probe(WipProbe { steps, .. }) => steps.push(WipProbeStep::AddGoal(goal)), + | DebugSolver::Probe(WipProbe { steps, .. }) => { + steps.push(WipProbeStep::AddGoal(source, goal)) + } s => unreachable!("tried to add {goal:?} to {s:?}"), } } diff --git a/compiler/rustc_trait_selection/src/solve/mod.rs b/compiler/rustc_trait_selection/src/solve/mod.rs index 1e58106e353..2f3111a2414 100644 --- a/compiler/rustc_trait_selection/src/solve/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/mod.rs @@ -19,8 +19,8 @@ use rustc_infer::infer::DefineOpaqueTypes; use rustc_infer::traits::query::NoSolution; use rustc_middle::infer::canonical::CanonicalVarInfos; use rustc_middle::traits::solve::{ - CanonicalResponse, Certainty, ExternalConstraintsData, Goal, IsNormalizesToHack, QueryResult, - Response, + CanonicalResponse, Certainty, ExternalConstraintsData, Goal, GoalSource, IsNormalizesToHack, + QueryResult, Response, }; use rustc_middle::ty::{self, OpaqueTypeKey, Ty, TyCtxt, UniverseIndex}; use rustc_middle::ty::{ @@ -157,7 +157,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { ) -> QueryResult<'tcx> { match self.well_formed_goals(goal.param_env, goal.predicate) { Some(goals) => { - self.add_goals(goals); + self.add_goals(GoalSource::Misc, goals); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } None => self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS), @@ -223,15 +223,19 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { } #[instrument(level = "debug", skip(self))] - fn add_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) { - inspect::ProofTreeBuilder::add_goal(self, goal); - self.nested_goals.goals.push(goal); + fn add_goal(&mut self, source: GoalSource, goal: Goal<'tcx, ty::Predicate<'tcx>>) { + inspect::ProofTreeBuilder::add_goal(self, source, goal); + self.nested_goals.goals.push((source, goal)); } #[instrument(level = "debug", skip(self, goals))] - fn add_goals(&mut self, goals: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>) { + fn add_goals( + &mut self, + source: GoalSource, + goals: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>, + ) { for goal in goals { - self.add_goal(goal); + self.add_goal(source, goal); } } @@ -335,7 +339,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { param_env, ty::NormalizesTo { alias, term: normalized_ty.into() }, ); - this.add_goal(normalizes_to_goal); + this.add_goal(GoalSource::Misc, normalizes_to_goal); this.try_evaluate_added_goals()?; let ty = this.resolve_vars_if_possible(normalized_ty); Ok(this.try_normalize_ty_recur(param_env, define_opaque_types, depth + 1, ty)) diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/inherent.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/inherent.rs index c3b8ae9a943..b2dff9b48ff 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/inherent.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/inherent.rs @@ -4,10 +4,10 @@ //! 1. instantiate substs, //! 2. equate the self type, and //! 3. instantiate and register where clauses. -use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::traits::solve::{Certainty, Goal, GoalSource, QueryResult}; use rustc_middle::ty; -use super::EvalCtxt; +use crate::solve::EvalCtxt; impl<'tcx> EvalCtxt<'_, 'tcx> { pub(super) fn normalize_inherent_associated_type( @@ -38,7 +38,13 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { .expect("expected goal term to be fully unconstrained"); // Check both where clauses on the impl and IAT + // + // FIXME(-Znext-solver=coinductive): I think this should be split + // and we tag the impl bounds with `GoalSource::ImplWhereBound`? + // Right not this includes both the impl and the assoc item where bounds, + // and I don't think the assoc item where-bounds are allowed to be coinductive. self.add_goals( + GoalSource::Misc, tcx.predicates_of(inherent.def_id) .instantiate(tcx, inherent_substs) .into_iter() diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs index 980ef862366..0e9656a1e18 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs @@ -1,7 +1,7 @@ use crate::traits::{check_args_compatible, specialization_graph}; use super::assembly::{self, structural_traits, Candidate}; -use super::EvalCtxt; +use super::{EvalCtxt, GoalSource}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; use rustc_hir::LangItem; @@ -128,6 +128,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { // Add GAT where clauses from the trait's definition ecx.add_goals( + GoalSource::Misc, tcx.predicates_of(goal.predicate.def_id()) .instantiate_own(tcx, goal.predicate.alias.args) .map(|(pred, _)| goal.with(tcx, pred)), @@ -169,10 +170,11 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { .predicates .into_iter() .map(|pred| goal.with(tcx, pred)); - ecx.add_goals(where_clause_bounds); + ecx.add_goals(GoalSource::ImplWhereBound, where_clause_bounds); // Add GAT where clauses from the trait's definition ecx.add_goals( + GoalSource::Misc, tcx.predicates_of(goal.predicate.def_id()) .instantiate_own(tcx, goal.predicate.alias.args) .map(|(pred, _)| goal.with(tcx, pred)), @@ -413,7 +415,8 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { DUMMY_SP, [ty::GenericArg::from(goal.predicate.self_ty())], ); - ecx.add_goal(goal.with(tcx, sized_predicate)); + // FIXME(-Znext-solver=coinductive): Should this be `GoalSource::ImplWhereBound`? + ecx.add_goal(GoalSource::Misc, goal.with(tcx, sized_predicate)); tcx.types.unit } @@ -421,7 +424,11 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { None => tcx.types.unit, Some(field_def) => { let self_ty = field_def.ty(tcx, args); - ecx.add_goal(goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty))); + // FIXME(-Znext-solver=coinductive): Should this be `GoalSource::ImplWhereBound`? + ecx.add_goal( + GoalSource::Misc, + goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty)), + ); return ecx .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); } @@ -431,7 +438,11 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { ty::Tuple(elements) => match elements.last() { None => tcx.types.unit, Some(&self_ty) => { - ecx.add_goal(goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty))); + // FIXME(-Znext-solver=coinductive): Should this be `GoalSource::ImplWhereBound`? + ecx.add_goal( + GoalSource::Misc, + goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty)), + ); return ecx .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); } diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/weak_types.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/weak_types.rs index 8d2bbec6d8b..6d5728797d1 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/weak_types.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/weak_types.rs @@ -3,10 +3,10 @@ //! //! Since a weak alias is not ambiguous, this just computes the `type_of` of //! the alias and registers the where-clauses of the type alias. -use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::traits::solve::{Certainty, Goal, GoalSource, QueryResult}; use rustc_middle::ty; -use super::EvalCtxt; +use crate::solve::EvalCtxt; impl<'tcx> EvalCtxt<'_, 'tcx> { pub(super) fn normalize_weak_type( @@ -22,6 +22,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // Check where clauses self.add_goals( + GoalSource::Misc, tcx.predicates_of(weak_ty.def_id) .instantiate(tcx, weak_ty.args) .predicates diff --git a/compiler/rustc_trait_selection/src/solve/project_goals.rs b/compiler/rustc_trait_selection/src/solve/project_goals.rs index d0e92a54ceb..30ae385a8a0 100644 --- a/compiler/rustc_trait_selection/src/solve/project_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/project_goals.rs @@ -1,3 +1,5 @@ +use crate::solve::GoalSource; + use super::EvalCtxt; use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; use rustc_middle::ty::{self, ProjectionPredicate}; @@ -22,14 +24,15 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ) .into(), }; - self.add_goal(goal.with( + let goal = goal.with( tcx, ty::PredicateKind::AliasRelate( projection_term, goal.predicate.term, ty::AliasRelationDirection::Equate, ), - )); + ); + self.add_goal(GoalSource::Misc, goal); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } } diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index 2a08b80e02a..2a161c2d956 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -8,6 +8,7 @@ use rustc_index::IndexVec; use rustc_middle::dep_graph::dep_kinds; use rustc_middle::traits::solve::CacheData; use rustc_middle::traits::solve::{CanonicalInput, Certainty, EvaluationCache, QueryResult}; +use rustc_middle::ty; use rustc_middle::ty::TyCtxt; use rustc_session::Limit; use std::collections::hash_map::Entry; @@ -111,6 +112,15 @@ impl<'tcx> SearchGraph<'tcx> { self.stack.is_empty() } + pub(super) fn current_goal_is_normalizes_to(&self) -> bool { + self.stack.raw.last().map_or(false, |e| { + matches!( + e.input.value.goal.predicate.kind().skip_binder(), + ty::PredicateKind::NormalizesTo(..) + ) + }) + } + /// Returns the remaining depth allowed for nested goals. /// /// This is generally simply one less than the current depth. diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index deb50e6aefd..ac3ffd2d6c2 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -1,7 +1,7 @@ //! Dealing with trait goals, i.e. `T: Trait<'a, U>`. use super::assembly::{self, structural_traits, Candidate}; -use super::{EvalCtxt, SolverMode}; +use super::{EvalCtxt, GoalSource, SolverMode}; use rustc_hir::def_id::DefId; use rustc_hir::{LangItem, Movability}; use rustc_infer::traits::query::NoSolution; @@ -72,7 +72,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { .predicates .into_iter() .map(|pred| goal.with(tcx, pred)); - ecx.add_goals(where_clause_bounds); + ecx.add_goals(GoalSource::ImplWhereBound, where_clause_bounds); ecx.evaluate_added_goals_and_make_canonical_response(maximal_certainty) }) @@ -172,7 +172,11 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { let nested_obligations = tcx .predicates_of(goal.predicate.def_id()) .instantiate(tcx, goal.predicate.trait_ref.args); - ecx.add_goals(nested_obligations.predicates.into_iter().map(|p| goal.with(tcx, p))); + // FIXME(-Znext-solver=coinductive): Should this be `GoalSource::ImplWhereBound`? + ecx.add_goals( + GoalSource::Misc, + nested_obligations.predicates.into_iter().map(|p| goal.with(tcx, p)), + ); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) } @@ -512,17 +516,23 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // Check that the type implements all of the predicates of the trait object. // (i.e. the principal, all of the associated types match, and any auto traits) - ecx.add_goals(b_data.iter().map(|pred| goal.with(tcx, pred.with_self_ty(tcx, a_ty)))); + ecx.add_goals( + GoalSource::ImplWhereBound, + b_data.iter().map(|pred| goal.with(tcx, pred.with_self_ty(tcx, a_ty))), + ); // The type must be `Sized` to be unsized. if let Some(sized_def_id) = tcx.lang_items().sized_trait() { - ecx.add_goal(goal.with(tcx, ty::TraitRef::new(tcx, sized_def_id, [a_ty]))); + ecx.add_goal( + GoalSource::ImplWhereBound, + goal.with(tcx, ty::TraitRef::new(tcx, sized_def_id, [a_ty])), + ); } else { return Err(NoSolution); } // The type must outlive the lifetime of the `dyn` we're unsizing into. - ecx.add_goal(goal.with(tcx, ty::OutlivesPredicate(a_ty, b_region))); + ecx.add_goal(GoalSource::Misc, goal.with(tcx, ty::OutlivesPredicate(a_ty, b_region))); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) } @@ -749,11 +759,14 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { } // Also require that a_ty's lifetime outlives b_ty's lifetime. - self.add_goal(Goal::new( - self.tcx(), - param_env, - ty::Binder::dummy(ty::OutlivesPredicate(a_region, b_region)), - )); + self.add_goal( + GoalSource::ImplWhereBound, + Goal::new( + self.tcx(), + param_env, + ty::Binder::dummy(ty::OutlivesPredicate(a_region, b_region)), + ), + ); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } @@ -826,14 +839,17 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // Finally, we require that `TailA: Unsize<TailB>` for the tail field // types. self.eq(goal.param_env, unsized_a_ty, b_ty)?; - self.add_goal(goal.with( - tcx, - ty::TraitRef::new( + self.add_goal( + GoalSource::ImplWhereBound, + goal.with( tcx, - tcx.lang_items().unsize_trait().unwrap(), - [a_tail_ty, b_tail_ty], + ty::TraitRef::new( + tcx, + tcx.lang_items().unsize_trait().unwrap(), + [a_tail_ty, b_tail_ty], + ), ), - )); + ); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } @@ -865,14 +881,17 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { self.eq(goal.param_env, unsized_a_ty, b_ty)?; // Similar to ADTs, require that we can unsize the tail. - self.add_goal(goal.with( - tcx, - ty::TraitRef::new( + self.add_goal( + GoalSource::ImplWhereBound, + goal.with( tcx, - tcx.lang_items().unsize_trait().unwrap(), - [a_last_ty, b_last_ty], + ty::TraitRef::new( + tcx, + tcx.lang_items().unsize_trait().unwrap(), + [a_last_ty, b_last_ty], + ), ), - )); + ); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } @@ -981,6 +1000,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ) -> QueryResult<'tcx> { self.probe_misc_candidate("constituent tys").enter(|ecx| { ecx.add_goals( + GoalSource::ImplWhereBound, constituent_tys(ecx, goal.predicate.self_ty())? .into_iter() .map(|ty| goal.with(ecx.tcx(), goal.predicate.with_self_ty(ecx.tcx(), ty))) diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index d5635812c74..a1b0ada0e8a 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -2327,8 +2327,9 @@ fn confirm_fn_pointer_candidate<'cx, 'tcx>( obligation: &ProjectionTyObligation<'tcx>, nested: Vec<PredicateObligation<'tcx>>, ) -> Progress<'tcx> { + let tcx = selcx.tcx(); let fn_type = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()); - let sig = fn_type.fn_sig(selcx.tcx()); + let sig = fn_type.fn_sig(tcx); let Normalized { value: sig, obligations } = normalize_with_depth( selcx, obligation.param_env, @@ -2337,9 +2338,24 @@ fn confirm_fn_pointer_candidate<'cx, 'tcx>( sig, ); - confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes) - .with_addl_obligations(nested) - .with_addl_obligations(obligations) + let host_effect_param = match *fn_type.kind() { + ty::FnDef(def_id, args) => tcx + .generics_of(def_id) + .host_effect_index + .map_or(tcx.consts.true_, |idx| args.const_at(idx)), + ty::FnPtr(_) => tcx.consts.true_, + _ => unreachable!("only expected FnPtr or FnDef in `confirm_fn_pointer_candidate`"), + }; + + confirm_callable_candidate( + selcx, + obligation, + sig, + util::TupleArgumentsFlag::Yes, + host_effect_param, + ) + .with_addl_obligations(nested) + .with_addl_obligations(obligations) } fn confirm_closure_candidate<'cx, 'tcx>( @@ -2362,9 +2378,16 @@ fn confirm_closure_candidate<'cx, 'tcx>( debug!(?obligation, ?closure_sig, ?obligations, "confirm_closure_candidate"); - confirm_callable_candidate(selcx, obligation, closure_sig, util::TupleArgumentsFlag::No) - .with_addl_obligations(nested) - .with_addl_obligations(obligations) + confirm_callable_candidate( + selcx, + obligation, + closure_sig, + util::TupleArgumentsFlag::No, + // FIXME(effects): This doesn't handle const closures correctly! + selcx.tcx().consts.true_, + ) + .with_addl_obligations(nested) + .with_addl_obligations(obligations) } fn confirm_callable_candidate<'cx, 'tcx>( @@ -2372,6 +2395,7 @@ fn confirm_callable_candidate<'cx, 'tcx>( obligation: &ProjectionTyObligation<'tcx>, fn_sig: ty::PolyFnSig<'tcx>, flag: util::TupleArgumentsFlag, + fn_host_effect: ty::Const<'tcx>, ) -> Progress<'tcx> { let tcx = selcx.tcx(); @@ -2386,6 +2410,7 @@ fn confirm_callable_candidate<'cx, 'tcx>( obligation.predicate.self_ty(), fn_sig, flag, + fn_host_effect, ) .map_bound(|(trait_ref, ret_type)| ty::ProjectionPredicate { projection_ty: ty::AliasTy::new(tcx, fn_once_output_def_id, trait_ref.args), diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index cd7fd028a46..73e06b84085 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -355,17 +355,23 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // Provide an impl, but only for suitable `fn` pointers. ty::FnPtr(sig) => { if sig.is_fn_trait_compatible() { - candidates.vec.push(FnPointerCandidate { is_const: false }); + candidates + .vec + .push(FnPointerCandidate { fn_host_effect: self.tcx().consts.true_ }); } } // Provide an impl for suitable functions, rejecting `#[target_feature]` functions (RFC 2396). - ty::FnDef(def_id, _) => { - if self.tcx().fn_sig(def_id).skip_binder().is_fn_trait_compatible() - && self.tcx().codegen_fn_attrs(def_id).target_features.is_empty() + ty::FnDef(def_id, args) => { + let tcx = self.tcx(); + if tcx.fn_sig(def_id).skip_binder().is_fn_trait_compatible() + && tcx.codegen_fn_attrs(def_id).target_features.is_empty() { - candidates - .vec - .push(FnPointerCandidate { is_const: self.tcx().is_const_fn(def_id) }); + candidates.vec.push(FnPointerCandidate { + fn_host_effect: tcx + .generics_of(def_id) + .host_effect_index + .map_or(tcx.consts.true_, |idx| args.const_at(idx)), + }); } } _ => {} diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index bcaf6aff66b..ce3fc2185ba 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -103,8 +103,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ImplSource::Builtin(BuiltinImplSource::Misc, vtable_iterator) } - FnPointerCandidate { is_const } => { - let data = self.confirm_fn_pointer_candidate(obligation, is_const)?; + FnPointerCandidate { fn_host_effect } => { + let data = self.confirm_fn_pointer_candidate(obligation, fn_host_effect)?; ImplSource::Builtin(BuiltinImplSource::Misc, data) } @@ -653,8 +653,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn confirm_fn_pointer_candidate( &mut self, obligation: &PolyTraitObligation<'tcx>, - // FIXME(effects) - _is_const: bool, + fn_host_effect: ty::Const<'tcx>, ) -> Result<Vec<PredicateObligation<'tcx>>, SelectionError<'tcx>> { debug!(?obligation, "confirm_fn_pointer_candidate"); @@ -675,6 +674,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self_ty, sig, util::TupleArgumentsFlag::Yes, + fn_host_effect, ) .map_bound(|(trait_ref, _)| trait_ref); @@ -860,7 +860,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { bug!("closure candidate for non-closure {:?}", obligation); }; - let trait_ref = self.closure_trait_ref_unnormalized(obligation, args); + let trait_ref = + self.closure_trait_ref_unnormalized(obligation, args, self.tcx().consts.true_); let nested = self.confirm_poly_trait_refs(obligation, trait_ref)?; debug!(?closure_def_id, ?trait_ref, ?nested, "confirm closure candidate obligations"); diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 9886e33ca3b..23f7bdd1584 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -1226,11 +1226,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { if unbound_input_types && stack.iter().skip(1).any(|prev| { stack.obligation.param_env == prev.obligation.param_env - && self.match_fresh_trait_refs( - stack.fresh_trait_pred, - prev.fresh_trait_pred, - prev.obligation.param_env, - ) + && self.match_fresh_trait_refs(stack.fresh_trait_pred, prev.fresh_trait_pred) }) { debug!("evaluate_stack --> unbound argument, recursive --> giving up",); @@ -1865,7 +1861,9 @@ impl<'tcx> SelectionContext<'_, 'tcx> { } // Drop otherwise equivalent non-const fn pointer candidates - (FnPointerCandidate { .. }, FnPointerCandidate { is_const: false }) => DropVictim::Yes, + (FnPointerCandidate { .. }, FnPointerCandidate { fn_host_effect }) => { + DropVictim::drop_if(*fn_host_effect == self.tcx().consts.true_) + } ( ParamCandidate(ref other_cand), @@ -2630,9 +2628,8 @@ impl<'tcx> SelectionContext<'_, 'tcx> { &self, previous: ty::PolyTraitPredicate<'tcx>, current: ty::PolyTraitPredicate<'tcx>, - param_env: ty::ParamEnv<'tcx>, ) -> bool { - let mut matcher = MatchAgainstFreshVars::new(self.tcx(), param_env); + let mut matcher = MatchAgainstFreshVars::new(self.tcx()); matcher.relate(previous, current).is_ok() } @@ -2660,6 +2657,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { &mut self, obligation: &PolyTraitObligation<'tcx>, args: GenericArgsRef<'tcx>, + fn_host_effect: ty::Const<'tcx>, ) -> ty::PolyTraitRef<'tcx> { let closure_sig = args.as_closure().sig(); @@ -2680,6 +2678,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { self_ty, closure_sig, util::TupleArgumentsFlag::No, + fn_host_effect, ) .map_bound(|(trait_ref, _)| trait_ref) } diff --git a/compiler/rustc_trait_selection/src/traits/structural_normalize.rs b/compiler/rustc_trait_selection/src/traits/structural_normalize.rs index b9ab26fe2fe..e0f9fdc3827 100644 --- a/compiler/rustc_trait_selection/src/traits/structural_normalize.rs +++ b/compiler/rustc_trait_selection/src/traits/structural_normalize.rs @@ -3,7 +3,7 @@ use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKi use rustc_infer::traits::{FulfillmentError, TraitEngine}; use rustc_middle::ty::{self, Ty}; -use crate::traits::{query::evaluate_obligation::InferCtxtExt, NormalizeExt, Obligation}; +use crate::traits::{NormalizeExt, Obligation}; pub trait StructurallyNormalizeExt<'tcx> { fn structurally_normalize( @@ -16,42 +16,43 @@ pub trait StructurallyNormalizeExt<'tcx> { impl<'tcx> StructurallyNormalizeExt<'tcx> for At<'_, 'tcx> { fn structurally_normalize( &self, - mut ty: Ty<'tcx>, + ty: Ty<'tcx>, fulfill_cx: &mut dyn TraitEngine<'tcx>, ) -> Result<Ty<'tcx>, Vec<FulfillmentError<'tcx>>> { assert!(!ty.is_ty_var(), "should have resolved vars before calling"); if self.infcx.next_trait_solver() { - // FIXME(-Znext-solver): correctly handle - // overflow here. - for _ in 0..256 { - let ty::Alias(ty::Projection | ty::Inherent | ty::Weak, alias) = *ty.kind() else { - break; - }; - - let new_infer_ty = self.infcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::NormalizeProjectionType, - span: self.cause.span, - }); - let obligation = Obligation::new( - self.infcx.tcx, - self.cause.clone(), - self.param_env, - ty::NormalizesTo { alias, term: new_infer_ty.into() }, - ); - if self.infcx.predicate_may_hold(&obligation) { - fulfill_cx.register_predicate_obligation(self.infcx, obligation); - let errors = fulfill_cx.select_where_possible(self.infcx); - if !errors.is_empty() { - return Err(errors); - } - ty = self.infcx.resolve_vars_if_possible(new_infer_ty); - } else { - break; - } + // FIXME(-Znext-solver): Should we resolve opaques here? + let ty::Alias(ty::Projection | ty::Inherent | ty::Weak, _) = *ty.kind() else { + return Ok(ty); + }; + + let new_infer_ty = self.infcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::NormalizeProjectionType, + span: self.cause.span, + }); + + // We simply emit an `alias-eq` goal here, since that will take care of + // normalizing the LHS of the projection until it is a rigid projection + // (or a not-yet-defined opaque in scope). + let obligation = Obligation::new( + self.infcx.tcx, + self.cause.clone(), + self.param_env, + ty::PredicateKind::AliasRelate( + ty.into(), + new_infer_ty.into(), + ty::AliasRelationDirection::Equate, + ), + ); + + fulfill_cx.register_predicate_obligation(self.infcx, obligation); + let errors = fulfill_cx.select_where_possible(self.infcx); + if !errors.is_empty() { + return Err(errors); } - Ok(ty) + Ok(self.infcx.resolve_vars_if_possible(new_infer_ty)) } else { Ok(self.normalize(ty).into_value_registering_obligations(self.infcx, fulfill_cx)) } diff --git a/compiler/rustc_trait_selection/src/traits/util.rs b/compiler/rustc_trait_selection/src/traits/util.rs index 575010ff46d..19eae93df9c 100644 --- a/compiler/rustc_trait_selection/src/traits/util.rs +++ b/compiler/rustc_trait_selection/src/traits/util.rs @@ -264,13 +264,26 @@ pub fn closure_trait_ref_and_return_type<'tcx>( self_ty: Ty<'tcx>, sig: ty::PolyFnSig<'tcx>, tuple_arguments: TupleArgumentsFlag, + fn_host_effect: ty::Const<'tcx>, ) -> ty::Binder<'tcx, (ty::TraitRef<'tcx>, Ty<'tcx>)> { assert!(!self_ty.has_escaping_bound_vars()); let arguments_tuple = match tuple_arguments { TupleArgumentsFlag::No => sig.skip_binder().inputs()[0], TupleArgumentsFlag::Yes => Ty::new_tup(tcx, sig.skip_binder().inputs()), }; - let trait_ref = ty::TraitRef::new(tcx, fn_trait_def_id, [self_ty, arguments_tuple]); + let trait_ref = if tcx.generics_of(fn_trait_def_id).host_effect_index.is_some() { + ty::TraitRef::new( + tcx, + fn_trait_def_id, + [ + ty::GenericArg::from(self_ty), + ty::GenericArg::from(arguments_tuple), + ty::GenericArg::from(fn_host_effect), + ], + ) + } else { + ty::TraitRef::new(tcx, fn_trait_def_id, [self_ty, arguments_tuple]) + }; sig.map_bound(|sig| (trait_ref, sig.output())) } diff --git a/compiler/stable_mir/src/mir/body.rs b/compiler/stable_mir/src/mir/body.rs index 5023af9ab79..b8fd9370aa6 100644 --- a/compiler/stable_mir/src/mir/body.rs +++ b/compiler/stable_mir/src/mir/body.rs @@ -27,6 +27,9 @@ pub struct Body { /// /// This is used for the "rust-call" ABI such as closures. pub(super) spread_arg: Option<Local>, + + /// The span that covers the entire function body. + pub span: Span, } pub type BasicBlockIdx = usize; @@ -42,6 +45,7 @@ impl Body { arg_count: usize, var_debug_info: Vec<VarDebugInfo>, spread_arg: Option<Local>, + span: Span, ) -> Self { // If locals doesn't contain enough entries, it can lead to panics in // `ret_local`, `arg_locals`, and `inner_locals`. @@ -49,7 +53,7 @@ impl Body { locals.len() > arg_count, "A Body must contain at least a local for the return value and each of the function's arguments" ); - Self { blocks, locals, arg_count, var_debug_info, spread_arg } + Self { blocks, locals, arg_count, var_debug_info, spread_arg, span } } /// Return local that holds this function's return value. diff --git a/compiler/stable_mir/src/mir/visit.rs b/compiler/stable_mir/src/mir/visit.rs index 98336a72900..ab57ff0f8f5 100644 --- a/compiler/stable_mir/src/mir/visit.rs +++ b/compiler/stable_mir/src/mir/visit.rs @@ -133,7 +133,7 @@ pub trait MirVisitor { } fn super_body(&mut self, body: &Body) { - let Body { blocks, locals: _, arg_count, var_debug_info, spread_arg: _ } = body; + let Body { blocks, locals: _, arg_count, var_debug_info, spread_arg: _, span } = body; for bb in blocks { self.visit_basic_block(bb); @@ -153,6 +153,8 @@ pub trait MirVisitor { for info in var_debug_info.iter() { self.visit_var_debug_info(info); } + + self.visit_span(span) } fn super_basic_block(&mut self, bb: &BasicBlock) { |
