diff options
69 files changed, 1345 insertions, 606 deletions
diff --git a/Cargo.lock b/Cargo.lock index fa150b06b86..1296468b4e2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4038,6 +4038,7 @@ dependencies = [ "rustc_query_impl", "rustc_query_system", "rustc_resolve", + "rustc_serialize", "rustc_session", "rustc_span", "rustc_symbol_mangling", diff --git a/compiler/rustc_ast_lowering/src/index.rs b/compiler/rustc_ast_lowering/src/index.rs index 33e15d386c8..e91b78720a8 100644 --- a/compiler/rustc_ast_lowering/src/index.rs +++ b/compiler/rustc_ast_lowering/src/index.rs @@ -89,7 +89,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { } } - self.nodes.insert(hir_id.local_id, ParentedNode { parent: self.parent_node, node: node }); + self.nodes.insert(hir_id.local_id, ParentedNode { parent: self.parent_node, node }); } fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_node_id: HirId, f: F) { diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index 7eb0d553747..9225f19876d 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -2072,8 +2072,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { .map(|name| format!("function `{name}`")) .unwrap_or_else(|| { match &self.infcx.tcx.def_kind(self.mir_def_id()) { + DefKind::Closure + if self + .infcx + .tcx + .is_coroutine(self.mir_def_id().to_def_id()) => + { + "enclosing coroutine" + } DefKind::Closure => "enclosing closure", - DefKind::Coroutine => "enclosing coroutine", kind => bug!("expected closure or coroutine, found {:?}", kind), } .to_string() diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs index 1a3848eee78..e37457f48df 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mod.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs @@ -217,9 +217,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { projection: place.projection.split_at(index + 1).0, }) { let var_index = field.index(); - buf = self.upvars[var_index].place.to_string(self.infcx.tcx); + buf = self.upvars[var_index].to_string(self.infcx.tcx); ok = Ok(()); - if !self.upvars[var_index].by_ref { + if !self.upvars[var_index].is_by_ref() { buf.insert(0, '*'); } } else { @@ -250,7 +250,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { local, projection: place.projection.split_at(index + 1).0, }) { - buf = self.upvars[field.index()].place.to_string(self.infcx.tcx); + buf = self.upvars[field.index()].to_string(self.infcx.tcx); ok = Ok(()); } else { let field_name = self.describe_field( diff --git a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs index 86b761eadf5..43487b85a7b 100644 --- a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs @@ -363,8 +363,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { format!("captured variable in an `{closure_kind}` closure"); let upvar = &self.upvars[upvar_field.unwrap().index()]; - let upvar_hir_id = upvar.place.get_root_variable(); - let upvar_name = upvar.place.to_string(self.infcx.tcx); + let upvar_hir_id = upvar.get_root_variable(); + let upvar_name = upvar.to_string(self.infcx.tcx); let upvar_span = self.infcx.tcx.hir().span(upvar_hir_id); let place_name = self.describe_any_place(move_place.as_ref()); diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index a11710e0bb0..d9ec2860962 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -67,7 +67,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { let imm_borrow_derefed = self.upvars[upvar_index.index()] .place - .place .deref_tys() .any(|ty| matches!(ty.kind(), ty::Ref(.., hir::Mutability::Not))); @@ -85,7 +84,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { if self.is_upvar_field_projection(access_place.as_ref()).is_some() { reason = ", as it is not declared as mutable".to_string(); } else { - let name = self.upvars[upvar_index.index()].place.to_string(self.infcx.tcx); + let name = self.upvars[upvar_index.index()].to_string(self.infcx.tcx); reason = format!(", as `{name}` is not declared as mutable"); } } @@ -388,7 +387,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty )); - let captured_place = &self.upvars[upvar_index.index()].place; + let captured_place = self.upvars[upvar_index.index()]; err.span_label(span, format!("cannot {act}")); diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 1de1c181ef4..40bbc5e7c41 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -605,7 +605,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { }; let captured_place = &self.upvars[upvar_field.index()].place; - let defined_hir = match captured_place.place.base { + let defined_hir = match captured_place.base { PlaceBase::Local(hirid) => Some(hirid), PlaceBase::Upvar(upvar) => Some(upvar.var_path.hir_id), _ => None, diff --git a/compiler/rustc_borrowck/src/diagnostics/var_name.rs b/compiler/rustc_borrowck/src/diagnostics/var_name.rs index 3a104c52431..28e07f2a81e 100644 --- a/compiler/rustc_borrowck/src/diagnostics/var_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/var_name.rs @@ -2,10 +2,9 @@ #![deny(rustc::diagnostic_outside_of_impl)] use crate::region_infer::RegionInferenceContext; -use crate::Upvar; use rustc_index::IndexSlice; use rustc_middle::mir::{Body, Local}; -use rustc_middle::ty::{RegionVid, TyCtxt}; +use rustc_middle::ty::{self, RegionVid, TyCtxt}; use rustc_span::symbol::Symbol; use rustc_span::Span; @@ -15,7 +14,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { tcx: TyCtxt<'tcx>, body: &Body<'tcx>, local_names: &IndexSlice<Local, Option<Symbol>>, - upvars: &[Upvar<'tcx>], + upvars: &[&ty::CapturedPlace<'tcx>], fr: RegionVid, ) -> Option<(Option<Symbol>, Span)> { debug!("get_var_name_and_span_for_region(fr={fr:?})"); @@ -66,10 +65,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { pub(crate) fn get_upvar_name_and_span_for_region( &self, tcx: TyCtxt<'tcx>, - upvars: &[Upvar<'tcx>], + upvars: &[&ty::CapturedPlace<'tcx>], upvar_index: usize, ) -> (Symbol, Span) { - let upvar_hir_id = upvars[upvar_index].place.get_root_variable(); + let upvar_hir_id = upvars[upvar_index].get_root_variable(); debug!("get_upvar_name_and_span_for_region: upvar_hir_id={upvar_hir_id:?}"); let upvar_name = tcx.hir().name(upvar_hir_id); diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 48e75adee4c..7d6e15839c5 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -34,7 +34,7 @@ use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::*; use rustc_middle::query::Providers; use rustc_middle::traits::DefiningAnchor; -use rustc_middle::ty::{self, CapturedPlace, ParamEnv, RegionVid, TyCtxt}; +use rustc_middle::ty::{self, ParamEnv, RegionVid, TyCtxt}; use rustc_session::lint::builtin::UNUSED_MUT; use rustc_span::{Span, Symbol}; use rustc_target::abi::FieldIdx; @@ -99,15 +99,6 @@ use renumber::RegionCtxt; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } -// FIXME(eddyb) perhaps move this somewhere more centrally. -#[derive(Debug)] -struct Upvar<'tcx> { - place: CapturedPlace<'tcx>, - - /// If true, the capture is behind a reference. - by_ref: bool, -} - /// Associate some local constants with the `'tcx` lifetime struct TyCtxtConsts<'tcx>(TyCtxt<'tcx>); impl<'tcx> TyCtxtConsts<'tcx> { @@ -192,18 +183,6 @@ fn do_mir_borrowck<'tcx>( infcx.set_tainted_by_errors(e); errors.set_tainted_by_errors(e); } - let upvars: Vec<_> = tcx - .closure_captures(def) - .iter() - .map(|&captured_place| { - let capture = captured_place.info.capture_kind; - let by_ref = match capture { - ty::UpvarCapture::ByValue => false, - ty::UpvarCapture::ByRef(..) => true, - }; - Upvar { place: captured_place.clone(), by_ref } - }) - .collect(); // Replace all regions with fresh inference variables. This // requires first making our own copy of the MIR. This copy will @@ -253,7 +232,7 @@ fn do_mir_borrowck<'tcx>( &mut flow_inits, &mdpe.move_data, &borrow_set, - &upvars, + tcx.closure_captures(def), consumer_options, ); @@ -323,7 +302,7 @@ fn do_mir_borrowck<'tcx>( used_mut: Default::default(), used_mut_upvars: SmallVec::new(), borrow_set: Rc::clone(&borrow_set), - upvars: Vec::new(), + upvars: &[], local_names: IndexVec::from_elem(None, &promoted_body.local_decls), region_names: RefCell::default(), next_region_name: RefCell::new(1), @@ -364,7 +343,7 @@ fn do_mir_borrowck<'tcx>( used_mut: Default::default(), used_mut_upvars: SmallVec::new(), borrow_set: Rc::clone(&borrow_set), - upvars, + upvars: tcx.closure_captures(def), local_names, region_names: RefCell::default(), next_region_name: RefCell::new(1), @@ -583,7 +562,7 @@ struct MirBorrowckCtxt<'cx, 'tcx> { borrow_set: Rc<BorrowSet<'tcx>>, /// Information about upvars not necessarily preserved in types or MIR - upvars: Vec<Upvar<'tcx>>, + upvars: &'tcx [&'tcx ty::CapturedPlace<'tcx>], /// Names of local (user) variables (extracted from `var_debug_info`). local_names: IndexVec<Local, Option<Symbol>>, @@ -2293,7 +2272,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // unique path to the `&mut` hir::Mutability::Mut => { let mode = match self.is_upvar_field_projection(place) { - Some(field) if self.upvars[field.index()].by_ref => { + Some(field) + if self.upvars[field.index()].is_by_ref() => + { is_local_mutation_allowed } _ => LocalMutationIsAllowed::Yes, @@ -2341,7 +2322,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { place={:?}, place_base={:?}", upvar, is_local_mutation_allowed, place, place_base ); - match (upvar.place.mutability, is_local_mutation_allowed) { + match (upvar.mutability, is_local_mutation_allowed) { ( Mutability::Not, LocalMutationIsAllowed::No diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index 08db3a62ece..480358ef997 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -37,7 +37,7 @@ use crate::{ renumber, type_check::{self, MirTypeckRegionConstraints, MirTypeckResults}, universal_regions::UniversalRegions, - BorrowckInferCtxt, Upvar, + BorrowckInferCtxt, }; pub type PoloniusOutput = Output<RustcFacts>; @@ -166,7 +166,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>( flow_inits: &mut ResultsCursor<'cx, 'tcx, MaybeInitializedPlaces<'cx, 'tcx>>, move_data: &MoveData<'tcx>, borrow_set: &BorrowSet<'tcx>, - upvars: &[Upvar<'tcx>], + upvars: &[&ty::CapturedPlace<'tcx>], consumer_options: Option<ConsumerOptions>, ) -> NllOutput<'tcx> { let is_polonius_legacy_enabled = infcx.tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled(); diff --git a/compiler/rustc_borrowck/src/path_utils.rs b/compiler/rustc_borrowck/src/path_utils.rs index 51e318f0854..2d997dfadf0 100644 --- a/compiler/rustc_borrowck/src/path_utils.rs +++ b/compiler/rustc_borrowck/src/path_utils.rs @@ -4,7 +4,6 @@ use crate::borrow_set::{BorrowData, BorrowSet, TwoPhaseActivation}; use crate::places_conflict; use crate::AccessDepth; use crate::BorrowIndex; -use crate::Upvar; use rustc_data_structures::graph::dominators::Dominators; use rustc_middle::mir::BorrowKind; use rustc_middle::mir::{BasicBlock, Body, Location, Place, PlaceRef, ProjectionElem}; @@ -150,7 +149,7 @@ pub(super) fn borrow_of_local_data(place: Place<'_>) -> bool { /// of a closure type. pub(crate) fn is_upvar_field_projection<'tcx>( tcx: TyCtxt<'tcx>, - upvars: &[Upvar<'tcx>], + upvars: &[&rustc_middle::ty::CapturedPlace<'tcx>], place_ref: PlaceRef<'tcx>, body: &Body<'tcx>, ) -> Option<FieldIdx> { @@ -166,7 +165,7 @@ pub(crate) fn is_upvar_field_projection<'tcx>( Some((place_base, ProjectionElem::Field(field, _ty))) => { let base_ty = place_base.ty(body, tcx).ty; if (base_ty.is_closure() || base_ty.is_coroutine()) - && (!by_ref || upvars[field.index()].by_ref) + && (!by_ref || upvars[field.index()].is_by_ref()) { Some(field) } else { diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index dff86f6343d..d4fd1a3cf2a 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -68,7 +68,7 @@ use crate::{ region_infer::TypeTest, type_check::free_region_relations::{CreateResult, UniversalRegionRelations}, universal_regions::{DefiningTy, UniversalRegions}, - BorrowckInferCtxt, Upvar, + BorrowckInferCtxt, }; macro_rules! span_mirbug { @@ -138,7 +138,7 @@ pub(crate) fn type_check<'mir, 'tcx>( flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>, move_data: &MoveData<'tcx>, elements: &Rc<RegionValueElements>, - upvars: &[Upvar<'tcx>], + upvars: &[&ty::CapturedPlace<'tcx>], use_polonius: bool, ) -> MirTypeckResults<'tcx> { let implicit_region_bound = ty::Region::new_var(infcx.tcx, universal_regions.fr_fn_body); @@ -857,7 +857,7 @@ struct BorrowCheckContext<'a, 'tcx> { all_facts: &'a mut Option<AllFacts>, borrow_set: &'a BorrowSet<'tcx>, pub(crate) constraints: &'a mut MirTypeckRegionConstraints<'tcx>, - upvars: &'a [Upvar<'tcx>], + upvars: &'a [&'a ty::CapturedPlace<'tcx>], /// The set of loans that are live at a given point in the CFG, filled in by `liveness::trace`, /// when using `-Zpolonius=next`. @@ -2678,8 +2678,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let typeck_root_args = ty::GenericArgs::identity_for_item(tcx, typeck_root_def_id); let parent_args = match tcx.def_kind(def_id) { + DefKind::Closure if tcx.is_coroutine(def_id.to_def_id()) => { + args.as_coroutine().parent_args() + } DefKind::Closure => args.as_closure().parent_args(), - DefKind::Coroutine => args.as_coroutine().parent_args(), DefKind::InlineConst => args.as_inline_const().parent_args(), other => bug!("unexpected item {:?}", other), }; diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index c3c2f536362..2b83c787139 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -737,18 +737,6 @@ trait InferCtxtExt<'tcx> { ) -> T where T: TypeFoldable<TyCtxt<'tcx>>; - - fn instantiate_bound_regions_with_nll_infer_vars_in_recursive_scope( - &self, - mir_def_id: LocalDefId, - indices: &mut UniversalRegionIndices<'tcx>, - ); - - fn instantiate_bound_regions_with_nll_infer_vars_in_item( - &self, - mir_def_id: LocalDefId, - indices: &mut UniversalRegionIndices<'tcx>, - ); } impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> { @@ -799,54 +787,6 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> { }); value } - - /// Finds late-bound regions that do not appear in the parameter listing and adds them to the - /// indices vector. Typically, we identify late-bound regions as we process the inputs and - /// outputs of the closure/function. However, sometimes there are late-bound regions which do - /// not appear in the fn parameters but which are nonetheless in scope. The simplest case of - /// this are unused functions, like fn foo<'a>() { } (see e.g., #51351). Despite not being used, - /// users can still reference these regions (e.g., let x: &'a u32 = &22;), so we need to create - /// entries for them and store them in the indices map. This code iterates over the complete - /// set of late-bound regions and checks for any that we have not yet seen, adding them to the - /// inputs vector. - #[instrument(skip(self, indices))] - fn instantiate_bound_regions_with_nll_infer_vars_in_recursive_scope( - &self, - mir_def_id: LocalDefId, - indices: &mut UniversalRegionIndices<'tcx>, - ) { - for_each_late_bound_region_in_recursive_scope(self.tcx, mir_def_id, |r| { - debug!(?r); - if !indices.indices.contains_key(&r) { - let region_vid = { - let name = r.get_name_or_anon(); - self.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) - }; - - debug!(?region_vid); - indices.insert_late_bound_region(r, region_vid.as_var()); - } - }); - } - - #[instrument(skip(self, indices))] - fn instantiate_bound_regions_with_nll_infer_vars_in_item( - &self, - mir_def_id: LocalDefId, - indices: &mut UniversalRegionIndices<'tcx>, - ) { - for_each_late_bound_region_in_item(self.tcx, mir_def_id, |r| { - debug!(?r); - if !indices.indices.contains_key(&r) { - let region_vid = { - let name = r.get_name_or_anon(); - self.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) - }; - - indices.insert_late_bound_region(r, region_vid.as_var()); - } - }); - } } impl<'tcx> UniversalRegionIndices<'tcx> { diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index f2be6f27ff6..51df14df644 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -373,10 +373,7 @@ fn add_unused_functions(cx: &CodegenCx<'_, '_>) { // just "functions", like consts, statics, etc. Filter those out. // If `ignore_unused_generics` was specified, filter out any // generic functions from consideration as well. - if !matches!( - kind, - DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Coroutine - ) { + if !matches!(kind, DefKind::Fn | DefKind::AssocFn | DefKind::Closure) { return None; } if ignore_unused_generics && tcx.generics_of(def_id).requires_monomorphization(tcx) { diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs index 712b6ed5333..624ce6d8813 100644 --- a/compiler/rustc_codegen_llvm/src/type_of.rs +++ b/compiler/rustc_codegen_llvm/src/type_of.rs @@ -9,7 +9,7 @@ use rustc_middle::ty::{self, Ty, TypeVisitableExt}; use rustc_target::abi::HasDataLayout; use rustc_target::abi::{Abi, Align, FieldsShape}; use rustc_target::abi::{Int, Pointer, F32, F64}; -use rustc_target::abi::{PointeeInfo, Scalar, Size, TyAbiInterface, Variants}; +use rustc_target::abi::{Scalar, Size, Variants}; use smallvec::{smallvec, SmallVec}; use std::fmt::Write; @@ -184,7 +184,6 @@ pub trait LayoutLlvmExt<'tcx> { immediate: bool, ) -> &'a Type; fn llvm_field_index<'a>(&self, cx: &CodegenCx<'a, 'tcx>, index: usize) -> u64; - fn pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo>; fn scalar_copy_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> Option<&'a Type>; } @@ -356,20 +355,6 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> { } } - // FIXME(eddyb) this having the same name as `TyAndLayout::pointee_info_at` - // (the inherent method, which is lacking this caching logic) can result in - // the uncached version being called - not wrong, but potentially inefficient. - fn pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo> { - if let Some(&pointee) = cx.pointee_infos.borrow().get(&(self.ty, offset)) { - return pointee; - } - - let result = Ty::ty_and_layout_pointee_info_at(*self, cx, offset); - - cx.pointee_infos.borrow_mut().insert((self.ty, offset), result); - result - } - fn scalar_copy_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> Option<&'a Type> { debug_assert!(self.is_sized()); diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index c65a0e968a5..bfd0a458884 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -216,6 +216,7 @@ impl CodegenResults { sess: &Session, rlink_file: &Path, codegen_results: &CodegenResults, + outputs: &OutputFilenames, ) -> Result<usize, io::Error> { let mut encoder = FileEncoder::new(rlink_file)?; encoder.emit_raw_bytes(RLINK_MAGIC); @@ -224,10 +225,14 @@ impl CodegenResults { encoder.emit_raw_bytes(&RLINK_VERSION.to_be_bytes()); encoder.emit_str(sess.cfg_version); Encodable::encode(codegen_results, &mut encoder); - encoder.finish() + Encodable::encode(outputs, &mut encoder); + encoder.finish().map_err(|(_path, err)| err) } - pub fn deserialize_rlink(sess: &Session, data: Vec<u8>) -> Result<Self, CodegenErrors> { + pub fn deserialize_rlink( + sess: &Session, + data: Vec<u8>, + ) -> Result<(Self, OutputFilenames), CodegenErrors> { // The Decodable machinery is not used here because it panics if the input data is invalid // and because its internal representation may change. if !data.starts_with(RLINK_MAGIC) { @@ -256,6 +261,7 @@ impl CodegenResults { } let codegen_results = CodegenResults::decode(&mut decoder); - Ok(codegen_results) + let outputs = OutputFilenames::decode(&mut decoder); + Ok((codegen_results, outputs)) } } diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index e7cc3ae4d55..6af11ce8479 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -401,9 +401,7 @@ fn run_compiler( Ok(()) })?; - // Make sure the `output_filenames` query is run for its side - // effects of writing the dep-info and reporting errors. - queries.global_ctxt()?.enter(|tcx| tcx.output_filenames(())); + queries.write_dep_info()?; } else { let krate = queries.parse()?; pretty::print( @@ -431,9 +429,7 @@ fn run_compiler( return early_exit(); } - // Make sure the `output_filenames` query is run for its side - // effects of writing the dep-info and reporting errors. - queries.global_ctxt()?.enter(|tcx| tcx.output_filenames(())); + queries.write_dep_info()?; if sess.opts.output_types.contains_key(&OutputType::DepInfo) && sess.opts.output_types.len() == 1 @@ -648,12 +644,11 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) { fn process_rlink(sess: &Session, compiler: &interface::Compiler) { assert!(sess.opts.unstable_opts.link_only); if let Input::File(file) = &sess.io.input { - let outputs = compiler.build_output_filenames(sess, &[]); let rlink_data = fs::read(file).unwrap_or_else(|err| { sess.emit_fatal(RlinkUnableToRead { err }); }); - let codegen_results = match CodegenResults::deserialize_rlink(sess, rlink_data) { - Ok(codegen) => codegen, + let (codegen_results, outputs) = match CodegenResults::deserialize_rlink(sess, rlink_data) { + Ok((codegen, outputs)) => (codegen, outputs), Err(err) => { match err { CodegenErrors::WrongFileType => sess.emit_fatal(RLinkWrongFileType), diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs index e901eba35b7..fedd380cada 100644 --- a/compiler/rustc_hir/src/def.rs +++ b/compiler/rustc_hir/src/def.rs @@ -114,7 +114,6 @@ pub enum DefKind { of_trait: bool, }, Closure, - Coroutine, } impl DefKind { @@ -157,7 +156,6 @@ impl DefKind { DefKind::Field => "field", DefKind::Impl { .. } => "implementation", DefKind::Closure => "closure", - DefKind::Coroutine => "coroutine", DefKind::ExternCrate => "extern crate", DefKind::GlobalAsm => "global assembly block", } @@ -216,7 +214,6 @@ impl DefKind { | DefKind::LifetimeParam | DefKind::ExternCrate | DefKind::Closure - | DefKind::Coroutine | DefKind::Use | DefKind::ForeignMod | DefKind::GlobalAsm @@ -226,7 +223,7 @@ impl DefKind { #[inline] pub fn is_fn_like(self) -> bool { - matches!(self, DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Coroutine) + matches!(self, DefKind::Fn | DefKind::AssocFn | DefKind::Closure) } /// Whether `query get_codegen_attrs` should be used with this definition. @@ -236,7 +233,6 @@ impl DefKind { | DefKind::AssocFn | DefKind::Ctor(..) | DefKind::Closure - | DefKind::Coroutine | DefKind::Static(_) => true, DefKind::Mod | DefKind::Struct diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index b279564ed45..fbed6f33e59 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -1449,7 +1449,7 @@ fn opaque_type_cycle_error( label_match(capture.place.ty(), capture.get_path_span(tcx)); } // Label any coroutine locals that capture the opaque - if let DefKind::Coroutine = tcx.def_kind(closure_def_id) + if tcx.is_coroutine(closure_def_id) && let Some(coroutine_layout) = tcx.mir_coroutine_witnesses(closure_def_id) { for interior_ty in &coroutine_layout.field_tys { @@ -1470,7 +1470,7 @@ pub(super) fn check_coroutine_obligations( tcx: TyCtxt<'_>, def_id: LocalDefId, ) -> Result<(), ErrorGuaranteed> { - debug_assert!(matches!(tcx.def_kind(def_id), DefKind::Coroutine)); + debug_assert!(tcx.is_coroutine(def_id.to_def_id())); let typeck = tcx.typeck(def_id); let param_env = tcx.param_env(def_id); diff --git a/compiler/rustc_hir_typeck/src/mem_categorization.rs b/compiler/rustc_hir_typeck/src/mem_categorization.rs index b25830675b2..94da4bfcdc4 100644 --- a/compiler/rustc_hir_typeck/src/mem_categorization.rs +++ b/compiler/rustc_hir_typeck/src/mem_categorization.rs @@ -66,25 +66,18 @@ use rustc_trait_selection::infer::InferCtxtExt; pub(crate) trait HirNode { fn hir_id(&self) -> hir::HirId; - fn span(&self) -> Span; } impl HirNode for hir::Expr<'_> { fn hir_id(&self) -> hir::HirId { self.hir_id } - fn span(&self) -> Span { - self.span - } } impl HirNode for hir::Pat<'_> { fn hir_id(&self) -> hir::HirId { self.hir_id } - fn span(&self) -> Span { - self.span - } } #[derive(Clone)] diff --git a/compiler/rustc_incremental/src/persist/file_format.rs b/compiler/rustc_incremental/src/persist/file_format.rs index 25bf83f64a0..b5742b97d02 100644 --- a/compiler/rustc_incremental/src/persist/file_format.rs +++ b/compiler/rustc_incremental/src/persist/file_format.rs @@ -80,8 +80,8 @@ where ); debug!("save: data written to disk successfully"); } - Err(err) => { - sess.emit_err(errors::WriteNew { name, path: path_buf, err }); + Err((path, err)) => { + sess.emit_err(errors::WriteNew { name, path, err }); } } } diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs index fa21320be26..d6320d680e7 100644 --- a/compiler/rustc_incremental/src/persist/save.rs +++ b/compiler/rustc_incremental/src/persist/save.rs @@ -50,9 +50,6 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) { join( move || { sess.time("incr_comp_persist_dep_graph", || { - if let Err(err) = tcx.dep_graph.encode(&tcx.sess.prof) { - sess.emit_err(errors::WriteDepGraph { path: &staging_dep_graph_path, err }); - } if let Err(err) = fs::rename(&staging_dep_graph_path, &dep_graph_path) { sess.emit_err(errors::MoveDepGraph { from: &staging_dep_graph_path, diff --git a/compiler/rustc_infer/src/infer/nll_relate/mod.rs b/compiler/rustc_infer/src/infer/nll_relate/mod.rs index c8049164391..ee6a3fd0c82 100644 --- a/compiler/rustc_infer/src/infer/nll_relate/mod.rs +++ b/compiler/rustc_infer/src/infer/nll_relate/mod.rs @@ -113,9 +113,6 @@ pub trait TypeRelatingDelegate<'tcx> { fn forbid_inference_vars() -> bool; } -#[derive(Copy, Clone)] -struct UniversallyQuantified(bool); - impl<'me, 'tcx, D> TypeRelating<'me, 'tcx, D> where D: TypeRelatingDelegate<'tcx>, diff --git a/compiler/rustc_interface/Cargo.toml b/compiler/rustc_interface/Cargo.toml index fd587e53f91..319e8175809 100644 --- a/compiler/rustc_interface/Cargo.toml +++ b/compiler/rustc_interface/Cargo.toml @@ -40,6 +40,7 @@ rustc_privacy = { path = "../rustc_privacy" } rustc_query_impl = { path = "../rustc_query_impl" } rustc_query_system = { path = "../rustc_query_system" } rustc_resolve = { path = "../rustc_resolve" } +rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index 91fd4b4a1d0..8a6d8d3d42e 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -1,7 +1,7 @@ use crate::util; use rustc_ast::token; -use rustc_ast::{self as ast, LitKind, MetaItemKind}; +use rustc_ast::{LitKind, MetaItemKind}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::defer; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; @@ -15,9 +15,7 @@ use rustc_middle::{bug, ty}; use rustc_parse::maybe_new_parser_from_source_str; use rustc_query_impl::QueryCtxt; use rustc_query_system::query::print_query_stack; -use rustc_session::config::{ - self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName, OutputFilenames, -}; +use rustc_session::config::{self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName}; use rustc_session::filesearch::sysroot_candidates; use rustc_session::parse::ParseSess; use rustc_session::{lint, CompilerIO, EarlyErrorHandler, Session}; @@ -43,16 +41,6 @@ pub struct Compiler { pub(crate) override_queries: Option<fn(&Session, &mut Providers)>, } -impl Compiler { - pub fn build_output_filenames( - &self, - sess: &Session, - attrs: &[ast::Attribute], - ) -> OutputFilenames { - util::build_output_filenames(attrs, sess) - } -} - /// Converts strings provided as `--cfg [cfgspec]` into a `Cfg`. pub(crate) fn parse_cfg(handler: &EarlyErrorHandler, cfgs: Vec<String>) -> Cfg { cfgs.into_iter() diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 99bea647bd5..88212d164bc 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -39,7 +39,7 @@ use std::any::Any; use std::ffi::OsString; use std::io::{self, BufWriter, Write}; use std::path::{Path, PathBuf}; -use std::sync::{Arc, LazyLock}; +use std::sync::LazyLock; use std::{env, fs, iter}; pub fn parse<'a>(sess: &'a Session) -> PResult<'a, ast::Crate> { @@ -553,13 +553,17 @@ fn resolver_for_lowering<'tcx>( tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, Lrc::new(krate)))) } -fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> { +pub(crate) fn write_dep_info(tcx: TyCtxt<'_>) { + // Make sure name resolution and macro expansion is run for + // the side-effect of providing a complete set of all + // accessed files and env vars. + let _ = tcx.resolver_for_lowering(()); + let sess = tcx.sess; - let _timer = sess.timer("prepare_outputs"); - let (_, krate) = &*tcx.resolver_for_lowering(()).borrow(); + let _timer = sess.timer("write_dep_info"); let crate_name = tcx.crate_name(LOCAL_CRATE); - let outputs = util::build_output_filenames(&krate.attrs, sess); + let outputs = tcx.output_filenames(()); let output_paths = generated_output_paths(tcx, &outputs, sess.io.output_file.is_some(), crate_name); @@ -596,15 +600,12 @@ fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> { } } } - - outputs.into() } pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| { let providers = &mut Providers::default(); providers.analysis = analysis; providers.hir_crate = rustc_ast_lowering::lower_to_hir; - providers.output_filenames = output_filenames; providers.resolver_for_lowering = resolver_for_lowering; providers.early_lint_checks = early_lint_checks; proc_macro_decls::provide(providers); @@ -756,7 +757,7 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> { }); tcx.hir().par_body_owners(|def_id| { - if let rustc_hir::def::DefKind::Coroutine = tcx.def_kind(def_id) { + if tcx.is_coroutine(def_id.to_def_id()) { tcx.ensure().mir_coroutine_witnesses(def_id); tcx.ensure().check_coroutine_obligations(def_id); } diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs index 1c65cf19cde..b7cd5468a00 100644 --- a/compiler/rustc_interface/src/queries.rs +++ b/compiler/rustc_interface/src/queries.rs @@ -1,6 +1,6 @@ use crate::errors::{FailedWritingFile, RustcErrorFatal, RustcErrorUnexpectedAnnotation}; use crate::interface::{Compiler, Result}; -use crate::{passes, util}; +use crate::{errors, passes, util}; use rustc_ast as ast; use rustc_codegen_ssa::traits::CodegenBackend; @@ -15,6 +15,7 @@ use rustc_metadata::creader::CStore; use rustc_middle::arena::Arena; use rustc_middle::dep_graph::DepGraph; use rustc_middle::ty::{GlobalCtxt, TyCtxt}; +use rustc_serialize::opaque::FileEncodeResult; use rustc_session::config::{self, CrateType, OutputFilenames, OutputType}; use rustc_session::cstore::Untracked; use rustc_session::output::find_crate_name; @@ -84,7 +85,6 @@ pub struct Queries<'tcx> { hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>, parse: Query<ast::Crate>, - pre_configure: Query<(ast::Crate, ast::AttrVec)>, // This just points to what's in `gcx_cell`. gcx: Query<&'tcx GlobalCtxt<'tcx>>, } @@ -97,23 +97,26 @@ impl<'tcx> Queries<'tcx> { arena: WorkerLocal::new(|_| Arena::default()), hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()), parse: Default::default(), - pre_configure: Default::default(), gcx: Default::default(), } } + pub fn finish(&self) -> FileEncodeResult { + if let Some(gcx) = self.gcx_cell.get() { gcx.finish() } else { Ok(0) } + } + pub fn parse(&self) -> Result<QueryResult<'_, ast::Crate>> { self.parse.compute(|| { passes::parse(&self.compiler.sess).map_err(|mut parse_error| parse_error.emit()) }) } - #[deprecated = "pre_configure may be made private in the future. If you need it please open an issue with your use case."] - pub fn pre_configure(&self) -> Result<QueryResult<'_, (ast::Crate, ast::AttrVec)>> { - self.pre_configure.compute(|| { + pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> { + self.gcx.compute(|| { + let sess = &self.compiler.sess; + let mut krate = self.parse()?.steal(); - let sess = &self.compiler.sess; rustc_builtin_macros::cmdline_attrs::inject( &mut krate, &sess.parse_sess, @@ -122,15 +125,6 @@ impl<'tcx> Queries<'tcx> { let pre_configured_attrs = rustc_expand::config::pre_configure_attrs(sess, &krate.attrs); - Ok((krate, pre_configured_attrs)) - }) - } - - pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> { - self.gcx.compute(|| { - let sess = &self.compiler.sess; - #[allow(deprecated)] - let (krate, pre_configured_attrs) = self.pre_configure()?.steal(); // parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches. let crate_name = find_crate_name(sess, &pre_configured_attrs); @@ -141,6 +135,7 @@ impl<'tcx> Queries<'tcx> { sess.opts.cg.metadata.clone(), sess.cfg_version, ); + let outputs = util::build_output_filenames(&pre_configured_attrs, sess); let dep_graph = setup_dep_graph(sess, crate_name, stable_crate_id)?; let cstore = FreezeLock::new(Box::new(CStore::new( @@ -175,11 +170,19 @@ impl<'tcx> Queries<'tcx> { crate_name, ))); feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs)))); + feed.output_filenames(Arc::new(outputs)); }); Ok(qcx) }) } + pub fn write_dep_info(&'tcx self) -> Result<()> { + self.global_ctxt()?.enter(|tcx| { + passes::write_dep_info(tcx); + }); + Ok(()) + } + /// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used /// to write UI tests that actually test that compilation succeeds without reporting /// an error. @@ -279,8 +282,13 @@ impl Linker { if sess.opts.unstable_opts.no_link { let rlink_file = self.output_filenames.with_extension(config::RLINK_EXT); - CodegenResults::serialize_rlink(sess, &rlink_file, &codegen_results) - .map_err(|error| sess.emit_fatal(FailedWritingFile { path: &rlink_file, error }))?; + CodegenResults::serialize_rlink( + sess, + &rlink_file, + &codegen_results, + &*self.output_filenames, + ) + .map_err(|error| sess.emit_fatal(FailedWritingFile { path: &rlink_file, error }))?; return Ok(()); } @@ -317,6 +325,9 @@ impl Compiler { // The timer's lifetime spans the dropping of `queries`, which contains // the global context. _timer = Some(self.sess.timer("free_global_ctxt")); + if let Err((path, error)) = queries.finish() { + self.sess.emit_err(errors::FailedWritingFile { path: &path, error }); + } ret } diff --git a/compiler/rustc_metadata/messages.ftl b/compiler/rustc_metadata/messages.ftl index d1815717e22..44b235a6d3d 100644 --- a/compiler/rustc_metadata/messages.ftl +++ b/compiler/rustc_metadata/messages.ftl @@ -63,11 +63,8 @@ metadata_extern_location_not_file = metadata_fail_create_file_encoder = failed to create file encoder: {$err} -metadata_fail_seek_file = - failed to seek the file: {$err} - metadata_fail_write_file = - failed to write to the file: {$err} + failed to write to `{$path}`: {$err} metadata_failed_copy_to_stdout = failed to copy {$filename} to stdout: {$err} diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs index 70daee291e7..edc8d8532d3 100644 --- a/compiler/rustc_metadata/src/errors.rs +++ b/compiler/rustc_metadata/src/errors.rs @@ -308,14 +308,9 @@ pub struct FailCreateFileEncoder { } #[derive(Diagnostic)] -#[diag(metadata_fail_seek_file)] -pub struct FailSeekFile { - pub err: Error, -} - -#[derive(Diagnostic)] #[diag(metadata_fail_write_file)] -pub struct FailWriteFile { +pub struct FailWriteFile<'a> { + pub path: &'a Path, pub err: Error, } diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 7c5d0eaf927..765bb7a362e 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1,4 +1,4 @@ -use crate::errors::{FailCreateFileEncoder, FailSeekFile, FailWriteFile}; +use crate::errors::{FailCreateFileEncoder, FailWriteFile}; use crate::rmeta::def_path_hash_map::DefPathHashMapRef; use crate::rmeta::table::TableBuilder; use crate::rmeta::*; @@ -42,6 +42,7 @@ use rustc_span::symbol::{sym, Symbol}; use rustc_span::{self, ExternalSource, FileName, SourceFile, Span, SpanData, SyntaxContext}; use std::borrow::Borrow; use std::collections::hash_map::Entry; +use std::fs::File; use std::hash::Hash; use std::io::{Read, Seek, Write}; use std::num::NonZeroUsize; @@ -856,8 +857,7 @@ fn should_encode_span(def_kind: DefKind) -> bool { | DefKind::OpaqueTy | DefKind::Field | DefKind::Impl { .. } - | DefKind::Closure - | DefKind::Coroutine => true, + | DefKind::Closure => true, DefKind::ForeignMod | DefKind::GlobalAsm => false, } } @@ -897,8 +897,7 @@ fn should_encode_attrs(def_kind: DefKind) -> bool { | DefKind::InlineConst | DefKind::OpaqueTy | DefKind::LifetimeParam - | DefKind::GlobalAsm - | DefKind::Coroutine => false, + | DefKind::GlobalAsm => false, } } @@ -933,8 +932,7 @@ fn should_encode_expn_that_defined(def_kind: DefKind) -> bool { | DefKind::Field | DefKind::LifetimeParam | DefKind::GlobalAsm - | DefKind::Closure - | DefKind::Coroutine => false, + | DefKind::Closure => false, } } @@ -969,7 +967,6 @@ fn should_encode_visibility(def_kind: DefKind) -> bool { | DefKind::GlobalAsm | DefKind::Impl { .. } | DefKind::Closure - | DefKind::Coroutine | DefKind::ExternCrate => false, } } @@ -1005,7 +1002,6 @@ fn should_encode_stability(def_kind: DefKind) -> bool { | DefKind::InlineConst | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Coroutine | DefKind::ExternCrate => false, } } @@ -1048,6 +1044,8 @@ fn should_encode_mir( | DefKind::AssocConst | DefKind::Static(..) | DefKind::Const => (true, false), + // Coroutines require optimized MIR to compute layout. + DefKind::Closure if tcx.is_coroutine(def_id.to_def_id()) => (false, true), // Full-fledged functions + closures DefKind::AssocFn | DefKind::Fn | DefKind::Closure => { let generics = tcx.generics_of(def_id); @@ -1061,8 +1059,6 @@ fn should_encode_mir( || tcx.is_const_default_method(def_id.to_def_id()); (is_const_fn, opt) } - // Coroutines require optimized MIR to compute layout. - DefKind::Coroutine => (false, true), // The others don't have MIR. _ => (false, false), } @@ -1098,7 +1094,6 @@ fn should_encode_variances<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, def_kind: Def | DefKind::InlineConst | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Coroutine | DefKind::ExternCrate => false, DefKind::TyAlias => tcx.type_alias_is_lazy(def_id), } @@ -1127,8 +1122,7 @@ fn should_encode_generics(def_kind: DefKind) -> bool { | DefKind::Impl { .. } | DefKind::Field | DefKind::TyParam - | DefKind::Closure - | DefKind::Coroutine => true, + | DefKind::Closure => true, DefKind::Mod | DefKind::ForeignMod | DefKind::ConstParam @@ -1157,7 +1151,6 @@ fn should_encode_type(tcx: TyCtxt<'_>, def_id: LocalDefId, def_kind: DefKind) -> | DefKind::AssocFn | DefKind::AssocConst | DefKind::Closure - | DefKind::Coroutine | DefKind::ConstParam | DefKind::AnonConst | DefKind::InlineConst => true, @@ -1218,7 +1211,6 @@ fn should_encode_fn_sig(def_kind: DefKind) -> bool { | DefKind::Impl { .. } | DefKind::AssocConst | DefKind::Closure - | DefKind::Coroutine | DefKind::ConstParam | DefKind::AnonConst | DefKind::InlineConst @@ -1257,7 +1249,6 @@ fn should_encode_constness(def_kind: DefKind) -> bool { | DefKind::OpaqueTy | DefKind::Impl { of_trait: false } | DefKind::ForeignTy - | DefKind::Coroutine | DefKind::ConstParam | DefKind::InlineConst | DefKind::AssocTy @@ -1292,7 +1283,6 @@ fn should_encode_const(def_kind: DefKind) -> bool { | DefKind::Impl { .. } | DefKind::AssocFn | DefKind::Closure - | DefKind::Coroutine | DefKind::ConstParam | DefKind::AssocTy | DefKind::TyParam @@ -1452,8 +1442,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.encode_info_for_assoc_item(def_id); } } - if let DefKind::Coroutine = def_kind { - let data = self.tcx.coroutine_kind(def_id).unwrap(); + if def_kind == DefKind::Closure + && let Some(data) = self.tcx.coroutine_kind(def_id) + { record!(self.tables.coroutine_kind[def_id] <- data); } if let DefKind::Enum | DefKind::Struct | DefKind::Union = def_kind { @@ -1635,7 +1626,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { record!(self.tables.closure_saved_names_of_captured_variables[def_id.to_def_id()] <- tcx.closure_saved_names_of_captured_variables(def_id)); - if let DefKind::Coroutine = self.tcx.def_kind(def_id) + if self.tcx.is_coroutine(def_id.to_def_id()) && let Some(witnesses) = tcx.mir_coroutine_witnesses(def_id) { record!(self.tables.mir_coroutine_witnesses[def_id.to_def_id()] <- witnesses); @@ -1662,7 +1653,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } record!(self.tables.promoted_mir[def_id.to_def_id()] <- tcx.promoted_mir(def_id)); - if let DefKind::Coroutine = self.tcx.def_kind(def_id) + if self.tcx.is_coroutine(def_id.to_def_id()) && let Some(witnesses) = tcx.mir_coroutine_witnesses(def_id) { record!(self.tables.mir_coroutine_witnesses[def_id.to_def_id()] <- witnesses); @@ -2255,25 +2246,34 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>, path: &Path) { // culminating in the `CrateRoot` which points to all of it. let root = ecx.encode_crate_root(); - ecx.opaque.flush(); + // Make sure we report any errors from writing to the file. + // If we forget this, compilation can succeed with an incomplete rmeta file, + // causing an ICE when the rmeta file is read by another compilation. + if let Err((path, err)) = ecx.opaque.finish() { + tcx.sess.emit_err(FailWriteFile { path: &path, err }); + } - let mut file = ecx.opaque.file(); + let file = ecx.opaque.file(); + if let Err(err) = encode_root_position(file, root.position.get()) { + tcx.sess.emit_err(FailWriteFile { path: ecx.opaque.path(), err }); + } + + // Record metadata size for self-profiling + tcx.prof.artifact_size("crate_metadata", "crate_metadata", file.metadata().unwrap().len()); +} + +fn encode_root_position(mut file: &File, pos: usize) -> Result<(), std::io::Error> { // We will return to this position after writing the root position. let pos_before_seek = file.stream_position().unwrap(); // Encode the root position. let header = METADATA_HEADER.len(); - file.seek(std::io::SeekFrom::Start(header as u64)) - .unwrap_or_else(|err| tcx.sess.emit_fatal(FailSeekFile { err })); - let pos = root.position.get(); - file.write_all(&[(pos >> 24) as u8, (pos >> 16) as u8, (pos >> 8) as u8, (pos >> 0) as u8]) - .unwrap_or_else(|err| tcx.sess.emit_fatal(FailWriteFile { err })); + file.seek(std::io::SeekFrom::Start(header as u64))?; + file.write_all(&[(pos >> 24) as u8, (pos >> 16) as u8, (pos >> 8) as u8, (pos >> 0) as u8])?; // Return to the position where we are before writing the root position. - file.seek(std::io::SeekFrom::Start(pos_before_seek)).unwrap(); - - // Record metadata size for self-profiling - tcx.prof.artifact_size("crate_metadata", "crate_metadata", file.metadata().unwrap().len()); + file.seek(std::io::SeekFrom::Start(pos_before_seek))?; + Ok(()) } pub fn provide(providers: &mut Providers) { diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 96cf668b7da..7cdbcd9193c 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -167,7 +167,6 @@ fixed_size_enum! { ( Impl { of_trait: false } ) ( Impl { of_trait: true } ) ( Closure ) - ( Coroutine ) ( Static(ast::Mutability::Not) ) ( Static(ast::Mutability::Mut) ) ( Ctor(CtorOf::Struct, CtorKind::Fn) ) diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index 9103187acf3..7bf0da2f2f3 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -231,8 +231,7 @@ impl<'hir> Map<'hir> { Node::ConstBlock(_) => DefKind::InlineConst, Node::Field(_) => DefKind::Field, Node::Expr(expr) => match expr.kind { - ExprKind::Closure(Closure { movability: None, .. }) => DefKind::Closure, - ExprKind::Closure(Closure { movability: Some(_), .. }) => DefKind::Coroutine, + ExprKind::Closure(_) => DefKind::Closure, _ => bug!("def_kind: unsupported node: {}", self.node_to_string(hir_id)), }, Node::GenericParam(param) => match param.kind { @@ -436,7 +435,7 @@ impl<'hir> Map<'hir> { } DefKind::InlineConst => BodyOwnerKind::Const { inline: true }, DefKind::Ctor(..) | DefKind::Fn | DefKind::AssocFn => BodyOwnerKind::Fn, - DefKind::Closure | DefKind::Coroutine => BodyOwnerKind::Closure, + DefKind::Closure => BodyOwnerKind::Closure, DefKind::Static(mt) => BodyOwnerKind::Static(mt), dk => bug!("{:?} is not a body node: {:?}", def_id, dk), } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index ee6567f6cc4..f37cfe8b0a1 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -25,7 +25,6 @@ use rustc_span::source_map::{SourceMap, StableSourceFileId}; use rustc_span::{BytePos, ExpnData, ExpnHash, Pos, RelativeBytePos, SourceFile, Span}; use rustc_span::{CachingSourceMapView, Symbol}; use std::collections::hash_map::Entry; -use std::io; use std::mem; const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE; @@ -862,7 +861,7 @@ impl<'a, 'tcx> CacheEncoder<'a, 'tcx> { } #[inline] - fn finish(self) -> Result<usize, io::Error> { + fn finish(mut self) -> FileEncodeResult { self.encoder.finish() } } diff --git a/compiler/rustc_middle/src/ty/closure.rs b/compiler/rustc_middle/src/ty/closure.rs index a355007d620..eaf5da130dd 100644 --- a/compiler/rustc_middle/src/ty/closure.rs +++ b/compiler/rustc_middle/src/ty/closure.rs @@ -247,6 +247,13 @@ impl<'tcx> CapturedPlace<'tcx> { .span } } + + pub fn is_by_ref(&self) -> bool { + match self.info.capture_kind { + ty::UpvarCapture::ByValue => false, + ty::UpvarCapture::ByRef(..) => true, + } + } } #[derive(Copy, Clone, Debug, HashStable)] diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index df5ac28130a..4ded61daa5c 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -629,6 +629,10 @@ impl<'tcx> GlobalCtxt<'tcx> { let icx = tls::ImplicitCtxt::new(self); tls::enter_context(&icx, || f(icx.tcx)) } + + pub fn finish(&self) -> FileEncodeResult { + self.dep_graph.finish_encoding(&self.sess.prof) + } } impl<'tcx> TyCtxt<'tcx> { @@ -800,6 +804,10 @@ impl<'tcx> TyCtxt<'tcx> { self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did) } + pub fn is_coroutine(self, def_id: DefId) -> bool { + self.coroutine_kind(def_id).is_some() + } + /// Returns `true` if the node pointed to by `def_id` is a coroutine for an async construct. pub fn coroutine_is_async(self, def_id: DefId) -> bool { matches!(self.coroutine_kind(def_id), Some(hir::CoroutineKind::Async(_))) diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 77cf3c0041c..cbf1a9900d9 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -550,16 +550,13 @@ impl<'tcx> TyCtxt<'tcx> { /// those are not yet phased out). The parent of the closure's /// `DefId` will also be the context where it appears. pub fn is_closure(self, def_id: DefId) -> bool { - matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Coroutine) + matches!(self.def_kind(def_id), DefKind::Closure) } /// Returns `true` if `def_id` refers to a definition that does not have its own /// type-checking context, i.e. closure, coroutine or inline const. pub fn is_typeck_child(self, def_id: DefId) -> bool { - matches!( - self.def_kind(def_id), - DefKind::Closure | DefKind::Coroutine | DefKind::InlineConst - ) + matches!(self.def_kind(def_id), DefKind::Closure | DefKind::InlineConst) } /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`). @@ -732,11 +729,13 @@ impl<'tcx> TyCtxt<'tcx> { pub fn def_kind_descr(self, def_kind: DefKind, def_id: DefId) -> &'static str { match def_kind { DefKind::AssocFn if self.associated_item(def_id).fn_has_self_parameter => "method", - DefKind::Coroutine => match self.coroutine_kind(def_id).unwrap() { - rustc_hir::CoroutineKind::Async(..) => "async closure", - rustc_hir::CoroutineKind::Coroutine => "coroutine", - rustc_hir::CoroutineKind::Gen(..) => "gen closure", - }, + DefKind::Closure if let Some(coroutine_kind) = self.coroutine_kind(def_id) => { + match coroutine_kind { + rustc_hir::CoroutineKind::Async(..) => "async closure", + rustc_hir::CoroutineKind::Coroutine => "coroutine", + rustc_hir::CoroutineKind::Gen(..) => "gen closure", + } + } _ => def_kind.descr(def_id), } } @@ -750,11 +749,13 @@ impl<'tcx> TyCtxt<'tcx> { pub fn def_kind_descr_article(self, def_kind: DefKind, def_id: DefId) -> &'static str { match def_kind { DefKind::AssocFn if self.associated_item(def_id).fn_has_self_parameter => "a", - DefKind::Coroutine => match self.coroutine_kind(def_id).unwrap() { - rustc_hir::CoroutineKind::Async(..) => "an", - rustc_hir::CoroutineKind::Coroutine => "a", - rustc_hir::CoroutineKind::Gen(..) => "a", - }, + DefKind::Closure if let Some(coroutine_kind) = self.coroutine_kind(def_id) => { + match coroutine_kind { + rustc_hir::CoroutineKind::Async(..) => "an", + rustc_hir::CoroutineKind::Coroutine => "a", + rustc_hir::CoroutineKind::Gen(..) => "a", + } + } _ => def_kind.article(), } } diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs index ad06db91e37..19b6496b102 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/build/mod.rs @@ -638,6 +638,14 @@ fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) - ); (sig.inputs().to_vec(), sig.output(), None) } + DefKind::Closure if coroutine_kind.is_some() => { + let coroutine_ty = tcx.type_of(def_id).instantiate_identity(); + let ty::Coroutine(_, args, _) = coroutine_ty.kind() else { bug!() }; + let args = args.as_coroutine(); + let yield_ty = args.yield_ty(); + let return_ty = args.return_ty(); + (vec![coroutine_ty, args.resume_ty()], return_ty, Some(yield_ty)) + } DefKind::Closure => { let closure_ty = tcx.type_of(def_id).instantiate_identity(); let ty::Closure(_, args) = closure_ty.kind() else { bug!() }; @@ -650,14 +658,6 @@ fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) - }; ([self_ty].into_iter().chain(sig.inputs().to_vec()).collect(), sig.output(), None) } - DefKind::Coroutine => { - let coroutine_ty = tcx.type_of(def_id).instantiate_identity(); - let ty::Coroutine(_, args, _) = coroutine_ty.kind() else { bug!() }; - let args = args.as_coroutine(); - let yield_ty = args.yield_ty(); - let return_ty = args.return_ty(); - (vec![coroutine_ty, args.resume_ty()], return_ty, Some(yield_ty)) - } dk => bug!("{:?} is not a body: {:?}", def_id, dk), }; diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs index 708e0b31b9e..4eba7103c0c 100644 --- a/compiler/rustc_mir_build/src/thir/cx/mod.rs +++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs @@ -37,7 +37,7 @@ pub(crate) fn thir_body( // The resume argument may be missing, in that case we need to provide it here. // It will always be `()` in this case. - if tcx.def_kind(owner_def) == DefKind::Coroutine && body.params.is_empty() { + if tcx.is_coroutine(owner_def.to_def_id()) && body.params.is_empty() { cx.thir.params.push(Param { ty: Ty::new_unit(tcx), pat: None, @@ -119,6 +119,17 @@ impl<'tcx> Cx<'tcx> { fn closure_env_param(&self, owner_def: LocalDefId, owner_id: HirId) -> Option<Param<'tcx>> { match self.tcx.def_kind(owner_def) { + DefKind::Closure if self.tcx.is_coroutine(owner_def.to_def_id()) => { + let coroutine_ty = self.typeck_results.node_type(owner_id); + let coroutine_param = Param { + ty: coroutine_ty, + pat: None, + ty_span: None, + self_kind: None, + hir_id: None, + }; + Some(coroutine_param) + } DefKind::Closure => { let closure_ty = self.typeck_results.node_type(owner_id); @@ -148,17 +159,6 @@ impl<'tcx> Cx<'tcx> { Some(env_param) } - DefKind::Coroutine => { - let coroutine_ty = self.typeck_results.node_type(owner_id); - let coroutine_param = Param { - ty: coroutine_ty, - pat: None, - ty_span: None, - self_kind: None, - hir_id: None, - }; - Some(coroutine_param) - } _ => None, } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index d80497fd45f..8b2a96cff41 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -18,17 +18,14 @@ use rustc_hir::pat_util::EnumerateAndAdjustIterator; use rustc_hir::RangeEnd; use rustc_index::Idx; use rustc_middle::mir::interpret::{ErrorHandled, GlobalId, LitToConstError, LitToConstInput}; -use rustc_middle::mir::{self, BorrowKind, Const, Mutability, UserTypeProjection}; +use rustc_middle::mir::{self, BorrowKind, Const, Mutability}; use rustc_middle::thir::{ Ascription, BindingMode, FieldPat, LocalVarId, Pat, PatKind, PatRange, PatRangeBoundary, }; use rustc_middle::ty::layout::IntegerExt; -use rustc_middle::ty::{ - self, AdtDef, CanonicalUserTypeAnnotation, GenericArg, GenericArgsRef, Region, Ty, TyCtxt, - TypeVisitableExt, UserType, -}; +use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::LocalDefId; -use rustc_span::{ErrorGuaranteed, Span, Symbol}; +use rustc_span::{ErrorGuaranteed, Span}; use rustc_target::abi::{FieldIdx, Integer}; use std::cmp::Ordering; @@ -701,146 +698,3 @@ impl<'tcx> UserAnnotatedTyHelpers<'tcx> for PatCtxt<'_, 'tcx> { self.typeck_results } } - -trait PatternFoldable<'tcx>: Sized { - fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - self.super_fold_with(folder) - } - - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self; -} - -trait PatternFolder<'tcx>: Sized { - fn fold_pattern(&mut self, pattern: &Pat<'tcx>) -> Pat<'tcx> { - pattern.super_fold_with(self) - } - - fn fold_pattern_kind(&mut self, kind: &PatKind<'tcx>) -> PatKind<'tcx> { - kind.super_fold_with(self) - } -} - -impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Box<T> { - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - let content: T = (**self).fold_with(folder); - Box::new(content) - } -} - -impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Vec<T> { - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - self.iter().map(|t| t.fold_with(folder)).collect() - } -} - -impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Box<[T]> { - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - self.iter().map(|t| t.fold_with(folder)).collect() - } -} - -impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Option<T> { - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - self.as_ref().map(|t| t.fold_with(folder)) - } -} - -macro_rules! ClonePatternFoldableImpls { - (<$lt_tcx:tt> $($ty:ty),+) => { - $( - impl<$lt_tcx> PatternFoldable<$lt_tcx> for $ty { - fn super_fold_with<F: PatternFolder<$lt_tcx>>(&self, _: &mut F) -> Self { - Clone::clone(self) - } - } - )+ - } -} - -ClonePatternFoldableImpls! { <'tcx> - Span, FieldIdx, Mutability, Symbol, LocalVarId, usize, - Region<'tcx>, Ty<'tcx>, BindingMode, AdtDef<'tcx>, - GenericArgsRef<'tcx>, &'tcx GenericArg<'tcx>, UserType<'tcx>, - UserTypeProjection, CanonicalUserTypeAnnotation<'tcx> -} - -impl<'tcx> PatternFoldable<'tcx> for FieldPat<'tcx> { - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) } - } -} - -impl<'tcx> PatternFoldable<'tcx> for Pat<'tcx> { - fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - folder.fold_pattern(self) - } - - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - Pat { - ty: self.ty.fold_with(folder), - span: self.span.fold_with(folder), - kind: self.kind.fold_with(folder), - } - } -} - -impl<'tcx> PatternFoldable<'tcx> for PatKind<'tcx> { - fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - folder.fold_pattern_kind(self) - } - - fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self { - match *self { - PatKind::Wild => PatKind::Wild, - PatKind::Error(e) => PatKind::Error(e), - PatKind::AscribeUserType { - ref subpattern, - ascription: Ascription { ref annotation, variance }, - } => PatKind::AscribeUserType { - subpattern: subpattern.fold_with(folder), - ascription: Ascription { annotation: annotation.fold_with(folder), variance }, - }, - PatKind::Binding { mutability, name, mode, var, ty, ref subpattern, is_primary } => { - PatKind::Binding { - mutability: mutability.fold_with(folder), - name: name.fold_with(folder), - mode: mode.fold_with(folder), - var: var.fold_with(folder), - ty: ty.fold_with(folder), - subpattern: subpattern.fold_with(folder), - is_primary, - } - } - PatKind::Variant { adt_def, args, variant_index, ref subpatterns } => { - PatKind::Variant { - adt_def: adt_def.fold_with(folder), - args: args.fold_with(folder), - variant_index, - subpatterns: subpatterns.fold_with(folder), - } - } - PatKind::Leaf { ref subpatterns } => { - PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) } - } - PatKind::Deref { ref subpattern } => { - PatKind::Deref { subpattern: subpattern.fold_with(folder) } - } - PatKind::Constant { value } => PatKind::Constant { value }, - PatKind::InlineConstant { def, subpattern: ref pattern } => { - PatKind::InlineConstant { def, subpattern: pattern.fold_with(folder) } - } - PatKind::Range(ref range) => PatKind::Range(range.clone()), - PatKind::Slice { ref prefix, ref slice, ref suffix } => PatKind::Slice { - prefix: prefix.fold_with(folder), - slice: slice.fold_with(folder), - suffix: suffix.fold_with(folder), - }, - PatKind::Array { ref prefix, ref slice, ref suffix } => PatKind::Array { - prefix: prefix.fold_with(folder), - slice: slice.fold_with(folder), - suffix: suffix.fold_with(folder), - }, - PatKind::Or { ref pats } => PatKind::Or { pats: pats.fold_with(folder) }, - } - } -} diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index 42d33f4f517..9a16003bdc9 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -84,8 +84,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp { // FIXME(welseywiser) const prop doesn't work on coroutines because of query cycles // computing their layout. - let is_coroutine = def_kind == DefKind::Coroutine; - if is_coroutine { + if tcx.is_coroutine(def_id.to_def_id()) { trace!("ConstProp skipped for coroutine {:?}", def_id); return; } diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs index da315bb86ac..99eecb567f2 100644 --- a/compiler/rustc_mir_transform/src/const_prop_lint.rs +++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs @@ -61,7 +61,7 @@ impl<'tcx> MirLint<'tcx> for ConstPropLint { // FIXME(welseywiser) const prop doesn't work on coroutines because of query cycles // computing their layout. - if let DefKind::Coroutine = def_kind { + if tcx.is_coroutine(def_id.to_def_id()) { trace!("ConstPropLint skipped for coroutine {:?}", def_id); return; } diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 5c05446a981..fff760ba399 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -395,7 +395,7 @@ fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: LocalDefId) -> Body<'_> { /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't /// end up missing the source MIR due to stealing happening. fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal<Body<'_>> { - if let DefKind::Coroutine = tcx.def_kind(def) { + if tcx.is_coroutine(def.to_def_id()) { tcx.ensure_with_value().mir_coroutine_witnesses(def); } let mir_borrowck = tcx.mir_borrowck(def); diff --git a/compiler/rustc_monomorphize/src/polymorphize.rs b/compiler/rustc_monomorphize/src/polymorphize.rs index a3b35eab465..91abbb216d6 100644 --- a/compiler/rustc_monomorphize/src/polymorphize.rs +++ b/compiler/rustc_monomorphize/src/polymorphize.rs @@ -131,7 +131,7 @@ fn mark_used_by_default_parameters<'tcx>( unused_parameters: &mut UnusedGenericParams, ) { match tcx.def_kind(def_id) { - DefKind::Closure | DefKind::Coroutine => { + DefKind::Closure => { for param in &generics.params { debug!(?param, "(closure/gen)"); unused_parameters.mark_used(param.index); @@ -248,7 +248,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkUsedGenericParams<'a, 'tcx> { fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) { if local == Local::from_usize(1) { let def_kind = self.tcx.def_kind(self.def_id); - if matches!(def_kind, DefKind::Closure | DefKind::Coroutine) { + if matches!(def_kind, DefKind::Closure) { // Skip visiting the closure/coroutine that is currently being processed. This only // happens because the first argument to the closure is a reference to itself and // that will call `visit_args`, resulting in each generic parameter captured being diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index b78c8b849f7..b8109d5bb06 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -653,8 +653,7 @@ impl<'tcx> EmbargoVisitor<'tcx> { | DefKind::Field | DefKind::GlobalAsm | DefKind::Impl { .. } - | DefKind::Closure - | DefKind::Coroutine => (), + | DefKind::Closure => (), } } } diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs index d720744a7a7..5acd012ef04 100644 --- a/compiler/rustc_query_system/src/dep_graph/graph.rs +++ b/compiler/rustc_query_system/src/dep_graph/graph.rs @@ -982,7 +982,7 @@ impl<D: Deps> DepGraph<D> { } } - pub fn encode(&self, profiler: &SelfProfilerRef) -> FileEncodeResult { + pub fn finish_encoding(&self, profiler: &SelfProfilerRef) -> FileEncodeResult { if let Some(data) = &self.data { data.current.encoder.steal().finish(profiler) } else { diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 60a28e3a1a4..65901eedb21 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -973,8 +973,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { | DefKind::LifetimeParam | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Impl { .. } - | DefKind::Coroutine, + | DefKind::Impl { .. }, _, ) | Res::Local(..) diff --git a/compiler/rustc_serialize/src/opaque.rs b/compiler/rustc_serialize/src/opaque.rs index 55255439051..cc8d1c25092 100644 --- a/compiler/rustc_serialize/src/opaque.rs +++ b/compiler/rustc_serialize/src/opaque.rs @@ -5,12 +5,13 @@ use std::io::{self, Write}; use std::marker::PhantomData; use std::ops::Range; use std::path::Path; +use std::path::PathBuf; // ----------------------------------------------------------------------------- // Encoder // ----------------------------------------------------------------------------- -pub type FileEncodeResult = Result<usize, io::Error>; +pub type FileEncodeResult = Result<usize, (PathBuf, io::Error)>; /// The size of the buffer in `FileEncoder`. const BUF_SIZE: usize = 8192; @@ -34,6 +35,9 @@ pub struct FileEncoder { // This is used to implement delayed error handling, as described in the // comment on `trait Encoder`. res: Result<(), io::Error>, + path: PathBuf, + #[cfg(debug_assertions)] + finished: bool, } impl FileEncoder { @@ -41,14 +45,18 @@ impl FileEncoder { // File::create opens the file for writing only. When -Zmeta-stats is enabled, the metadata // encoder rewinds the file to inspect what was written. So we need to always open the file // for reading and writing. - let file = File::options().read(true).write(true).create(true).truncate(true).open(path)?; + let file = + File::options().read(true).write(true).create(true).truncate(true).open(&path)?; Ok(FileEncoder { buf: vec![0u8; BUF_SIZE].into_boxed_slice().try_into().unwrap(), + path: path.as_ref().into(), buffered: 0, flushed: 0, file, res: Ok(()), + #[cfg(debug_assertions)] + finished: false, }) } @@ -63,6 +71,10 @@ impl FileEncoder { #[cold] #[inline(never)] pub fn flush(&mut self) { + #[cfg(debug_assertions)] + { + self.finished = false; + } if self.res.is_ok() { self.res = self.file.write_all(&self.buf[..self.buffered]); } @@ -74,6 +86,10 @@ impl FileEncoder { &self.file } + pub fn path(&self) -> &Path { + &self.path + } + #[inline] fn buffer_empty(&mut self) -> &mut [u8] { // SAFETY: self.buffered is inbounds as an invariant of the type @@ -97,6 +113,10 @@ impl FileEncoder { #[inline] fn write_all(&mut self, buf: &[u8]) { + #[cfg(debug_assertions)] + { + self.finished = false; + } if let Some(dest) = self.buffer_empty().get_mut(..buf.len()) { dest.copy_from_slice(buf); self.buffered += buf.len(); @@ -121,6 +141,10 @@ impl FileEncoder { /// with one instruction, so while this does in some sense do wasted work, we come out ahead. #[inline] pub fn write_with<const N: usize>(&mut self, visitor: impl FnOnce(&mut [u8; N]) -> usize) { + #[cfg(debug_assertions)] + { + self.finished = false; + } let flush_threshold = const { BUF_SIZE.checked_sub(N).unwrap() }; if std::intrinsics::unlikely(self.buffered > flush_threshold) { self.flush(); @@ -152,20 +176,25 @@ impl FileEncoder { }) } - pub fn finish(mut self) -> Result<usize, io::Error> { + pub fn finish(&mut self) -> FileEncodeResult { self.flush(); + #[cfg(debug_assertions)] + { + self.finished = true; + } match std::mem::replace(&mut self.res, Ok(())) { Ok(()) => Ok(self.position()), - Err(e) => Err(e), + Err(e) => Err((self.path.clone(), e)), } } } +#[cfg(debug_assertions)] impl Drop for FileEncoder { fn drop(&mut self) { - // Likely to be a no-op, because `finish` should have been called and - // it also flushes. But do it just in case. - self.flush(); + if !std::thread::panicking() { + assert!(self.finished); + } } } diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 2d2a3c3d665..d6e7afb7d09 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -580,7 +580,7 @@ pub enum ResolveDocLinks { /// *Do not* switch `BTreeMap` out for an unsorted container type! That would break /// dependency tracking for command-line arguments. Also only hash keys, since tracking /// should only depend on the output types, not the paths they're written to. -#[derive(Clone, Debug, Hash, HashStable_Generic)] +#[derive(Clone, Debug, Hash, HashStable_Generic, Encodable, Decodable)] pub struct OutputTypes(BTreeMap<OutputType, Option<OutFileName>>); impl OutputTypes { @@ -818,7 +818,7 @@ impl Input { } } -#[derive(Clone, Hash, Debug, HashStable_Generic, PartialEq)] +#[derive(Clone, Hash, Debug, HashStable_Generic, PartialEq, Encodable, Decodable)] pub enum OutFileName { Real(PathBuf), Stdout, @@ -890,7 +890,7 @@ impl OutFileName { } } -#[derive(Clone, Hash, Debug, HashStable_Generic)] +#[derive(Clone, Hash, Debug, HashStable_Generic, Encodable, Decodable)] pub struct OutputFilenames { pub out_directory: PathBuf, /// Crate name. Never contains '-'. diff --git a/compiler/rustc_smir/src/rustc_smir/mod.rs b/compiler/rustc_smir/src/rustc_smir/mod.rs index 5d903e69c6c..eee587f3b2a 100644 --- a/compiler/rustc_smir/src/rustc_smir/mod.rs +++ b/compiler/rustc_smir/src/rustc_smir/mod.rs @@ -89,7 +89,7 @@ pub(crate) fn new_item_kind(kind: DefKind) -> ItemKind { | DefKind::GlobalAsm => { unreachable!("Not a valid item kind: {kind:?}"); } - DefKind::Closure | DefKind::Coroutine | DefKind::AssocFn | DefKind::Fn => ItemKind::Fn, + DefKind::Closure | DefKind::AssocFn | DefKind::Fn => ItemKind::Fn, DefKind::Const | DefKind::InlineConst | DefKind::AssocConst | DefKind::AnonConst => { ItemKind::Const } diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs index 9f0982d0995..311b94d9e0e 100644 --- a/compiler/rustc_symbol_mangling/src/lib.rs +++ b/compiler/rustc_symbol_mangling/src/lib.rs @@ -234,7 +234,7 @@ fn compute_symbol_name<'tcx>( // and we want to be sure to avoid any symbol conflicts here. let is_globally_shared_function = matches!( tcx.def_kind(instance.def_id()), - DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Coroutine | DefKind::Ctor(..) + DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Ctor(..) ) && matches!( MonoItem::Fn(instance).instantiation_mode(tcx), InstantiationMode::GloballyShared { may_conflict: true } diff --git a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs index 59c979cf437..eb30b4cd85e 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs @@ -37,8 +37,6 @@ pub(super) trait GoalKind<'tcx>: fn trait_ref(self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx>; - fn polarity(self) -> ty::ImplPolarity; - fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self; fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId; diff --git a/compiler/rustc_trait_selection/src/solve/mod.rs b/compiler/rustc_trait_selection/src/solve/mod.rs index 0cf4799f86d..b526fa19c13 100644 --- a/compiler/rustc_trait_selection/src/solve/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/mod.rs @@ -64,8 +64,6 @@ enum GoalEvaluationKind { trait CanonicalResponseExt { fn has_no_inference_or_external_constraints(&self) -> bool; - - fn has_only_region_constraints(&self) -> bool; } impl<'tcx> CanonicalResponseExt for Canonical<'tcx, Response<'tcx>> { @@ -74,11 +72,6 @@ impl<'tcx> CanonicalResponseExt for Canonical<'tcx, Response<'tcx>> { && self.value.var_values.is_identity() && self.value.external_constraints.opaque_types.is_empty() } - - fn has_only_region_constraints(&self) -> bool { - self.value.var_values.is_identity_modulo_regions() - && self.value.external_constraints.opaque_types.is_empty() - } } impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { diff --git a/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs b/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs index 6c29ce492df..37bbf32c768 100644 --- a/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs @@ -101,10 +101,6 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { self.projection_ty.trait_ref(tcx) } - fn polarity(self) -> ty::ImplPolarity { - ty::ImplPolarity::Positive - } - fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self { self.with_self_ty(tcx, self_ty) } diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index a0c065dfa03..95712da3c5e 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -24,10 +24,6 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { self.trait_ref } - fn polarity(self) -> ty::ImplPolarity { - self.polarity - } - fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self { self.with_self_ty(tcx, self_ty) } diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index c26b1b618ac..8e97333ad0f 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -29,7 +29,6 @@ use rustc_middle::traits::solve::{CandidateSource, Certainty, Goal}; use rustc_middle::traits::specialization_graph::OverlapMode; use rustc_middle::traits::DefiningAnchor; use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; -use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::visit::{TypeVisitable, TypeVisitableExt}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor}; use rustc_session::lint::builtin::COINDUCTIVE_OVERLAP_IN_COHERENCE; @@ -54,7 +53,7 @@ pub enum Conflict { pub struct OverlapResult<'tcx> { pub impl_header: ty::ImplHeader<'tcx>, - pub intercrate_ambiguity_causes: FxIndexSet<IntercrateAmbiguityCause>, + pub intercrate_ambiguity_causes: FxIndexSet<IntercrateAmbiguityCause<'tcx>>, /// `true` if the overlap might've been permitted before the shift /// to universes. @@ -987,8 +986,8 @@ where fn compute_intercrate_ambiguity_causes<'tcx>( infcx: &InferCtxt<'tcx>, obligations: &[PredicateObligation<'tcx>], -) -> FxIndexSet<IntercrateAmbiguityCause> { - let mut causes: FxIndexSet<IntercrateAmbiguityCause> = Default::default(); +) -> FxIndexSet<IntercrateAmbiguityCause<'tcx>> { + let mut causes: FxIndexSet<IntercrateAmbiguityCause<'tcx>> = Default::default(); for obligation in obligations { search_ambiguity_causes(infcx, obligation.clone().into(), &mut causes); @@ -997,11 +996,11 @@ fn compute_intercrate_ambiguity_causes<'tcx>( causes } -struct AmbiguityCausesVisitor<'a> { - causes: &'a mut FxIndexSet<IntercrateAmbiguityCause>, +struct AmbiguityCausesVisitor<'a, 'tcx> { + causes: &'a mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>, } -impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a> { +impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a, 'tcx> { type BreakTy = !; fn visit_goal(&mut self, goal: &InspectGoal<'_, 'tcx>) -> ControlFlow<Self::BreakTy> { let infcx = goal.infcx(); @@ -1041,14 +1040,12 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a> { } = cand.kind() { if let ty::ImplPolarity::Reservation = infcx.tcx.impl_polarity(def_id) { - let value = infcx + let message = infcx .tcx .get_attr(def_id, sym::rustc_reservation_impl) .and_then(|a| a.value_str()); - if let Some(value) = value { - self.causes.insert(IntercrateAmbiguityCause::ReservationImpl { - message: value.to_string(), - }); + if let Some(message) = message { + self.causes.insert(IntercrateAmbiguityCause::ReservationImpl { message }); } } } @@ -1088,24 +1085,18 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a> { Ok(Err(conflict)) => { if !trait_ref.references_error() { let self_ty = trait_ref.self_ty(); - let (trait_desc, self_desc) = with_no_trimmed_paths!({ - let trait_desc = trait_ref.print_only_trait_path().to_string(); - let self_desc = self_ty - .has_concrete_skeleton() - .then(|| self_ty.to_string()); - (trait_desc, self_desc) - }); + let self_ty = self_ty.has_concrete_skeleton().then(|| self_ty); ambiguity_cause = Some(match conflict { Conflict::Upstream => { IntercrateAmbiguityCause::UpstreamCrateUpdate { - trait_desc, - self_desc, + trait_ref, + self_ty, } } Conflict::Downstream => { IntercrateAmbiguityCause::DownstreamCrate { - trait_desc, - self_desc, + trait_ref, + self_ty, } } }); @@ -1142,7 +1133,7 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a> { fn search_ambiguity_causes<'tcx>( infcx: &InferCtxt<'tcx>, goal: Goal<'tcx, ty::Predicate<'tcx>>, - causes: &mut FxIndexSet<IntercrateAmbiguityCause>, + causes: &mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>, ) { infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor { causes }); } diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index f3e6887e012..0c884b7c16a 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -46,6 +46,7 @@ use rustc_middle::ty::GenericArgsRef; use rustc_middle::ty::{self, EarlyBinder, PolyProjectionPredicate, ToPolyTraitRef, ToPredicate}; use rustc_middle::ty::{Ty, TyCtxt, TypeFoldable, TypeVisitableExt}; use rustc_span::symbol::sym; +use rustc_span::Symbol; use std::cell::{Cell, RefCell}; use std::cmp; @@ -59,13 +60,13 @@ mod candidate_assembly; mod confirmation; #[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub enum IntercrateAmbiguityCause { - DownstreamCrate { trait_desc: String, self_desc: Option<String> }, - UpstreamCrateUpdate { trait_desc: String, self_desc: Option<String> }, - ReservationImpl { message: String }, +pub enum IntercrateAmbiguityCause<'tcx> { + DownstreamCrate { trait_ref: ty::TraitRef<'tcx>, self_ty: Option<Ty<'tcx>> }, + UpstreamCrateUpdate { trait_ref: ty::TraitRef<'tcx>, self_ty: Option<Ty<'tcx>> }, + ReservationImpl { message: Symbol }, } -impl IntercrateAmbiguityCause { +impl<'tcx> IntercrateAmbiguityCause<'tcx> { /// Emits notes when the overlap is caused by complex intercrate ambiguities. /// See #23980 for details. pub fn add_intercrate_ambiguity_hint(&self, err: &mut Diagnostic) { @@ -73,28 +74,32 @@ impl IntercrateAmbiguityCause { } pub fn intercrate_ambiguity_hint(&self) -> String { - match self { - IntercrateAmbiguityCause::DownstreamCrate { trait_desc, self_desc } => { - let self_desc = if let Some(ty) = self_desc { - format!(" for type `{ty}`") - } else { - String::new() - }; - format!("downstream crates may implement trait `{trait_desc}`{self_desc}") + with_no_trimmed_paths!(match self { + IntercrateAmbiguityCause::DownstreamCrate { trait_ref, self_ty } => { + format!( + "downstream crates may implement trait `{trait_desc}`{self_desc}", + trait_desc = trait_ref.print_only_trait_path(), + self_desc = if let Some(self_ty) = self_ty { + format!(" for type `{self_ty}`") + } else { + String::new() + } + ) } - IntercrateAmbiguityCause::UpstreamCrateUpdate { trait_desc, self_desc } => { - let self_desc = if let Some(ty) = self_desc { - format!(" for type `{ty}`") - } else { - String::new() - }; + IntercrateAmbiguityCause::UpstreamCrateUpdate { trait_ref, self_ty } => { format!( "upstream crates may add a new impl of trait `{trait_desc}`{self_desc} \ - in future versions" + in future versions", + trait_desc = trait_ref.print_only_trait_path(), + self_desc = if let Some(self_ty) = self_ty { + format!(" for type `{self_ty}`") + } else { + String::new() + } ) } - IntercrateAmbiguityCause::ReservationImpl { message } => message.clone(), - } + IntercrateAmbiguityCause::ReservationImpl { message } => message.to_string(), + }) } } @@ -114,7 +119,7 @@ pub struct SelectionContext<'cx, 'tcx> { /// We don't do his until we detect a coherence error because it can /// lead to false overflow results (#47139) and because always /// computing it may negatively impact performance. - intercrate_ambiguity_causes: Option<FxIndexSet<IntercrateAmbiguityCause>>, + intercrate_ambiguity_causes: Option<FxIndexSet<IntercrateAmbiguityCause<'tcx>>>, /// The mode that trait queries run in, which informs our error handling /// policy. In essence, canonicalized queries need their errors propagated @@ -270,7 +275,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { /// Gets the intercrate ambiguity causes collected since tracking /// was enabled and disables tracking at the same time. If /// tracking is not enabled, just returns an empty vector. - pub fn take_intercrate_ambiguity_causes(&mut self) -> FxIndexSet<IntercrateAmbiguityCause> { + pub fn take_intercrate_ambiguity_causes( + &mut self, + ) -> FxIndexSet<IntercrateAmbiguityCause<'tcx>> { assert!(self.is_intercrate()); self.intercrate_ambiguity_causes.take().unwrap_or_default() } @@ -428,19 +435,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ); if !trait_ref.references_error() { let self_ty = trait_ref.self_ty(); - let (trait_desc, self_desc) = with_no_trimmed_paths!({ - let trait_desc = trait_ref.print_only_trait_path().to_string(); - let self_desc = - self_ty.has_concrete_skeleton().then(|| self_ty.to_string()); - (trait_desc, self_desc) - }); + let self_ty = self_ty.has_concrete_skeleton().then(|| self_ty); let cause = if let Conflict::Upstream = conflict { - IntercrateAmbiguityCause::UpstreamCrateUpdate { - trait_desc, - self_desc, - } + IntercrateAmbiguityCause::UpstreamCrateUpdate { trait_ref, self_ty } } else { - IntercrateAmbiguityCause::DownstreamCrate { trait_desc, self_desc } + IntercrateAmbiguityCause::DownstreamCrate { trait_ref, self_ty } }; debug!(?cause, "evaluate_stack: pushing cause"); self.intercrate_ambiguity_causes.as_mut().unwrap().insert(cause); @@ -1451,20 +1450,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { if let ImplCandidate(def_id) = candidate { if let ty::ImplPolarity::Reservation = tcx.impl_polarity(def_id) { if let Some(intercrate_ambiguity_clauses) = &mut self.intercrate_ambiguity_causes { - let value = tcx + let message = tcx .get_attr(def_id, sym::rustc_reservation_impl) .and_then(|a| a.value_str()); - if let Some(value) = value { + if let Some(message) = message { debug!( "filter_reservation_impls: \ reservation impl ambiguity on {:?}", def_id ); - intercrate_ambiguity_clauses.insert( - IntercrateAmbiguityCause::ReservationImpl { - message: value.to_string(), - }, - ); + intercrate_ambiguity_clauses + .insert(IntercrateAmbiguityCause::ReservationImpl { message }); } } return Ok(None); diff --git a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs index eac633f1a9c..73ba03a3610 100644 --- a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs @@ -37,7 +37,7 @@ pub struct OverlapError<'tcx> { pub with_impl: DefId, pub trait_ref: ty::TraitRef<'tcx>, pub self_ty: Option<Ty<'tcx>>, - pub intercrate_ambiguity_causes: FxIndexSet<IntercrateAmbiguityCause>, + pub intercrate_ambiguity_causes: FxIndexSet<IntercrateAmbiguityCause<'tcx>>, pub involves_placeholder: bool, } diff --git a/compiler/rustc_ty_utils/src/implied_bounds.rs b/compiler/rustc_ty_utils/src/implied_bounds.rs index 6cf5aa6f2fb..eef90dc6dac 100644 --- a/compiler/rustc_ty_utils/src/implied_bounds.rs +++ b/compiler/rustc_ty_utils/src/implied_bounds.rs @@ -156,8 +156,7 @@ fn assumed_wf_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx [(Ty<' | DefKind::Field | DefKind::LifetimeParam | DefKind::GlobalAsm - | DefKind::Closure - | DefKind::Coroutine => ty::List::empty(), + | DefKind::Closure => ty::List::empty(), } } diff --git a/compiler/rustc_ty_utils/src/opaque_types.rs b/compiler/rustc_ty_utils/src/opaque_types.rs index c17970e23a2..16f5ed09d00 100644 --- a/compiler/rustc_ty_utils/src/opaque_types.rs +++ b/compiler/rustc_ty_utils/src/opaque_types.rs @@ -313,7 +313,7 @@ fn opaque_types_defined_by<'tcx>( | DefKind::Impl { .. } => {} // Closures and coroutines are type checked with their parent, so we need to allow all // opaques from the closure signature *and* from the parent body. - DefKind::Closure | DefKind::Coroutine | DefKind::InlineConst => { + DefKind::Closure | DefKind::InlineConst => { collector.opaques.extend(tcx.opaque_types_defined_by(tcx.local_parent(item))); } } diff --git a/compiler/rustc_ty_utils/src/sig_types.rs b/compiler/rustc_ty_utils/src/sig_types.rs index 268639a7f44..7c0261c818f 100644 --- a/compiler/rustc_ty_utils/src/sig_types.rs +++ b/compiler/rustc_ty_utils/src/sig_types.rs @@ -67,7 +67,7 @@ pub(crate) fn walk_types<'tcx, V: SpannedTypeVisitor<'tcx>>( // These are not part of a public API, they can only appear as hidden types, and there // the interesting parts are solely in the signature of the containing item's opaque type // or dyn type. - DefKind::InlineConst | DefKind::Closure | DefKind::Coroutine => {} + DefKind::InlineConst | DefKind::Closure => {} DefKind::Impl { of_trait } => { if of_trait { let span = tcx.hir().get_by_def_id(item).expect_item().expect_impl().of_trait.unwrap().path.span; diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index d44cf299c27..86f5fc56361 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -178,6 +178,7 @@ #![feature(is_ascii_octdigit)] #![feature(isqrt)] #![feature(maybe_uninit_uninit_array)] +#![feature(non_null_convenience)] #![feature(offset_of)] #![feature(offset_of_enum)] #![feature(ptr_alignment_type)] diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index ae673b7799f..c99fe36de6d 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -2,11 +2,14 @@ use crate::cmp::Ordering; use crate::convert::From; use crate::fmt; use crate::hash; +use crate::intrinsics; use crate::intrinsics::assert_unsafe_precondition; use crate::marker::Unsize; +use crate::mem::SizedTypeProperties; use crate::mem::{self, MaybeUninit}; use crate::num::NonZeroUsize; use crate::ops::{CoerceUnsized, DispatchFromDyn}; +use crate::ptr; use crate::ptr::Unique; use crate::slice::{self, SliceIndex}; @@ -471,41 +474,1047 @@ impl<T: ?Sized> NonNull<T> { unsafe { NonNull::new_unchecked(self.as_ptr() as *mut U) } } - /// See [`pointer::add`] for semantics and safety requirements. + /// Calculates the offset from a pointer. + /// + /// `count` is in units of T; e.g., a `count` of 3 represents a pointer + /// offset of `3 * size_of::<T>()` bytes. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is Undefined + /// Behavior: + /// + /// * Both the starting and resulting pointer must be either in bounds or one + /// byte past the end of the same [allocated object]. + /// + /// * The computed offset, **in bytes**, cannot overflow an `isize`. + /// + /// * The offset being in bounds cannot rely on "wrapping around" the address + /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize. + /// + /// The compiler and standard library generally tries to ensure allocations + /// never reach a size where an offset is a concern. For instance, `Vec` + /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so + /// `vec.as_ptr().add(vec.len())` is always safe. + /// + /// Most platforms fundamentally can't even construct such an allocation. + /// For instance, no known 64-bit platform can ever serve a request + /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. + /// However, some 32-bit and 16-bit platforms may successfully serve a request for + /// more than `isize::MAX` bytes with things like Physical Address + /// Extension. As such, memory acquired directly from allocators or memory + /// mapped files *may* be too large to handle with this function. + /// + /// [allocated object]: crate::ptr#allocated-object + /// + /// # Examples + /// + /// ``` + /// #![feature(non_null_convenience)] + /// use std::ptr::NonNull; + /// + /// let mut s = [1, 2, 3]; + /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap(); + /// + /// unsafe { + /// println!("{}", ptr.offset(1).read()); + /// println!("{}", ptr.offset(2).read()); + /// } + /// ``` + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[must_use = "returns a new pointer rather than modifying its argument"] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn offset(self, count: isize) -> NonNull<T> + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `offset`. + // Additionally safety contract of `offset` guarantees that the resulting pointer is + // pointing to an allocation, there can't be an allocation at null, thus it's safe to + // construct `NonNull`. + unsafe { NonNull { pointer: intrinsics::offset(self.pointer, count) } } + } + + /// Calculates the offset from a pointer in bytes. + /// + /// `count` is in units of **bytes**. + /// + /// This is purely a convenience for casting to a `u8` pointer and + /// using [offset][pointer::offset] on it. See that method for documentation + /// and safety requirements. + /// + /// For non-`Sized` pointees this operation changes only the data pointer, + /// leaving the metadata untouched. + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[must_use] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn byte_offset(self, count: isize) -> Self { + // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has + // the same safety contract. + // Additionally safety contract of `offset` guarantees that the resulting pointer is + // pointing to an allocation, there can't be an allocation at null, thus it's safe to + // construct `NonNull`. + unsafe { NonNull { pointer: self.pointer.byte_offset(count) } } + } + + /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`). + /// + /// `count` is in units of T; e.g., a `count` of 3 represents a pointer + /// offset of `3 * size_of::<T>()` bytes. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is Undefined + /// Behavior: + /// + /// * Both the starting and resulting pointer must be either in bounds or one + /// byte past the end of the same [allocated object]. + /// + /// * The computed offset, **in bytes**, cannot overflow an `isize`. + /// + /// * The offset being in bounds cannot rely on "wrapping around" the address + /// space. That is, the infinite-precision sum must fit in a `usize`. + /// + /// The compiler and standard library generally tries to ensure allocations + /// never reach a size where an offset is a concern. For instance, `Vec` + /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so + /// `vec.as_ptr().add(vec.len())` is always safe. + /// + /// Most platforms fundamentally can't even construct such an allocation. + /// For instance, no known 64-bit platform can ever serve a request + /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. + /// However, some 32-bit and 16-bit platforms may successfully serve a request for + /// more than `isize::MAX` bytes with things like Physical Address + /// Extension. As such, memory acquired directly from allocators or memory + /// mapped files *may* be too large to handle with this function. + /// + /// [allocated object]: crate::ptr#allocated-object + /// + /// # Examples + /// + /// ``` + /// #![feature(non_null_convenience)] + /// use std::ptr::NonNull; + /// + /// let s: &str = "123"; + /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap(); + /// + /// unsafe { + /// println!("{}", ptr.add(1).read() as char); + /// println!("{}", ptr.add(2).read() as char); + /// } + /// ``` + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[must_use = "returns a new pointer rather than modifying its argument"] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn add(self, count: usize) -> Self + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `offset`. + // Additionally safety contract of `offset` guarantees that the resulting pointer is + // pointing to an allocation, there can't be an allocation at null, thus it's safe to + // construct `NonNull`. + unsafe { NonNull { pointer: intrinsics::offset(self.pointer, count) } } + } + + /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`). + /// + /// `count` is in units of bytes. + /// + /// This is purely a convenience for casting to a `u8` pointer and + /// using [`add`][NonNull::add] on it. See that method for documentation + /// and safety requirements. + /// + /// For non-`Sized` pointees this operation changes only the data pointer, + /// leaving the metadata untouched. + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[must_use] + #[inline(always)] + #[rustc_allow_const_fn_unstable(set_ptr_value)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn byte_add(self, count: usize) -> Self { + // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same + // safety contract. + // Additionally safety contract of `add` guarantees that the resulting pointer is pointing + // to an allocation, there can't be an allocation at null, thus it's safe to construct + // `NonNull`. + unsafe { NonNull { pointer: self.pointer.byte_add(count) } } + } + + /// Calculates the offset from a pointer (convenience for + /// `.offset((count as isize).wrapping_neg())`). + /// + /// `count` is in units of T; e.g., a `count` of 3 represents a pointer + /// offset of `3 * size_of::<T>()` bytes. + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is Undefined + /// Behavior: + /// + /// * Both the starting and resulting pointer must be either in bounds or one + /// byte past the end of the same [allocated object]. + /// + /// * The computed offset cannot exceed `isize::MAX` **bytes**. + /// + /// * The offset being in bounds cannot rely on "wrapping around" the address + /// space. That is, the infinite-precision sum must fit in a usize. + /// + /// The compiler and standard library generally tries to ensure allocations + /// never reach a size where an offset is a concern. For instance, `Vec` + /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so + /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe. + /// + /// Most platforms fundamentally can't even construct such an allocation. + /// For instance, no known 64-bit platform can ever serve a request + /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. + /// However, some 32-bit and 16-bit platforms may successfully serve a request for + /// more than `isize::MAX` bytes with things like Physical Address + /// Extension. As such, memory acquired directly from allocators or memory + /// mapped files *may* be too large to handle with this function. + /// + /// [allocated object]: crate::ptr#allocated-object + /// + /// # Examples + /// + /// ``` + /// #![feature(non_null_convenience)] + /// use std::ptr::NonNull; + /// + /// let s: &str = "123"; + /// + /// unsafe { + /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3); + /// println!("{}", end.sub(1).read() as char); + /// println!("{}", end.sub(2).read() as char); + /// } + /// ``` + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[must_use = "returns a new pointer rather than modifying its argument"] + // We could always go back to wrapping if unchecked becomes unacceptable + #[rustc_allow_const_fn_unstable(const_int_unchecked_arith)] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn sub(self, count: usize) -> Self + where + T: Sized, + { + if T::IS_ZST { + // Pointer arithmetic does nothing when the pointee is a ZST. + self + } else { + // SAFETY: the caller must uphold the safety contract for `offset`. + // Because the pointee is *not* a ZST, that means that `count` is + // at most `isize::MAX`, and thus the negation cannot overflow. + unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) } + } + } + + /// Calculates the offset from a pointer in bytes (convenience for + /// `.byte_offset((count as isize).wrapping_neg())`). + /// + /// `count` is in units of bytes. + /// + /// This is purely a convenience for casting to a `u8` pointer and + /// using [`sub`][NonNull::sub] on it. See that method for documentation + /// and safety requirements. + /// + /// For non-`Sized` pointees this operation changes only the data pointer, + /// leaving the metadata untouched. + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[must_use] + #[inline(always)] + #[rustc_allow_const_fn_unstable(set_ptr_value)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn byte_sub(self, count: usize) -> Self { + // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same + // safety contract. + // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing + // to an allocation, there can't be an allocation at null, thus it's safe to construct + // `NonNull`. + unsafe { NonNull { pointer: self.pointer.byte_sub(count) } } + } + + /// Calculates the distance between two pointers. The returned value is in + /// units of T: the distance in bytes divided by `mem::size_of::<T>()`. + /// + /// This is equivalent to `(self as isize - origin as isize) / (mem::size_of::<T>() as isize)`, + /// except that it has a lot more opportunities for UB, in exchange for the compiler + /// better understanding what you are doing. + /// + /// The primary motivation of this method is for computing the `len` of an array/slice + /// of `T` that you are currently representing as a "start" and "end" pointer + /// (and "end" is "one past the end" of the array). + /// In that case, `end.offset_from(start)` gets you the length of the array. + /// + /// All of the following safety requirements are trivially satisfied for this usecase. + /// + /// [`offset`]: #method.offset + /// + /// # Safety + /// + /// If any of the following conditions are violated, the result is Undefined + /// Behavior: + /// + /// * Both `self` and `origin` must be either in bounds or one + /// byte past the end of the same [allocated object]. + /// + /// * Both pointers must be *derived from* a pointer to the same object. + /// (See below for an example.) + /// + /// * The distance between the pointers, in bytes, must be an exact multiple + /// of the size of `T`. + /// + /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`. + /// + /// * The distance being in bounds cannot rely on "wrapping around" the address space. + /// + /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the + /// address space, so two pointers within some value of any Rust type `T` will always satisfy + /// the last two conditions. The standard library also generally ensures that allocations + /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they + /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())` + /// always satisfies the last two conditions. + /// + /// Most platforms fundamentally can't even construct such a large allocation. + /// For instance, no known 64-bit platform can ever serve a request + /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space. + /// However, some 32-bit and 16-bit platforms may successfully serve a request for + /// more than `isize::MAX` bytes with things like Physical Address + /// Extension. As such, memory acquired directly from allocators or memory + /// mapped files *may* be too large to handle with this function. + /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on + /// such large allocations either.) + /// + /// The requirement for pointers to be derived from the same allocated object is primarily + /// needed for `const`-compatibility: the distance between pointers into *different* allocated + /// objects is not known at compile-time. However, the requirement also exists at + /// runtime and may be exploited by optimizations. If you wish to compute the difference between + /// pointers that are not guaranteed to be from the same allocation, use `(self as isize - + /// origin as isize) / mem::size_of::<T>()`. + // FIXME: recommend `addr()` instead of `as usize` once that is stable. + /// + /// [`add`]: #method.add + /// [allocated object]: crate::ptr#allocated-object + /// + /// # Panics + /// + /// This function panics if `T` is a Zero-Sized Type ("ZST"). + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(non_null_convenience)] + /// use std::ptr::NonNull; + /// + /// let a = [0; 5]; + /// let ptr1: NonNull<u32> = NonNull::from(&a[1]); + /// let ptr2: NonNull<u32> = NonNull::from(&a[3]); + /// unsafe { + /// assert_eq!(ptr2.offset_from(ptr1), 2); + /// assert_eq!(ptr1.offset_from(ptr2), -2); + /// assert_eq!(ptr1.offset(2), ptr2); + /// assert_eq!(ptr2.offset(-2), ptr1); + /// } + /// ``` + /// + /// *Incorrect* usage: + /// + /// ```rust,no_run + /// #![feature(non_null_convenience, strict_provenance)] + /// use std::ptr::NonNull; + /// + /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap(); + /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap(); + /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize); + /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1. + /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff)).unwrap(); + /// assert_eq!(ptr2.addr(), ptr2_other.addr()); + /// // Since ptr2_other and ptr2 are derived from pointers to different objects, + /// // computing their offset is undefined behavior, even though + /// // they point to the same address! + /// unsafe { + /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior + /// } + /// ``` + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `offset_from`. + unsafe { self.pointer.offset_from(origin.pointer) } + } + + /// Calculates the distance between two pointers. The returned value is in + /// units of **bytes**. + /// + /// This is purely a convenience for casting to a `u8` pointer and + /// using [`offset_from`][NonNull::offset_from] on it. See that method for + /// documentation and safety requirements. + /// + /// For non-`Sized` pointees this operation considers only the data pointers, + /// ignoring the metadata. + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize { + // SAFETY: the caller must uphold the safety contract for `byte_offset_from`. + unsafe { self.pointer.byte_offset_from(origin.pointer) } + } + + // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null + + /// Calculates the distance between two pointers, *where it's known that + /// `self` is equal to or greater than `origin`*. The returned value is in + /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`. + /// + /// This computes the same value that [`offset_from`](#method.offset_from) + /// would compute, but with the added precondition that the offset is + /// guaranteed to be non-negative. This method is equivalent to + /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`, + /// but it provides slightly more information to the optimizer, which can + /// sometimes allow it to optimize slightly better with some backends. + /// + /// This method can be though of as recovering the `count` that was passed + /// to [`add`](#method.add) (or, with the parameters in the other order, + /// to [`sub`](#method.sub)). The following are all equivalent, assuming + /// that their safety preconditions are met: + /// ```rust + /// # #![feature(non_null_convenience)] + /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { + /// ptr.sub_ptr(origin) == count + /// # && + /// origin.add(count) == ptr + /// # && + /// ptr.sub(count) == origin + /// # } + /// ``` + /// + /// # Safety + /// + /// - The distance between the pointers must be non-negative (`self >= origin`) + /// + /// - *All* the safety conditions of [`offset_from`](#method.offset_from) + /// apply to this method as well; see it for the full details. + /// + /// Importantly, despite the return type of this method being able to represent + /// a larger offset, it's still *not permitted* to pass pointers which differ + /// by more than `isize::MAX` *bytes*. As such, the result of this method will + /// always be less than or equal to `isize::MAX as usize`. + /// + /// # Panics + /// + /// This function panics if `T` is a Zero-Sized Type ("ZST"). + /// + /// # Examples + /// + /// ``` + /// #![feature(non_null_convenience)] + /// use std::ptr::NonNull; + /// + /// let a = [0; 5]; + /// let ptr1: NonNull<u32> = NonNull::from(&a[1]); + /// let ptr2: NonNull<u32> = NonNull::from(&a[3]); + /// unsafe { + /// assert_eq!(ptr2.sub_ptr(ptr1), 2); + /// assert_eq!(ptr1.add(2), ptr2); + /// assert_eq!(ptr2.sub(2), ptr1); + /// assert_eq!(ptr2.sub_ptr(ptr2), 0); + /// } + /// + /// // This would be incorrect, as the pointers are not correctly ordered: + /// // ptr1.sub_ptr(ptr2) + /// ``` + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + // #[unstable(feature = "ptr_sub_ptr", issue = "95892")] + // #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")] + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn sub_ptr(self, subtracted: NonNull<T>) -> usize + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `sub_ptr`. + unsafe { self.pointer.sub_ptr(subtracted.pointer) } + } + + /// Reads the value from `self` without moving it. This leaves the + /// memory in `self` unchanged. + /// + /// See [`ptr::read`] for safety concerns and examples. + /// + /// [`ptr::read`]: crate::ptr::read() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn read(self) -> T + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `read`. + unsafe { ptr::read(self.pointer) } + } + + /// Performs a volatile read of the value from `self` without moving it. This + /// leaves the memory in `self` unchanged. + /// + /// Volatile operations are intended to act on I/O memory, and are guaranteed + /// to not be elided or reordered by the compiler across other volatile + /// operations. + /// + /// See [`ptr::read_volatile`] for safety concerns and examples. + /// + /// [`ptr::read_volatile`]: crate::ptr::read_volatile() + #[unstable(feature = "non_null_convenience", issue = "117691")] #[inline] - pub(crate) const unsafe fn add(self, delta: usize) -> Self + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub unsafe fn read_volatile(self) -> T where T: Sized, { - // SAFETY: We require that the delta stays in-bounds of the object, and - // thus it cannot become null, as that would require wrapping the - // address space, which no legal objects are allowed to do. - // And the caller promised the `delta` is sound to add. - unsafe { NonNull { pointer: self.pointer.add(delta) } } + // SAFETY: the caller must uphold the safety contract for `read_volatile`. + unsafe { ptr::read_volatile(self.pointer) } } - /// See [`pointer::sub`] for semantics and safety requirements. + /// Reads the value from `self` without moving it. This leaves the + /// memory in `self` unchanged. + /// + /// Unlike `read`, the pointer may be unaligned. + /// + /// See [`ptr::read_unaligned`] for safety concerns and examples. + /// + /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline] - pub(crate) const unsafe fn sub(self, delta: usize) -> Self + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn read_unaligned(self) -> T + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `read_unaligned`. + unsafe { ptr::read_unaligned(self.pointer) } + } + + /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source + /// and destination may overlap. + /// + /// NOTE: this has the *same* argument order as [`ptr::copy`]. + /// + /// See [`ptr::copy`] for safety concerns and examples. + /// + /// [`ptr::copy`]: crate::ptr::copy() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `copy`. + unsafe { ptr::copy(self.pointer, dest.as_ptr(), count) } + } + + /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source + /// and destination may *not* overlap. + /// + /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`]. + /// + /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. + /// + /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`. + unsafe { ptr::copy_nonoverlapping(self.pointer, dest.as_ptr(), count) } + } + + /// Copies `count * size_of<T>` bytes from `src` to `self`. The source + /// and destination may overlap. + /// + /// NOTE: this has the *opposite* argument order of [`ptr::copy`]. + /// + /// See [`ptr::copy`] for safety concerns and examples. + /// + /// [`ptr::copy`]: crate::ptr::copy() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `copy`. + unsafe { ptr::copy(src.pointer, self.as_ptr(), count) } + } + + /// Copies `count * size_of<T>` bytes from `src` to `self`. The source + /// and destination may *not* overlap. + /// + /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`]. + /// + /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. + /// + /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`. + unsafe { ptr::copy_nonoverlapping(src.pointer, self.as_ptr(), count) } + } + + /// Executes the destructor (if any) of the pointed-to value. + /// + /// See [`ptr::drop_in_place`] for safety concerns and examples. + /// + /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + pub unsafe fn drop_in_place(self) { + // SAFETY: the caller must uphold the safety contract for `drop_in_place`. + unsafe { ptr::drop_in_place(self.as_ptr()) } + } + + /// Overwrites a memory location with the given value without reading or + /// dropping the old value. + /// + /// See [`ptr::write`] for safety concerns and examples. + /// + /// [`ptr::write`]: crate::ptr::write() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + //#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn write(self, val: T) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `write`. + unsafe { ptr::write(self.as_ptr(), val) } + } + + /// Invokes memset on the specified pointer, setting `count * size_of::<T>()` + /// bytes of memory starting at `self` to `val`. + /// + /// See [`ptr::write_bytes`] for safety concerns and examples. + /// + /// [`ptr::write_bytes`]: crate::ptr::write_bytes() + #[doc(alias = "memset")] + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + //#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn write_bytes(self, val: u8, count: usize) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `write_bytes`. + unsafe { ptr::write_bytes(self.as_ptr(), val, count) } + } + + /// Performs a volatile write of a memory location with the given value without + /// reading or dropping the old value. + /// + /// Volatile operations are intended to act on I/O memory, and are guaranteed + /// to not be elided or reordered by the compiler across other volatile + /// operations. + /// + /// See [`ptr::write_volatile`] for safety concerns and examples. + /// + /// [`ptr::write_volatile`]: crate::ptr::write_volatile() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub unsafe fn write_volatile(self, val: T) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `write_volatile`. + unsafe { ptr::write_volatile(self.as_ptr(), val) } + } + + /// Overwrites a memory location with the given value without reading or + /// dropping the old value. + /// + /// Unlike `write`, the pointer may be unaligned. + /// + /// See [`ptr::write_unaligned`] for safety concerns and examples. + /// + /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + //#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn write_unaligned(self, val: T) where T: Sized, { - // SAFETY: We require that the delta stays in-bounds of the object, and - // thus it cannot become null, as no legal objects can be allocated - // in such as way that the null address is part of them. - // And the caller promised the `delta` is sound to subtract. - unsafe { NonNull { pointer: self.pointer.sub(delta) } } + // SAFETY: the caller must uphold the safety contract for `write_unaligned`. + unsafe { ptr::write_unaligned(self.as_ptr(), val) } } - /// See [`pointer::sub_ptr`] for semantics and safety requirements. + /// Replaces the value at `self` with `src`, returning the old + /// value, without dropping either. + /// + /// See [`ptr::replace`] for safety concerns and examples. + /// + /// [`ptr::replace`]: crate::ptr::replace() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[inline(always)] + pub unsafe fn replace(self, src: T) -> T + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `replace`. + unsafe { ptr::replace(self.as_ptr(), src) } + } + + /// Swaps the values at two mutable locations of the same type, without + /// deinitializing either. They may overlap, unlike `mem::swap` which is + /// otherwise equivalent. + /// + /// See [`ptr::swap`] for safety concerns and examples. + /// + /// [`ptr::swap`]: crate::ptr::swap() + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + //#[rustc_const_unstable(feature = "const_swap", issue = "83163")] + #[inline(always)] + pub const unsafe fn swap(self, with: NonNull<T>) + where + T: Sized, + { + // SAFETY: the caller must uphold the safety contract for `swap`. + unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) } + } + + /// Computes the offset that needs to be applied to the pointer in order to make it aligned to + /// `align`. + /// + /// If it is not possible to align the pointer, the implementation returns + /// `usize::MAX`. It is permissible for the implementation to *always* + /// return `usize::MAX`. Only your algorithm's performance can depend + /// on getting a usable offset here, not its correctness. + /// + /// The offset is expressed in number of `T` elements, and not bytes. + /// + /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go + /// beyond the allocation that the pointer points into. It is up to the caller to ensure that + /// the returned offset is correct in all terms other than alignment. + /// + /// # Panics + /// + /// The function panics if `align` is not a power-of-two. + /// + /// # Examples + /// + /// Accessing adjacent `u8` as `u16` + /// + /// ``` + /// #![feature(non_null_convenience)] + /// use std::mem::align_of; + /// use std::ptr::NonNull; + /// + /// # unsafe { + /// let x = [5_u8, 6, 7, 8, 9]; + /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap(); + /// let offset = ptr.align_offset(align_of::<u16>()); + /// + /// if offset < x.len() - 1 { + /// let u16_ptr = ptr.add(offset).cast::<u16>(); + /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7])); + /// } else { + /// // while the pointer can be aligned via `offset`, it would point + /// // outside the allocation + /// } + /// # } + /// ``` + #[unstable(feature = "non_null_convenience", issue = "117691")] + #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] + //#[rustc_const_unstable(feature = "const_align_offset", issue = "90962")] + #[must_use] #[inline] - pub(crate) const unsafe fn sub_ptr(self, subtrahend: Self) -> usize + pub const fn align_offset(self, align: usize) -> usize where T: Sized, { - // SAFETY: The caller promised that this is safe to do, and - // the non-nullness is irrelevant to the operation. - unsafe { self.pointer.sub_ptr(subtrahend.pointer) } + if !align.is_power_of_two() { + panic!("align_offset: align is not a power-of-two"); + } + + { + // SAFETY: `align` has been checked to be a power of 2 above. + unsafe { ptr::align_offset(self.pointer, align) } + } + } + + /// Returns whether the pointer is properly aligned for `T`. + /// + /// # Examples + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// use std::ptr::NonNull; + /// + /// // On some platforms, the alignment of i32 is less than 4. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// + /// let data = AlignedI32(42); + /// let ptr = NonNull::<AlignedI32>::from(&data); + /// + /// assert!(ptr.is_aligned()); + /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned()); + /// ``` + /// + /// # At compiletime + /// **Note: Alignment at compiletime is experimental and subject to change. See the + /// [tracking issue] for details.** + /// + /// At compiletime, the compiler may not know where a value will end up in memory. + /// Calling this function on a pointer created from a reference at compiletime will only + /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer + /// is never aligned if cast to a type with a stricter alignment than the reference's + /// underlying allocation. + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// #![feature(const_pointer_is_aligned)] + /// #![feature(non_null_convenience)] + /// #![feature(const_option)] + /// #![feature(const_nonnull_new)] + /// use std::ptr::NonNull; + /// + /// // On some platforms, the alignment of primitives is less than their size. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// #[repr(align(8))] + /// struct AlignedI64(i64); + /// + /// const _: () = { + /// let data = [AlignedI32(42), AlignedI32(42)]; + /// let ptr = NonNull::<AlignedI32>::new(&data[0] as *const _ as *mut _).unwrap(); + /// assert!(ptr.is_aligned()); + /// + /// // At runtime either `ptr1` or `ptr2` would be aligned, but at compiletime neither is aligned. + /// let ptr1 = ptr.cast::<AlignedI64>(); + /// let ptr2 = unsafe { ptr.add(1).cast::<AlignedI64>() }; + /// assert!(!ptr1.is_aligned()); + /// assert!(!ptr2.is_aligned()); + /// }; + /// ``` + /// + /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime + /// pointer is aligned, even if the compiletime pointer wasn't aligned. + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// #![feature(const_pointer_is_aligned)] + /// + /// // On some platforms, the alignment of primitives is less than their size. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// #[repr(align(8))] + /// struct AlignedI64(i64); + /// + /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned. + /// const COMPTIME_PTR: *const AlignedI32 = &AlignedI32(42); + /// const _: () = assert!(!COMPTIME_PTR.cast::<AlignedI64>().is_aligned()); + /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).cast::<AlignedI64>().is_aligned()); + /// + /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned. + /// let runtime_ptr = COMPTIME_PTR; + /// assert_ne!( + /// runtime_ptr.cast::<AlignedI64>().is_aligned(), + /// runtime_ptr.wrapping_add(1).cast::<AlignedI64>().is_aligned(), + /// ); + /// ``` + /// + /// If a pointer is created from a fixed address, this function behaves the same during + /// runtime and compiletime. + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// #![feature(const_pointer_is_aligned)] + /// #![feature(const_option)] + /// #![feature(const_nonnull_new)] + /// use std::ptr::NonNull; + /// + /// // On some platforms, the alignment of primitives is less than their size. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// #[repr(align(8))] + /// struct AlignedI64(i64); + /// + /// const _: () = { + /// let ptr = NonNull::new(40 as *mut AlignedI32).unwrap(); + /// assert!(ptr.is_aligned()); + /// + /// // For pointers with a known address, runtime and compiletime behavior are identical. + /// let ptr1 = ptr.cast::<AlignedI64>(); + /// let ptr2 = NonNull::new(ptr.as_ptr().wrapping_add(1)).unwrap().cast::<AlignedI64>(); + /// assert!(ptr1.is_aligned()); + /// assert!(!ptr2.is_aligned()); + /// }; + /// ``` + /// + /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203 + #[unstable(feature = "pointer_is_aligned", issue = "96284")] + #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")] + #[must_use] + #[inline] + pub const fn is_aligned(self) -> bool + where + T: Sized, + { + self.pointer.is_aligned() + } + + /// Returns whether the pointer is aligned to `align`. + /// + /// For non-`Sized` pointees this operation considers only the data pointer, + /// ignoring the metadata. + /// + /// # Panics + /// + /// The function panics if `align` is not a power-of-two (this includes 0). + /// + /// # Examples + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// + /// // On some platforms, the alignment of i32 is less than 4. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// + /// let data = AlignedI32(42); + /// let ptr = &data as *const AlignedI32; + /// + /// assert!(ptr.is_aligned_to(1)); + /// assert!(ptr.is_aligned_to(2)); + /// assert!(ptr.is_aligned_to(4)); + /// + /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2)); + /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4)); + /// + /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8)); + /// ``` + /// + /// # At compiletime + /// **Note: Alignment at compiletime is experimental and subject to change. See the + /// [tracking issue] for details.** + /// + /// At compiletime, the compiler may not know where a value will end up in memory. + /// Calling this function on a pointer created from a reference at compiletime will only + /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer + /// cannot be stricter aligned than the reference's underlying allocation. + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// #![feature(const_pointer_is_aligned)] + /// + /// // On some platforms, the alignment of i32 is less than 4. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// + /// const _: () = { + /// let data = AlignedI32(42); + /// let ptr = &data as *const AlignedI32; + /// + /// assert!(ptr.is_aligned_to(1)); + /// assert!(ptr.is_aligned_to(2)); + /// assert!(ptr.is_aligned_to(4)); + /// + /// // At compiletime, we know for sure that the pointer isn't aligned to 8. + /// assert!(!ptr.is_aligned_to(8)); + /// assert!(!ptr.wrapping_add(1).is_aligned_to(8)); + /// }; + /// ``` + /// + /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime + /// pointer is aligned, even if the compiletime pointer wasn't aligned. + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// #![feature(const_pointer_is_aligned)] + /// + /// // On some platforms, the alignment of i32 is less than 4. + /// #[repr(align(4))] + /// struct AlignedI32(i32); + /// + /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned. + /// const COMPTIME_PTR: *const AlignedI32 = &AlignedI32(42); + /// const _: () = assert!(!COMPTIME_PTR.is_aligned_to(8)); + /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).is_aligned_to(8)); + /// + /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned. + /// let runtime_ptr = COMPTIME_PTR; + /// assert_ne!( + /// runtime_ptr.is_aligned_to(8), + /// runtime_ptr.wrapping_add(1).is_aligned_to(8), + /// ); + /// ``` + /// + /// If a pointer is created from a fixed address, this function behaves the same during + /// runtime and compiletime. + /// + /// ``` + /// #![feature(pointer_is_aligned)] + /// #![feature(const_pointer_is_aligned)] + /// + /// const _: () = { + /// let ptr = 40 as *const u8; + /// assert!(ptr.is_aligned_to(1)); + /// assert!(ptr.is_aligned_to(2)); + /// assert!(ptr.is_aligned_to(4)); + /// assert!(ptr.is_aligned_to(8)); + /// assert!(!ptr.is_aligned_to(16)); + /// }; + /// ``` + /// + /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203 + #[unstable(feature = "pointer_is_aligned", issue = "96284")] + #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")] + #[must_use] + #[inline] + pub const fn is_aligned_to(self, align: usize) -> bool { + self.pointer.is_aligned_to(align) } } diff --git a/library/std/src/error.rs b/library/std/src/error.rs index 375ff2d2450..b240e4e2c45 100644 --- a/library/std/src/error.rs +++ b/library/std/src/error.rs @@ -12,14 +12,6 @@ pub use core::error::Error; #[unstable(feature = "error_generic_member_access", issue = "99301")] pub use core::error::{request_ref, request_value, Request}; -mod private { - // This is a hack to prevent `type_id` from being overridden by `Error` - // implementations, since that can enable unsound downcasting. - #[unstable(feature = "error_type_id", issue = "60784")] - #[derive(Debug)] - pub struct Internal; -} - /// An error reporter that prints an error and its sources. /// /// Report also exposes configuration options for formatting the error sources, either entirely on a diff --git a/src/librustdoc/formats/item_type.rs b/src/librustdoc/formats/item_type.rs index 5666751d1ce..e80da46adb4 100644 --- a/src/librustdoc/formats/item_type.rs +++ b/src/librustdoc/formats/item_type.rs @@ -149,8 +149,7 @@ impl From<DefKind> for ItemType { | DefKind::LifetimeParam | DefKind::GlobalAsm | DefKind::Impl { .. } - | DefKind::Closure - | DefKind::Coroutine => Self::ForeignType, + | DefKind::Closure => Self::ForeignType, } } } diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 615fb08c76f..7cc0f2e8f1e 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -25,19 +25,9 @@ function showMain() { removeClass(document.getElementById(MAIN_ID), "hidden"); } -function elemIsInParent(elem, parent) { - while (elem && elem !== document.body) { - if (elem === parent) { - return true; - } - elem = elem.parentElement; - } - return false; -} - function blurHandler(event, parentElem, hideCallback) { - if (!elemIsInParent(document.activeElement, parentElem) && - !elemIsInParent(event.relatedTarget, parentElem) + if (!parentElem.contains(document.activeElement) && + !parentElem.contains(event.relatedTarget) ) { hideCallback(); } @@ -1118,7 +1108,7 @@ function preLoadCss(cssUrl) { if (ev.pointerType !== "mouse") { return; } - if (!e.TOOLTIP_FORCE_VISIBLE && !elemIsInParent(ev.relatedTarget, e)) { + if (!e.TOOLTIP_FORCE_VISIBLE && !e.contains(ev.relatedTarget)) { // See "Tooltip pointer leave gesture" below. setTooltipHoverTimeout(e, false); addClass(wrapper, "fade-out"); @@ -1178,10 +1168,10 @@ function preLoadCss(cssUrl) { function tooltipBlurHandler(event) { if (window.CURRENT_TOOLTIP_ELEMENT && - !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT) && - !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT) && - !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) && - !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) + !window.CURRENT_TOOLTIP_ELEMENT.contains(document.activeElement) && + !window.CURRENT_TOOLTIP_ELEMENT.contains(event.relatedTarget) && + !window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.contains(document.activeElement) && + !window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.contains(event.relatedTarget) ) { // Work around a difference in the focus behaviour between Firefox, Chrome, and Safari. // When I click the button on an already-opened tooltip popover, Safari @@ -1248,8 +1238,8 @@ function preLoadCss(cssUrl) { if (ev.pointerType !== "mouse") { return; } - if (!e.TOOLTIP_FORCE_VISIBLE && - !elemIsInParent(ev.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT)) { + if (!e.TOOLTIP_FORCE_VISIBLE && window.CURRENT_TOOLTIP_ELEMENT && + !window.CURRENT_TOOLTIP_ELEMENT.contains(ev.relatedTarget)) { // Tooltip pointer leave gesture: // // Designing a good hover microinteraction is a matter of guessing user diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js index 63947789c54..70a2825265e 100644 --- a/src/librustdoc/html/static/js/settings.js +++ b/src/librustdoc/html/static/js/settings.js @@ -1,6 +1,6 @@ // Local js definitions: /* global getSettingValue, updateLocalStorage, updateTheme */ -/* global addClass, removeClass, onEach, onEachLazy, blurHandler, elemIsInParent */ +/* global addClass, removeClass, onEach, onEachLazy, blurHandler */ /* global MAIN_ID, getVar, getSettingsButton */ "use strict"; @@ -232,7 +232,7 @@ const settingsButton = getSettingsButton(); const settingsMenu = document.getElementById("settings"); settingsButton.onclick = event => { - if (elemIsInParent(event.target, settingsMenu)) { + if (settingsMenu.contains(event.target)) { return; } event.preventDefault(); diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 1e8ece7e114..6b536e97c43 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -1924,7 +1924,6 @@ fn resolution_failure( Variant | Field | Closure - | Coroutine | AssocTy | AssocConst | AssocFn diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs index dd52deef672..14680fdb064 100644 --- a/src/librustdoc/scrape_examples.rs +++ b/src/librustdoc/scrape_examples.rs @@ -325,7 +325,7 @@ pub(crate) fn run( // Save output to provided path let mut encoder = FileEncoder::new(options.output_path).map_err(|e| e.to_string())?; calls.encode(&mut encoder); - encoder.finish().map_err(|e| e.to_string())?; + encoder.finish().map_err(|(_path, e)| e.to_string())?; Ok(()) }; diff --git a/triagebot.toml b/triagebot.toml index c1482769852..593386288b4 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -626,7 +626,7 @@ cc = ["@nnethercote"] [assign] warn_non_default_branch = true contributing_url = "https://rustc-dev-guide.rust-lang.org/getting-started.html" -users_on_vacation = ["jyn514", "WaffleLapkin", "oli-obk"] +users_on_vacation = ["jyn514", "oli-obk"] [assign.adhoc_groups] compiler-team = [ |
