diff options
728 files changed, 10980 insertions, 6230 deletions
diff --git a/compiler/rustc/src/main.rs b/compiler/rustc/src/main.rs index 29766fc9d87..e9a7397557e 100644 --- a/compiler/rustc/src/main.rs +++ b/compiler/rustc/src/main.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to `rustc_driver`. +#![feature(rustc_private)] + // A note about jemalloc: rustc uses jemalloc when built for CI and // distribution. The obvious way to do this is with the `#[global_allocator]` // mechanism. However, for complicated reasons (see diff --git a/compiler/rustc_ast_lowering/messages.ftl b/compiler/rustc_ast_lowering/messages.ftl index 9ed93d481e7..a5ee6713be8 100644 --- a/compiler/rustc_ast_lowering/messages.ftl +++ b/compiler/rustc_ast_lowering/messages.ftl @@ -167,11 +167,21 @@ ast_lowering_template_modifier = template modifier ast_lowering_this_not_async = this is not `async` +ast_lowering_underscore_array_length_unstable = + using `_` for array lengths is unstable + ast_lowering_underscore_expr_lhs_assign = in expressions, `_` can only be used on the left-hand side of an assignment .label = `_` not allowed here +ast_lowering_unstable_inline_assembly = inline assembly is not stable yet on this architecture +ast_lowering_unstable_inline_assembly_label_operands = + label operands for inline assembly are unstable +ast_lowering_unstable_may_unwind = the `may_unwind` option is unstable + ast_lowering_use_angle_brackets = use angle brackets instead + +ast_lowering_yield = yield syntax is experimental ast_lowering_yield_in_closure = `yield` can only be used in `#[coroutine]` closures, or `gen` blocks .suggestion = use `#[coroutine]` to make this closure a coroutine diff --git a/compiler/rustc_ast_lowering/src/asm.rs b/compiler/rustc_ast_lowering/src/asm.rs index ea7b8c114f4..7d9d689e6d7 100644 --- a/compiler/rustc_ast_lowering/src/asm.rs +++ b/compiler/rustc_ast_lowering/src/asm.rs @@ -19,10 +19,12 @@ use super::errors::{ InvalidRegisterClass, RegisterClassOnlyClobber, RegisterConflict, }; use super::LoweringContext; -use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt}; +use crate::{ + fluent_generated as fluent, ImplTraitContext, ImplTraitPosition, ParamMode, + ResolverAstLoweringExt, +}; impl<'a, 'hir> LoweringContext<'a, 'hir> { - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub(crate) fn lower_inline_asm( &mut self, sp: Span, @@ -52,7 +54,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &self.tcx.sess, sym::asm_experimental_arch, sp, - "inline assembly is not stable yet on this architecture", + fluent::ast_lowering_unstable_inline_assembly, ) .emit(); } @@ -64,8 +66,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp }); } if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind { - feature_err(&self.tcx.sess, sym::asm_unwind, sp, "the `may_unwind` option is unstable") - .emit(); + feature_err( + &self.tcx.sess, + sym::asm_unwind, + sp, + fluent::ast_lowering_unstable_may_unwind, + ) + .emit(); } let mut clobber_abis = FxIndexMap::default(); @@ -176,20 +183,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)), } } - InlineAsmOperand::Const { anon_const } => { - if !self.tcx.features().asm_const { - feature_err( - sess, - sym::asm_const, - *op_sp, - "const operands for inline assembly are unstable", - ) - .emit(); - } - hir::InlineAsmOperand::Const { - anon_const: self.lower_anon_const_to_anon_const(anon_const), - } - } + InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const { + anon_const: self.lower_anon_const_to_anon_const(anon_const), + }, InlineAsmOperand::Sym { sym } => { let static_def_id = self .resolver @@ -246,7 +242,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { sess, sym::asm_goto, *op_sp, - "label operands for inline assembly are unstable", + fluent::ast_lowering_unstable_inline_assembly_label_operands, ) .emit(); } diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 124fe6bd380..b5d8a547a8f 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -23,7 +23,7 @@ use super::{ ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs, ResolverAstLoweringExt, }; use crate::errors::YieldInClosure; -use crate::{FnDeclKind, ImplTraitPosition}; +use crate::{fluent_generated, FnDeclKind, ImplTraitPosition}; impl<'hir> LoweringContext<'_, 'hir> { fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] { @@ -1540,7 +1540,6 @@ impl<'hir> LoweringContext<'_, 'hir> { } } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> { let yielded = opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span)); @@ -1575,7 +1574,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &self.tcx.sess, sym::coroutines, span, - "yield syntax is experimental", + fluent_generated::ast_lowering_yield, ) .emit(); } @@ -1587,7 +1586,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &self.tcx.sess, sym::coroutines, span, - "yield syntax is experimental", + fluent_generated::ast_lowering_yield, ) .emit(); } diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index 224787c335b..81d17a9dec2 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -2326,7 +2326,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.expr_block(block) } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn lower_array_length(&mut self, c: &AnonConst) -> hir::ArrayLen<'hir> { match c.value.kind { ExprKind::Underscore => { @@ -2340,7 +2339,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &self.tcx.sess, sym::generic_arg_infer, c.value.span, - "using `_` for array lengths is unstable", + fluent_generated::ast_lowering_underscore_array_length_unstable, ) .stash(c.value.span, StashKey::UnderscoreForArrayLengths); hir::ArrayLen::Body(self.lower_anon_const_to_const_arg(c)) diff --git a/compiler/rustc_attr/messages.ftl b/compiler/rustc_attr/messages.ftl index eb51e568f81..5d9ac23ec49 100644 --- a/compiler/rustc_attr/messages.ftl +++ b/compiler/rustc_attr/messages.ftl @@ -104,6 +104,9 @@ attr_unknown_meta_item = attr_unknown_version_literal = unknown version literal format, assuming it refers to a future version +attr_unstable_cfg_target_compact = + compact `cfg(target(..))` is experimental and subject to change + attr_unsupported_literal_cfg_string = literal in `cfg` predicate value must be a string attr_unsupported_literal_deprecated_kv_pair = diff --git a/compiler/rustc_attr/src/builtin.rs b/compiler/rustc_attr/src/builtin.rs index cf86a6942da..d057dcfdf9d 100644 --- a/compiler/rustc_attr/src/builtin.rs +++ b/compiler/rustc_attr/src/builtin.rs @@ -20,6 +20,7 @@ use rustc_span::hygiene::Transparency; use rustc_span::symbol::{sym, Symbol}; use rustc_span::Span; +use crate::fluent_generated; use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause}; /// The version placeholder that recently stabilized features contain inside the @@ -521,7 +522,6 @@ pub struct Condition { } /// Tests if a cfg-pattern matches the cfg set -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub fn cfg_matches( cfg: &ast::MetaItem, sess: &Session, @@ -593,7 +593,6 @@ pub fn parse_version(s: Symbol) -> Option<RustcVersion> { /// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to /// evaluate individual items. -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub fn eval_condition( cfg: &ast::MetaItem, sess: &Session, @@ -680,7 +679,7 @@ pub fn eval_condition( sess, sym::cfg_target_compact, cfg.span, - "compact `cfg(target(..))` is experimental and subject to change", + fluent_generated::attr_unstable_cfg_target_compact, ) .emit(); } diff --git a/compiler/rustc_borrowck/messages.ftl b/compiler/rustc_borrowck/messages.ftl index c14a617eb91..edb25e12864 100644 --- a/compiler/rustc_borrowck/messages.ftl +++ b/compiler/rustc_borrowck/messages.ftl @@ -62,6 +62,9 @@ borrowck_could_not_normalize = borrowck_could_not_prove = could not prove `{$predicate}` +borrowck_dereference_suggestion = + dereference the return value + borrowck_func_take_self_moved_place = `{$func}` takes ownership of the receiver `self`, which moves {$place_name} @@ -74,9 +77,24 @@ borrowck_higher_ranked_lifetime_error = borrowck_higher_ranked_subtype_error = higher-ranked subtype error +borrowck_implicit_static = + this has an implicit `'static` lifetime requirement + +borrowck_implicit_static_introduced = + calling this method introduces the `impl`'s `'static` requirement + +borrowck_implicit_static_relax = + consider relaxing the implicit `'static` requirement + borrowck_lifetime_constraints_error = lifetime may not live long enough +borrowck_limitations_implies_static = + due to current limitations in the borrow checker, this implies a `'static` lifetime + +borrowck_move_closure_suggestion = + consider adding 'move' keyword before the nested closure + borrowck_move_out_place_here = {$place} is moved here @@ -163,6 +181,9 @@ borrowck_partial_var_move_by_use_in_coroutine = *[false] moved } due to use in coroutine +borrowck_restrict_to_static = + consider restricting the type parameter to the `'static` lifetime + borrowck_returned_async_block_escaped = returns an `async` block that contains a reference to a captured variable, which then escapes the closure body diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index a58c7c43246..2b46e5597f7 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -3989,7 +3989,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } else { let ty = self.infcx.tcx.type_of(self.mir_def_id()).instantiate_identity(); match ty.kind() { - ty::FnDef(_, _) | ty::FnPtr(_) => self.annotate_fn_sig( + ty::FnDef(_, _) | ty::FnPtr(..) => self.annotate_fn_sig( self.mir_def_id(), self.infcx.tcx.fn_sig(self.mir_def_id()).instantiate_identity(), ), diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index 3590e12274c..d85959c9a29 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -3,6 +3,8 @@ #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +use std::assert_matches::assert_matches; + use rustc_errors::{Applicability, Diag}; use rustc_hir as hir; use rustc_hir::intravisit::Visitor; @@ -116,7 +118,7 @@ impl<'tcx> BorrowExplanation<'tcx> { // path_span must be `Some` as otherwise the if condition is true let path_span = path_span.unwrap(); // path_span is only present in the case of closure capture - assert!(matches!(later_use_kind, LaterUseKind::ClosureCapture)); + assert_matches!(later_use_kind, LaterUseKind::ClosureCapture); if !borrow_span.is_some_and(|sp| sp.overlaps(var_or_use_span)) { let path_label = "used here by closure"; let capture_kind_label = message; @@ -147,7 +149,7 @@ impl<'tcx> BorrowExplanation<'tcx> { // path_span must be `Some` as otherwise the if condition is true let path_span = path_span.unwrap(); // path_span is only present in the case of closure capture - assert!(matches!(later_use_kind, LaterUseKind::ClosureCapture)); + assert_matches!(later_use_kind, LaterUseKind::ClosureCapture); if borrow_span.map(|sp| !sp.overlaps(var_or_use_span)).unwrap_or(true) { let path_label = "used here by closure"; let capture_kind_label = message; diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 82df9760d8e..451e8bcb16d 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -35,7 +35,7 @@ use crate::session_diagnostics::{ LifetimeReturnCategoryErr, RequireStaticErr, VarHereDenote, }; use crate::universal_regions::DefiningTy; -use crate::{borrowck_errors, MirBorrowckCtxt}; +use crate::{borrowck_errors, fluent_generated as fluent, MirBorrowckCtxt}; impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> { fn description(&self) -> &'static str { @@ -198,7 +198,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { // from higher-ranked trait bounds (HRTB). Try to locate span of the trait // and the span which bounded to the trait for adding 'static lifetime suggestion #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn suggest_static_lifetime_for_gat_from_hrtb( &self, diag: &mut Diag<'_>, @@ -251,23 +250,28 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { debug!(?hrtb_bounds); hrtb_bounds.iter().for_each(|bound| { - let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }, _) = bound else { return; }; - diag.span_note( - *trait_span, - "due to current limitations in the borrow checker, this implies a `'static` lifetime" - ); - let Some(generics_fn) = hir.get_generics(self.body.source.def_id().expect_local()) else { return; }; - let Def(_, trait_res_defid) = trait_ref.path.res else { return; }; + let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }, _) = bound else { + return; + }; + diag.span_note(*trait_span, fluent::borrowck_limitations_implies_static); + let Some(generics_fn) = hir.get_generics(self.body.source.def_id().expect_local()) + else { + return; + }; + let Def(_, trait_res_defid) = trait_ref.path.res else { + return; + }; debug!(?generics_fn); generics_fn.predicates.iter().for_each(|predicate| { - let BoundPredicate( - WhereBoundPredicate { - span: bounded_span, - bounded_ty, - bounds, - .. - } - ) = predicate else { return; }; + let BoundPredicate(WhereBoundPredicate { + span: bounded_span, + bounded_ty, + bounds, + .. + }) = predicate + else { + return; + }; bounds.iter().for_each(|bd| { if let Trait(PolyTraitRef { trait_ref: tr_ref, .. }, _) = bd && let Def(_, res_defid) = tr_ref.path.res @@ -277,16 +281,17 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { && generics_fn.params .iter() .rfind(|param| param.def_id.to_def_id() == defid) - .is_some() { - suggestions.push((bounded_span.shrink_to_hi(), " + 'static".to_string())); - } + .is_some() + { + suggestions.push((bounded_span.shrink_to_hi(), " + 'static".to_string())); + } }); }); }); if suggestions.len() > 0 { suggestions.dedup(); diag.multipart_suggestion_verbose( - "consider restricting the type parameter to the `'static` lifetime", + fluent::borrowck_restrict_to_static, suggestions, Applicability::MaybeIncorrect, ); @@ -976,7 +981,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable #[instrument(skip(self, err), level = "debug")] fn suggest_constrain_dyn_trait_in_impl( &self, @@ -994,16 +998,12 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { debug!("trait spans found: {:?}", traits); for span in &traits { let mut multi_span: MultiSpan = vec![*span].into(); - multi_span - .push_span_label(*span, "this has an implicit `'static` lifetime requirement"); - multi_span.push_span_label( - ident.span, - "calling this method introduces the `impl`'s `'static` requirement", - ); + multi_span.push_span_label(*span, fluent::borrowck_implicit_static); + multi_span.push_span_label(ident.span, fluent::borrowck_implicit_static_introduced); err.subdiagnostic(RequireStaticErr::UsedImpl { multi_span }); err.span_suggestion_verbose( span.shrink_to_hi(), - "consider relaxing the implicit `'static` requirement", + fluent::borrowck_implicit_static_relax, " + '_", Applicability::MaybeIncorrect, ); @@ -1045,7 +1045,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable /// When encountering a lifetime error caused by the return type of a closure, check the /// corresponding trait bound and see if dereferencing the closure return value would satisfy /// them. If so, we produce a structured suggestion. @@ -1166,7 +1165,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { if ocx.select_all_or_error().is_empty() && count > 0 { diag.span_suggestion_verbose( tcx.hir().body(*body).value.peel_blocks().span.shrink_to_lo(), - "dereference the return value", + fluent::borrowck_dereference_suggestion, "*".repeat(count), Applicability::MachineApplicable, ); @@ -1174,7 +1173,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn suggest_move_on_borrowing_closure(&self, diag: &mut Diag<'_>) { let map = self.infcx.tcx.hir(); let body = map.body_owned_by(self.mir_def_id()); @@ -1213,7 +1211,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { if let Some(closure_span) = closure_span { diag.span_suggestion_verbose( closure_span, - "consider adding 'move' keyword before the nested closure", + fluent::borrowck_move_closure_suggestion, "move ", Applicability::MaybeIncorrect, ); diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 9c2a0036bef..30dd45d847c 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -1644,7 +1644,7 @@ impl<'mir, 'tcx> MirBorrowckCtxt<'_, 'mir, '_, 'tcx> { | ty::Pat(_, _) | ty::Slice(_) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) @@ -1689,7 +1689,7 @@ impl<'mir, 'tcx> MirBorrowckCtxt<'_, 'mir, '_, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never diff --git a/compiler/rustc_borrowck/src/type_check/canonical.rs b/compiler/rustc_borrowck/src/type_check/canonical.rs index 86cd8b918fc..b58691fbeae 100644 --- a/compiler/rustc_borrowck/src/type_check/canonical.rs +++ b/compiler/rustc_borrowck/src/type_check/canonical.rs @@ -7,6 +7,7 @@ use rustc_middle::mir::ConstraintCategory; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, Upcast}; use rustc_span::def_id::DefId; use rustc_span::Span; +use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp; use rustc_trait_selection::traits::query::type_op::{self, TypeOpOutput}; use rustc_trait_selection::traits::ObligationCause; @@ -166,6 +167,52 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } #[instrument(skip(self), level = "debug")] + pub(super) fn struct_tail( + &mut self, + ty: Ty<'tcx>, + location: impl NormalizeLocation, + ) -> Ty<'tcx> { + let tcx = self.tcx(); + if self.infcx.next_trait_solver() { + let body = self.body; + let param_env = self.param_env; + self.fully_perform_op( + location.to_locations(), + ConstraintCategory::Boring, + CustomTypeOp::new( + |ocx| { + let structurally_normalize = |ty| { + ocx.structurally_normalize( + &ObligationCause::misc( + location.to_locations().span(body), + body.source.def_id().expect_local(), + ), + param_env, + ty, + ) + .unwrap_or_else(|_| bug!("struct tail should have been computable, since we computed it in HIR")) + }; + + let tail = tcx.struct_tail_raw( + ty, + structurally_normalize, + || {}, + ); + + Ok(tail) + }, + "normalizing struct tail", + ), + ) + .unwrap_or_else(|guar| Ty::new_error(tcx, guar)) + } else { + let mut normalize = |ty| self.normalize(ty, location); + let tail = tcx.struct_tail_raw(ty, &mut normalize, || {}); + normalize(tail) + } + } + + #[instrument(skip(self), level = "debug")] pub(super) fn ascribe_user_type( &mut self, mir_ty: Ty<'tcx>, diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index b13773ffe14..a2669da1b04 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -1364,7 +1364,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { debug!("func_ty.kind: {:?}", func_ty.kind()); let sig = match func_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => func_ty.fn_sig(tcx), + ty::FnDef(..) | ty::FnPtr(..) => func_ty.fn_sig(tcx), _ => { span_mirbug!(self, term, "call to non-function {:?}", func_ty); return; @@ -1989,9 +1989,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_fn_ptr_from = Ty::new_fn_ptr(tcx, fn_sig); - if let Err(terr) = self.eq_types( - *ty, + if let Err(terr) = self.sub_types( ty_fn_ptr_from, + *ty, location.to_locations(), ConstraintCategory::Cast { unsize_to: None }, ) { @@ -2014,9 +2014,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_fn_ptr_from = Ty::new_fn_ptr(tcx, tcx.signature_unclosure(sig, *safety)); - if let Err(terr) = self.eq_types( - *ty, + if let Err(terr) = self.sub_types( ty_fn_ptr_from, + *ty, location.to_locations(), ConstraintCategory::Cast { unsize_to: None }, ) { @@ -2329,17 +2329,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let cast_ty_to = CastTy::from_ty(*ty); match (cast_ty_from, cast_ty_to) { (Some(CastTy::Ptr(src)), Some(CastTy::Ptr(dst))) => { - let mut normalize = |t| self.normalize(t, location); - - // N.B. `struct_tail_with_normalize` only "structurally resolves" - // the type. It is not fully normalized, so we have to normalize it - // afterwards. - let src_tail = - tcx.struct_tail_with_normalize(src.ty, &mut normalize, || ()); - let src_tail = normalize(src_tail); - let dst_tail = - tcx.struct_tail_with_normalize(dst.ty, &mut normalize, || ()); - let dst_tail = normalize(dst_tail); + let src_tail = self.struct_tail(src.ty, location); + let dst_tail = self.struct_tail(dst.ty, location); // This checks (lifetime part of) vtable validity for pointer casts, // which is irrelevant when there are aren't principal traits on both sides (aka only auto traits). @@ -2420,7 +2411,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_left = left.ty(body, tcx); match ty_left.kind() { // Types with regions are comparable if they have a common super-type. - ty::RawPtr(_, _) | ty::FnPtr(_) => { + ty::RawPtr(_, _) | ty::FnPtr(..) => { let ty_right = right.ty(body, tcx); let common_ty = self.infcx.next_ty_var(body.source_info(location).span); self.sub_types( diff --git a/compiler/rustc_codegen_cranelift/src/common.rs b/compiler/rustc_codegen_cranelift/src/common.rs index a1b29a42225..b9000a3874f 100644 --- a/compiler/rustc_codegen_cranelift/src/common.rs +++ b/compiler/rustc_codegen_cranelift/src/common.rs @@ -69,7 +69,7 @@ fn clif_type_from_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<types::Typ FloatTy::F64 => types::F64, FloatTy::F128 => unimplemented!("f16_f128"), }, - ty::FnPtr(_) => pointer_ty(tcx), + ty::FnPtr(..) => pointer_ty(tcx), ty::RawPtr(pointee_ty, _) | ty::Ref(_, pointee_ty, _) => { if has_ptr_meta(tcx, *pointee_ty) { return None; diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs index b21c559e668..29deac60730 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs @@ -725,7 +725,8 @@ fn codegen_regular_intrinsic_call<'tcx>( // Cranelift treats stores as volatile by default // FIXME correctly handle unaligned_volatile_store - // FIXME actually do nontemporal stores if requested + // FIXME actually do nontemporal stores if requested (but do not just emit MOVNT on x86; + // see the LLVM backend for details) let dest = CPlace::for_ptr(Pointer::new(ptr), val.layout()); dest.write_cvalue(fx, val); } diff --git a/compiler/rustc_codegen_cranelift/src/value_and_place.rs b/compiler/rustc_codegen_cranelift/src/value_and_place.rs index 8eb2095e523..fd77502224e 100644 --- a/compiler/rustc_codegen_cranelift/src/value_and_place.rs +++ b/compiler/rustc_codegen_cranelift/src/value_and_place.rs @@ -874,7 +874,7 @@ pub(crate) fn assert_assignable<'tcx>( (ty::Ref(_, a, _), ty::RawPtr(b, _)) | (ty::RawPtr(a, _), ty::Ref(_, b, _)) => { assert_assignable(fx, *a, *b, limit - 1); } - (ty::FnPtr(_), ty::FnPtr(_)) => { + (ty::FnPtr(..), ty::FnPtr(..)) => { let from_sig = fx.tcx.normalize_erasing_late_bound_regions( ParamEnv::reveal_all(), from_ty.fn_sig(fx.tcx), diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs index dabf6c5aa3e..83fa8059b1a 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/test.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs @@ -552,7 +552,7 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"--stage", &"0", &"tests/assembly/asm", - &"--rustc-args", + &"--compiletest-rustc-args", &rustc_args, ], Some(&rust_dir), @@ -1020,7 +1020,7 @@ where &"--stage", &"0", &format!("tests/{}", test_type), - &"--rustc-args", + &"--compiletest-rustc-args", &rustc_args, ], Some(&rust_path), diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index a64371a3d89..47b378cc1cd 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -1127,6 +1127,8 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { self.llbb().add_assignment(self.location, aligned_destination, val); // TODO(antoyo): handle align and flags. // NOTE: dummy value here since it's never used. FIXME(antoyo): API should not return a value here? + // When adding support for NONTEMPORAL, make sure to not just emit MOVNT on x86; see the + // LLVM backend for details. self.cx.context.new_rvalue_zero(self.type_i32()) } diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index 7a456e1c5d6..dca6b6494f9 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -160,6 +160,11 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> { self.context.new_struct_constructor(None, struct_type.as_type(), None, values) } + fn const_vector(&self, values: &[RValue<'gcc>]) -> RValue<'gcc> { + let typ = self.type_vector(values[0].get_type(), values.len() as u64); + self.context.new_rvalue_from_vector(None, typ, values) + } + fn const_to_opt_uint(&self, _v: RValue<'gcc>) -> Option<u64> { // TODO(antoyo) None diff --git a/compiler/rustc_codegen_gcc/src/type_of.rs b/compiler/rustc_codegen_gcc/src/type_of.rs index d85ed4f12ff..b7b1be5369c 100644 --- a/compiler/rustc_codegen_gcc/src/type_of.rs +++ b/compiler/rustc_codegen_gcc/src/type_of.rs @@ -213,9 +213,8 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> { // NOTE: we cannot remove this match like in the LLVM codegen because the call // to fn_ptr_backend_type handle the on-stack attribute. // TODO(antoyo): find a less hackish way to hande the on-stack attribute. - ty::FnPtr(sig) => { - cx.fn_ptr_backend_type(cx.fn_abi_of_fn_ptr(sig, ty::List::empty())) - } + ty::FnPtr(sig_tys, hdr) => cx + .fn_ptr_backend_type(cx.fn_abi_of_fn_ptr(sig_tys.with(hdr), ty::List::empty())), _ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO), }; cx.scalar_types.borrow_mut().insert(self.ty, ty); diff --git a/compiler/rustc_codegen_gcc/tests/run/asm.rs b/compiler/rustc_codegen_gcc/tests/run/asm.rs index 56f2aac3d0a..4e05d026868 100644 --- a/compiler/rustc_codegen_gcc/tests/run/asm.rs +++ b/compiler/rustc_codegen_gcc/tests/run/asm.rs @@ -3,12 +3,10 @@ // Run-time: // status: 0 -#![feature(asm_const)] - -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] use std::arch::{asm, global_asm}; -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] global_asm!( " .global add_asm @@ -22,7 +20,7 @@ extern "C" { fn add_asm(a: i64, b: i64) -> i64; } -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] pub unsafe fn mem_cpy(dst: *mut u8, src: *const u8, len: usize) { asm!( "rep movsb", @@ -33,7 +31,7 @@ pub unsafe fn mem_cpy(dst: *mut u8, src: *const u8, len: usize) { ); } -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] fn asm() { unsafe { asm!("nop"); @@ -178,9 +176,8 @@ fn asm() { assert_eq!(array1, array2); } -#[cfg(not(target_arch="x86_64"))] -fn asm() { -} +#[cfg(not(target_arch = "x86_64"))] +fn asm() {} fn main() { asm(); diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 1277b7898c2..5ff580e295a 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -430,9 +430,32 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { i += 1; i - 1 }; + + let apply_range_attr = |idx: AttributePlace, scalar: rustc_target::abi::Scalar| { + if cx.sess().opts.optimize != config::OptLevel::No + && llvm_util::get_version() >= (19, 0, 0) + && matches!(scalar.primitive(), Int(..)) + // If the value is a boolean, the range is 0..2 and that ultimately + // become 0..0 when the type becomes i1, which would be rejected + // by the LLVM verifier. + && !scalar.is_bool() + // LLVM also rejects full range. + && !scalar.is_always_valid(cx) + { + attributes::apply_to_llfn( + llfn, + idx, + &[llvm::CreateRangeAttr(cx.llcx, scalar.size(cx), scalar.valid_range(cx))], + ); + } + }; + match &self.ret.mode { PassMode::Direct(attrs) => { attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn); + if let abi::Abi::Scalar(scalar) = self.ret.layout.abi { + apply_range_attr(llvm::AttributePlace::ReturnValue, scalar); + } } PassMode::Indirect { attrs, meta_attrs: _, on_stack } => { assert!(!on_stack); @@ -471,8 +494,13 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { ); attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]); } - PassMode::Direct(attrs) - | PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => { + PassMode::Direct(attrs) => { + let i = apply(attrs); + if let abi::Abi::Scalar(scalar) = arg.layout.abi { + apply_range_attr(llvm::AttributePlace::Argument(i), scalar); + } + } + PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => { apply(attrs); } PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => { @@ -481,8 +509,12 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { apply(meta_attrs); } PassMode::Pair(a, b) => { - apply(a); - apply(b); + let i = apply(a); + let ii = apply(b); + if let abi::Abi::ScalarPair(scalar_a, scalar_b) = arg.layout.abi { + apply_range_attr(llvm::AttributePlace::Argument(i), scalar_a); + apply_range_attr(llvm::AttributePlace::Argument(ii), scalar_b); + } } PassMode::Cast { cast, pad_i32 } => { if *pad_i32 { @@ -537,15 +569,18 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } _ => {} } - if let abi::Abi::Scalar(scalar) = self.ret.layout.abi { - // If the value is a boolean, the range is 0..2 and that ultimately - // become 0..0 when the type becomes i1, which would be rejected - // by the LLVM verifier. - if let Int(..) = scalar.primitive() { - if !scalar.is_bool() && !scalar.is_always_valid(bx) { - bx.range_metadata(callsite, scalar.valid_range(bx)); - } - } + if bx.cx.sess().opts.optimize != config::OptLevel::No + && llvm_util::get_version() < (19, 0, 0) + && let abi::Abi::Scalar(scalar) = self.ret.layout.abi + && matches!(scalar.primitive(), Int(..)) + // If the value is a boolean, the range is 0..2 and that ultimately + // become 0..0 when the type becomes i1, which would be rejected + // by the LLVM verifier. + && !scalar.is_bool() + // LLVM also rejects full range. + && !scalar.is_always_valid(bx) + { + bx.range_metadata(callsite, scalar.valid_range(bx)); } for arg in self.args.iter() { match &arg.mode { diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index aea8395441a..f931698c38f 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use libc::{c_char, c_uint}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_codegen_ssa::mir::operand::OperandValue; @@ -89,7 +91,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { // if the target feature needed by the register class is // disabled. This is necessary otherwise LLVM will try // to actually allocate a register for the dummy output. - assert!(matches!(reg, InlineAsmRegOrRegClass::Reg(_))); + assert_matches!(reg, InlineAsmRegOrRegClass::Reg(_)); clobbers.push(format!("~{}", reg_to_llvm(reg, None))); continue; } else { diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 0f44c5dba5e..cc081f29e12 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -728,13 +728,32 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { llvm::LLVMSetVolatile(store, llvm::True); } if flags.contains(MemFlags::NONTEMPORAL) { - // According to LLVM [1] building a nontemporal store must - // *always* point to a metadata value of the integer 1. - // - // [1]: https://llvm.org/docs/LangRef.html#store-instruction - let one = self.cx.const_i32(1); - let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1); - llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node); + // Make sure that the current target architectures supports "sane" non-temporal + // stores, i.e., non-temporal stores that are equivalent to regular stores except + // for performance. LLVM doesn't seem to care about this, and will happily treat + // `!nontemporal` stores as-if they were normal stores (for reordering optimizations + // etc) even on x86, despite later lowering them to MOVNT which do *not* behave like + // regular stores but require special fences. + // So we keep a list of architectures where `!nontemporal` is known to be truly just + // a hint, and use regular stores everywhere else. + // (In the future, we could alternatively ensure that an sfence gets emitted after a sequence of movnt + // before any kind of synchronizing operation. But it's not clear how to do that with LLVM.) + // For more context, see <https://github.com/rust-lang/rust/issues/114582> and + // <https://github.com/llvm/llvm-project/issues/64521>. + const WELL_BEHAVED_NONTEMPORAL_ARCHS: &[&str] = + &["aarch64", "arm", "riscv32", "riscv64"]; + + let use_nontemporal = + WELL_BEHAVED_NONTEMPORAL_ARCHS.contains(&&*self.cx.tcx.sess.target.arch); + if use_nontemporal { + // According to LLVM [1] building a nontemporal store must + // *always* point to a metadata value of the integer 1. + // + // [1]: https://llvm.org/docs/LangRef.html#store-instruction + let one = self.cx.const_i32(1); + let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1); + llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node); + } } store } diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index 65f974c5689..a3997900184 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -97,11 +97,6 @@ impl<'ll> CodegenCx<'ll, '_> { unsafe { llvm::LLVMConstArray2(ty, elts.as_ptr(), len) } } - pub fn const_vector(&self, elts: &[&'ll Value]) -> &'ll Value { - let len = c_uint::try_from(elts.len()).expect("LLVMConstVector elements len overflow"); - unsafe { llvm::LLVMConstVector(elts.as_ptr(), len) } - } - pub fn const_bytes(&self, bytes: &[u8]) -> &'ll Value { bytes_in_context(self.llcx, bytes) } @@ -221,6 +216,11 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { struct_in_context(self.llcx, elts, packed) } + fn const_vector(&self, elts: &[&'ll Value]) -> &'ll Value { + let len = c_uint::try_from(elts.len()).expect("LLVMConstVector elements len overflow"); + unsafe { llvm::LLVMConstVector(elts.as_ptr(), len) } + } + fn const_to_opt_uint(&self, v: &'ll Value) -> Option<u64> { try_as_const_integral(v).and_then(|v| unsafe { let mut i = 0u64; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs index ad638588612..87bea22d8dd 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs @@ -456,7 +456,7 @@ pub fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll D { build_pointer_or_reference_di_node(cx, t, t.boxed_ty(), unique_type_id) } - ty::FnDef(..) | ty::FnPtr(_) => build_subroutine_type_di_node(cx, unique_type_id), + ty::FnDef(..) | ty::FnPtr(..) => build_subroutine_type_di_node(cx, unique_type_id), ty::Closure(..) => build_closure_env_di_node(cx, unique_type_id), ty::CoroutineClosure(..) => build_closure_env_di_node(cx, unique_type_id), ty::Coroutine(..) => enums::build_coroutine_di_node(cx, unique_type_id), diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 57d5f6fdf50..f5558723d11 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::cmp::Ordering; use rustc_codegen_ssa::base::{compare_simd_types, wants_msvc_seh, wants_wasm_eh}; @@ -1142,7 +1143,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( if cfg!(debug_assertions) { for (ty, arg) in arg_tys.iter().zip(args) { if ty.is_simd() { - assert!(matches!(arg.val, OperandValue::Immediate(_))); + assert_matches!(arg.val, OperandValue::Immediate(_)); } } } diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index 518a86e0cb0..43164390a1c 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -8,6 +8,7 @@ #![allow(internal_features)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(exact_size_is_empty)] #![feature(extern_types)] #![feature(hash_raw_entry)] diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 80b13c0e1d4..faabbcb020d 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -1575,6 +1575,12 @@ extern "C" { pub fn LLVMRustCreateAllocSizeAttr(C: &Context, size_arg: u32) -> &Attribute; pub fn LLVMRustCreateAllocKindAttr(C: &Context, size_arg: u64) -> &Attribute; pub fn LLVMRustCreateMemoryEffectsAttr(C: &Context, effects: MemoryEffects) -> &Attribute; + pub fn LLVMRustCreateRangeAttribute( + C: &Context, + num_bits: c_uint, + lower_words: *const u64, + upper_words: *const u64, + ) -> &Attribute; // Operations on functions pub fn LLVMRustGetOrInsertFunction<'a>( diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs index 72691907c0d..d0db350a149 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs @@ -8,7 +8,7 @@ use std::string::FromUtf8Error; use libc::c_uint; use rustc_data_structures::small_c_str::SmallCStr; use rustc_llvm::RustString; -use rustc_target::abi::Align; +use rustc_target::abi::{Align, Size, WrappingRange}; pub use self::AtomicRmwBinOp::*; pub use self::CallConv::*; @@ -105,6 +105,21 @@ pub fn CreateAllocKindAttr(llcx: &Context, kind_arg: AllocKindFlags) -> &Attribu unsafe { LLVMRustCreateAllocKindAttr(llcx, kind_arg.bits()) } } +pub fn CreateRangeAttr(llcx: &Context, size: Size, range: WrappingRange) -> &Attribute { + let lower = range.start; + let upper = range.end.wrapping_add(1); + let lower_words = [lower as u64, (lower >> 64) as u64]; + let upper_words = [upper as u64, (upper >> 64) as u64]; + unsafe { + LLVMRustCreateRangeAttribute( + llcx, + size.bits().try_into().unwrap(), + lower_words.as_ptr(), + upper_words.as_ptr(), + ) + } +} + #[derive(Copy, Clone)] pub enum AttributePlace { ReturnValue, diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index bea12747a51..70b45a852ca 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -1,4 +1,5 @@ use std::any::Any; +use std::assert_matches::assert_matches; use std::marker::PhantomData; use std::path::{Path, PathBuf}; use std::sync::mpsc::{channel, Receiver, Sender}; @@ -1963,7 +1964,7 @@ impl SharedEmitterMain { sess.dcx().abort_if_errors(); } Ok(SharedEmitterMessage::InlineAsmError(cookie, msg, level, source)) => { - assert!(matches!(level, Level::Error | Level::Warning | Level::Note)); + assert_matches!(level, Level::Error | Level::Warning | Level::Note); let msg = msg.strip_prefix("error: ").unwrap_or(&msg).to_string(); let mut err = Diag::<()>::new(sess.dcx(), level, msg); diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs index 27558038927..359043a6d78 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs @@ -331,7 +331,7 @@ fn push_debuginfo_type_name<'tcx>( output.push(')'); } } - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { // We've encountered a weird 'recursive type' // Currently, the only way to generate such a type // is by using 'impl trait': diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 1b029660433..cb6d9d6f66e 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -4,6 +4,7 @@ #![allow(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(let_chains)] diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index bc3076528da..817e2ca72ec 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -846,7 +846,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ), None, ), - ty::FnPtr(_) => (None, Some(callee.immediate())), + ty::FnPtr(..) => (None, Some(callee.immediate())), _ => bug!("{} is not callable", callee.layout.ty), }; @@ -923,8 +923,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // third argument must be constant. This is // checked by the type-checker. if i == 2 && intrinsic.name == sym::simd_shuffle { + // FIXME: the simd_shuffle argument is actually an array, + // not a vector, so we need this special hack to make sure + // it is passed as an immediate. We should pass the + // shuffle indices as a vector instead to avoid this hack. if let mir::Operand::Constant(constant) = &arg.node { - let (llval, ty) = self.simd_shuffle_indices(bx, constant); + let (llval, ty) = self.immediate_const_vector(bx, constant); return OperandRef { val: Immediate(llval), layout: bx.layout_of(ty), diff --git a/compiler/rustc_codegen_ssa/src/mir/constant.rs b/compiler/rustc_codegen_ssa/src/mir/constant.rs index f4d974f7036..0aa85b82038 100644 --- a/compiler/rustc_codegen_ssa/src/mir/constant.rs +++ b/compiler/rustc_codegen_ssa/src/mir/constant.rs @@ -1,6 +1,6 @@ use rustc_middle::mir::interpret::ErrorHandled; use rustc_middle::ty::layout::HasTyCtxt; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::{self, Ty, ValTree}; use rustc_middle::{bug, mir, span_bug}; use rustc_target::abi::Abi; @@ -28,7 +28,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { .expect("erroneous constant missed by mono item collection") } - /// This is a convenience helper for `simd_shuffle_indices`. It has the precondition + /// This is a convenience helper for `immediate_const_vector`. It has the precondition /// that the given `constant` is an `Const::Unevaluated` and must be convertible to /// a `ValTree`. If you want a more general version of this, talk to `wg-const-eval` on zulip. /// @@ -59,23 +59,42 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.cx.tcx().const_eval_resolve_for_typeck(ty::ParamEnv::reveal_all(), uv, constant.span) } - /// process constant containing SIMD shuffle indices - pub fn simd_shuffle_indices( + /// process constant containing SIMD shuffle indices & constant vectors + pub fn immediate_const_vector( &mut self, bx: &Bx, constant: &mir::ConstOperand<'tcx>, ) -> (Bx::Value, Ty<'tcx>) { let ty = self.monomorphize(constant.ty()); + let ty_is_simd = ty.is_simd(); + // FIXME: ideally we'd assert that this is a SIMD type, but simd_shuffle + // in its current form relies on a regular array being passed as an + // immediate argument. This hack can be removed once that is fixed. + let field_ty = if ty_is_simd { + ty.simd_size_and_type(bx.tcx()).1 + } else { + ty.builtin_index().unwrap() + }; + let val = self .eval_unevaluated_mir_constant_to_valtree(constant) .ok() .map(|x| x.ok()) .flatten() .map(|val| { - let field_ty = ty.builtin_index().unwrap(); - let values: Vec<_> = val - .unwrap_branch() - .iter() + // Depending on whether this is a SIMD type with an array field + // or a type with many fields (one for each elements), the valtree + // is either a single branch with N children, or a root node + // with exactly one child which then in turn has many children. + // So we look at the first child to determine whether it is a + // leaf or whether we have to go one more layer down. + let branch_or_leaf = val.unwrap_branch(); + let first = branch_or_leaf.get(0).unwrap(); + let field_iter = match first { + ValTree::Branch(_) => first.unwrap_branch().iter(), + ValTree::Leaf(_) => branch_or_leaf.iter(), + }; + let values: Vec<_> = field_iter .map(|field| { if let Some(prim) = field.try_to_scalar() { let layout = bx.layout_of(field_ty); @@ -84,11 +103,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { }; bx.scalar_to_backend(prim, scalar, bx.immediate_backend_type(layout)) } else { - bug!("simd shuffle field {:?}", field) + bug!("field is not a scalar {:?}", field) } }) .collect(); - bx.const_struct(&values, false) + if ty_is_simd { bx.const_vector(&values) } else { bx.const_struct(&values, false) } }) .unwrap_or_else(|| { bx.tcx().dcx().emit_err(errors::ShuffleIndicesEvaluation { span: constant.span }); diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index 2bc2d0f70bf..1891de8c0eb 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::fmt; use arrayvec::ArrayVec; @@ -389,7 +390,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { } // Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]); (OperandValue::Immediate(llval), Abi::Aggregate { sized: true }) => { - assert!(matches!(self.layout.abi, Abi::Vector { .. })); + assert_matches!(self.layout.abi, Abi::Vector { .. }); let llfield_ty = bx.cx().backend_type(field); @@ -635,7 +636,24 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.codegen_consume(bx, place.as_ref()) } - mir::Operand::Constant(ref constant) => self.eval_mir_constant_to_operand(bx, constant), + mir::Operand::Constant(ref constant) => { + let constant_ty = self.monomorphize(constant.ty()); + // Most SIMD vector constants should be passed as immediates. + // (In particular, some intrinsics really rely on this.) + if constant_ty.is_simd() { + // However, some SIMD types do not actually use the vector ABI + // (in particular, packed SIMD types do not). Ensure we exclude those. + let layout = bx.layout_of(constant_ty); + if let Abi::Vector { .. } = layout.abi { + let (llval, ty) = self.immediate_const_vector(bx, constant); + return OperandRef { + val: OperandValue::Immediate(llval), + layout: bx.layout_of(ty), + }; + } + } + self.eval_mir_constant_to_operand(bx, constant) + } } } } diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index d91a118bc71..3c2c29ac7f7 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use arrayvec::ArrayVec; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; @@ -220,7 +222,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { match operand.val { OperandValue::Ref(source_place_val) => { assert_eq!(source_place_val.llextra, None); - assert!(matches!(operand_kind, OperandValueKind::Ref)); + assert_matches!(operand_kind, OperandValueKind::Ref); Some(bx.load_operand(source_place_val.with_type(cast)).val) } OperandValue::ZeroSized => { diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index 2b802240e03..6cf84a012f0 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::ty::layout::{HasParamEnv, TyAndLayout}; use rustc_middle::ty::{Instance, Ty}; @@ -254,10 +256,10 @@ pub trait BuilderMethods<'a, 'tcx>: } else { (in_ty, dest_ty) }; - assert!(matches!( + assert_matches!( self.cx().type_kind(float_ty), TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::FP128 - )); + ); assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer); if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts { diff --git a/compiler/rustc_codegen_ssa/src/traits/consts.rs b/compiler/rustc_codegen_ssa/src/traits/consts.rs index d93b3e06ca6..c15f1fa8e56 100644 --- a/compiler/rustc_codegen_ssa/src/traits/consts.rs +++ b/compiler/rustc_codegen_ssa/src/traits/consts.rs @@ -30,6 +30,7 @@ pub trait ConstMethods<'tcx>: BackendTypes { fn const_str(&self, s: &str) -> (Self::Value, Self::Value); fn const_struct(&self, elts: &[Self::Value], packed: bool) -> Self::Value; + fn const_vector(&self, elts: &[Self::Value]) -> Self::Value; fn const_to_opt_uint(&self, v: Self::Value) -> Option<u64>; fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option<u128>; diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index c64c73b2323..1442f1832b9 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -41,6 +41,8 @@ const_eval_const_context = {$kind -> *[other] {""} } +const_eval_const_stable = const-stable functions can only call other const-stable functions + const_eval_copy_nonoverlapping_overlapping = `copy_nonoverlapping` called on overlapping ranges @@ -201,6 +203,9 @@ const_eval_invalid_vtable_pointer = const_eval_invalid_vtable_trait = using vtable for trait `{$vtable_trait}` but trait `{$expected_trait}` was expected +const_eval_lazy_lock = + consider wrapping this expression in `std::sync::LazyLock::new(|| ...)` + const_eval_live_drop = destructor of `{$dropped_ty}` cannot be evaluated at compile-time .label = the destructor for this type cannot be evaluated in {const_eval_const_context}s diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 3ded81b90ff..32a9247bcc7 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -1,5 +1,6 @@ //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations. +use std::assert_matches::assert_matches; use std::mem; use std::ops::Deref; @@ -170,7 +171,7 @@ struct LocalReturnTyVisitor<'ck, 'mir, 'tcx> { impl<'ck, 'mir, 'tcx> TypeVisitor<TyCtxt<'tcx>> for LocalReturnTyVisitor<'ck, 'mir, 'tcx> { fn visit_ty(&mut self, t: Ty<'tcx>) { match t.kind() { - ty::FnPtr(_) => {} + ty::FnPtr(..) => {} ty::Ref(_, _, hir::Mutability::Mut) => { self.checker.check_op(ops::mut_ref::MutRef(self.kind)); t.super_visit_with(self) @@ -590,7 +591,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) { // Int, bool, and char operations are fine. } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() { - assert!(matches!( + assert_matches!( op, BinOp::Eq | BinOp::Ne @@ -599,7 +600,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { | BinOp::Ge | BinOp::Gt | BinOp::Offset - )); + ); self.check_op(ops::RawPtrComparison); } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() { @@ -725,7 +726,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { let (mut callee, mut fn_args) = match *fn_ty.kind() { ty::FnDef(def_id, fn_args) => (def_id, fn_args), - ty::FnPtr(_) => { + ty::FnPtr(..) => { self.check_op(ops::FnCallIndirect); return; } diff --git a/compiler/rustc_const_eval/src/check_consts/ops.rs b/compiler/rustc_const_eval/src/check_consts/ops.rs index f47a2ec8f75..c6361710ac9 100644 --- a/compiler/rustc_const_eval/src/check_consts/ops.rs +++ b/compiler/rustc_const_eval/src/check_consts/ops.rs @@ -23,7 +23,7 @@ use rustc_trait_selection::traits::SelectionContext; use tracing::debug; use super::ConstCx; -use crate::errors; +use crate::{errors, fluent_generated}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Status { @@ -310,7 +310,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> { } if let ConstContext::Static(_) = ccx.const_kind() { - err.note("consider wrapping this expression in `std::sync::LazyLock::new(|| ...)`"); + err.note(fluent_generated::const_eval_lazy_lock); } err @@ -334,7 +334,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallUnstable { // FIXME: make this translatable #[allow(rustc::untranslatable_diagnostic)] if ccx.is_const_stable_const_fn() { - err.help("const-stable functions can only call other const-stable functions"); + err.help(fluent_generated::const_eval_const_stable); } else if ccx.tcx.sess.is_nightly_build() { if let Some(feature) = feature { err.help(format!("add `#![feature({feature})]` to the crate attributes to enable")); @@ -605,8 +605,6 @@ impl<'tcx> NonConstOp<'tcx> for StaticAccess { span, format!("referencing statics in {}s is unstable", ccx.const_kind(),), ); - // FIXME: make this translatable - #[allow(rustc::untranslatable_diagnostic)] err .note("`static` and `const` variables can refer to other `const` variables. A `const` variable, however, cannot refer to a `static` variable.") .help("to fix this, the value can be extracted to a `const` and then used."); diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index ff27e400016..96b3ec6f187 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -226,7 +226,7 @@ pub(super) fn op_to_const<'tcx>( let pointee_ty = imm.layout.ty.builtin_deref(false).unwrap(); // `false` = no raw ptrs debug_assert!( matches!( - ecx.tcx.struct_tail_without_normalization(pointee_ty).kind(), + ecx.tcx.struct_tail_for_codegen(pointee_ty, ecx.param_env).kind(), ty::Str | ty::Slice(..), ), "`ConstValue::Slice` is for slice-tailed types only, but got {}", diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index 8227c045948..c96296eddb8 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -132,7 +132,7 @@ fn const_to_valtree_inner<'tcx>( // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to // agree with runtime equality tests. - ty::FnPtr(_) => Err(ValTreeCreationError::NonSupportedType(ty)), + ty::FnPtr(..) => Err(ValTreeCreationError::NonSupportedType(ty)), ty::Ref(_, _, _) => { let derefd_place = ecx.deref_pointer(place)?; @@ -195,7 +195,7 @@ fn reconstruct_place_meta<'tcx>( let mut last_valtree = valtree; // Traverse the type, and update `last_valtree` as we go. - let tail = tcx.struct_tail_with_normalize( + let tail = tcx.struct_tail_raw( layout.ty, |ty| ty, || { @@ -353,7 +353,7 @@ pub fn valtree_to_const_value<'tcx>( | ty::CoroutineClosure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Str | ty::Slice(_) | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()), diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index fdbdfc7e1b8..917a2fa7c6d 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -1,5 +1,6 @@ //! Manages calling a concrete function (with known MIR body) with argument passing, //! and returning the return value to the caller. +use std::assert_matches::assert_matches; use std::borrow::Cow; use either::{Left, Right}; @@ -557,7 +558,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { unwind, )? { assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden); - assert!(matches!(fallback.def, ty::InstanceKind::Item(_))); + assert_matches!(fallback.def, ty::InstanceKind::Item(_)); return self.init_fn_call( FnVal::Instance(fallback), (caller_abi, caller_fn_abi), diff --git a/compiler/rustc_const_eval/src/interpret/cast.rs b/compiler/rustc_const_eval/src/interpret/cast.rs index c3d7f2234ed..4901e4b2a41 100644 --- a/compiler/rustc_const_eval/src/interpret/cast.rs +++ b/compiler/rustc_const_eval/src/interpret/cast.rs @@ -97,7 +97,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer) => { let src = self.read_immediate(src)?; match cast_ty.kind() { - ty::FnPtr(_) => { + ty::FnPtr(..) => { // No change to value self.write_immediate(*src, dest)?; } @@ -230,7 +230,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { src: &ImmTy<'tcx, M::Provenance>, cast_to: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> { - assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(_)); + assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..)); assert!(cast_to.ty.is_integral()); let scalar = src.to_scalar(); @@ -400,6 +400,12 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } (ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => { let val = self.read_immediate(src)?; + // MIR building generates odd NOP casts, prevent them from causing unexpected trouble. + // See <https://github.com/rust-lang/rust/issues/128880>. + // FIXME: ideally we wouldn't have to do this. + if data_a == data_b { + return self.write_immediate(*val, dest); + } // Take apart the old pointer, and find the dynamic type. let (old_data, old_vptr) = val.to_scalar_pair(); let old_data = old_data.to_pointer(self)?; diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index 9210ec4e16f..aef39b9af2f 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -2,6 +2,8 @@ //! looking at their MIR. Intrinsics/functions supported here are shared by CTFE //! and miri. +use std::assert_matches::assert_matches; + use rustc_hir::def_id::DefId; use rustc_middle::mir::{self, BinOp, ConstValue, NonDivergingIntrinsic}; use rustc_middle::ty::layout::{LayoutOf as _, TyAndLayout, ValidityRequirement}; @@ -79,7 +81,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>( | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) @@ -510,7 +512,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { dest: &MPlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { assert_eq!(a.layout.ty, b.layout.ty); - assert!(matches!(a.layout.ty.kind(), ty::Int(..) | ty::Uint(..))); + assert_matches!(a.layout.ty.kind(), ty::Int(..) | ty::Uint(..)); // Performs an exact division, resulting in undefined behavior where // `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`. @@ -536,8 +538,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { r: &ImmTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Scalar<M::Provenance>> { assert_eq!(l.layout.ty, r.layout.ty); - assert!(matches!(l.layout.ty.kind(), ty::Int(..) | ty::Uint(..))); - assert!(matches!(mir_op, BinOp::Add | BinOp::Sub)); + assert_matches!(l.layout.ty.kind(), ty::Int(..) | ty::Uint(..)); + assert_matches!(mir_op, BinOp::Add | BinOp::Sub); let (val, overflowed) = self.binary_op(mir_op.wrapping_to_overflowing().unwrap(), l, r)?.to_scalar_pair(); diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index 7af4e0c285b..761ab81e228 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -19,7 +19,7 @@ use rustc_target::spec::abi::Abi as CallAbi; use super::{ throw_unsup, throw_unsup_format, AllocBytes, AllocId, AllocKind, AllocRange, Allocation, ConstAllocation, CtfeProvenance, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, - MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, + MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, CTFE_ALLOC_SALT, }; /// Data returned by [`Machine::after_stack_pop`], and consumed by @@ -575,6 +575,14 @@ pub trait Machine<'tcx>: Sized { { eval(ecx, val, span, layout) } + + /// Returns the salt to be used for a deduplicated global alloation. + /// If the allocation is for a function, the instance is provided as well + /// (this lets Miri ensure unique addresses for some functions). + fn get_global_alloc_salt( + ecx: &InterpCx<'tcx, Self>, + instance: Option<ty::Instance<'tcx>>, + ) -> usize; } /// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines @@ -677,4 +685,12 @@ pub macro compile_time_machine(<$tcx: lifetime>) { let (prov, offset) = ptr.into_parts(); Some((prov.alloc_id(), offset, prov.immutable())) } + + #[inline(always)] + fn get_global_alloc_salt( + _ecx: &InterpCx<$tcx, Self>, + _instance: Option<ty::Instance<$tcx>>, + ) -> usize { + CTFE_ALLOC_SALT + } } diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 2e5d0ae7736..910aec9b8e1 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -195,7 +195,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> { let id = match fn_val { - FnVal::Instance(instance) => self.tcx.reserve_and_set_fn_alloc(instance), + FnVal::Instance(instance) => { + let salt = M::get_global_alloc_salt(self, Some(instance)); + self.tcx.reserve_and_set_fn_alloc(instance, salt) + } FnVal::Other(extra) => { // FIXME(RalfJung): Should we have a cache here? let id = self.tcx.reserve_alloc_id(); diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 4ced009ab39..ad87d6953d3 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -342,7 +342,7 @@ impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> { } // extract fields from types with `ScalarPair` ABI (Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => { - assert!(matches!(layout.abi, Abi::Scalar(..))); + assert_matches!(layout.abi, Abi::Scalar(..)); Immediate::from(if offset.bytes() == 0 { debug_assert_eq!(layout.size, a.size(cx)); a_val diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 470a62026b9..2afdd02c880 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -1008,7 +1008,8 @@ where // Use cache for immutable strings. let ptr = if mutbl.is_not() { // Use dedup'd allocation function. - let id = tcx.allocate_bytes_dedup(str.as_bytes()); + let salt = M::get_global_alloc_salt(self, None); + let id = tcx.allocate_bytes_dedup(str.as_bytes(), salt); // Turn untagged "global" pointers (obtained via `tcx`) into the machine pointer to the allocation. M::adjust_alloc_root_pointer(&self, Pointer::from(id), Some(kind))? diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 50dbced6a2a..0f6bf5c0336 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -483,7 +483,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index 2527eca3446..aaee6f6d247 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -424,7 +424,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty)); let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() { - ty::FnPtr(_sig) => { + ty::FnPtr(..) => { let fn_ptr = self.read_pointer(&func)?; let fn_val = self.get_ptr_fn(fn_ptr)?; (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false) diff --git a/compiler/rustc_const_eval/src/interpret/traits.rs b/compiler/rustc_const_eval/src/interpret/traits.rs index fb50661b826..cd4faf06655 100644 --- a/compiler/rustc_const_eval/src/interpret/traits.rs +++ b/compiler/rustc_const_eval/src/interpret/traits.rs @@ -28,7 +28,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { ensure_monomorphic_enough(*self.tcx, ty)?; ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?; - let vtable_symbolic_allocation = self.tcx.reserve_and_set_vtable_alloc(ty, poly_trait_ref); + let salt = M::get_global_alloc_salt(self, None); + let vtable_symbolic_allocation = + self.tcx.reserve_and_set_vtable_alloc(ty, poly_trait_ref, salt); let vtable_ptr = self.global_root_pointer(Pointer::from(vtable_symbolic_allocation))?; Ok(vtable_ptr.into()) } diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index fadc4ee6c8a..26b7251f6db 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -616,7 +616,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?; Ok(true) } - ty::FnPtr(_sig) => { + ty::FnPtr(..) => { let value = self.read_scalar(value, ExpectedKind::FnPtr)?; // If we check references recursively, also check that this points to a function. diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 3aa3b3b74e0..36c7bed5c11 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -35,7 +35,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::Slice(_) | ty::RawPtr(_, _) | ty::Ref(_, _, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Tuple(_) | ty::Dynamic(_, _, _) => self.pretty_print_type(ty), diff --git a/compiler/rustc_data_structures/src/graph/scc/mod.rs b/compiler/rustc_data_structures/src/graph/scc/mod.rs index 96fc8ae3887..2a457ffb70b 100644 --- a/compiler/rustc_data_structures/src/graph/scc/mod.rs +++ b/compiler/rustc_data_structures/src/graph/scc/mod.rs @@ -8,6 +8,7 @@ //! Typical examples would include: minimum element in SCC, maximum element //! reachable from it, etc. +use std::assert_matches::debug_assert_matches; use std::fmt::Debug; use std::ops::Range; @@ -569,7 +570,7 @@ where // This None marks that we still have the initialize this node's frame. debug!(?depth, ?node); - debug_assert!(matches!(self.node_states[node], NodeState::NotVisited)); + debug_assert_matches!(self.node_states[node], NodeState::NotVisited); // Push `node` onto the stack. self.node_states[node] = NodeState::BeingVisited { diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 403136e78f4..a35f5b1f17d 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -18,6 +18,7 @@ #![feature(array_windows)] #![feature(ascii_char)] #![feature(ascii_char_variants)] +#![feature(assert_matches)] #![feature(auto_traits)] #![feature(cfg_match)] #![feature(core_intrinsics)] diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index aefbf05a1fc..fd203c38318 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -10,6 +10,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(array_windows)] +#![feature(assert_matches)] #![feature(associated_type_defaults)] #![feature(box_into_inner)] #![feature(box_patterns)] @@ -28,6 +29,7 @@ extern crate self as rustc_errors; +use std::assert_matches::assert_matches; use std::backtrace::{Backtrace, BacktraceStatus}; use std::borrow::Cow; use std::cell::Cell; @@ -1490,7 +1492,7 @@ impl DiagCtxtInner { // Future breakages aren't emitted if they're `Level::Allow` or // `Level::Expect`, but they still need to be constructed and // stashed below, so they'll trigger the must_produce_diag check. - assert!(matches!(diagnostic.level, Error | Warning | Allow | Expect(_))); + assert_matches!(diagnostic.level, Error | Warning | Allow | Expect(_)); self.future_breakage_diagnostics.push(diagnostic.clone()); } @@ -1835,23 +1837,23 @@ impl DelayedDiagInner { } } -/// Level is_error EmissionGuarantee Top-level Sub Used in lints? -/// ----- -------- ----------------- --------- --- -------------- -/// Bug yes BugAbort yes - - -/// Fatal yes FatalAbort/FatalError(*) yes - - -/// Error yes ErrorGuaranteed yes - yes -/// DelayedBug yes ErrorGuaranteed yes - - -/// ForceWarning - () yes - lint-only -/// Warning - () yes yes yes -/// Note - () rare yes - -/// OnceNote - () - yes lint-only -/// Help - () rare yes - -/// OnceHelp - () - yes lint-only -/// FailureNote - () rare - - -/// Allow - () yes - lint-only -/// Expect - () yes - lint-only +/// | Level | is_error | EmissionGuarantee | Top-level | Sub | Used in lints? +/// | ----- | -------- | ----------------- | --------- | --- | -------------- +/// | Bug | yes | BugAbort | yes | - | - +/// | Fatal | yes | FatalAbort/FatalError[^star] | yes | - | - +/// | Error | yes | ErrorGuaranteed | yes | - | yes +/// | DelayedBug | yes | ErrorGuaranteed | yes | - | - +/// | ForceWarning | - | () | yes | - | lint-only +/// | Warning | - | () | yes | yes | yes +/// | Note | - | () | rare | yes | - +/// | OnceNote | - | () | - | yes | lint-only +/// | Help | - | () | rare | yes | - +/// | OnceHelp | - | () | - | yes | lint-only +/// | FailureNote | - | () | rare | - | - +/// | Allow | - | () | yes | - | lint-only +/// | Expect | - | () | yes | - | lint-only /// -/// (*) `FatalAbort` normally, `FatalError` in the non-aborting "almost fatal" case that is +/// [^star]: `FatalAbort` normally, `FatalError` in the non-aborting "almost fatal" case that is /// occasionally used. /// #[derive(Copy, PartialEq, Eq, Clone, Hash, Debug, Encodable, Decodable)] diff --git a/compiler/rustc_expand/messages.ftl b/compiler/rustc_expand/messages.ftl index 18d95a398fd..766d96e268f 100644 --- a/compiler/rustc_expand/messages.ftl +++ b/compiler/rustc_expand/messages.ftl @@ -129,6 +129,9 @@ expand_module_multiple_candidates = expand_must_repeat_once = this must repeat at least once +expand_non_inline_modules_in_proc_macro_input_are_unstable = + non-inline modules in proc macro input are unstable + expand_not_a_meta_item = not a meta item diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 7712915d490..8f9104135cd 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -1398,8 +1398,6 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) { }; if crate_matches { - // FIXME: make this translatable - #[allow(rustc::untranslatable_diagnostic)] sess.dcx().emit_fatal(errors::ProcMacroBackCompat { crate_name: "rental".to_string(), fixed_version: "0.5.6".to_string(), diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index d8cb367e3fa..37679e17b90 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -39,6 +39,7 @@ use crate::errors::{ RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported, UnsupportedKeyValue, WrongFragmentKind, }; +use crate::fluent_generated; use crate::mbe::diagnostics::annotate_err_with_kind; use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod}; use crate::placeholders::{placeholder, PlaceholderExpander}; @@ -882,7 +883,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } impl<'ast, 'a> Visitor<'ast> for GateProcMacroInput<'a> { - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn visit_item(&mut self, item: &'ast ast::Item) { match &item.kind { ItemKind::Mod(_, mod_kind) @@ -892,7 +892,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.sess, sym::proc_macro_hygiene, item.span, - "non-inline modules in proc macro input are unstable", + fluent_generated::expand_non_inline_modules_in_proc_macro_input_are_unstable, ) .emit(); } @@ -1876,7 +1876,6 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // Detect use of feature-gated or invalid attributes on macro invocations // since they will not be detected after macro expansion. - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn check_attributes(&self, attrs: &[ast::Attribute], call: &ast::MacCall) { let features = self.cx.ecfg.features; let mut attrs = attrs.iter().peekable(); diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 44286cfeeef..03b40e28f8b 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -60,6 +60,8 @@ declare_features! ( (accepted, adx_target_feature, "1.61.0", Some(44839)), /// Allows explicit discriminants on non-unit enum variants. (accepted, arbitrary_enum_discriminant, "1.66.0", Some(60553)), + /// Allows using `const` operands in inline assembly. + (accepted, asm_const, "CURRENT_RUSTC_VERSION", Some(93332)), /// Allows using `sym` operands in inline assembly. (accepted, asm_sym, "1.66.0", Some(93333)), /// Allows the definition of associated constants in `trait` or `impl` blocks. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 72ea55d5999..d4c54b67f24 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -578,12 +578,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ EncodeCrossCrate::No, coroutines, experimental!(coroutines) ), - // `#[pointee]` attribute to designate the pointee type in SmartPointer derive-macro - gated!( - pointee, Normal, template!(Word), ErrorFollowing, - EncodeCrossCrate::No, derive_smart_pointer, experimental!(pointee) - ), - // RFC 3543 // `#[patchable_function_entry(prefix_nops = m, entry_nops = n)]` gated!( @@ -643,8 +637,8 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ through unstable paths" ), rustc_attr!( - rustc_deprecated_safe_2024, Normal, template!(Word), WarnFollowing, - EncodeCrossCrate::Yes, + rustc_deprecated_safe_2024, Normal, template!(List: r#"audit_that = "...""#), + ErrorFollowing, EncodeCrossCrate::Yes, "rustc_deprecated_safe_2024 is supposed to be used in libstd only", ), diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 47810bc9165..24f691ea7fa 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -348,8 +348,6 @@ declare_features! ( (unstable, alloc_error_handler, "1.29.0", Some(51540)), /// Allows trait methods with arbitrary self types. (unstable, arbitrary_self_types, "1.23.0", Some(44874)), - /// Allows using `const` operands in inline assembly. - (unstable, asm_const, "1.58.0", Some(93332)), /// Enables experimental inline assembly support for additional architectures. (unstable, asm_experimental_arch, "1.58.0", Some(93335)), /// Allows using `label` operands in inline assembly. diff --git a/compiler/rustc_hir_analysis/src/check/intrinsicck.rs b/compiler/rustc_hir_analysis/src/check/intrinsicck.rs index 847a1e64706..3d5a22fce85 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsicck.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsicck.rs @@ -1,3 +1,5 @@ +use std::assert_matches::debug_assert_matches; + use rustc_ast::InlineAsmTemplatePiece; use rustc_data_structures::fx::FxIndexSet; use rustc_hir::{self as hir, LangItem}; @@ -66,7 +68,7 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { ty::Float(FloatTy::F32) => Some(InlineAsmType::F32), ty::Float(FloatTy::F64) => Some(InlineAsmType::F64), ty::Float(FloatTy::F128) => Some(InlineAsmType::F128), - ty::FnPtr(_) => Some(asm_ty_isize), + ty::FnPtr(..) => Some(asm_ty_isize), ty::RawPtr(ty, _) if self.is_thin_ptr_ty(ty) => Some(asm_ty_isize), ty::Adt(adt, args) if adt.repr().simd() => { let fields = &adt.non_enum_variant().fields; @@ -457,17 +459,17 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { } // Typeck has checked that Const operands are integers. hir::InlineAsmOperand::Const { anon_const } => { - debug_assert!(matches!( + debug_assert_matches!( self.tcx.type_of(anon_const.def_id).instantiate_identity().kind(), ty::Error(_) | ty::Int(_) | ty::Uint(_) - )); + ); } // Typeck has checked that SymFn refers to a function. hir::InlineAsmOperand::SymFn { anon_const } => { - debug_assert!(matches!( + debug_assert_matches!( self.tcx.type_of(anon_const.def_id).instantiate_identity().kind(), ty::Error(_) | ty::FnDef(..) - )); + ); } // AST lowering guarantees that SymStatic points to a static. hir::InlineAsmOperand::SymStatic { .. } => {} diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index a9f8630741a..d3b928fc17c 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -951,7 +951,7 @@ fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) -> Result<(), } else { let mut diag = match ty.kind() { ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Error(_) => return Ok(()), - ty::FnPtr(_) => tcx.dcx().struct_span_err( + ty::FnPtr(..) => tcx.dcx().struct_span_err( hir_ty.span, "using function pointers as const generic parameters is forbidden", ), diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index bdb5f5b7205..fecd78bc38f 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -1,6 +1,7 @@ //! Check properties that are required by built-in traits and set //! up data structures required by type-checking/codegen. +use std::assert_matches::assert_matches; use std::collections::BTreeMap; use rustc_data_structures::fx::FxHashSet; @@ -129,7 +130,7 @@ fn visit_implementation_of_const_param_ty( checker: &Checker<'_>, kind: LangItem, ) -> Result<(), ErrorGuaranteed> { - assert!(matches!(kind, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy)); + assert_matches!(kind, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy); let tcx = checker.tcx; let header = checker.impl_header; diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs index e2d3ff558cf..89acb778d6c 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs @@ -172,7 +172,7 @@ impl<'tcx> InherentCollect<'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::Never - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Tuple(..) => self.check_primitive_impl(id, self_ty), ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) | ty::Param(_) => { Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span })) diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 8c1e5e78b75..91fa066ec6a 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -1700,6 +1700,8 @@ fn impl_trait_header(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::ImplTrai trait_ref: ty::EarlyBinder::bind(trait_ref), safety: impl_.safety, polarity: polarity_of_impl(tcx, def_id, impl_, item.span), + do_not_recommend: tcx.features().do_not_recommend + && tcx.has_attrs_with_path(def_id, &[sym::diagnostic, sym::do_not_recommend]), } }) } diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs index 60e2c2eb30e..28d6cab4b43 100644 --- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::ops::ControlFlow; use hir::intravisit::{self, Visitor}; @@ -207,9 +208,9 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { .. }) => { if in_trait { - assert!(matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn)) + assert_matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn); } else { - assert!(matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn | DefKind::Fn)) + assert_matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn | DefKind::Fn); } Some(fn_def_id.to_def_id()) } @@ -218,9 +219,9 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { .. }) => { if in_assoc_ty { - assert!(matches!(tcx.def_kind(parent), DefKind::AssocTy)); + assert_matches!(tcx.def_kind(parent), DefKind::AssocTy); } else { - assert!(matches!(tcx.def_kind(parent), DefKind::TyAlias)); + assert_matches!(tcx.def_kind(parent), DefKind::TyAlias); } debug!("generics_of: parent of opaque ty {:?} is {:?}", def_id, parent); // Opaque types are always nested within another item, and diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index a5a56cb845d..6ac4802b195 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use hir::{HirId, Node}; use rustc_data_structures::fx::FxIndexSet; use rustc_hir as hir; @@ -601,7 +603,7 @@ pub(super) fn implied_predicates_with_filter( let Some(trait_def_id) = trait_def_id.as_local() else { // if `assoc_name` is None, then the query should've been redirected to an // external provider - assert!(matches!(filter, PredicateFilter::SelfThatDefines(_))); + assert_matches!(filter, PredicateFilter::SelfThatDefines(_)); return tcx.explicit_super_predicates_of(trait_def_id); }; diff --git a/compiler/rustc_hir_analysis/src/delegation.rs b/compiler/rustc_hir_analysis/src/delegation.rs index ca62ef92b83..20aaa43219f 100644 --- a/compiler/rustc_hir_analysis/src/delegation.rs +++ b/compiler/rustc_hir_analysis/src/delegation.rs @@ -1,3 +1,5 @@ +use std::assert_matches::debug_assert_matches; + use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; @@ -63,7 +65,7 @@ enum FnKind { } fn fn_kind<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> FnKind { - debug_assert!(matches!(tcx.def_kind(def_id), DefKind::Fn | DefKind::AssocFn)); + debug_assert_matches!(tcx.def_kind(def_id), DefKind::Fn | DefKind::AssocFn); let parent = tcx.parent(def_id); match tcx.def_kind(parent) { diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check.rs b/compiler/rustc_hir_analysis/src/impl_wf_check.rs index a8ae620f7a4..ab441ed4cde 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check.rs @@ -8,6 +8,8 @@ //! specialization errors. These things can (and probably should) be //! fixed, but for the moment it's easier to do these checks early. +use std::assert_matches::debug_assert_matches; + use min_specialization::check_min_specialization; use rustc_data_structures::fx::FxHashSet; use rustc_errors::codes::*; @@ -54,7 +56,7 @@ mod min_specialization; pub fn check_impl_wf(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) -> Result<(), ErrorGuaranteed> { let min_specialization = tcx.features().min_specialization; let mut res = Ok(()); - debug_assert!(matches!(tcx.def_kind(impl_def_id), DefKind::Impl { .. })); + debug_assert_matches!(tcx.def_kind(impl_def_id), DefKind::Impl { .. }); res = res.and(enforce_impl_params_are_constrained(tcx, impl_def_id)); if min_specialization { res = res.and(check_min_specialization(tcx, impl_def_id)); diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 061db14ad0a..291d57f2a17 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -62,6 +62,7 @@ This API is completely unstable and subject to change. #![allow(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(control_flow_enum)] #![feature(if_let_guard)] #![feature(iter_intersperse)] diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 92baa41e07f..ce9e73bf245 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -317,8 +317,8 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraint(current, data.index, variance); } - ty::FnPtr(sig) => { - self.add_constraints_from_sig(current, sig, variance); + ty::FnPtr(sig_tys, hdr) => { + self.add_constraints_from_sig(current, sig_tys.with(hdr), variance); } ty::Error(_) => { diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 07f64ead6f6..44cb08e44eb 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -137,7 +137,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If the callee is a bare function or a closure, then we're all set. match *adjusted_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let adjustments = self.adjust_steps(autoderef); self.apply_adjustments(callee_expr, adjustments); return Some(CallStep::Builtin(adjusted_ty)); @@ -467,7 +467,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { (fn_sig, Some(def_id)) } // FIXME(effects): these arms should error because we can't enforce them - ty::FnPtr(sig) => (sig, None), + ty::FnPtr(sig_tys, hdr) => (sig_tys.with(hdr), None), _ => { for arg in arg_exprs { self.check_expr(arg); diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index de702733904..7cd97166ed1 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -97,6 +97,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return Ok(Some(PointerKind::Thin)); } + let t = self.try_structurally_resolve_type(span, t); + Ok(match *t.kind() { ty::Slice(_) | ty::Str => Some(PointerKind::Length), ty::Dynamic(tty, _, ty::Dyn) => Some(PointerKind::VTable(tty)), diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index 89df464cca0..cd357e4a7ad 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -160,15 +160,11 @@ pub(super) fn check_fn<'a, 'tcx>( fcx.demand_suptype(span, ret_ty, actual_return_ty); // Check that a function marked as `#[panic_handler]` has signature `fn(&PanicInfo) -> !` - if let Some(panic_impl_did) = tcx.lang_items().panic_impl() - && panic_impl_did == fn_def_id.to_def_id() - { - check_panic_info_fn(tcx, panic_impl_did.expect_local(), fn_sig); + if tcx.is_lang_item(fn_def_id.to_def_id(), LangItem::PanicImpl) { + check_panic_info_fn(tcx, fn_def_id, fn_sig); } - if let Some(lang_start_defid) = tcx.lang_items().start_fn() - && lang_start_defid == fn_def_id.to_def_id() - { + if tcx.is_lang_item(fn_def_id.to_def_id(), LangItem::Start) { check_lang_start_fn(tcx, fn_sig, fn_def_id); } diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs index a7953acc95c..589a9c5a719 100644 --- a/compiler/rustc_hir_typeck/src/closure.rs +++ b/compiler/rustc_hir_typeck/src/closure.rs @@ -336,9 +336,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .into_iter() .map(|obl| (obl.predicate, obl.cause.span)), ), - ty::FnPtr(sig) => match closure_kind { + ty::FnPtr(sig_tys, hdr) => match closure_kind { hir::ClosureKind::Closure => { - let expected_sig = ExpectedSig { cause_span: None, sig }; + let expected_sig = ExpectedSig { cause_span: None, sig: sig_tys.with(hdr) }; (Some(expected_sig), Some(ty::ClosureKind::Fn)) } hir::ClosureKind::Coroutine(_) | hir::ClosureKind::CoroutineClosure(_) => { diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index fcd3798eb48..d53df251a15 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -137,7 +137,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { at.lub(DefineOpaqueTypes::Yes, b, a) } else { at.sup(DefineOpaqueTypes::Yes, b, a) - .map(|InferOk { value: (), obligations }| InferOk { value: a, obligations }) + .map(|InferOk { value: (), obligations }| InferOk { value: b, obligations }) }; // In the new solver, lazy norm may allow us to shallowly equate @@ -225,10 +225,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // items to drop the unsafe qualifier. self.coerce_from_fn_item(a, b) } - ty::FnPtr(a_f) => { + ty::FnPtr(a_sig_tys, a_hdr) => { // We permit coercion of fn pointers to drop the // unsafe qualifier. - self.coerce_from_fn_pointer(a, a_f, b) + self.coerce_from_fn_pointer(a, a_sig_tys.with(a_hdr), b) } ty::Closure(closure_def_id_a, args_a) => { // Non-capturing closures are coercible to @@ -788,9 +788,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.commit_if_ok(|snapshot| { let outer_universe = self.infcx.universe(); - let result = if let ty::FnPtr(fn_ty_b) = b.kind() - && let (hir::Safety::Safe, hir::Safety::Unsafe) = - (fn_ty_a.safety(), fn_ty_b.safety()) + let result = if let ty::FnPtr(_, hdr_b) = b.kind() + && let (hir::Safety::Safe, hir::Safety::Unsafe) = (fn_ty_a.safety(), hdr_b.safety) { let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); self.unify_and(unsafe_a, b, to_unsafe) @@ -842,7 +841,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b); match b.kind() { - ty::FnPtr(b_sig) => { + ty::FnPtr(_, b_hdr) => { let a_sig = a.fn_sig(self.tcx); if let ty::FnDef(def_id, _) = *a.kind() { // Intrinsics are not coercible to function pointers @@ -852,7 +851,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396). - if b_sig.safety() == hir::Safety::Safe + if b_hdr.safety == hir::Safety::Safe && !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() { return Err(TypeError::TargetFeatureCast(def_id)); @@ -910,7 +909,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // // All we care here is if any variable is being captured and not the exact paths, // so we check `upvars_mentioned` for root variables being captured. - ty::FnPtr(fn_ty) + ty::FnPtr(_, hdr) if self .tcx .upvars_mentioned(closure_def_id_a.expect_local()) @@ -923,7 +922,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // or // `unsafe fn(arg0,arg1,...) -> _` let closure_sig = args_a.as_closure().sig(); - let safety = fn_ty.safety(); + let safety = hdr.safety; let pointer_ty = Ty::new_fn_ptr(self.tcx, self.tcx.signature_unclosure(closure_sig, safety)); debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty); diff --git a/compiler/rustc_hir_typeck/src/expectation.rs b/compiler/rustc_hir_typeck/src/expectation.rs index 91deae4174b..76ae41db5c5 100644 --- a/compiler/rustc_hir_typeck/src/expectation.rs +++ b/compiler/rustc_hir_typeck/src/expectation.rs @@ -70,7 +70,8 @@ impl<'a, 'tcx> Expectation<'tcx> { /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169 /// for examples of where this comes up,. pub(super) fn rvalue_hint(fcx: &FnCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> { - match fcx.tcx.struct_tail_without_normalization(ty).kind() { + // FIXME: This is not right, even in the old solver... + match fcx.tcx.struct_tail_raw(ty, |ty| ty, || {}).kind() { ty::Slice(_) | ty::Str | ty::Dynamic(..) => ExpectRvalueLikeUnsized(ty), _ => ExpectHasType(ty), } diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 841d25b54cc..b169f75796b 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -404,7 +404,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { code: traits::ObligationCauseCode<'tcx>, ) { if !ty.references_error() { - let tail = self.tcx.struct_tail_with_normalize( + let tail = self.tcx.struct_tail_raw( ty, |ty| { if self.next_trait_solver() { diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 89e7227eda2..7720faddba3 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -1530,7 +1530,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ty::Int(_) | ty::Uint(_) => Some(ty), ty::Char => Some(tcx.types.u8), ty::RawPtr(..) => Some(tcx.types.usize), - ty::FnDef(..) | ty::FnPtr(_) => Some(tcx.types.usize), + ty::FnDef(..) | ty::FnPtr(..) => Some(tcx.types.usize), _ => None, }); opt_ty.unwrap_or_else(|| self.next_int_var()) diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 6b4edcd95d9..703273968c5 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -604,7 +604,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Ty<'tcx>, found: Ty<'tcx>, ) -> bool { - if let (ty::FnPtr(_), ty::Closure(def_id, _)) = (expected.kind(), found.kind()) { + if let (ty::FnPtr(..), ty::Closure(def_id, _)) = (expected.kind(), found.kind()) { if let Some(upvars) = self.tcx.upvars_mentioned(*def_id) { // Report upto four upvars being captured to reduce the amount error messages // reported back to the user. diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index 61287d98676..3b71e2f19b1 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -50,7 +50,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Not all of these (e.g., unsafe fns) implement `FnOnce`, // so we look for these beforehand. // FIXME(async_closures): These don't impl `FnOnce` by default. - ty::Closure(..) | ty::FnDef(..) | ty::FnPtr(_) => true, + ty::Closure(..) | ty::FnDef(..) | ty::FnPtr(..) => true, // If it's not a simple function, look for things which implement `FnOnce`. _ => { let Some(fn_once) = tcx.lang_items().fn_once_trait() else { diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 3bcb92d8029..db5139172b0 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -438,7 +438,7 @@ impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Canonicalizer<'cx, 'tcx> { | ty::RawPtr(..) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(..) | ty::Never | ty::Tuple(..) diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index c3d37d9986f..1908e1e09c3 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_middle::ty::{self, OutlivesPredicate, Ty, TyCtxt}; use rustc_type_ir::outlives::{compute_alias_components_recursive, Component}; use smallvec::smallvec; @@ -181,7 +183,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { &self, generic_ty: Ty<'tcx>, ) -> Vec<ty::PolyTypeOutlivesPredicate<'tcx>> { - assert!(matches!(generic_ty.kind(), ty::Param(_) | ty::Placeholder(_))); + assert_matches!(generic_ty.kind(), ty::Param(_) | ty::Placeholder(_)); self.declared_generic_bounds_from_env_for_erased_ty(generic_ty) } diff --git a/compiler/rustc_infer/src/lib.rs b/compiler/rustc_infer/src/lib.rs index b65ac859667..25ac8ba974b 100644 --- a/compiler/rustc_infer/src/lib.rs +++ b/compiler/rustc_infer/src/lib.rs @@ -18,6 +18,7 @@ #![allow(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(box_patterns)] #![feature(control_flow_enum)] #![feature(extend_one)] diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 6f53b1c9031..761d288a7c2 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -386,7 +386,6 @@ fn get_codegen_sysroot( } } -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub(crate) fn check_attr_crate_type( sess: &Session, attrs: &[ast::Attribute], diff --git a/compiler/rustc_lint/src/for_loops_over_fallibles.rs b/compiler/rustc_lint/src/for_loops_over_fallibles.rs index 6cb5263ac54..2793d48dc51 100644 --- a/compiler/rustc_lint/src/for_loops_over_fallibles.rs +++ b/compiler/rustc_lint/src/for_loops_over_fallibles.rs @@ -1,5 +1,5 @@ use hir::{Expr, Pat}; -use rustc_hir as hir; +use rustc_hir::{self as hir, LangItem}; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::ObligationCause; use rustc_middle::ty; @@ -126,7 +126,10 @@ fn extract_iterator_next_call<'tcx>( ) -> Option<&'tcx Expr<'tcx>> { // This won't work for `Iterator::next(iter)`, is this an issue? if let hir::ExprKind::MethodCall(_, recv, _, _) = expr.kind - && cx.typeck_results().type_dependent_def_id(expr.hir_id) == cx.tcx.lang_items().next_fn() + && cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .is_some_and(|def_id| cx.tcx.is_lang_item(def_id, LangItem::IteratorNext)) { Some(recv) } else { diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs index 72920fd045f..44117e5d7a5 100644 --- a/compiler/rustc_lint/src/levels.rs +++ b/compiler/rustc_lint/src/levels.rs @@ -717,7 +717,6 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { }; } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn add(&mut self, attrs: &[ast::Attribute], is_crate_node: bool, source_hir_id: Option<HirId>) { let sess = self.sess; for (attr_index, attr) in attrs.iter().enumerate() { @@ -1039,7 +1038,6 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS); // FIXME: make this translatable #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] lint_level(self.sess, lint, level, src, Some(span.into()), |lint| { lint.primary_message(fluent::lint_unknown_gated_lint); lint.arg("name", lint_id.lint.name_lower()); diff --git a/compiler/rustc_lint/src/traits.rs b/compiler/rustc_lint/src/traits.rs index fea96b5366e..a0fe4b5af74 100644 --- a/compiler/rustc_lint/src/traits.rs +++ b/compiler/rustc_lint/src/traits.rs @@ -114,7 +114,7 @@ impl<'tcx> LateLintPass<'tcx> for DropTraitConstraints { let hir::TyKind::TraitObject(bounds, _lifetime, _syntax) = &ty.kind else { return }; for (bound, modifier) in &bounds[..] { let def_id = bound.trait_ref.trait_def_id(); - if cx.tcx.lang_items().drop_trait() == def_id + if def_id.is_some_and(|def_id| cx.tcx.is_lang_item(def_id, LangItem::Drop)) && *modifier != hir::TraitBoundModifier::Maybe { let Some(def_id) = cx.tcx.get_diagnostic_item(sym::needs_drop) else { return }; diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index 51896da893c..54bf73a40dd 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -1022,7 +1022,7 @@ fn ty_is_known_nonnull<'tcx>( let ty = tcx.try_normalize_erasing_regions(param_env, ty).unwrap_or(ty); match ty.kind() { - ty::FnPtr(_) => true, + ty::FnPtr(..) => true, ty::Ref(..) => true, ty::Adt(def, _) if def.is_box() && matches!(mode, CItemKind::Definition) => true, ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => { @@ -1473,7 +1473,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { ty::Array(inner_ty, _) => self.check_type_for_ffi(cache, inner_ty), - ty::FnPtr(sig) => { + ty::FnPtr(sig_tys, hdr) => { + let sig = sig_tys.with(hdr); if self.is_internal_abi(sig.abi()) { return FfiUnsafe { ty, @@ -1709,8 +1710,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { type Result = ControlFlow<Ty<'tcx>>; fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result { - if let ty::FnPtr(sig) = ty.kind() - && !self.visitor.is_internal_abi(sig.abi()) + if let ty::FnPtr(_, hdr) = ty.kind() + && !self.visitor.is_internal_abi(hdr.abi) { self.tys.push(ty); } diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index 2ff7335a0fc..79a68b2ff0e 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -397,6 +397,18 @@ LLVMRustCreateAllocSizeAttr(LLVMContextRef C, uint32_t ElementSizeArg) { std::nullopt)); } +extern "C" LLVMAttributeRef +LLVMRustCreateRangeAttribute(LLVMContextRef C, unsigned NumBits, + const uint64_t LowerWords[], + const uint64_t UpperWords[]) { +#if LLVM_VERSION_GE(19, 0) + return LLVMCreateConstantRangeAttribute(C, Attribute::Range, NumBits, + LowerWords, UpperWords); +#else + report_fatal_error("LLVM 19.0 is required for Range Attribute"); +#endif +} + // These values **must** match ffi::AllocKindFlags. // It _happens_ to match the LLVM values of llvm::AllocFnKind, // but that's happenstance and we do explicit conversions before diff --git a/compiler/rustc_metadata/messages.ftl b/compiler/rustc_metadata/messages.ftl index 415399ed06c..d80aa0cc4f4 100644 --- a/compiler/rustc_metadata/messages.ftl +++ b/compiler/rustc_metadata/messages.ftl @@ -41,6 +41,9 @@ metadata_crate_dep_multiple = metadata_crate_dep_not_static = `{$crate_name}` was unavailable as a static crate, preventing fully static linking +metadata_crate_dep_rustc_driver = + `feature(rustc_private)` is needed to link to the compiler's `rustc_driver` library + metadata_crate_location_unknown_type = extern location for {$crate_name} is of an unknown type: {$path} @@ -131,12 +134,18 @@ metadata_lib_framework_apple = metadata_lib_required = crate `{$crate_name}` required to be available in {$kind} format, but was not found in this form +metadata_link_arg_unstable = + link kind `link-arg` is unstable + metadata_link_cfg_form = link cfg must be of the form `cfg(/* predicate */)` metadata_link_cfg_single_predicate = link cfg must have a single predicate argument +metadata_link_cfg_unstable = + link cfg is unstable + metadata_link_framework_apple = link kind `framework` is only supported on Apple targets diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs index 2fca443ffa0..14a1a7f67e5 100644 --- a/compiler/rustc_metadata/src/creader.rs +++ b/compiler/rustc_metadata/src/creader.rs @@ -949,7 +949,6 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { } } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn report_unused_deps(&mut self, krate: &ast::Crate) { // Make a point span rather than covering the whole file let span = krate.spans.inner_span.shrink_to_lo(); diff --git a/compiler/rustc_metadata/src/dependency_format.rs b/compiler/rustc_metadata/src/dependency_format.rs index 17fd260fd79..39fa23766b5 100644 --- a/compiler/rustc_metadata/src/dependency_format.rs +++ b/compiler/rustc_metadata/src/dependency_format.rs @@ -51,7 +51,7 @@ //! Additionally, the algorithm is geared towards finding *any* solution rather //! than finding a number of solutions (there are normally quite a few). -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::def_id::CrateNum; use rustc_middle::bug; use rustc_middle::middle::dependency_format::{Dependencies, DependencyList, Linkage}; @@ -59,12 +59,14 @@ use rustc_middle::ty::TyCtxt; use rustc_session::config::CrateType; use rustc_session::cstore::CrateDepKind; use rustc_session::cstore::LinkagePreference::{self, RequireDynamic, RequireStatic}; +use rustc_span::sym; use tracing::info; use crate::creader::CStore; use crate::errors::{ BadPanicStrategy, CrateDepMultiple, IncompatiblePanicInDropStrategy, LibRequired, - NonStaticCrateDep, RequiredPanicStrategy, RlibRequired, RustcLibRequired, TwoPanicRuntimes, + NonStaticCrateDep, RequiredPanicStrategy, RlibRequired, RustcDriverHelp, RustcLibRequired, + TwoPanicRuntimes, }; pub(crate) fn calculate(tcx: TyCtxt<'_>) -> Dependencies { @@ -160,25 +162,49 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { Linkage::Dynamic | Linkage::IncludedFromDylib => {} } + let all_dylibs = || { + tcx.crates(()).iter().filter(|&&cnum| { + !tcx.dep_kind(cnum).macros_only() && tcx.used_crate_source(cnum).dylib.is_some() + }) + }; + + let mut upstream_in_dylibs = FxHashSet::default(); + + if tcx.features().rustc_private { + // We need this to prevent users of `rustc_driver` from linking dynamically to `std` + // which does not work as `std` is also statically linked into `rustc_driver`. + + // Find all libraries statically linked to upstream dylibs. + for &cnum in all_dylibs() { + let deps = tcx.dylib_dependency_formats(cnum); + for &(depnum, style) in deps.iter() { + if let RequireStatic = style { + upstream_in_dylibs.insert(depnum); + } + } + } + } + let mut formats = FxHashMap::default(); // Sweep all crates for found dylibs. Add all dylibs, as well as their // dependencies, ensuring there are no conflicts. The only valid case for a // dependency to be relied upon twice is for both cases to rely on a dylib. - for &cnum in tcx.crates(()).iter() { - if tcx.dep_kind(cnum).macros_only() { + for &cnum in all_dylibs() { + if upstream_in_dylibs.contains(&cnum) { + info!("skipping dylib: {}", tcx.crate_name(cnum)); + // If this dylib is also available statically linked to another dylib + // we try to use that instead. continue; } + let name = tcx.crate_name(cnum); - let src = tcx.used_crate_source(cnum); - if src.dylib.is_some() { - info!("adding dylib: {}", name); - add_library(tcx, cnum, RequireDynamic, &mut formats, &mut unavailable_as_static); - let deps = tcx.dylib_dependency_formats(cnum); - for &(depnum, style) in deps.iter() { - info!("adding {:?}: {}", style, tcx.crate_name(depnum)); - add_library(tcx, depnum, style, &mut formats, &mut unavailable_as_static); - } + info!("adding dylib: {}", name); + add_library(tcx, cnum, RequireDynamic, &mut formats, &mut unavailable_as_static); + let deps = tcx.dylib_dependency_formats(cnum); + for &(depnum, style) in deps.iter() { + info!("adding {:?}: {}", style, tcx.crate_name(depnum)); + add_library(tcx, depnum, style, &mut formats, &mut unavailable_as_static); } } @@ -268,12 +294,15 @@ fn add_library( // This error is probably a little obscure, but I imagine that it // can be refined over time. if link2 != link || link == RequireStatic { + let linking_to_rustc_driver = tcx.sess.psess.unstable_features.is_nightly_build() + && tcx.crates(()).iter().any(|&cnum| tcx.crate_name(cnum) == sym::rustc_driver); tcx.dcx().emit_err(CrateDepMultiple { crate_name: tcx.crate_name(cnum), non_static_deps: unavailable_as_static .drain(..) .map(|cnum| NonStaticCrateDep { crate_name: tcx.crate_name(cnum) }) .collect(), + rustc_driver_help: linking_to_rustc_driver.then_some(RustcDriverHelp), }); } } diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs index 89970dddf9f..42dec978b78 100644 --- a/compiler/rustc_metadata/src/errors.rs +++ b/compiler/rustc_metadata/src/errors.rs @@ -38,6 +38,8 @@ pub struct CrateDepMultiple { pub crate_name: Symbol, #[subdiagnostic] pub non_static_deps: Vec<NonStaticCrateDep>, + #[subdiagnostic] + pub rustc_driver_help: Option<RustcDriverHelp>, } #[derive(Subdiagnostic)] @@ -46,6 +48,10 @@ pub struct NonStaticCrateDep { pub crate_name: Symbol, } +#[derive(Subdiagnostic)] +#[help(metadata_crate_dep_rustc_driver)] +pub struct RustcDriverHelp; + #[derive(Diagnostic)] #[diag(metadata_two_panic_runtimes)] pub struct TwoPanicRuntimes { diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index b19493d12ff..34497f5ac53 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -17,7 +17,7 @@ use rustc_span::def_id::{DefId, LOCAL_CRATE}; use rustc_span::symbol::{sym, Symbol}; use rustc_target::spec::abi::Abi; -use crate::errors; +use crate::{errors, fluent_generated}; pub fn find_native_static_library(name: &str, verbatim: bool, sess: &Session) -> PathBuf { let formats = if verbatim { @@ -87,7 +87,6 @@ struct Collector<'tcx> { } impl<'tcx> Collector<'tcx> { - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn process_module(&mut self, module: &ForeignModule) { let ForeignModule { def_id, abi, ref foreign_items } = *module; let def_id = def_id.expect_local(); @@ -161,7 +160,7 @@ impl<'tcx> Collector<'tcx> { sess, sym::link_arg_attribute, span, - "link kind `link-arg` is unstable", + fluent_generated::metadata_link_arg_unstable, ) .emit(); } @@ -201,8 +200,13 @@ impl<'tcx> Collector<'tcx> { continue; }; if !features.link_cfg { - feature_err(sess, sym::link_cfg, item.span(), "link cfg is unstable") - .emit(); + feature_err( + sess, + sym::link_cfg, + item.span(), + fluent_generated::metadata_link_cfg_unstable, + ) + .emit(); } cfg = Some(link_cfg.clone()); } @@ -266,6 +270,8 @@ impl<'tcx> Collector<'tcx> { macro report_unstable_modifier($feature: ident) { if !features.$feature { + // FIXME: make this translatable + #[expect(rustc::untranslatable_diagnostic)] feature_err( sess, sym::$feature, diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index e3d7dff3c66..37c10b14054 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -61,10 +61,6 @@ macro_rules! arena_types { [] dtorck_constraint: rustc_middle::traits::query::DropckConstraint<'tcx>, [] candidate_step: rustc_middle::traits::query::CandidateStep<'tcx>, [] autoderef_bad_ty: rustc_middle::traits::query::MethodAutoderefBadTy<'tcx>, - [] canonical_goal_evaluation: - rustc_type_ir::solve::inspect::CanonicalGoalEvaluationStep< - rustc_middle::ty::TyCtxt<'tcx> - >, [] query_region_constraints: rustc_middle::infer::canonical::QueryRegionConstraints<'tcx>, [] type_op_subtype: rustc_middle::infer::canonical::Canonical<'tcx, diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index 1851a61d753..91e71c12cae 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -13,7 +13,6 @@ use std::num::NonZero; use std::{fmt, io}; use rustc_ast::LitKind; -use rustc_attr::InlineAttr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lock; use rustc_errors::ErrorGuaranteed; @@ -46,7 +45,7 @@ pub use self::pointer::{CtfeProvenance, Pointer, PointerArithmetic, Provenance}; pub use self::value::Scalar; use crate::mir; use crate::ty::codec::{TyDecoder, TyEncoder}; -use crate::ty::{self, GenericArgKind, Instance, Ty, TyCtxt}; +use crate::ty::{self, Instance, Ty, TyCtxt}; /// Uniquely identifies one of the following: /// - A constant @@ -126,11 +125,10 @@ pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>( AllocDiscriminant::Alloc.encode(encoder); alloc.encode(encoder); } - GlobalAlloc::Function { instance, unique } => { + GlobalAlloc::Function { instance } => { trace!("encoding {:?} with {:#?}", alloc_id, instance); AllocDiscriminant::Fn.encode(encoder); instance.encode(encoder); - unique.encode(encoder); } GlobalAlloc::VTable(ty, poly_trait_ref) => { trace!("encoding {:?} with {ty:#?}, {poly_trait_ref:#?}", alloc_id); @@ -219,38 +217,32 @@ impl<'s> AllocDecodingSession<'s> { } // Now decode the actual data. - let alloc_id = decoder.with_position(pos, |decoder| { - match alloc_kind { - AllocDiscriminant::Alloc => { - trace!("creating memory alloc ID"); - let alloc = <ConstAllocation<'tcx> as Decodable<_>>::decode(decoder); - trace!("decoded alloc {:?}", alloc); - decoder.interner().reserve_and_set_memory_alloc(alloc) - } - AllocDiscriminant::Fn => { - trace!("creating fn alloc ID"); - let instance = ty::Instance::decode(decoder); - trace!("decoded fn alloc instance: {:?}", instance); - let unique = bool::decode(decoder); - // Here we cannot call `reserve_and_set_fn_alloc` as that would use a query, which - // is not possible in this context. That's why the allocation stores - // whether it is unique or not. - decoder.interner().reserve_and_set_fn_alloc_internal(instance, unique) - } - AllocDiscriminant::VTable => { - trace!("creating vtable alloc ID"); - let ty = <Ty<'_> as Decodable<D>>::decode(decoder); - let poly_trait_ref = - <Option<ty::PolyExistentialTraitRef<'_>> as Decodable<D>>::decode(decoder); - trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}"); - decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref) - } - AllocDiscriminant::Static => { - trace!("creating extern static alloc ID"); - let did = <DefId as Decodable<D>>::decode(decoder); - trace!("decoded static def-ID: {:?}", did); - decoder.interner().reserve_and_set_static_alloc(did) - } + let alloc_id = decoder.with_position(pos, |decoder| match alloc_kind { + AllocDiscriminant::Alloc => { + trace!("creating memory alloc ID"); + let alloc = <ConstAllocation<'tcx> as Decodable<_>>::decode(decoder); + trace!("decoded alloc {:?}", alloc); + decoder.interner().reserve_and_set_memory_alloc(alloc) + } + AllocDiscriminant::Fn => { + trace!("creating fn alloc ID"); + let instance = ty::Instance::decode(decoder); + trace!("decoded fn alloc instance: {:?}", instance); + decoder.interner().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT) + } + AllocDiscriminant::VTable => { + trace!("creating vtable alloc ID"); + let ty = <Ty<'_> as Decodable<D>>::decode(decoder); + let poly_trait_ref = + <Option<ty::PolyExistentialTraitRef<'_>> as Decodable<D>>::decode(decoder); + trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}"); + decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref, CTFE_ALLOC_SALT) + } + AllocDiscriminant::Static => { + trace!("creating extern static alloc ID"); + let did = <DefId as Decodable<D>>::decode(decoder); + trace!("decoded static def-ID: {:?}", did); + decoder.interner().reserve_and_set_static_alloc(did) } }); @@ -265,12 +257,7 @@ impl<'s> AllocDecodingSession<'s> { #[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)] pub enum GlobalAlloc<'tcx> { /// The alloc ID is used as a function pointer. - Function { - instance: Instance<'tcx>, - /// Stores whether this instance is unique, i.e. all pointers to this function use the same - /// alloc ID. - unique: bool, - }, + Function { instance: Instance<'tcx> }, /// This alloc ID points to a symbolic (not-reified) vtable. VTable(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), /// The alloc ID points to a "lazy" static variable that did not get computed (yet). @@ -323,14 +310,17 @@ impl<'tcx> GlobalAlloc<'tcx> { } } +pub const CTFE_ALLOC_SALT: usize = 0; + pub(crate) struct AllocMap<'tcx> { /// Maps `AllocId`s to their corresponding allocations. alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>, - /// Used to ensure that statics and functions only get one associated `AllocId`. - // - // FIXME: Should we just have two separate dedup maps for statics and functions each? - dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>, + /// Used to deduplicate global allocations: functions, vtables, string literals, ... + /// + /// The `usize` is a "salt" used by Miri to make deduplication imperfect, thus better emulating + /// the actual guarantees. + dedup: FxHashMap<(GlobalAlloc<'tcx>, usize), AllocId>, /// The `AllocId` to assign to the next requested ID. /// Always incremented; never gets smaller. @@ -368,74 +358,40 @@ impl<'tcx> TyCtxt<'tcx> { /// Reserves a new ID *if* this allocation has not been dedup-reserved before. /// Should not be used for mutable memory. - fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId { + fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>, salt: usize) -> AllocId { let mut alloc_map = self.alloc_map.lock(); if let GlobalAlloc::Memory(mem) = alloc { if mem.inner().mutability.is_mut() { bug!("trying to dedup-reserve mutable memory"); } } - if let Some(&alloc_id) = alloc_map.dedup.get(&alloc) { + let alloc_salt = (alloc, salt); + if let Some(&alloc_id) = alloc_map.dedup.get(&alloc_salt) { return alloc_id; } let id = alloc_map.reserve(); - debug!("creating alloc {alloc:?} with id {id:?}"); - alloc_map.alloc_map.insert(id, alloc.clone()); - alloc_map.dedup.insert(alloc, id); + debug!("creating alloc {:?} with id {id:?}", alloc_salt.0); + alloc_map.alloc_map.insert(id, alloc_salt.0.clone()); + alloc_map.dedup.insert(alloc_salt, id); id } /// Generates an `AllocId` for a memory allocation. If the exact same memory has been /// allocated before, this will return the same `AllocId`. - pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>) -> AllocId { - self.reserve_and_set_dedup(GlobalAlloc::Memory(mem)) + pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>, salt: usize) -> AllocId { + self.reserve_and_set_dedup(GlobalAlloc::Memory(mem), salt) } /// Generates an `AllocId` for a static or return a cached one in case this function has been /// called on the same static before. pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId { - self.reserve_and_set_dedup(GlobalAlloc::Static(static_id)) - } - - /// Generates an `AllocId` for a function. The caller must already have decided whether this - /// function obtains a unique AllocId or gets de-duplicated via the cache. - fn reserve_and_set_fn_alloc_internal(self, instance: Instance<'tcx>, unique: bool) -> AllocId { - let alloc = GlobalAlloc::Function { instance, unique }; - if unique { - // Deduplicate. - self.reserve_and_set_dedup(alloc) - } else { - // Get a fresh ID. - let mut alloc_map = self.alloc_map.lock(); - let id = alloc_map.reserve(); - alloc_map.alloc_map.insert(id, alloc); - id - } + let salt = 0; // Statics have a guaranteed unique address, no salt added. + self.reserve_and_set_dedup(GlobalAlloc::Static(static_id), salt) } - /// Generates an `AllocId` for a function. Depending on the function type, - /// this might get deduplicated or assigned a new ID each time. - pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>) -> AllocId { - // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated - // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be - // duplicated across crates. We thus generate a new `AllocId` for every mention of a - // function. This means that `main as fn() == main as fn()` is false, while `let x = main as - // fn(); x == x` is true. However, as a quality-of-life feature it can be useful to identify - // certain functions uniquely, e.g. for backtraces. So we identify whether codegen will - // actually emit duplicate functions. It does that when they have non-lifetime generics, or - // when they can be inlined. All other functions are given a unique address. - // This is not a stable guarantee! The `inline` attribute is a hint and cannot be relied - // upon for anything. But if we don't do this, backtraces look terrible. - let is_generic = instance - .args - .into_iter() - .any(|kind| !matches!(kind.unpack(), GenericArgKind::Lifetime(_))); - let can_be_inlined = match self.codegen_fn_attrs(instance.def_id()).inline { - InlineAttr::Never => false, - _ => true, - }; - let unique = !is_generic && !can_be_inlined; - self.reserve_and_set_fn_alloc_internal(instance, unique) + /// Generates an `AllocId` for a function. Will get deduplicated. + pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>, salt: usize) -> AllocId { + self.reserve_and_set_dedup(GlobalAlloc::Function { instance }, salt) } /// Generates an `AllocId` for a (symbolic, not-reified) vtable. Will get deduplicated. @@ -443,8 +399,9 @@ impl<'tcx> TyCtxt<'tcx> { self, ty: Ty<'tcx>, poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>, + salt: usize, ) -> AllocId { - self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, poly_trait_ref)) + self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, poly_trait_ref), salt) } /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical diff --git a/compiler/rustc_middle/src/ty/consts/kind.rs b/compiler/rustc_middle/src/ty/consts/kind.rs index 7f096dd36f8..c7c2e8afa1e 100644 --- a/compiler/rustc_middle/src/ty/consts/kind.rs +++ b/compiler/rustc_middle/src/ty/consts/kind.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_macros::{extension, HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use super::Const; @@ -80,7 +82,7 @@ impl<'tcx> Expr<'tcx> { } pub fn binop_args(self) -> (Ty<'tcx>, Ty<'tcx>, Const<'tcx>, Const<'tcx>) { - assert!(matches!(self.kind, ExprKind::Binop(_))); + assert_matches!(self.kind, ExprKind::Binop(_)); match self.args().as_slice() { [lhs_ty, rhs_ty, lhs_ct, rhs_ct] => ( @@ -101,7 +103,7 @@ impl<'tcx> Expr<'tcx> { } pub fn unop_args(self) -> (Ty<'tcx>, Const<'tcx>) { - assert!(matches!(self.kind, ExprKind::UnOp(_))); + assert_matches!(self.kind, ExprKind::UnOp(_)); match self.args().as_slice() { [ty, ct] => (ty.expect_ty(), ct.expect_const()), @@ -125,7 +127,7 @@ impl<'tcx> Expr<'tcx> { } pub fn call_args(self) -> (Ty<'tcx>, Const<'tcx>, impl Iterator<Item = Const<'tcx>>) { - assert!(matches!(self.kind, ExprKind::FunctionCall)); + assert_matches!(self.kind, ExprKind::FunctionCall); match self.args().as_slice() { [func_ty, func, rest @ ..] => ( @@ -152,7 +154,7 @@ impl<'tcx> Expr<'tcx> { } pub fn cast_args(self) -> (Ty<'tcx>, Const<'tcx>, Ty<'tcx>) { - assert!(matches!(self.kind, ExprKind::Cast(_))); + assert_matches!(self.kind, ExprKind::Cast(_)); match self.args().as_slice() { [value_ty, value, to_ty] => { diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 8f8fd09c9e4..9b39b849704 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -107,8 +107,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.mk_predefined_opaques_in_body(data) } type DefiningOpaqueTypes = &'tcx ty::List<LocalDefId>; - type CanonicalGoalEvaluationStepRef = - &'tcx solve::inspect::CanonicalGoalEvaluationStep<TyCtxt<'tcx>>; type CanonicalVars = CanonicalVarInfos<'tcx>; fn mk_canonical_var_infos(self, infos: &[ty::CanonicalVarInfo<Self>]) -> Self::CanonicalVars { self.mk_canonical_var_infos(infos) @@ -277,13 +275,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.debug_assert_args_compatible(def_id, args); } - fn intern_canonical_goal_evaluation_step( - self, - step: solve::inspect::CanonicalGoalEvaluationStep<TyCtxt<'tcx>>, - ) -> &'tcx solve::inspect::CanonicalGoalEvaluationStep<TyCtxt<'tcx>> { - self.arena.alloc(step) - } - fn mk_type_list_from_iter<I, T>(self, args: I) -> T::Output where I: Iterator<Item = T>, @@ -427,7 +418,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -1438,11 +1429,11 @@ impl<'tcx> TyCtxt<'tcx> { /// Allocates a read-only byte or string literal for `mir::interpret`. /// Returns the same `AllocId` if called again with the same bytes. - pub fn allocate_bytes_dedup(self, bytes: &[u8]) -> interpret::AllocId { + pub fn allocate_bytes_dedup(self, bytes: &[u8], salt: usize) -> interpret::AllocId { // Create an allocation that just contains these bytes. let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes); let alloc = self.mk_const_alloc(alloc); - self.reserve_and_set_memory_dedup(alloc) + self.reserve_and_set_memory_dedup(alloc, salt) } /// Returns a range of the start/end indices specified with the @@ -3176,6 +3167,12 @@ impl<'tcx> TyCtxt<'tcx> { pub fn impl_polarity(self, def_id: impl IntoQueryParam<DefId>) -> ty::ImplPolarity { self.impl_trait_header(def_id).map_or(ty::ImplPolarity::Positive, |h| h.polarity) } + + /// Whether this is a trait implementation that has `#[diagnostic::do_not_recommend]` + pub fn do_not_recommend_impl(self, def_id: DefId) -> bool { + matches!(self.def_kind(def_id), DefKind::Impl { of_trait: true }) + && self.impl_trait_header(def_id).is_some_and(|header| header.do_not_recommend) + } } /// Parameter attributes that can only be determined by examining the body of a function instead diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs index 5acc0b7ac7f..c14dadc68c9 100644 --- a/compiler/rustc_middle/src/ty/diagnostics.rs +++ b/compiler/rustc_middle/src/ty/diagnostics.rs @@ -6,10 +6,9 @@ use std::ops::ControlFlow; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{into_diag_arg_using_display, Applicability, Diag, DiagArgValue, IntoDiagArg}; -use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; -use rustc_hir::{PredicateOrigin, WherePredicate}; +use rustc_hir::{self as hir, LangItem, PredicateOrigin, WherePredicate}; use rustc_span::{BytePos, Span}; use rustc_type_ir::TyKind::*; @@ -290,8 +289,9 @@ pub fn suggest_constraining_type_params<'a>( let Some(param) = param else { return false }; { - let mut sized_constraints = - constraints.extract_if(|(_, def_id)| *def_id == tcx.lang_items().sized_trait()); + let mut sized_constraints = constraints.extract_if(|(_, def_id)| { + def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Sized)) + }); if let Some((_, def_id)) = sized_constraints.next() { applicability = Applicability::MaybeIncorrect; diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index 2f9bdb16bb0..d974a86a303 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -130,7 +130,7 @@ impl<'tcx> Ty<'tcx> { DefKind::Ctor(CtorOf::Variant, _) => "enum constructor".into(), _ => "fn item".into(), }, - ty::FnPtr(_) => "fn pointer".into(), + ty::FnPtr(..) => "fn pointer".into(), ty::Dynamic(inner, ..) if let Some(principal) = inner.principal() => { format!("`dyn {}`", tcx.def_path_str(principal.def_id())).into() } @@ -194,7 +194,7 @@ impl<'tcx> Ty<'tcx> { DefKind::Ctor(CtorOf::Variant, _) => "enum constructor".into(), _ => "fn item".into(), }, - ty::FnPtr(_) => "fn pointer".into(), + ty::FnPtr(..) => "fn pointer".into(), ty::Dynamic(..) => "trait object".into(), ty::Closure(..) | ty::CoroutineClosure(..) => "closure".into(), ty::Coroutine(def_id, ..) => { diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index c3430b58406..fc079592583 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -250,9 +250,8 @@ impl FlagComputation { self.add_args(args); } - &ty::FnPtr(fn_sig) => self.bound_computation(fn_sig, |computation, fn_sig| { - computation.add_tys(fn_sig.inputs()); - computation.add_ty(fn_sig.output()); + &ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { + computation.add_tys(sig_tys.inputs_and_output); }), } } diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index 0496c571f5e..6f19739de45 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -838,7 +838,7 @@ impl<'tcx> Instance<'tcx> { return None; }; - if tcx.lang_items().get(coroutine_callable_item) == Some(trait_item_id) { + if tcx.is_lang_item(trait_item_id, coroutine_callable_item) { let ty::Coroutine(_, id_args) = *tcx.type_of(coroutine_def_id).skip_binder().kind() else { bug!() diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 9204405d58f..619981bf021 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -362,7 +362,7 @@ impl<'tcx> SizeSkeleton<'tcx> { ty::Ref(_, pointee, _) | ty::RawPtr(pointee, _) => { let non_zero = !ty.is_unsafe_ptr(); - let tail = tcx.struct_tail_with_normalize( + let tail = tcx.struct_tail_raw( pointee, |ty| match tcx.try_normalize_erasing_regions(param_env, ty) { Ok(ty) => ty, @@ -801,7 +801,7 @@ where | ty::Int(_) | ty::Uint(_) | ty::Float(_) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::FnDef(..) | ty::CoroutineWitness(..) @@ -986,9 +986,11 @@ where safe: None, }) } - ty::FnPtr(fn_sig) if offset.bytes() == 0 => { - tcx.layout_of(param_env.and(Ty::new_fn_ptr(tcx, fn_sig))).ok().map(|layout| { - PointeeInfo { size: layout.size, align: layout.align.abi, safe: None } + ty::FnPtr(..) if offset.bytes() == 0 => { + tcx.layout_of(param_env.and(this.ty)).ok().map(|layout| PointeeInfo { + size: layout.size, + align: layout.align.abi, + safe: None, }) } ty::Ref(_, ty, mt) if offset.bytes() == 0 => { diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 9736428e6f7..1e3b5800cba 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -262,6 +262,7 @@ pub struct ImplTraitHeader<'tcx> { pub trait_ref: ty::EarlyBinder<'tcx, ty::TraitRef<'tcx>>, pub polarity: ImplPolarity, pub safety: hir::Safety, + pub do_not_recommend: bool, } #[derive(Copy, Clone, PartialEq, Eq, Debug, TypeFoldable, TypeVisitable)] @@ -2149,6 +2150,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(PredicateKind<'_>, 32); - static_assert_size!(WithCachedTypeInfo<TyKind<'_>>, 56); + static_assert_size!(WithCachedTypeInfo<TyKind<'_>>, 48); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 6cce79dfdc1..cc746746760 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -290,7 +290,7 @@ fn characteristic_def_id_of_type_cached<'a>( | ty::Int(_) | ty::Uint(_) | ty::Str - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Alias(..) | ty::Placeholder(..) | ty::Param(_) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 29d72183dd3..56ddf146636 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -696,7 +696,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!(print(sig), " {{", print_value_path(def_id, args), "}}"); } } - ty::FnPtr(ref bare_fn) => p!(print(bare_fn)), + ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))), ty::Infer(infer_ty) => { if self.should_print_verbose() { p!(write("{:?}", ty.kind())); @@ -1145,7 +1145,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let term = if let Some(ty) = term.skip_binder().as_type() && let ty::Alias(ty::Projection, proj) = ty.kind() && let Some(assoc) = tcx.opt_associated_item(proj.def_id) - && assoc.trait_container(tcx) == tcx.lang_items().coroutine_trait() + && assoc + .trait_container(tcx) + .is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Coroutine)) && assoc.name == rustc_span::sym::Return { if let ty::Coroutine(_, args) = args.type_at(0).kind() { @@ -1678,7 +1680,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } } - ty::FnPtr(_) => { + ty::FnPtr(..) => { // FIXME: We should probably have a helper method to share code with the "Byte strings" // printing above (which also has to handle pointers to all sorts of things). if let Some(GlobalAlloc::Function { instance, .. }) = @@ -1741,7 +1743,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!(write("{:?}", char::try_from(int).unwrap())) } // Pointer types - ty::Ref(..) | ty::RawPtr(_, _) | ty::FnPtr(_) => { + ty::Ref(..) | ty::RawPtr(_, _) | ty::FnPtr(..) => { let data = int.to_bits(self.tcx().data_layout.pointer_size); self.typed_value( |this| { diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 8fb44a5f0b1..80b33c2cda9 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -374,7 +374,7 @@ impl<'tcx> TypeSuperFoldable<TyCtxt<'tcx>> for Ty<'tcx> { ), ty::Tuple(ts) => ty::Tuple(ts.try_fold_with(folder)?), ty::FnDef(def_id, args) => ty::FnDef(def_id, args.try_fold_with(folder)?), - ty::FnPtr(f) => ty::FnPtr(f.try_fold_with(folder)?), + ty::FnPtr(sig_tys, hdr) => ty::FnPtr(sig_tys.try_fold_with(folder)?, hdr), ty::Ref(r, ty, mutbl) => { ty::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) } @@ -424,7 +424,7 @@ impl<'tcx> TypeSuperVisitable<TyCtxt<'tcx>> for Ty<'tcx> { } ty::Tuple(ts) => ts.visit_with(visitor), ty::FnDef(_, args) => args.visit_with(visitor), - ty::FnPtr(ref f) => f.visit_with(visitor), + ty::FnPtr(ref sig_tys, _) => sig_tys.visit_with(visitor), ty::Ref(r, ty, _) => { try_visit!(r.visit_with(visitor)); ty.visit_with(visitor) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 8c97de1c59b..0a277fea49c 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -658,7 +658,8 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn new_fn_ptr(tcx: TyCtxt<'tcx>, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { - Ty::new(tcx, FnPtr(fty)) + let (sig_tys, hdr) = fty.split(); + Ty::new(tcx, FnPtr(sig_tys, hdr)) } #[inline] @@ -1182,7 +1183,7 @@ impl<'tcx> Ty<'tcx> { | Float(_) | Uint(_) | FnDef(..) - | FnPtr(_) + | FnPtr(..) | RawPtr(_, _) | Infer(IntVar(_) | FloatVar(_)) ) @@ -1333,7 +1334,7 @@ impl<'tcx> Ty<'tcx> { pub fn fn_sig(self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { match self.kind() { FnDef(def_id, args) => tcx.fn_sig(*def_id).instantiate(tcx, args), - FnPtr(f) => *f, + FnPtr(sig_tys, hdr) => sig_tys.with(*hdr), Error(_) => { // ignore errors (#54954) Binder::dummy(ty::FnSig { @@ -1352,12 +1353,12 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn is_fn(self) -> bool { - matches!(self.kind(), FnDef(..) | FnPtr(_)) + matches!(self.kind(), FnDef(..) | FnPtr(..)) } #[inline] pub fn is_fn_ptr(self) -> bool { - matches!(self.kind(), FnPtr(_)) + matches!(self.kind(), FnPtr(..)) } #[inline] @@ -1590,7 +1591,7 @@ impl<'tcx> Ty<'tcx> { tcx: TyCtxt<'tcx>, normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, ) -> Result<Ty<'tcx>, Ty<'tcx>> { - let tail = tcx.struct_tail_with_normalize(self, normalize, || {}); + let tail = tcx.struct_tail_raw(self, normalize, || {}); match tail.kind() { // Sized types ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) @@ -1599,7 +1600,7 @@ impl<'tcx> Ty<'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -1614,10 +1615,10 @@ impl<'tcx> Ty<'tcx> { | ty::Foreign(..) // `dyn*` has metadata = (). | ty::Dynamic(_, _, ty::DynStar) - // If returned by `struct_tail_with_normalize` this is a unit struct + // If returned by `struct_tail_raw` this is a unit struct // without any fields, or not a struct, and therefore is Sized. | ty::Adt(..) - // If returned by `struct_tail_with_normalize` this is the empty tuple, + // If returned by `struct_tail_raw` this is the empty tuple, // a.k.a. unit type, which is Sized | ty::Tuple(..) => Ok(tcx.types.unit), @@ -1791,7 +1792,7 @@ impl<'tcx> Ty<'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -1915,7 +1916,7 @@ impl<'tcx> Ty<'tcx> { pub fn is_c_void(self, tcx: TyCtxt<'_>) -> bool { match self.kind() { - ty::Adt(adt, _) => tcx.lang_items().get(LangItem::CVoid) == Some(adt.did()), + ty::Adt(adt, _) => tcx.is_lang_item(adt.did(), LangItem::CVoid), _ => false, } } @@ -1941,7 +1942,7 @@ impl<'tcx> Ty<'tcx> { | RawPtr(_, _) | Ref(_, _, _) | FnDef(_, _) - | FnPtr(_) + | FnPtr(..) | Dynamic(_, _, _) | Closure(_, _) | CoroutineClosure(_, _) @@ -1955,9 +1956,12 @@ impl<'tcx> Ty<'tcx> { } impl<'tcx> rustc_type_ir::inherent::Tys<TyCtxt<'tcx>> for &'tcx ty::List<Ty<'tcx>> { - fn split_inputs_and_output(self) -> (&'tcx [Ty<'tcx>], Ty<'tcx>) { - let (output, inputs) = self.split_last().unwrap(); - (inputs, *output) + fn inputs(self) -> &'tcx [Ty<'tcx>] { + self.split_last().unwrap().1 + } + + fn output(self) -> Ty<'tcx> { + *self.split_last().unwrap().0 } } @@ -1969,6 +1973,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(ty::RegionKind<'_>, 24); - static_assert_size!(ty::TyKind<'_>, 32); + static_assert_size!(ty::TyKind<'_>, 24); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 365f434a264..b9bf17cbb5c 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -171,14 +171,6 @@ impl<'tcx> TyCtxt<'tcx> { } } - /// Attempts to returns the deeply last field of nested structures, but - /// does not apply any normalization in its search. Returns the same type - /// if input `ty` is not a structure at all. - pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> { - let tcx = self; - tcx.struct_tail_with_normalize(ty, |ty| ty, || {}) - } - /// Returns the deeply last field of nested structures, or the same type if /// not a structure at all. Corresponds to the only possible unsized field, /// and its type can be used to determine unsizing strategy. @@ -188,7 +180,7 @@ impl<'tcx> TyCtxt<'tcx> { /// normalization attempt may cause compiler bugs. pub fn struct_tail_for_codegen(self, ty: Ty<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Ty<'tcx> { let tcx = self; - tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty), || {}) + tcx.struct_tail_raw(ty, |ty| tcx.normalize_erasing_regions(param_env, ty), || {}) } /// Returns the deeply last field of nested structures, or the same type if @@ -196,12 +188,14 @@ impl<'tcx> TyCtxt<'tcx> { /// and its type can be used to determine unsizing strategy. /// /// This is parameterized over the normalization strategy (i.e. how to - /// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity - /// function to indicate no normalization should take place. + /// handle `<T as Trait>::Assoc` and `impl Trait`). You almost certainly do + /// **NOT** want to pass the identity function here, unless you know what + /// you're doing, or you're within normalization code itself and will handle + /// an unnormalized tail recursively. /// /// See also `struct_tail_for_codegen`, which is suitable for use /// during codegen. - pub fn struct_tail_with_normalize( + pub fn struct_tail_raw( self, mut ty: Ty<'tcx>, mut normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, @@ -281,7 +275,7 @@ impl<'tcx> TyCtxt<'tcx> { param_env: ty::ParamEnv<'tcx>, ) -> (Ty<'tcx>, Ty<'tcx>) { let tcx = self; - tcx.struct_lockstep_tails_with_normalize(source, target, |ty| { + tcx.struct_lockstep_tails_raw(source, target, |ty| { tcx.normalize_erasing_regions(param_env, ty) }) } @@ -294,7 +288,7 @@ impl<'tcx> TyCtxt<'tcx> { /// /// See also `struct_lockstep_tails_for_codegen`, which is suitable for use /// during codegen. - pub fn struct_lockstep_tails_with_normalize( + pub fn struct_lockstep_tails_raw( self, source: Ty<'tcx>, target: Ty<'tcx>, @@ -1278,7 +1272,7 @@ impl<'tcx> Ty<'tcx> { | ty::RawPtr(_, _) | ty::FnDef(..) | ty::Error(_) - | ty::FnPtr(_) => true, + | ty::FnPtr(..) => true, ty::Tuple(fields) => fields.iter().all(Self::is_trivially_freeze), ty::Pat(ty, _) | ty::Slice(ty) | ty::Array(ty, _) => ty.is_trivially_freeze(), ty::Adt(..) @@ -1318,7 +1312,7 @@ impl<'tcx> Ty<'tcx> { | ty::RawPtr(_, _) | ty::FnDef(..) | ty::Error(_) - | ty::FnPtr(_) => true, + | ty::FnPtr(..) => true, ty::Tuple(fields) => fields.iter().all(Self::is_trivially_unpin), ty::Pat(ty, _) | ty::Slice(ty) | ty::Array(ty, _) => ty.is_trivially_unpin(), ty::Adt(..) @@ -1357,7 +1351,7 @@ impl<'tcx> Ty<'tcx> { | ty::Ref(..) | ty::RawPtr(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Infer(ty::FreshIntTy(_)) | ty::Infer(ty::FreshFloatTy(_)) => AsyncDropGlueMorphology::Noop, @@ -1540,7 +1534,7 @@ impl<'tcx> Ty<'tcx> { ty::Pat(..) | ty::Ref(..) | ty::Array(..) | ty::Slice(_) | ty::Tuple(..) => true, // Raw pointers use bitwise comparison. - ty::RawPtr(_, _) | ty::FnPtr(_) => true, + ty::RawPtr(_, _) | ty::FnPtr(..) => true, // Floating point numbers are not `Eq`. ty::Float(_) => false, @@ -1671,7 +1665,7 @@ pub fn needs_drop_components_with_async<'tcx>( | ty::Float(_) | ty::Never | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Char | ty::RawPtr(_, _) | ty::Ref(..) @@ -1738,7 +1732,7 @@ pub fn is_trivially_const_drop(ty: Ty<'_>) -> bool { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Foreign(_) => true, diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs index f38f27b84f0..951112dfe85 100644 --- a/compiler/rustc_middle/src/ty/vtable.rs +++ b/compiler/rustc_middle/src/ty/vtable.rs @@ -3,7 +3,7 @@ use std::fmt; use rustc_ast::Mutability; use rustc_macros::HashStable; -use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar}; +use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar, CTFE_ALLOC_SALT}; use crate::ty::{self, Instance, PolyTraitRef, Ty, TyCtxt}; #[derive(Clone, Copy, PartialEq, HashStable)] @@ -73,6 +73,11 @@ pub(crate) fn vtable_min_entries<'tcx>( /// Retrieves an allocation that represents the contents of a vtable. /// Since this is a query, allocations are cached and not duplicated. +/// +/// This is an "internal" `AllocId` that should never be used as a value in the interpreted program. +/// The interpreter should use `AllocId` that refer to a `GlobalAlloc::VTable` instead. +/// (This is similar to statics, which also have a similar "internal" `AllocId` storing their +/// initial contents.) pub(super) fn vtable_allocation_provider<'tcx>( tcx: TyCtxt<'tcx>, key: (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), @@ -114,7 +119,7 @@ pub(super) fn vtable_allocation_provider<'tcx>( VtblEntry::MetadataDropInPlace => { if ty.needs_drop(tcx, ty::ParamEnv::reveal_all()) { let instance = ty::Instance::resolve_drop_in_place(tcx, ty); - let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance); + let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT); let fn_ptr = Pointer::from(fn_alloc_id); Scalar::from_pointer(fn_ptr, &tcx) } else { @@ -127,7 +132,7 @@ pub(super) fn vtable_allocation_provider<'tcx>( VtblEntry::Method(instance) => { // Prepare the fn ptr we write into the vtable. let instance = instance.polymorphize(tcx); - let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance); + let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT); let fn_ptr = Pointer::from(fn_alloc_id); Scalar::from_pointer(fn_ptr, &tcx) } diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs index 2dd7a96f192..abd6df17514 100644 --- a/compiler/rustc_middle/src/ty/walk.rs +++ b/compiler/rustc_middle/src/ty/walk.rs @@ -189,9 +189,10 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) stack.extend(args.iter().rev()); } ty::Tuple(ts) => stack.extend(ts.iter().rev().map(GenericArg::from)), - ty::FnPtr(sig) => { - stack.push(sig.skip_binder().output().into()); - stack.extend(sig.skip_binder().inputs().iter().copied().rev().map(|ty| ty.into())); + ty::FnPtr(sig_tys, _hdr) => { + stack.extend( + sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), + ); } }, GenericArgKind::Lifetime(_) => {} diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl index dda4debecec..7baf0256dd8 100644 --- a/compiler/rustc_mir_build/messages.ftl +++ b/compiler/rustc_mir_build/messages.ftl @@ -30,7 +30,7 @@ mir_build_call_to_deprecated_safe_fn_requires_unsafe = call to deprecated safe function `{$function}` is unsafe and requires unsafe block .note = consult the function's documentation for information on how to avoid undefined behavior .label = call to unsafe function - .suggestion = you can wrap the call in an `unsafe` block if you can guarantee the code is only ever called from single-threaded code + .suggestion = you can wrap the call in an `unsafe` block if you can guarantee {$guarantee} mir_build_call_to_fn_with_requires_unsafe = call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe block diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/build/expr/as_constant.rs index 10cf545f1b7..4430aab73a8 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_constant.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_constant.rs @@ -2,7 +2,9 @@ use rustc_ast as ast; use rustc_hir::LangItem; -use rustc_middle::mir::interpret::{Allocation, LitToConstError, LitToConstInput, Scalar}; +use rustc_middle::mir::interpret::{ + Allocation, LitToConstError, LitToConstInput, Scalar, CTFE_ALLOC_SALT, +}; use rustc_middle::mir::*; use rustc_middle::thir::*; use rustc_middle::ty::{ @@ -140,7 +142,7 @@ fn lit_to_mir_constant<'tcx>( ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() } } (ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => { - let id = tcx.allocate_bytes_dedup(data); + let id = tcx.allocate_bytes_dedup(data, CTFE_ALLOC_SALT); ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx)) } (ast::LitKind::CStr(data, _), ty::Ref(_, inner_ty, _)) if matches!(inner_ty.kind(), ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::CStr)) => diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 54a4204da71..9b85ad0ad08 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -96,9 +96,32 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> { // from an edition before 2024. &UnsafeOpKind::CallToUnsafeFunction(Some(id)) if !span.at_least_rust_2024() - && self.tcx.has_attr(id, sym::rustc_deprecated_safe_2024) => + && let Some(attr) = self.tcx.get_attr(id, sym::rustc_deprecated_safe_2024) => { + let suggestion = attr + .meta_item_list() + .unwrap_or_default() + .into_iter() + .find(|item| item.has_name(sym::audit_that)) + .map(|item| { + item.value_str().expect( + "`#[rustc_deprecated_safe_2024(audit_that)]` must have a string value", + ) + }); + let sm = self.tcx.sess.source_map(); + let guarantee = suggestion + .as_ref() + .map(|suggestion| format!("that {}", suggestion)) + .unwrap_or_else(|| String::from("its unsafe preconditions")); + let suggestion = suggestion + .and_then(|suggestion| { + sm.indentation_before(span).map(|indent| { + format!("{}// TODO: Audit that {}.\n", indent, suggestion) // ignore-tidy-todo + }) + }) + .unwrap_or_default(); + self.tcx.emit_node_span_lint( DEPRECATED_SAFE_2024, self.hir_context, @@ -106,8 +129,9 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> { CallToDeprecatedSafeFnRequiresUnsafe { span, function: with_no_trimmed_paths!(self.tcx.def_path_str(id)), + guarantee, sub: CallToDeprecatedSafeFnRequiresUnsafeSub { - indent: sm.indentation_before(span).unwrap_or_default(), + start_of_line_suggestion: suggestion, start_of_line: sm.span_extend_to_line(span).shrink_to_lo(), left: span.shrink_to_lo(), right: span.shrink_to_hi(), diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 42eca71ca3f..34577f102d1 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -28,6 +28,7 @@ pub(crate) struct CallToDeprecatedSafeFnRequiresUnsafe { #[label] pub(crate) span: Span, pub(crate) function: String, + pub(crate) guarantee: String, #[subdiagnostic] pub(crate) sub: CallToDeprecatedSafeFnRequiresUnsafeSub, } @@ -35,10 +36,8 @@ pub(crate) struct CallToDeprecatedSafeFnRequiresUnsafe { #[derive(Subdiagnostic)] #[multipart_suggestion(mir_build_suggestion, applicability = "machine-applicable")] pub(crate) struct CallToDeprecatedSafeFnRequiresUnsafeSub { - pub(crate) indent: String, - #[suggestion_part( - code = "{indent}// TODO: Audit that the environment access only happens in single-threaded code.\n" // ignore-tidy-todo - )] + pub(crate) start_of_line_suggestion: String, + #[suggestion_part(code = "{start_of_line_suggestion}")] pub(crate) start_of_line: Span, #[suggestion_part(code = "unsafe {{ ")] pub(crate) left: Span, diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index 07bf222fcca..85b9dacb129 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -702,10 +702,12 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> { && adt.is_enum() && let Constructor::Variant(variant_index) = witness_1.ctor() { - let variant = adt.variant(*variant_index); - let inhabited = variant.inhabited_predicate(self.tcx, *adt).instantiate(self.tcx, args); - assert!(inhabited.apply(self.tcx, cx.param_env, cx.module)); - !inhabited.apply_ignore_module(self.tcx, cx.param_env) + let variant_inhabited = adt + .variant(*variant_index) + .inhabited_predicate(self.tcx, *adt) + .instantiate(self.tcx, args); + variant_inhabited.apply(self.tcx, cx.param_env, cx.module) + && !variant_inhabited.apply_ignore_module(self.tcx, cx.param_env) } else { false }; diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs index e9e8ddefa02..7822fb17f72 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_index::bit_set::{BitSet, ChunkedBitSet}; use rustc_index::Idx; use rustc_middle::bug; @@ -496,7 +498,7 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, '_, 'tcx> { }); if self.skip_unreachable_unwind.contains(location.block) { let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() }; - assert!(matches!(unwind, mir::UnwindAction::Cleanup(_))); + assert_matches!(unwind, mir::UnwindAction::Cleanup(_)); TerminatorEdges::Single(target) } else { terminator.edges() diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs index b0808ba2067..8708bebeeb0 100644 --- a/compiler/rustc_mir_dataflow/src/lib.rs +++ b/compiler/rustc_mir_dataflow/src/lib.rs @@ -1,4 +1,5 @@ // tidy-alphabetical-start +#![feature(assert_matches)] #![feature(associated_type_defaults)] #![feature(box_patterns)] #![feature(exact_size_is_empty)] diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index c26a72e4543..86091379f5a 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -158,7 +158,7 @@ impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { | ty::Pat(_, _) | ty::Slice(_) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -201,7 +201,7 @@ impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index ca8a2777045..139fd592f69 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -32,6 +32,7 @@ //! Because of that, we can assume that the only way to change the value behind a tracked place is //! by direct assignment. +use std::assert_matches::assert_matches; use std::fmt::{Debug, Formatter}; use std::ops::Range; @@ -54,7 +55,7 @@ use crate::{Analysis, AnalysisDomain, JoinSemiLattice, SwitchIntEdgeEffects}; pub trait ValueAnalysis<'tcx> { /// For each place of interest, the analysis tracks a value of the given type. - type Value: Clone + JoinSemiLattice + HasBottom + HasTop; + type Value: Clone + JoinSemiLattice + HasBottom + HasTop + Debug; const NAME: &'static str; @@ -344,7 +345,7 @@ impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) { // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥. - assert!(matches!(state, State::Unreachable)); + assert_matches!(state, State::Unreachable); *state = State::new_reachable(); for arg in body.args_iter() { state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map()); diff --git a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs index f52a4524d78..edb6bc4fbea 100644 --- a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs +++ b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs @@ -64,7 +64,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls { let ty = func.ty(body, tcx); let sig = ty.fn_sig(tcx); let fn_def_id = match ty.kind() { - ty::FnPtr(_) => None, + ty::FnPtr(..) => None, &ty::FnDef(def_id, _) => Some(def_id), _ => span_bug!(span, "invalid callee of type {:?}", ty), }; diff --git a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs index 4132e604f20..9a2cc057232 100644 --- a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs +++ b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs @@ -57,7 +57,7 @@ fn has_ffi_unwind_calls(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> bool { }; let fn_def_id = match ty.kind() { - ty::FnPtr(_) => None, + ty::FnPtr(..) => None, &ty::FnDef(def_id, _) => { // Rust calls cannot themselves create foreign unwinds (even if they use a non-Rust ABI). // So the leak of the foreign unwind into Rust can only be elsewhere, not here. diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index dc8e50ac8cd..48a3266ae6f 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -468,7 +468,7 @@ impl<'tcx> Validator<'_, 'tcx> { if let ty::RawPtr(_, _) | ty::FnPtr(..) = lhs_ty.kind() { // Raw and fn pointer operations are not allowed inside consts and thus not promotable. - assert!(matches!( + assert_matches!( op, BinOp::Eq | BinOp::Ne @@ -477,7 +477,7 @@ impl<'tcx> Validator<'_, 'tcx> { | BinOp::Ge | BinOp::Gt | BinOp::Offset - )); + ); return Err(Unpromotable); } diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index f41f3ef656c..09b4e5e0711 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -437,7 +437,7 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) - let src = tcx.mk_place_deref(Place::from(Local::new(1 + 0))); match self_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => builder.copy_shim(), + ty::FnDef(..) | ty::FnPtr(..) => builder.copy_shim(), ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()), ty::CoroutineClosure(_, args) => { builder.tuple_like_shim(dest, src, args.as_coroutine_closure().upvar_tys()) @@ -996,7 +996,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> { /// } /// ``` fn build_fn_ptr_addr_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> { - assert!(matches!(self_ty.kind(), ty::FnPtr(..)), "expected fn ptr, found {self_ty}"); + assert_matches!(self_ty.kind(), ty::FnPtr(..), "expected fn ptr, found {self_ty}"); let span = tcx.def_span(def_id); let Some(sig) = tcx.fn_sig(def_id).instantiate(tcx, &[self_ty.into()]).no_bound_vars() else { span_bug!(span, "FnPtr::addr with bound vars for `{self_ty}`"); diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index 65a3d8d1742..f96329a3403 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -648,7 +648,9 @@ fn characteristic_def_id_of_mono_item<'tcx>( if let Some(impl_def_id) = tcx.impl_of_method(def_id) { if tcx.sess.opts.incremental.is_some() - && tcx.trait_id_of_impl(impl_def_id) == tcx.lang_items().drop_trait() + && tcx + .trait_id_of_impl(impl_def_id) + .is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Drop)) { // Put `Drop::drop` into the same cgu as `drop_in_place` // since `drop_in_place` is the only thing that can diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index 82488088e30..394518daa42 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -360,7 +360,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz | ty::Ref(_, _, _) | ty::Pat(_, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_next_trait_solver/src/coherence.rs b/compiler/rustc_next_trait_solver/src/coherence.rs index f22ea41c512..2461ef0c0df 100644 --- a/compiler/rustc_next_trait_solver/src/coherence.rs +++ b/compiler/rustc_next_trait_solver/src/coherence.rs @@ -334,7 +334,7 @@ where | ty::Str | ty::FnDef(..) | ty::Pat(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Array(..) | ty::Slice(..) | ty::RawPtr(..) diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 84921e87fb3..bb05eb4c256 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -533,7 +533,7 @@ where | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(..) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -620,7 +620,7 @@ where | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Alias(..) | ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index 00837f7cdd8..a57338acaab 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -31,7 +31,7 @@ where | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Error(_) | ty::Never | ty::Char => Ok(vec![]), @@ -117,7 +117,7 @@ where | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -178,7 +178,7 @@ where { match ty.kind() { // impl Copy/Clone for FnDef, FnPtr - ty::FnDef(..) | ty::FnPtr(_) | ty::Error(_) => Ok(vec![]), + ty::FnDef(..) | ty::FnPtr(..) | ty::Error(_) => Ok(vec![]), // Implementations are provided in core ty::Uint(_) @@ -269,7 +269,8 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_callable<I: Intern } } // keep this in sync with assemble_fn_pointer_candidates until the old solver is removed. - ty::FnPtr(sig) => { + ty::FnPtr(sig_tys, hdr) => { + let sig = sig_tys.with(hdr); if sig.is_fn_trait_compatible() { Ok(Some( sig.map_bound(|sig| (Ty::new_tup(cx, sig.inputs().as_slice()), sig.output())), @@ -460,7 +461,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I: ty::FnDef(def_id, _) => { let sig = self_ty.fn_sig(cx); - if sig.skip_binder().is_fn_trait_compatible() && !cx.has_target_features(def_id) { + if sig.is_fn_trait_compatible() && !cx.has_target_features(def_id) { fn_item_to_async_callable(cx, sig) } else { Err(NoSolution) @@ -468,7 +469,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I: } ty::FnPtr(..) => { let sig = self_ty.fn_sig(cx); - if sig.skip_binder().is_fn_trait_compatible() { + if sig.is_fn_trait_compatible() { fn_item_to_async_callable(cx, sig) } else { Err(NoSolution) diff --git a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs index a3c21666bd6..86fb036cd3d 100644 --- a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs +++ b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs @@ -5,11 +5,10 @@ //! see the comment on [ProofTreeBuilder]. use std::marker::PhantomData; -use std::mem; use derive_where::derive_where; use rustc_type_ir::inherent::*; -use rustc_type_ir::{self as ty, search_graph, Interner}; +use rustc_type_ir::{self as ty, Interner}; use crate::delegate::SolverDelegate; use crate::solve::eval_ctxt::canonical; @@ -94,31 +93,10 @@ impl<I: Interner> WipGoalEvaluation<I> { } } -#[derive_where(PartialEq, Eq; I: Interner)] -pub(in crate::solve) enum WipCanonicalGoalEvaluationKind<I: Interner> { - Overflow, - CycleInStack, - ProvisionalCacheHit, - Interned { final_revision: I::CanonicalGoalEvaluationStepRef }, -} - -impl<I: Interner> std::fmt::Debug for WipCanonicalGoalEvaluationKind<I> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Overflow => write!(f, "Overflow"), - Self::CycleInStack => write!(f, "CycleInStack"), - Self::ProvisionalCacheHit => write!(f, "ProvisionalCacheHit"), - Self::Interned { final_revision: _ } => { - f.debug_struct("Interned").finish_non_exhaustive() - } - } - } -} - #[derive_where(PartialEq, Eq, Debug; I: Interner)] struct WipCanonicalGoalEvaluation<I: Interner> { goal: CanonicalInput<I>, - kind: Option<WipCanonicalGoalEvaluationKind<I>>, + encountered_overflow: bool, /// Only used for uncached goals. After we finished evaluating /// the goal, this is interned and moved into `kind`. final_revision: Option<WipCanonicalGoalEvaluationStep<I>>, @@ -127,25 +105,17 @@ struct WipCanonicalGoalEvaluation<I: Interner> { impl<I: Interner> WipCanonicalGoalEvaluation<I> { fn finalize(self) -> inspect::CanonicalGoalEvaluation<I> { - // We've already interned the final revision in - // `fn finalize_canonical_goal_evaluation`. - assert!(self.final_revision.is_none()); - let kind = match self.kind.unwrap() { - WipCanonicalGoalEvaluationKind::Overflow => { + inspect::CanonicalGoalEvaluation { + goal: self.goal, + kind: if self.encountered_overflow { + assert!(self.final_revision.is_none()); inspect::CanonicalGoalEvaluationKind::Overflow - } - WipCanonicalGoalEvaluationKind::CycleInStack => { - inspect::CanonicalGoalEvaluationKind::CycleInStack - } - WipCanonicalGoalEvaluationKind::ProvisionalCacheHit => { - inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit - } - WipCanonicalGoalEvaluationKind::Interned { final_revision } => { + } else { + let final_revision = self.final_revision.unwrap().finalize(); inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } - } - }; - - inspect::CanonicalGoalEvaluation { goal: self.goal, kind, result: self.result.unwrap() } + }, + result: self.result.unwrap(), + } } } @@ -308,7 +278,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> { ) -> ProofTreeBuilder<D> { self.nested(|| WipCanonicalGoalEvaluation { goal, - kind: None, + encountered_overflow: false, final_revision: None, result: None, }) @@ -329,11 +299,11 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> { } } - pub fn canonical_goal_evaluation_kind(&mut self, kind: WipCanonicalGoalEvaluationKind<I>) { + pub fn canonical_goal_evaluation_overflow(&mut self) { if let Some(this) = self.as_mut() { match this { DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => { - assert_eq!(canonical_goal_evaluation.kind.replace(kind), None); + canonical_goal_evaluation.encountered_overflow = true; } _ => unreachable!(), }; @@ -547,51 +517,3 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> { } } } - -impl<D, I> search_graph::ProofTreeBuilder<I> for ProofTreeBuilder<D> -where - D: SolverDelegate<Interner = I>, - I: Interner, -{ - fn try_apply_proof_tree( - &mut self, - proof_tree: Option<I::CanonicalGoalEvaluationStepRef>, - ) -> bool { - if !self.is_noop() { - if let Some(final_revision) = proof_tree { - let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision }; - self.canonical_goal_evaluation_kind(kind); - true - } else { - false - } - } else { - true - } - } - - fn on_provisional_cache_hit(&mut self) { - self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); - } - - fn on_cycle_in_stack(&mut self) { - self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::CycleInStack); - } - - fn finalize_canonical_goal_evaluation( - &mut self, - tcx: I, - ) -> Option<I::CanonicalGoalEvaluationStepRef> { - self.as_mut().map(|this| match this { - DebugSolver::CanonicalGoalEvaluation(evaluation) => { - let final_revision = mem::take(&mut evaluation.final_revision).unwrap(); - let final_revision = - tcx.intern_canonical_goal_evaluation_step(final_revision.finalize()); - let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision }; - assert_eq!(evaluation.kind.replace(kind), None); - final_revision - } - _ => unreachable!(), - }) - } -} diff --git a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs index fe053a506e7..81c89fad8e8 100644 --- a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs +++ b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs @@ -1,12 +1,13 @@ +use std::convert::Infallible; use std::marker::PhantomData; use rustc_type_ir::inherent::*; -use rustc_type_ir::search_graph::{self, CycleKind, UsageKind}; +use rustc_type_ir::search_graph::{self, PathKind}; use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult}; use rustc_type_ir::Interner; -use super::inspect::{self, ProofTreeBuilder}; -use super::FIXPOINT_STEP_LIMIT; +use super::inspect::ProofTreeBuilder; +use super::{has_no_inference_or_external_constraints, FIXPOINT_STEP_LIMIT}; use crate::delegate::SolverDelegate; /// This type is never constructed. We only use it to implement `search_graph::Delegate` @@ -22,43 +23,48 @@ where { type Cx = D::Interner; + const ENABLE_PROVISIONAL_CACHE: bool = true; + type ValidationScope = Infallible; + fn enter_validation_scope( + _cx: Self::Cx, + _input: CanonicalInput<I>, + ) -> Option<Self::ValidationScope> { + None + } + const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT; type ProofTreeBuilder = ProofTreeBuilder<D>; + fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool { + inspect.is_noop() + } + const DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW: usize = 4; fn recursion_limit(cx: I) -> usize { cx.recursion_limit() } fn initial_provisional_result( cx: I, - kind: CycleKind, + kind: PathKind, input: CanonicalInput<I>, ) -> QueryResult<I> { match kind { - CycleKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes), - CycleKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)), + PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes), + PathKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)), } } - fn reached_fixpoint( - cx: I, - kind: UsageKind, + fn is_initial_provisional_result( + cx: Self::Cx, + kind: PathKind, input: CanonicalInput<I>, - provisional_result: Option<QueryResult<I>>, result: QueryResult<I>, ) -> bool { - if let Some(r) = provisional_result { - r == result - } else { - match kind { - UsageKind::Single(CycleKind::Coinductive) => { - response_no_constraints(cx, input, Certainty::Yes) == result - } - UsageKind::Single(CycleKind::Inductive) => { - response_no_constraints(cx, input, Certainty::overflow(false)) == result - } - UsageKind::Mixed => false, + match kind { + PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes) == result, + PathKind::Inductive => { + response_no_constraints(cx, input, Certainty::overflow(false)) == result } } } @@ -68,7 +74,7 @@ where inspect: &mut ProofTreeBuilder<D>, input: CanonicalInput<I>, ) -> QueryResult<I> { - inspect.canonical_goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::Overflow); + inspect.canonical_goal_evaluation_overflow(); response_no_constraints(cx, input, Certainty::overflow(true)) } @@ -76,6 +82,22 @@ where response_no_constraints(cx, input, Certainty::overflow(false)) } + fn is_ambiguous_result(result: QueryResult<I>) -> bool { + result.is_ok_and(|response| { + has_no_inference_or_external_constraints(response) + && matches!(response.value.certainty, Certainty::Maybe(_)) + }) + } + + fn propagate_ambiguity( + cx: I, + for_input: CanonicalInput<I>, + from_result: QueryResult<I>, + ) -> QueryResult<I> { + let certainty = from_result.unwrap().value.certainty; + response_no_constraints(cx, for_input, certainty) + } + fn step_is_coinductive(cx: I, input: CanonicalInput<I>) -> bool { input.value.goal.predicate.is_coinductive(cx) } diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index b1dba712f79..1314b7eb6ff 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -1145,7 +1145,7 @@ where | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(_, _) diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index e6b04080c8d..37079271493 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -5,6 +5,7 @@ #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] #![feature(array_windows)] +#![feature(assert_matches)] #![feature(box_patterns)] #![feature(debug_closure_helpers)] #![feature(if_let_guard)] diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 4b8e4c25e16..9b3b6d5f9ad 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -10,6 +10,7 @@ mod path; mod stmt; mod ty; +use std::assert_matches::debug_assert_matches; use std::ops::Range; use std::{fmt, mem, slice}; @@ -1166,10 +1167,12 @@ impl<'a> Parser<'a> { match self.token_cursor.tree_cursor.look_ahead(0) { Some(tree) => { // Indexing stayed within the current token tree. - return match tree { - TokenTree::Token(token, _) => looker(token), - TokenTree::Delimited(dspan, _, delim, _) => { - looker(&Token::new(token::OpenDelim(*delim), dspan.open)) + match tree { + TokenTree::Token(token, _) => return looker(token), + &TokenTree::Delimited(dspan, _, delim, _) => { + if delim != Delimiter::Invisible { + return looker(&Token::new(token::OpenDelim(delim), dspan.open)); + } } }; } @@ -1385,7 +1388,7 @@ impl<'a> Parser<'a> { // can capture these tokens if necessary. self.bump(); if self.token_cursor.stack.len() == target_depth { - debug_assert!(matches!(self.token.kind, token::CloseDelim(_))); + debug_assert_matches!(self.token.kind, token::CloseDelim(_)); break; } } diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 2d82742f66c..cb8e8d30988 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::io::prelude::*; use std::iter::Peekable; use std::path::{Path, PathBuf}; @@ -1747,7 +1748,7 @@ fn out_of_line_mod() { .unwrap(); let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() }; - assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2)); + assert_matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2); }); } diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index 59c9d1e49f5..0318d34fb73 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -223,6 +223,9 @@ passes_doc_masked_only_extern_crate = .not_an_extern_crate_label = not an `extern crate` item .note = read <https://doc.rust-lang.org/unstable-book/language-features/doc-masked.html> for more information +passes_doc_rust_logo = + the `#[doc(rust_logo)]` attribute is used for Rust branding + passes_doc_test_literal = `#![doc(test(...)]` does not take a literal passes_doc_test_takes_list = @@ -595,6 +598,9 @@ passes_remove_fields = *[other] fields } +passes_repr_align_function = + `repr(align)` attributes on functions are unstable + passes_repr_conflicting = conflicting representation hints diff --git a/compiler/rustc_passes/src/abi_test.rs b/compiler/rustc_passes/src/abi_test.rs index 839b96fb3de..d0cc123c41a 100644 --- a/compiler/rustc_passes/src/abi_test.rs +++ b/compiler/rustc_passes/src/abi_test.rs @@ -127,14 +127,17 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut for meta_item in meta_items { match meta_item.name_or_empty() { sym::debug => { - let ty::FnPtr(sig) = ty.kind() else { + let ty::FnPtr(sig_tys, hdr) = ty.kind() else { span_bug!( meta_item.span(), "`#[rustc_abi(debug)]` on a type alias requires function pointer type" ); }; let abi = unwrap_fn_abi( - tcx.fn_abi_of_fn_ptr(param_env.and((*sig, /* extra_args */ ty::List::empty()))), + tcx.fn_abi_of_fn_ptr( + param_env + .and((sig_tys.with(*hdr), /* extra_args */ ty::List::empty())), + ), tcx, item_def_id, ); @@ -155,7 +158,7 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut "`#[rustc_abi(assert_eq)]` on a type alias requires pair type" ); }; - let ty::FnPtr(sig1) = field1.kind() else { + let ty::FnPtr(sig_tys1, hdr1) = field1.kind() else { span_bug!( meta_item.span(), "`#[rustc_abi(assert_eq)]` on a type alias requires pair of function pointer types" @@ -163,12 +166,13 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut }; let abi1 = unwrap_fn_abi( tcx.fn_abi_of_fn_ptr( - param_env.and((*sig1, /* extra_args */ ty::List::empty())), + param_env + .and((sig_tys1.with(*hdr1), /* extra_args */ ty::List::empty())), ), tcx, item_def_id, ); - let ty::FnPtr(sig2) = field2.kind() else { + let ty::FnPtr(sig_tys2, hdr2) = field2.kind() else { span_bug!( meta_item.span(), "`#[rustc_abi(assert_eq)]` on a type alias requires pair of function pointer types" @@ -176,7 +180,8 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut }; let abi2 = unwrap_fn_abi( tcx.fn_abi_of_fn_ptr( - param_env.and((*sig2, /* extra_args */ ty::List::empty())), + param_env + .and((sig_tys2.with(*hdr2), /* extra_args */ ty::List::empty())), ), tcx, item_def_id, diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index 2a63ecfe658..bd157d1d619 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -1142,7 +1142,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { /// of one item. Read the documentation of [`check_doc_inline`] for more information. /// /// [`check_doc_inline`]: Self::check_doc_inline - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn check_doc_attrs( &self, attr: &Attribute, @@ -1220,7 +1219,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { &self.tcx.sess, sym::rustdoc_internals, meta.span(), - "the `#[doc(rust_logo)]` attribute is used for Rust branding", + fluent::passes_doc_rust_logo, ) .emit(); } @@ -1736,7 +1735,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks if the `#[repr]` attributes on `item` are valid. - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn check_repr( &self, attrs: &[Attribute], @@ -1793,7 +1791,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { &self.tcx.sess, sym::fn_align, hint.span(), - "`repr(align)` attributes on functions are unstable", + fluent::passes_repr_align_function, ) .emit(); } diff --git a/compiler/rustc_passes/src/entry.rs b/compiler/rustc_passes/src/entry.rs index 48f55d4c3a0..ecb345bb51a 100644 --- a/compiler/rustc_passes/src/entry.rs +++ b/compiler/rustc_passes/src/entry.rs @@ -106,7 +106,6 @@ fn check_and_search_item(id: ItemId, ctxt: &mut EntryContext<'_>) { } } -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_>) -> Option<(DefId, EntryFnType)> { if let Some((def_id, _)) = visitor.start_fn { Some((def_id.to_def_id(), EntryFnType::Start)) diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index 10b7968a1a7..bcd3d3092c3 100644 --- a/compiler/rustc_pattern_analysis/src/rustc.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -413,7 +413,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { | ty::Foreign(_) | ty::RawPtr(_, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Pat(_, _) | ty::Dynamic(_, _, _) | ty::Closure(..) diff --git a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs index 61de338eab1..a4e4f50e8f2 100644 --- a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs +++ b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs @@ -607,10 +607,15 @@ pub fn encode_ty<'tcx>( typeid.push_str(&s); } - ty::FnPtr(fn_sig) => { + ty::FnPtr(sig_tys, hdr) => { // PF<return-type><parameter-type1..parameter-typeN>E let mut s = String::from("P"); - s.push_str(&encode_fnsig(tcx, &fn_sig.skip_binder(), dict, TypeIdOptions::empty())); + s.push_str(&encode_fnsig( + tcx, + &sig_tys.with(*hdr).skip_binder(), + dict, + TypeIdOptions::empty(), + )); compress(dict, DictKey::Ty(ty, TyQ::None), &mut s); typeid.push_str(&s); } diff --git a/compiler/rustc_smir/src/rustc_internal/internal.rs b/compiler/rustc_smir/src/rustc_internal/internal.rs index a4577461094..f52cb010a87 100644 --- a/compiler/rustc_smir/src/rustc_internal/internal.rs +++ b/compiler/rustc_smir/src/rustc_internal/internal.rs @@ -131,7 +131,10 @@ impl RustcInternal for RigidTy { RigidTy::FnDef(def, args) => { rustc_ty::TyKind::FnDef(def.0.internal(tables, tcx), args.internal(tables, tcx)) } - RigidTy::FnPtr(sig) => rustc_ty::TyKind::FnPtr(sig.internal(tables, tcx)), + RigidTy::FnPtr(sig) => { + let (sig_tys, hdr) = sig.internal(tables, tcx).split(); + rustc_ty::TyKind::FnPtr(sig_tys, hdr) + } RigidTy::Closure(def, args) => { rustc_ty::TyKind::Closure(def.0.internal(tables, tcx), args.internal(tables, tcx)) } diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index ef2eb7d52ea..332fe22d869 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -352,7 +352,9 @@ impl<'tcx> Stable<'tcx> for ty::TyKind<'tcx> { ty::FnDef(def_id, generic_args) => { TyKind::RigidTy(RigidTy::FnDef(tables.fn_def(*def_id), generic_args.stable(tables))) } - ty::FnPtr(poly_fn_sig) => TyKind::RigidTy(RigidTy::FnPtr(poly_fn_sig.stable(tables))), + ty::FnPtr(sig_tys, hdr) => { + TyKind::RigidTy(RigidTy::FnPtr(sig_tys.with(*hdr).stable(tables))) + } ty::Dynamic(existential_predicates, region, dyn_kind) => { TyKind::RigidTy(RigidTy::Dynamic( existential_predicates diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 32fca6733bb..a2e94492f8c 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -472,6 +472,7 @@ symbols! { attr, attr_literals, attributes, + audit_that, augmented_assignments, auto_traits, automatically_derived, @@ -1619,6 +1620,7 @@ symbols! { rustc_dirty, rustc_do_not_const_check, rustc_doc_primitive, + rustc_driver, rustc_dummy, rustc_dump_def_parents, rustc_dump_item_bounds, diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index c2451c08d11..3a606f244e3 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -427,7 +427,8 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { self.print_def_path(def_id, &[])?; } - ty::FnPtr(sig) => { + ty::FnPtr(sig_tys, hdr) => { + let sig = sig_tys.with(hdr); self.push("F"); self.in_binder(&sig, |cx, sig| { if sig.safety == hir::Safety::Unsafe { diff --git a/compiler/rustc_target/src/abi/call/powerpc64.rs b/compiler/rustc_target/src/abi/call/powerpc64.rs index 11a6cb52bab..749eea0ef63 100644 --- a/compiler/rustc_target/src/abi/call/powerpc64.rs +++ b/compiler/rustc_target/src/abi/call/powerpc64.rs @@ -41,64 +41,23 @@ where }) } -fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, abi: ABI) +fn classify<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI, is_ret: bool) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout, { - if !ret.layout.is_sized() { + if arg.is_ignore() || !arg.layout.is_sized() { // Not touching this... return; } - if !ret.layout.is_aggregate() { - ret.extend_integer_width_to(64); + if !arg.layout.is_aggregate() { + arg.extend_integer_width_to(64); return; } // The ELFv1 ABI doesn't return aggregates in registers - if abi == ELFv1 { - ret.make_indirect(); - return; - } - - if let Some(uniform) = is_homogeneous_aggregate(cx, ret, abi) { - ret.cast_to(uniform); - return; - } - - let size = ret.layout.size; - let bits = size.bits(); - if bits <= 128 { - let unit = if cx.data_layout().endian == Endian::Big { - Reg { kind: RegKind::Integer, size } - } else if bits <= 8 { - Reg::i8() - } else if bits <= 16 { - Reg::i16() - } else if bits <= 32 { - Reg::i32() - } else { - Reg::i64() - }; - - ret.cast_to(Uniform::new(unit, size)); - return; - } - - ret.make_indirect(); -} - -fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI) -where - Ty: TyAbiInterface<'a, C> + Copy, - C: HasDataLayout, -{ - if !arg.layout.is_sized() { - // Not touching this... - return; - } - if !arg.layout.is_aggregate() { - arg.extend_integer_width_to(64); + if is_ret && abi == ELFv1 { + arg.make_indirect(); return; } @@ -108,7 +67,10 @@ where } let size = arg.layout.size; - if size.bits() <= 64 { + if is_ret && size.bits() > 128 { + // Non-homogeneous aggregates larger than two doublewords are returned indirectly. + arg.make_indirect(); + } else if size.bits() <= 64 { // Aggregates smaller than a doubleword should appear in // the least-significant bits of the parameter doubleword. arg.cast_to(Reg { kind: RegKind::Integer, size }) @@ -138,14 +100,9 @@ where } }; - if !fn_abi.ret.is_ignore() { - classify_ret(cx, &mut fn_abi.ret, abi); - } + classify(cx, &mut fn_abi.ret, abi, true); for arg in fn_abi.args.iter_mut() { - if arg.is_ignore() { - continue; - } - classify_arg(cx, arg, abi); + classify(cx, arg, abi, false); } } diff --git a/compiler/rustc_target/src/spec/abi/tests.rs b/compiler/rustc_target/src/spec/abi/tests.rs index 251a12fe7aa..4823058dd69 100644 --- a/compiler/rustc_target/src/spec/abi/tests.rs +++ b/compiler/rustc_target/src/spec/abi/tests.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use super::*; #[allow(non_snake_case)] @@ -16,7 +18,7 @@ fn lookup_cdecl() { #[test] fn lookup_baz() { let abi = lookup("baz"); - assert!(matches!(abi, Err(AbiUnsupported::Unrecognized))) + assert_matches!(abi, Err(AbiUnsupported::Unrecognized)); } #[test] diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 8ce51ba2463..80f89a0ab2b 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -1561,6 +1561,7 @@ supported_targets! { ("powerpc-unknown-linux-gnu", powerpc_unknown_linux_gnu), ("powerpc-unknown-linux-gnuspe", powerpc_unknown_linux_gnuspe), ("powerpc-unknown-linux-musl", powerpc_unknown_linux_musl), + ("powerpc-unknown-linux-muslspe", powerpc_unknown_linux_muslspe), ("powerpc64-ibm-aix", powerpc64_ibm_aix), ("powerpc64-unknown-linux-gnu", powerpc64_unknown_linux_gnu), ("powerpc64-unknown-linux-musl", powerpc64_unknown_linux_musl), diff --git a/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_muslspe.rs b/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_muslspe.rs new file mode 100644 index 00000000000..d19015729ec --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_muslspe.rs @@ -0,0 +1,28 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-muslspe".into(), + metadata: crate::spec::TargetMetadata { + description: Some("PowerPC SPE Linux with musl".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(true), + }, + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { + abi: "spe".into(), + endian: Endian::Big, + mcount: "_mcount".into(), + ..base + }, + } +} diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 8ccb2a8483a..5193333be8e 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -1087,9 +1087,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { values } - (ty::FnDef(did1, args1), ty::FnPtr(sig2)) => { + (ty::FnDef(did1, args1), ty::FnPtr(sig_tys2, hdr2)) => { let sig1 = self.tcx.fn_sig(*did1).instantiate(self.tcx, args1); - let mut values = self.cmp_fn_sig(&sig1, sig2); + let mut values = self.cmp_fn_sig(&sig1, &sig_tys2.with(*hdr2)); values.0.push_highlighted(format!( " {{{}}}", self.tcx.def_path_str_with_args(*did1, args1) @@ -1097,16 +1097,18 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { values } - (ty::FnPtr(sig1), ty::FnDef(did2, args2)) => { + (ty::FnPtr(sig_tys1, hdr1), ty::FnDef(did2, args2)) => { let sig2 = self.tcx.fn_sig(*did2).instantiate(self.tcx, args2); - let mut values = self.cmp_fn_sig(sig1, &sig2); + let mut values = self.cmp_fn_sig(&sig_tys1.with(*hdr1), &sig2); values .1 .push_normal(format!(" {{{}}}", self.tcx.def_path_str_with_args(*did2, args2))); values } - (ty::FnPtr(sig1), ty::FnPtr(sig2)) => self.cmp_fn_sig(sig1, sig2), + (ty::FnPtr(sig_tys1, hdr1), ty::FnPtr(sig_tys2, hdr2)) => { + self.cmp_fn_sig(&sig_tys1.with(*hdr1), &sig_tys2.with(*hdr2)) + } _ => { let mut strs = (DiagStyledString::new(), DiagStyledString::new()); diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index 864510bb650..05c79170902 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -4,6 +4,7 @@ use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::error::{ExpectedFound, TypeError}; +use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; use rustc_middle::ty::print::{FmtPrinter, Printer}; use rustc_middle::ty::{self, suggest_constraining_type_param, Ty}; use rustc_span::def_id::DefId; @@ -313,11 +314,15 @@ impl<T> Trait<T> for X { (ty::Dynamic(t, _, ty::DynKind::Dyn), _) if let Some(def_id) = t.principal_def_id() => { - let mut impl_def_ids = vec![]; + let mut has_matching_impl = false; tcx.for_each_relevant_impl(def_id, values.found, |did| { - impl_def_ids.push(did) + if DeepRejectCtxt::new(tcx, TreatParams::ForLookup) + .types_may_unify(values.found, tcx.type_of(did).skip_binder()) + { + has_matching_impl = true; + } }); - if let [_] = &impl_def_ids[..] { + if has_matching_impl { let trait_name = tcx.item_name(def_id); diag.help(format!( "`{}` implements `{trait_name}` so you could box the found value \ @@ -330,11 +335,15 @@ impl<T> Trait<T> for X { (_, ty::Dynamic(t, _, ty::DynKind::Dyn)) if let Some(def_id) = t.principal_def_id() => { - let mut impl_def_ids = vec![]; + let mut has_matching_impl = false; tcx.for_each_relevant_impl(def_id, values.expected, |did| { - impl_def_ids.push(did) + if DeepRejectCtxt::new(tcx, TreatParams::ForLookup) + .types_may_unify(values.expected, tcx.type_of(did).skip_binder()) + { + has_matching_impl = true; + } }); - if let [_] = &impl_def_ids[..] { + if has_matching_impl { let trait_name = tcx.item_name(def_id); diag.help(format!( "`{}` implements `{trait_name}` so you could change the expected \ @@ -346,11 +355,15 @@ impl<T> Trait<T> for X { (ty::Dynamic(t, _, ty::DynKind::DynStar), _) if let Some(def_id) = t.principal_def_id() => { - let mut impl_def_ids = vec![]; + let mut has_matching_impl = false; tcx.for_each_relevant_impl(def_id, values.found, |did| { - impl_def_ids.push(did) + if DeepRejectCtxt::new(tcx, TreatParams::ForLookup) + .types_may_unify(values.found, tcx.type_of(did).skip_binder()) + { + has_matching_impl = true; + } }); - if let [_] = &impl_def_ids[..] { + if has_matching_impl { let trait_name = tcx.item_name(def_id); diag.help(format!( "`{}` implements `{trait_name}`, `#[feature(dyn_star)]` is likely \ @@ -441,9 +454,9 @@ impl<T> Trait<T> for X { } } } - (ty::FnPtr(sig), ty::FnDef(def_id, _)) - | (ty::FnDef(def_id, _), ty::FnPtr(sig)) => { - if tcx.fn_sig(def_id).skip_binder().safety() < sig.safety() { + (ty::FnPtr(_, hdr), ty::FnDef(def_id, _)) + | (ty::FnDef(def_id, _), ty::FnPtr(_, hdr)) => { + if tcx.fn_sig(def_id).skip_binder().safety() < hdr.safety { diag.note( "unsafe functions cannot be coerced into safe function pointers", ); diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs index ee159aa0b77..35f68a56d2d 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs @@ -383,8 +383,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { return; } match (&expected_inner.kind(), &found_inner.kind()) { - (ty::FnPtr(sig), ty::FnDef(did, args)) => { - let expected_sig = &(self.normalize_fn_sig)(*sig); + (ty::FnPtr(sig_tys, hdr), ty::FnDef(did, args)) => { + let sig = sig_tys.with(*hdr); + let expected_sig = &(self.normalize_fn_sig)(sig); let found_sig = &(self.normalize_fn_sig)(self.tcx.fn_sig(*did).instantiate(self.tcx, args)); @@ -402,11 +403,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { (false, true) => FunctionPointerSuggestion::RemoveRef { span, fn_name }, (true, true) => { diag.subdiagnostic(FnItemsAreDistinct); - FunctionPointerSuggestion::CastRef { span, fn_name, sig: *sig } + FunctionPointerSuggestion::CastRef { span, fn_name, sig } } (false, false) => { diag.subdiagnostic(FnItemsAreDistinct); - FunctionPointerSuggestion::Cast { span, fn_name, sig: *sig } + FunctionPointerSuggestion::Cast { span, fn_name, sig } } }; diag.subdiagnostic(sugg); @@ -449,10 +450,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { diag.subdiagnostic(sug); } - (ty::FnDef(did, args), ty::FnPtr(sig)) => { + (ty::FnDef(did, args), ty::FnPtr(sig_tys, hdr)) => { let expected_sig = &(self.normalize_fn_sig)(self.tcx.fn_sig(*did).instantiate(self.tcx, args)); - let found_sig = &(self.normalize_fn_sig)(*sig); + let found_sig = &(self.normalize_fn_sig)(sig_tys.with(*hdr)); if !self.same_type_modulo_infer(*found_sig, *expected_sig) { return; diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 95d4509c100..326a0e4e35c 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -230,8 +230,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { post_message, ); - let (err_msg, safe_transmute_explanation) = if Some(main_trait_ref.def_id()) - == self.tcx.lang_items().transmute_trait() + let (err_msg, safe_transmute_explanation) = if self.tcx.is_lang_item(main_trait_ref.def_id(), LangItem::TransmuteTrait) { // Recompute the safe transmute reason and use that for the error reporting match self.get_safe_transmute_error_and_reason( @@ -375,7 +374,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let impl_candidates = self.find_similar_impl_candidates(leaf_trait_predicate); suggested = if let &[cand] = &impl_candidates[..] { let cand = cand.trait_ref; - if let (ty::FnPtr(_), ty::FnDef(..)) = + if let (ty::FnPtr(..), ty::FnDef(..)) = (cand.self_ty().kind(), main_trait_ref.self_ty().skip_binder().kind()) { err.span_suggestion( @@ -687,10 +686,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let mut applied_do_not_recommend = false; loop { if let ObligationCauseCode::ImplDerived(ref c) = base_cause { - if self.tcx.has_attrs_with_path( - c.impl_or_alias_def_id, - &[sym::diagnostic, sym::do_not_recommend], - ) { + if self.tcx.do_not_recommend_impl(c.impl_or_alias_def_id) { let code = (*c.derived.parent_code).clone(); obligation.cause.map_code(|_| code); obligation.predicate = c.derived.parent_trait_pred.upcast(self.tcx); @@ -793,8 +789,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // is unimplemented is because async closures don't implement `Fn`/`FnMut` // if they have captures. if let Some(by_ref_captures) = by_ref_captures - && let ty::FnPtr(sig) = by_ref_captures.kind() - && !sig.skip_binder().output().is_unit() + && let ty::FnPtr(sig_tys, _) = by_ref_captures.kind() + && !sig_tys.skip_binder().output().is_unit() { let mut err = self.dcx().create_err(AsyncClosureNotFn { span: self.tcx.def_span(closure_def_id), @@ -1060,7 +1056,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { "`{ty}` is forbidden as the type of a const generic parameter", ) } - ty::FnPtr(_) => { + ty::FnPtr(..) => { struct_span_code_err!( self.dcx(), span, @@ -1629,11 +1625,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .tcx .all_impls(def_id) // ignore `do_not_recommend` items - .filter(|def_id| { - !self - .tcx - .has_attrs_with_path(*def_id, &[sym::diagnostic, sym::do_not_recommend]) - }) + .filter(|def_id| !self.tcx.do_not_recommend_impl(*def_id)) // Ignore automatically derived impls and `!Trait` impls. .filter_map(|def_id| self.tcx.impl_trait_header(def_id)) .filter_map(|header| { @@ -1843,10 +1835,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if let &[cand] = &candidates[..] { let (desc, mention_castable) = match (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) { - (ty::FnPtr(_), ty::FnDef(..)) => { + (ty::FnPtr(..), ty::FnDef(..)) => { (" implemented for fn pointer `", ", cast using `as`") } - (ty::FnPtr(_), _) => (" implemented for fn pointer `", ""), + (ty::FnPtr(..), _) => (" implemented for fn pointer `", ""), _ => (" implemented for `", ""), }; err.highlighted_help(vec![ @@ -1903,12 +1895,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let impl_candidates = impl_candidates .into_iter() .cloned() - .filter(|cand| { - !self.tcx.has_attrs_with_path( - cand.impl_def_id, - &[sym::diagnostic, sym::do_not_recommend], - ) - }) + .filter(|cand| !self.tcx.do_not_recommend_impl(cand.impl_def_id)) .collect::<Vec<_>>(); let def_id = trait_ref.def_id(); diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index 9269177eb50..d0f76f0d50e 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -1077,10 +1077,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let Some((def_id_or_name, output, inputs)) = (self.autoderef_steps)(found).into_iter().find_map(|(found, _)| { match *found.kind() { - ty::FnPtr(fn_sig) => Some(( + ty::FnPtr(sig_tys, _) => Some(( DefIdOrName::Name("function pointer"), - fn_sig.output(), - fn_sig.inputs(), + sig_tys.output(), + sig_tys.inputs(), )), ty::FnDef(def_id, _) => { let fn_sig = found.fn_sig(self.tcx); @@ -1977,20 +1977,22 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let ObligationCauseCode::FunctionArg { arg_hir_id, .. } = cause else { return; }; - let ty::FnPtr(expected) = expected.kind() else { + let ty::FnPtr(sig_tys, hdr) = expected.kind() else { return; }; - let ty::FnPtr(found) = found.kind() else { + let expected = sig_tys.with(*hdr); + let ty::FnPtr(sig_tys, hdr) = found.kind() else { return; }; + let found = sig_tys.with(*hdr); let Node::Expr(arg) = self.tcx.hir_node(*arg_hir_id) else { return; }; let hir::ExprKind::Path(path) = arg.kind else { return; }; - let expected_inputs = self.tcx.instantiate_bound_regions_with_erased(*expected).inputs(); - let found_inputs = self.tcx.instantiate_bound_regions_with_erased(*found).inputs(); + let expected_inputs = self.tcx.instantiate_bound_regions_with_erased(expected).inputs(); + let found_inputs = self.tcx.instantiate_bound_regions_with_erased(found).inputs(); let both_tys = expected_inputs.iter().copied().zip(found_inputs.iter().copied()); let arg_expr = |infcx: &InferCtxt<'tcx>, name, expected: Ty<'tcx>, found: Ty<'tcx>| { @@ -2829,7 +2831,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Do not suggest relaxing if there is an explicit `Sized` obligation. && !bounds.iter() .filter_map(|bound| bound.trait_ref()) - .any(|tr| tr.trait_def_id() == tcx.lang_items().sized_trait()) + .any(|tr| tr.trait_def_id().is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Sized))) { let (span, separator) = if let [.., last] = bounds { (last.span().shrink_to_hi(), " +") @@ -4790,13 +4792,13 @@ fn hint_missing_borrow<'tcx>( } let found_args = match found.kind() { - ty::FnPtr(f) => infcx.enter_forall(*f, |f| f.inputs().iter()), + ty::FnPtr(sig_tys, _) => infcx.enter_forall(*sig_tys, |sig_tys| sig_tys.inputs().iter()), kind => { span_bug!(span, "found was converted to a FnPtr above but is now {:?}", kind) } }; let expected_args = match expected.kind() { - ty::FnPtr(f) => infcx.enter_forall(*f, |f| f.inputs().iter()), + ty::FnPtr(sig_tys, _) => infcx.enter_forall(*sig_tys, |sig_tys| sig_tys.inputs().iter()), kind => { span_bug!(span, "expected was converted to a FnPtr above but is now {:?}", kind) } diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index 49fa775a0a1..de8951ef720 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -13,7 +13,6 @@ use rustc_middle::bug; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::{self, TyCtxt}; use rustc_next_trait_solver::solve::{GenerateProofTree, SolverDelegateEvalExt as _}; -use rustc_span::symbol::sym; use super::delegate::SolverDelegate; use super::inspect::{self, ProofTreeInferCtxtExt, ProofTreeVisitor}; @@ -440,10 +439,7 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { source: CandidateSource::Impl(impl_def_id), result: _, } = candidate.kind() - && goal - .infcx() - .tcx - .has_attrs_with_path(impl_def_id, &[sym::diagnostic, sym::do_not_recommend]) + && goal.infcx().tcx.do_not_recommend_impl(impl_def_id) { return ControlFlow::Break(self.obligation.clone()); } diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index e8de8457440..51dda25d8ad 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -9,6 +9,8 @@ //! coherence right now and was annoying to implement, so I am leaving it //! as is until we start using it for something else. +use std::assert_matches::assert_matches; + use rustc_ast_ir::try_visit; use rustc_ast_ir::visit::VisitorResult; use rustc_infer::infer::{DefineOpaqueTypes, InferCtxt, InferOk}; @@ -273,10 +275,10 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { steps.push(step) } inspect::ProbeStep::MakeCanonicalResponse { shallow_certainty: c } => { - assert!(matches!( + assert_matches!( shallow_certainty.replace(c), None | Some(Certainty::Maybe(MaybeCause::Ambiguity)) - )); + ); } inspect::ProbeStep::NestedProbe(ref probe) => { match probe.kind { @@ -332,13 +334,9 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { pub fn candidates(&'a self) -> Vec<InspectCandidate<'a, 'tcx>> { let mut candidates = vec![]; - let last_eval_step = match self.evaluation_kind { - inspect::CanonicalGoalEvaluationKind::Overflow - | inspect::CanonicalGoalEvaluationKind::CycleInStack - | inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => { - warn!("unexpected root evaluation: {:?}", self.evaluation_kind); - return vec![]; - } + let last_eval_step = match &self.evaluation_kind { + // An annoying edge case in case the recursion limit is 0. + inspect::CanonicalGoalEvaluationKind::Overflow => return vec![], inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } => final_revision, }; diff --git a/compiler/rustc_trait_selection/src/solve/normalize.rs b/compiler/rustc_trait_selection/src/solve/normalize.rs index 419d7e704de..c93c40b4826 100644 --- a/compiler/rustc_trait_selection/src/solve/normalize.rs +++ b/compiler/rustc_trait_selection/src/solve/normalize.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::fmt::Debug; use std::marker::PhantomData; @@ -63,7 +64,7 @@ where E: FromSolverError<'tcx, NextSolverError<'tcx>>, { fn normalize_alias_ty(&mut self, alias_ty: Ty<'tcx>) -> Result<Ty<'tcx>, Vec<E>> { - assert!(matches!(alias_ty.kind(), ty::Alias(..))); + assert_matches!(alias_ty.kind(), ty::Alias(..)); let infcx = self.at.infcx; let tcx = infcx.tcx; diff --git a/compiler/rustc_trait_selection/src/traits/misc.rs b/compiler/rustc_trait_selection/src/traits/misc.rs index 9a127e752a6..3e65194577e 100644 --- a/compiler/rustc_trait_selection/src/traits/misc.rs +++ b/compiler/rustc_trait_selection/src/traits/misc.rs @@ -1,5 +1,7 @@ //! Miscellaneous type-system utilities that are too small to deserve their own modules. +use std::assert_matches::assert_matches; + use hir::LangItem; use rustc_ast::Mutability; use rustc_data_structures::fx::FxIndexSet; @@ -92,7 +94,7 @@ pub fn type_allowed_to_implement_const_param_ty<'tcx>( lang_item: LangItem, parent_cause: ObligationCause<'tcx>, ) -> Result<(), ConstParamTyImplementationError<'tcx>> { - assert!(matches!(lang_item, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy)); + assert_matches!(lang_item, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy); let inner_tys: Vec<_> = match *self_type.kind() { // Trivially okay as these types are all: diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index 4b62a5c59b2..568214fe022 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1110,7 +1110,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Error(_) => false, } } else if tcx.is_lang_item(trait_ref.def_id, LangItem::PointeeTrait) { - let tail = selcx.tcx().struct_tail_with_normalize( + let tail = selcx.tcx().struct_tail_raw( self_ty, |ty| { // We throw away any obligations we get from this, since we normalize @@ -1149,10 +1149,10 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Never // Extern types have unit metadata, according to RFC 2850 | ty::Foreign(_) - // If returned by `struct_tail_without_normalization` this is a unit struct + // If returned by `struct_tail` this is a unit struct // without any fields, or not a struct, and therefore is Sized. | ty::Adt(..) - // If returned by `struct_tail_without_normalization` this is the empty tuple. + // If returned by `struct_tail` this is the empty tuple. | ty::Tuple(..) // Integers and floats are always Sized, and so have unit type metadata. | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true, @@ -1636,7 +1636,7 @@ fn confirm_fn_pointer_candidate<'cx, 'tcx>( .generics_of(def_id) .host_effect_index .map_or(tcx.consts.true_, |idx| args.const_at(idx)), - ty::FnPtr(_) => tcx.consts.true_, + ty::FnPtr(..) => tcx.consts.true_, _ => unreachable!("only expected FnPtr or FnDef in `confirm_fn_pointer_candidate`"), }; diff --git a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs index d3a1ed52d2e..7d30e652449 100644 --- a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs @@ -33,7 +33,7 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { | ty::Float(_) | ty::Never | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Char | ty::CoroutineWitness(..) | ty::RawPtr(_, _) @@ -224,7 +224,7 @@ pub fn dtorck_constraint_for_ty_inner<'tcx>( | ty::RawPtr(..) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::CoroutineWitness(..) => { // these types never have a destructor } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs index 294c6bfc124..d6687c762c3 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs @@ -1,3 +1,4 @@ +use rustc_hir::LangItem; use rustc_infer::traits::Obligation; pub use rustc_middle::traits::query::type_op::ProvePredicate; use rustc_middle::traits::query::NoSolution; @@ -20,8 +21,7 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> { // such cases. if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_ref)) = key.value.predicate.kind().skip_binder() - && let Some(sized_def_id) = tcx.lang_items().sized_trait() - && trait_ref.def_id() == sized_def_id + && tcx.is_lang_item(trait_ref.def_id(), LangItem::Sized) && trait_ref.self_ty().is_trivially_sized(tcx) { return Some(()); diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index 9de62031311..cb8deeaedb6 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -468,8 +468,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates.vec.push(AsyncClosureCandidate); } // Provide an impl, but only for suitable `fn` pointers. - ty::FnPtr(sig) => { - if sig.is_fn_trait_compatible() { + ty::FnPtr(sig_tys, hdr) => { + if sig_tys.with(hdr).is_fn_trait_compatible() { candidates.vec.push(AsyncClosureCandidate); } } @@ -535,8 +535,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates.ambiguous = true; // Could wind up being a fn() type. } // Provide an impl, but only for suitable `fn` pointers. - ty::FnPtr(sig) => { - if sig.is_fn_trait_compatible() { + ty::FnPtr(sig_tys, hdr) => { + if sig_tys.with(hdr).is_fn_trait_compatible() { candidates .vec .push(FnPointerCandidate { fn_host_effect: self.tcx().consts.true_ }); @@ -819,7 +819,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) @@ -1207,7 +1207,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Foreign(_) | ty::Array(..) @@ -1290,7 +1290,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::Pat(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -1339,7 +1339,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let self_ty = self.infcx.resolve_vars_if_possible(obligation.self_ty()); match self_ty.skip_binder().kind() { - ty::FnPtr(_) => candidates.vec.push(BuiltinCandidate { has_nested: false }), + ty::FnPtr(..) => candidates.vec.push(BuiltinCandidate { has_nested: false }), ty::Bool | ty::Char | ty::Int(_) diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index ddd8b970cc8..0d7ceca4301 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -1398,7 +1398,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Foreign(_) => {} diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 1b2767a6a62..f002fa27db2 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -2113,7 +2113,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -2170,7 +2170,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { use self::BuiltinImplConditions::{Ambiguous, None, Where}; match *self_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) | ty::Error(_) => Where(ty::Binder::dummy(Vec::new())), + ty::FnDef(..) | ty::FnPtr(..) | ty::Error(_) => Where(ty::Binder::dummy(Vec::new())), ty::Uint(_) | ty::Int(_) @@ -2332,7 +2332,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Error(_) | ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) | ty::Never diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs index 7e5fe7e3c94..a3982c3d987 100644 --- a/compiler/rustc_trait_selection/src/traits/wf.rs +++ b/compiler/rustc_trait_selection/src/traits/wf.rs @@ -812,7 +812,7 @@ impl<'a, 'tcx> TypeVisitor<TyCtxt<'tcx>> for WfPredicates<'a, 'tcx> { return upvars.visit_with(self); } - ty::FnPtr(_) => { + ty::FnPtr(..) => { // Let the visitor iterate into the argument/return // types appearing in the fn signature. } diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index d90c3bedc70..34c426f2aa6 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -621,7 +621,7 @@ fn fn_abi_new_uncached<'tcx>( let rust_abi = matches!(sig.abi, RustIntrinsic | Rust | RustCall); let is_drop_in_place = - fn_def_id.is_some() && fn_def_id == cx.tcx.lang_items().drop_in_place_fn(); + fn_def_id.is_some_and(|def_id| cx.tcx.is_lang_item(def_id, LangItem::DropInPlace)); let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> { let span = tracing::debug_span!("arg_of"); diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index 43e49138709..c7ed6e6110f 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -248,7 +248,7 @@ fn resolve_associated_item<'tcx>( if name == sym::clone { let self_ty = trait_ref.self_ty(); match self_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => (), + ty::FnDef(..) | ty::FnPtr(..) => (), ty::Coroutine(..) | ty::CoroutineWitness(..) | ty::Closure(..) diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index 1eb03fc3bd6..234e1a6d55e 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -184,7 +184,7 @@ fn layout_of_uncached<'tcx>( ty::Int(ity) => scalar(Int(Integer::from_int_ty(dl, ity), true)), ty::Uint(ity) => scalar(Int(Integer::from_uint_ty(dl, ity), false)), ty::Float(fty) => scalar(Float(Float::from_float_ty(fty))), - ty::FnPtr(_) => { + ty::FnPtr(..) => { let mut ptr = scalar_unit(Pointer(dl.instruction_address_space)); ptr.valid_range_mut().start = 1; tcx.mk_layout(LayoutS::scalar(cx, ptr)) @@ -219,9 +219,13 @@ fn layout_of_uncached<'tcx>( // its struct tail cannot be normalized either, so try to get a // more descriptive layout error here, which will lead to less confusing // diagnostics. + // + // We use the raw struct tail function here to get the first tail + // that is an alias, which is likely the cause of the normalization + // error. match tcx.try_normalize_erasing_regions( param_env, - tcx.struct_tail_without_normalization(pointee), + tcx.struct_tail_raw(pointee, |ty| ty, || {}), ) { Ok(_) => {} Err(better_err) => { diff --git a/compiler/rustc_ty_utils/src/layout_sanity_check.rs b/compiler/rustc_ty_utils/src/layout_sanity_check.rs index d8e0443c50b..2223aca28d1 100644 --- a/compiler/rustc_ty_utils/src/layout_sanity_check.rs +++ b/compiler/rustc_ty_utils/src/layout_sanity_check.rs @@ -249,7 +249,7 @@ pub(super) fn sanity_check_layout<'tcx>( if let Variants::Multiple { variants, .. } = &layout.variants { for variant in variants.iter() { // No nested "multiple". - assert!(matches!(variant.variants, Variants::Single { .. })); + assert_matches!(variant.variants, Variants::Single { .. }); // Variants should have the same or a smaller size as the full thing, // and same for alignment. if variant.size > layout.size { diff --git a/compiler/rustc_type_ir/src/binder.rs b/compiler/rustc_type_ir/src/binder.rs index c1f6fb36324..d42efbc91e1 100644 --- a/compiler/rustc_type_ir/src/binder.rs +++ b/compiler/rustc_type_ir/src/binder.rs @@ -8,7 +8,7 @@ use derive_where::derive_where; use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable}; #[cfg(feature = "nightly")] use rustc_serialize::Decodable; -use tracing::debug; +use tracing::instrument; use crate::data_structures::SsoHashSet; use crate::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable}; @@ -86,6 +86,7 @@ macro_rules! impl_binder_encode_decode { #[cfg(feature = "nightly")] impl_binder_encode_decode! { ty::FnSig<I>, + ty::FnSigTys<I>, ty::TraitPredicate<I>, ty::ExistentialPredicate<I>, ty::TraitRef<I>, @@ -247,21 +248,6 @@ impl<I: Interner, T> Binder<I, T> { // `self.value` is equivalent to `self.skip_binder()` if self.value.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) } } - - /// Splits the contents into two things that share the same binder - /// level as the original, returning two distinct binders. - /// - /// `f` should consider bound regions at depth 1 to be free, and - /// anything it produces with bound regions at depth 1 will be - /// bound in the resulting return values. - pub fn split<U, V, F>(self, f: F) -> (Binder<I, U>, Binder<I, V>) - where - F: FnOnce(T) -> (U, V), - { - let Binder { value, bound_vars } = self; - let (u, v) = f(value); - (Binder { value: u, bound_vars }, Binder { value: v, bound_vars }) - } } impl<I: Interner, T> Binder<I, Option<T>> { @@ -831,28 +817,20 @@ impl<'a, I: Interner> ArgFolder<'a, I> { /// As indicated in the diagram, here the same type `&'a i32` is instantiated once, but in the /// first case we do not increase the De Bruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. + #[instrument(level = "trace", skip(self), fields(binders_passed = self.binders_passed), ret)] fn shift_vars_through_binders<T: TypeFoldable<I>>(&self, val: T) -> T { - debug!( - "shift_vars(val={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", - val, - self.binders_passed, - val.has_escaping_bound_vars() - ); - if self.binders_passed == 0 || !val.has_escaping_bound_vars() { - return val; + val + } else { + ty::fold::shift_vars(self.cx, val, self.binders_passed) } - - let result = ty::fold::shift_vars(TypeFolder::cx(self), val, self.binders_passed); - debug!("shift_vars: shifted result = {:?}", result); - - result } fn shift_region_through_binders(&self, region: I::Region) -> I::Region { if self.binders_passed == 0 || !region.has_escaping_bound_vars() { - return region; + region + } else { + ty::fold::shift_region(self.cx, region, self.binders_passed) } - ty::fold::shift_region(self.cx, region, self.binders_passed) } } diff --git a/compiler/rustc_type_ir/src/fast_reject.rs b/compiler/rustc_type_ir/src/fast_reject.rs index 456accd1a1b..fab4a099117 100644 --- a/compiler/rustc_type_ir/src/fast_reject.rs +++ b/compiler/rustc_type_ir/src/fast_reject.rs @@ -135,7 +135,9 @@ pub fn simplify_type<I: Interner>( ty::CoroutineWitness(def_id, _) => Some(SimplifiedType::CoroutineWitness(def_id)), ty::Never => Some(SimplifiedType::Never), ty::Tuple(tys) => Some(SimplifiedType::Tuple(tys.len())), - ty::FnPtr(f) => Some(SimplifiedType::Function(f.skip_binder().inputs().len())), + ty::FnPtr(sig_tys, _hdr) => { + Some(SimplifiedType::Function(sig_tys.skip_binder().inputs().len())) + } ty::Placeholder(..) => Some(SimplifiedType::Placeholder), ty::Param(_) => match treat_params { TreatParams::ForLookup => Some(SimplifiedType::Placeholder), @@ -307,17 +309,14 @@ impl<I: Interner> DeepRejectCtxt<I> { obl_preds.principal_def_id() == impl_preds.principal_def_id() ) } - ty::FnPtr(obl_sig) => match k { - ty::FnPtr(impl_sig) => { - let ty::FnSig { inputs_and_output, c_variadic, safety, abi } = - obl_sig.skip_binder(); - let impl_sig = impl_sig.skip_binder(); + ty::FnPtr(obl_sig_tys, obl_hdr) => match k { + ty::FnPtr(impl_sig_tys, impl_hdr) => { + let obl_sig_tys = obl_sig_tys.skip_binder().inputs_and_output; + let impl_sig_tys = impl_sig_tys.skip_binder().inputs_and_output; - abi == impl_sig.abi - && c_variadic == impl_sig.c_variadic - && safety == impl_sig.safety - && inputs_and_output.len() == impl_sig.inputs_and_output.len() - && iter::zip(inputs_and_output.iter(), impl_sig.inputs_and_output.iter()) + obl_hdr == impl_hdr + && obl_sig_tys.len() == impl_sig_tys.len() + && iter::zip(obl_sig_tys.iter(), impl_sig_tys.iter()) .all(|(obl, imp)| self.types_may_unify(obl, imp)) } _ => false, diff --git a/compiler/rustc_type_ir/src/fold.rs b/compiler/rustc_type_ir/src/fold.rs index d37bacc7d35..8e3534b0e9e 100644 --- a/compiler/rustc_type_ir/src/fold.rs +++ b/compiler/rustc_type_ir/src/fold.rs @@ -48,7 +48,7 @@ use std::mem; use rustc_index::{Idx, IndexVec}; -use tracing::debug; +use tracing::instrument; use crate::data_structures::Lrc; use crate::inherent::*; @@ -417,15 +417,14 @@ pub fn shift_region<I: Interner>(cx: I, region: I::Region, amount: u32) -> I::Re } } +#[instrument(level = "trace", skip(cx), ret)] pub fn shift_vars<I: Interner, T>(cx: I, value: T, amount: u32) -> T where T: TypeFoldable<I>, { - debug!("shift_vars(value={:?}, amount={})", value, amount); - if amount == 0 || !value.has_escaping_bound_vars() { - return value; + value + } else { + value.fold_with(&mut Shifter::new(cx, amount)) } - - value.fold_with(&mut Shifter::new(cx, amount)) } diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 263ba676427..958360faede 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -133,12 +133,12 @@ pub trait Ty<I: Interner<Ty = Self>>: } fn is_fn_ptr(self) -> bool { - matches!(self.kind(), ty::FnPtr(_)) + matches!(self.kind(), ty::FnPtr(..)) } fn fn_sig(self, interner: I) -> ty::Binder<I, ty::FnSig<I>> { match self.kind() { - ty::FnPtr(sig) => sig, + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), ty::FnDef(def_id, args) => interner.fn_sig(def_id).instantiate(interner, args), ty::Error(_) => { // ignore errors (#54954) @@ -181,7 +181,7 @@ pub trait Ty<I: Interner<Ty = Self>>: | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) @@ -203,7 +203,9 @@ pub trait Ty<I: Interner<Ty = Self>>: pub trait Tys<I: Interner<Tys = Self>>: Copy + Debug + Hash + Eq + SliceLike<Item = I::Ty> + TypeFoldable<I> + Default { - fn split_inputs_and_output(self) -> (I::FnInputTys, I::Ty); + fn inputs(self) -> I::FnInputTys; + + fn output(self) -> I::Ty; } pub trait Abi<I: Interner<Abi = Self>>: Copy + Debug + Hash + Eq + Relate<I> { diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index c251540c0fc..f2492ede4f5 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -11,7 +11,6 @@ use crate::inherent::*; use crate::ir_print::IrPrint; use crate::lang_items::TraitSolverLangItem; use crate::relate::Relate; -use crate::solve::inspect::CanonicalGoalEvaluationStep; use crate::solve::{ CanonicalInput, ExternalConstraintsData, PredefinedOpaquesData, QueryResult, SolverMode, }; @@ -65,11 +64,6 @@ pub trait Interner: + Eq + TypeVisitable<Self> + SliceLike<Item = Self::LocalDefId>; - type CanonicalGoalEvaluationStepRef: Copy - + Debug - + Hash - + Eq - + Deref<Target = CanonicalGoalEvaluationStep<Self>>; type CanonicalVars: Copy + Debug @@ -177,11 +171,6 @@ pub trait Interner: fn debug_assert_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs); - fn intern_canonical_goal_evaluation_step( - self, - step: CanonicalGoalEvaluationStep<Self>, - ) -> Self::CanonicalGoalEvaluationStepRef; - fn mk_type_list_from_iter<I, T>(self, args: I) -> T::Output where I: Iterator<Item = T>, @@ -390,7 +379,6 @@ impl<T, R, E> CollectAndApply<T, R> for Result<T, E> { } impl<I: Interner> search_graph::Cx for I { - type ProofTree = Option<I::CanonicalGoalEvaluationStepRef>; type Input = CanonicalInput<I>; type Result = QueryResult<I>; diff --git a/compiler/rustc_type_ir/src/outlives.rs b/compiler/rustc_type_ir/src/outlives.rs index 2f26a439183..bfcea6a81d3 100644 --- a/compiler/rustc_type_ir/src/outlives.rs +++ b/compiler/rustc_type_ir/src/outlives.rs @@ -186,7 +186,7 @@ impl<I: Interner> TypeVisitor<I> for OutlivesCollector<'_, I> { | ty::Slice(_) | ty::RawPtr(_, _) | ty::Ref(_, _, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Tuple(_) => { ty.super_visit_with(self); diff --git a/compiler/rustc_type_ir/src/relate.rs b/compiler/rustc_type_ir/src/relate.rs index 9fd3534d1fa..578436b622a 100644 --- a/compiler/rustc_type_ir/src/relate.rs +++ b/compiler/rustc_type_ir/src/relate.rs @@ -524,8 +524,8 @@ pub fn structurally_relate_tys<I: Interner, R: TypeRelation<I>>( Ok(Ty::new_fn_def(cx, a_def_id, args)) } - (ty::FnPtr(a_fty), ty::FnPtr(b_fty)) => { - let fty = relation.relate(a_fty, b_fty)?; + (ty::FnPtr(a_sig_tys, a_hdr), ty::FnPtr(b_sig_tys, b_hdr)) => { + let fty = relation.relate(a_sig_tys.with(a_hdr), b_sig_tys.with(b_hdr))?; Ok(Ty::new_fn_ptr(cx, fty)) } diff --git a/compiler/rustc_type_ir/src/search_graph/global_cache.rs b/compiler/rustc_type_ir/src/search_graph/global_cache.rs index be4f1069cd1..47f7cefac6a 100644 --- a/compiler/rustc_type_ir/src/search_graph/global_cache.rs +++ b/compiler/rustc_type_ir/src/search_graph/global_cache.rs @@ -1,18 +1,17 @@ use derive_where::derive_where; -use rustc_index::IndexVec; -use super::{AvailableDepth, Cx, StackDepth, StackEntry}; -use crate::data_structures::{HashMap, HashSet}; - -#[derive_where(Debug, Clone, Copy; X: Cx)] -struct QueryData<X: Cx> { - result: X::Result, - proof_tree: X::ProofTree, -} +use super::{AvailableDepth, Cx, NestedGoals}; +use crate::data_structures::HashMap; struct Success<X: Cx> { - data: X::Tracked<QueryData<X>>, additional_depth: usize, + nested_goals: NestedGoals<X>, + result: X::Tracked<X::Result>, +} + +struct WithOverflow<X: Cx> { + nested_goals: NestedGoals<X>, + result: X::Tracked<X::Result>, } /// The cache entry for a given input. @@ -23,24 +22,15 @@ struct Success<X: Cx> { #[derive_where(Default; X: Cx)] struct CacheEntry<X: Cx> { success: Option<Success<X>>, - /// We have to be careful when caching roots of cycles. - /// - /// See the doc comment of `StackEntry::cycle_participants` for more - /// details. - nested_goals: HashSet<X::Input>, - with_overflow: HashMap<usize, X::Tracked<QueryData<X>>>, + with_overflow: HashMap<usize, WithOverflow<X>>, } #[derive_where(Debug; X: Cx)] pub(super) struct CacheData<'a, X: Cx> { pub(super) result: X::Result, - pub(super) proof_tree: X::ProofTree, pub(super) additional_depth: usize, pub(super) encountered_overflow: bool, - // FIXME: This is currently unused, but impacts the design - // by requiring a closure for `Cx::with_global_cache`. - #[allow(dead_code)] - pub(super) nested_goals: &'a HashSet<X::Input>, + pub(super) nested_goals: &'a NestedGoals<X>, } #[derive_where(Default; X: Cx)] pub struct GlobalCache<X: Cx> { @@ -55,20 +45,21 @@ impl<X: Cx> GlobalCache<X> { input: X::Input, result: X::Result, - proof_tree: X::ProofTree, dep_node: X::DepNodeIndex, additional_depth: usize, encountered_overflow: bool, - nested_goals: &HashSet<X::Input>, + nested_goals: NestedGoals<X>, ) { - let data = cx.mk_tracked(QueryData { result, proof_tree }, dep_node); + let result = cx.mk_tracked(result, dep_node); let entry = self.map.entry(input).or_default(); - entry.nested_goals.extend(nested_goals); if encountered_overflow { - entry.with_overflow.insert(additional_depth, data); + let with_overflow = WithOverflow { nested_goals, result }; + let prev = entry.with_overflow.insert(additional_depth, with_overflow); + assert!(prev.is_none()); } else { - entry.success = Some(Success { data, additional_depth }); + let prev = entry.success.replace(Success { additional_depth, nested_goals, result }); + assert!(prev.is_none()); } } @@ -80,36 +71,37 @@ impl<X: Cx> GlobalCache<X> { &'a self, cx: X, input: X::Input, - stack: &IndexVec<StackDepth, StackEntry<X>>, available_depth: AvailableDepth, + mut candidate_is_applicable: impl FnMut(&NestedGoals<X>) -> bool, ) -> Option<CacheData<'a, X>> { let entry = self.map.get(&input)?; - if stack.iter().any(|e| entry.nested_goals.contains(&e.input)) { - return None; - } - - if let Some(ref success) = entry.success { - if available_depth.cache_entry_is_applicable(success.additional_depth) { - let QueryData { result, proof_tree } = cx.get_tracked(&success.data); + if let Some(Success { additional_depth, ref nested_goals, ref result }) = entry.success { + if available_depth.cache_entry_is_applicable(additional_depth) + && candidate_is_applicable(nested_goals) + { return Some(CacheData { - result, - proof_tree, - additional_depth: success.additional_depth, + result: cx.get_tracked(&result), + additional_depth, encountered_overflow: false, - nested_goals: &entry.nested_goals, + nested_goals, }); } } - entry.with_overflow.get(&available_depth.0).map(|e| { - let QueryData { result, proof_tree } = cx.get_tracked(e); - CacheData { - result, - proof_tree, - additional_depth: available_depth.0, - encountered_overflow: true, - nested_goals: &entry.nested_goals, + let additional_depth = available_depth.0; + if let Some(WithOverflow { nested_goals, result }) = + entry.with_overflow.get(&additional_depth) + { + if candidate_is_applicable(nested_goals) { + return Some(CacheData { + result: cx.get_tracked(result), + additional_depth, + encountered_overflow: true, + nested_goals, + }); } - }) + } + + None } } diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs index 4abf99b1ded..d47c9e725f3 100644 --- a/compiler/rustc_type_ir/src/search_graph/mod.rs +++ b/compiler/rustc_type_ir/src/search_graph/mod.rs @@ -1,19 +1,32 @@ +/// The search graph is responsible for caching and cycle detection in the trait +/// solver. Making sure that caching doesn't result in soundness bugs or unstable +/// query results is very challenging and makes this one of the most-involved +/// self-contained components of the compiler. +/// +/// We added fuzzing support to test its correctness. The fuzzers used to verify +/// the current implementation can be found in https://github.com/lcnr/search_graph_fuzz. +/// +/// This is just a quick overview of the general design, please check out the relevant +/// [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html) for +/// more details. Caching is split between a global cache and the per-cycle `provisional_cache`. +/// The global cache has to be completely unobservable, while the per-cycle cache may impact +/// behavior as long as the resulting behavior is still correct. +use std::cmp::Ordering; +use std::collections::BTreeSet; use std::fmt::Debug; use std::hash::Hash; use std::marker::PhantomData; -use std::mem; use derive_where::derive_where; use rustc_index::{Idx, IndexVec}; use tracing::debug; -use crate::data_structures::{HashMap, HashSet}; +use crate::data_structures::HashMap; use crate::solve::SolverMode; mod global_cache; use global_cache::CacheData; pub use global_cache::GlobalCache; -mod validate; /// The search graph does not simply use `Interner` directly /// to enable its fuzzing without having to stub the rest of @@ -22,7 +35,6 @@ mod validate; /// about `Input` and `Result` as they are implementation details /// of the search graph. pub trait Cx: Copy { - type ProofTree: Debug + Copy; type Input: Debug + Eq + Hash + Copy; type Result: Debug + Eq + Hash + Copy; @@ -43,30 +55,41 @@ pub trait Cx: Copy { ) -> R; } -pub trait ProofTreeBuilder<X: Cx> { - fn try_apply_proof_tree(&mut self, proof_tree: X::ProofTree) -> bool; - fn on_provisional_cache_hit(&mut self); - fn on_cycle_in_stack(&mut self); - fn finalize_canonical_goal_evaluation(&mut self, cx: X) -> X::ProofTree; -} - pub trait Delegate { type Cx: Cx; + /// Whether to use the provisional cache. Set to `false` by a fuzzer when + /// validating the search graph. + const ENABLE_PROVISIONAL_CACHE: bool; + type ValidationScope; + /// Returning `Some` disables the global cache for the current goal. + /// + /// The `ValidationScope` is used when fuzzing the search graph to track + /// for which goals the global cache has been disabled. This is necessary + /// as we may otherwise ignore the global cache entry for some goal `G` + /// only to later use it, failing to detect a cycle goal and potentially + /// changing the result. + fn enter_validation_scope( + cx: Self::Cx, + input: <Self::Cx as Cx>::Input, + ) -> Option<Self::ValidationScope>; + const FIXPOINT_STEP_LIMIT: usize; - type ProofTreeBuilder: ProofTreeBuilder<Self::Cx>; + type ProofTreeBuilder; + fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool; + + const DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW: usize; fn recursion_limit(cx: Self::Cx) -> usize; fn initial_provisional_result( cx: Self::Cx, - kind: CycleKind, + kind: PathKind, input: <Self::Cx as Cx>::Input, ) -> <Self::Cx as Cx>::Result; - fn reached_fixpoint( + fn is_initial_provisional_result( cx: Self::Cx, - kind: UsageKind, + kind: PathKind, input: <Self::Cx as Cx>::Input, - provisional_result: Option<<Self::Cx as Cx>::Result>, result: <Self::Cx as Cx>::Result, ) -> bool; fn on_stack_overflow( @@ -79,6 +102,13 @@ pub trait Delegate { input: <Self::Cx as Cx>::Input, ) -> <Self::Cx as Cx>::Result; + fn is_ambiguous_result(result: <Self::Cx as Cx>::Result) -> bool; + fn propagate_ambiguity( + cx: Self::Cx, + for_input: <Self::Cx as Cx>::Input, + from_result: <Self::Cx as Cx>::Result, + ) -> <Self::Cx as Cx>::Result; + fn step_is_coinductive(cx: Self::Cx, input: <Self::Cx as Cx>::Input) -> bool; } @@ -86,19 +116,20 @@ pub trait Delegate { /// result. In the case we return an initial provisional result depending /// on the kind of cycle. #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum CycleKind { +pub enum PathKind { Coinductive, Inductive, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum UsageKind { - Single(CycleKind), + Single(PathKind), Mixed, } impl UsageKind { fn merge(self, other: Self) -> Self { match (self, other) { + (UsageKind::Mixed, _) | (_, UsageKind::Mixed) => UsageKind::Mixed, (UsageKind::Single(lhs), UsageKind::Single(rhs)) => { if lhs == rhs { UsageKind::Single(lhs) @@ -106,11 +137,11 @@ impl UsageKind { UsageKind::Mixed } } - (UsageKind::Mixed, UsageKind::Mixed) - | (UsageKind::Mixed, UsageKind::Single(_)) - | (UsageKind::Single(_), UsageKind::Mixed) => UsageKind::Mixed, } } + fn and_merge(&mut self, other: Self) { + *self = self.merge(other); + } } #[derive(Debug, Clone, Copy)] @@ -132,7 +163,7 @@ impl AvailableDepth { } Some(if last.encountered_overflow { - AvailableDepth(last.available_depth.0 / 2) + AvailableDepth(last.available_depth.0 / D::DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW) } else { AvailableDepth(last.available_depth.0 - 1) }) @@ -148,97 +179,181 @@ impl AvailableDepth { } } +/// All cycle heads a given goal depends on, ordered by their stack depth. +/// +/// We therefore pop the cycle heads from highest to lowest. +#[derive(Clone, Debug, PartialEq, Eq, Default)] +struct CycleHeads { + heads: BTreeSet<StackDepth>, +} + +impl CycleHeads { + fn is_empty(&self) -> bool { + self.heads.is_empty() + } + + fn highest_cycle_head(&self) -> StackDepth { + *self.heads.last().unwrap() + } + + fn opt_highest_cycle_head(&self) -> Option<StackDepth> { + self.heads.last().copied() + } + + fn opt_lowest_cycle_head(&self) -> Option<StackDepth> { + self.heads.first().copied() + } + + fn remove_highest_cycle_head(&mut self) { + let last = self.heads.pop_last(); + debug_assert_ne!(last, None); + } + + fn insert(&mut self, head: StackDepth) { + self.heads.insert(head); + } + + fn merge(&mut self, heads: &CycleHeads) { + for &head in heads.heads.iter() { + self.insert(head); + } + } + + /// Update the cycle heads of a goal at depth `this` given the cycle heads + /// of a nested goal. This merges the heads after filtering the parent goal + /// itself. + fn extend_from_child(&mut self, this: StackDepth, child: &CycleHeads) { + for &head in child.heads.iter() { + match head.cmp(&this) { + Ordering::Less => {} + Ordering::Equal => continue, + Ordering::Greater => unreachable!(), + } + + self.insert(head); + } + } +} + +/// The nested goals of each stack entry and the path from the +/// stack entry to that nested goal. +/// +/// We only start tracking nested goals once we've either encountered +/// overflow or a solver cycle. This is a performance optimization to +/// avoid tracking nested goals on the happy path. +/// +/// We use nested goals for two reasons: +/// - when rebasing provisional cache entries +/// - when checking whether we have to ignore a global cache entry as reevaluating +/// it would encounter a cycle or use a provisional cache entry. +/// +/// We need to disable the global cache if using it would hide a cycle, as +/// cycles can impact behavior. The cycle ABA may have different final +/// results from a the cycle BAB depending on the cycle root. +#[derive_where(Debug, Default; X: Cx)] +struct NestedGoals<X: Cx> { + nested_goals: HashMap<X::Input, UsageKind>, +} +impl<X: Cx> NestedGoals<X> { + fn is_empty(&self) -> bool { + self.nested_goals.is_empty() + } + + fn insert(&mut self, input: X::Input, path_from_entry: UsageKind) { + self.nested_goals.entry(input).or_insert(path_from_entry).and_merge(path_from_entry); + } + + fn merge(&mut self, nested_goals: &NestedGoals<X>) { + #[allow(rustc::potential_query_instability)] + for (input, path_from_entry) in nested_goals.iter() { + self.insert(input, path_from_entry); + } + } + + /// Adds the nested goals of a nested goal, given that the path `step_kind` from this goal + /// to the parent goal. + /// + /// If the path from this goal to the nested goal is inductive, the paths from this goal + /// to all nested goals of that nested goal are also inductive. Otherwise the paths are + /// the same as for the child. + fn extend_from_child(&mut self, step_kind: PathKind, nested_goals: &NestedGoals<X>) { + #[allow(rustc::potential_query_instability)] + for (input, path_from_entry) in nested_goals.iter() { + let path_from_entry = match step_kind { + PathKind::Coinductive => path_from_entry, + PathKind::Inductive => UsageKind::Single(PathKind::Inductive), + }; + self.insert(input, path_from_entry); + } + } + + #[rustc_lint_query_instability] + #[allow(rustc::potential_query_instability)] + fn iter(&self) -> impl Iterator<Item = (X::Input, UsageKind)> + '_ { + self.nested_goals.iter().map(|(i, p)| (*i, *p)) + } + + fn get(&self, input: X::Input) -> Option<UsageKind> { + self.nested_goals.get(&input).copied() + } + + fn contains(&self, input: X::Input) -> bool { + self.nested_goals.contains_key(&input) + } +} + rustc_index::newtype_index! { #[orderable] #[gate_rustc_only] pub struct StackDepth {} } +/// Stack entries of the evaluation stack. Its fields tend to be lazily +/// when popping a child goal or completely immutable. #[derive_where(Debug; X: Cx)] struct StackEntry<X: Cx> { input: X::Input, + /// The available depth of a given goal, immutable. available_depth: AvailableDepth, /// The maximum depth reached by this stack entry, only up-to date /// for the top of the stack and lazily updated for the rest. reached_depth: StackDepth, - /// Whether this entry is a non-root cycle participant. - /// - /// We must not move the result of non-root cycle participants to the - /// global cache. We store the highest stack depth of a head of a cycle - /// this goal is involved in. This necessary to soundly cache its - /// provisional result. - non_root_cycle_participant: Option<StackDepth>, + /// All cycle heads this goal depends on. Lazily updated and only + /// up-to date for the top of the stack. + heads: CycleHeads, + /// Whether evaluating this goal encountered overflow. Lazily updated. encountered_overflow: bool, + /// Whether this goal has been used as the root of a cycle. This gets + /// eagerly updated when encountering a cycle. has_been_used: Option<UsageKind>, - /// We put only the root goal of a coinductive cycle into the global cache. - /// - /// If we were to use that result when later trying to prove another cycle - /// participant, we can end up with unstable query results. - /// - /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for - /// an example of where this is needed. - /// - /// There can be multiple roots on the same stack, so we need to track - /// cycle participants per root: - /// ```plain - /// A :- B - /// B :- A, C - /// C :- D - /// D :- C - /// ``` - nested_goals: HashSet<X::Input>, + /// The nested goals of this goal, see the doc comment of the type. + nested_goals: NestedGoals<X>, + /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. provisional_result: Option<X::Result>, } -/// The provisional result for a goal which is not on the stack. -#[derive(Debug)] -struct DetachedEntry<X: Cx> { - /// The head of the smallest non-trivial cycle involving this entry. - /// - /// Given the following rules, when proving `A` the head for - /// the provisional entry of `C` would be `B`. - /// ```plain - /// A :- B - /// B :- C - /// C :- A + B + C - /// ``` - head: StackDepth, - result: X::Result, -} - -/// Stores the stack depth of a currently evaluated goal *and* already -/// computed results for goals which depend on other goals still on the stack. -/// -/// The provisional result may depend on whether the stack above it is inductive -/// or coinductive. Because of this, we store separate provisional results for -/// each case. If an provisional entry is not applicable, it may be the case -/// that we already have provisional result while computing a goal. In this case -/// we prefer the provisional result to potentially avoid fixpoint iterations. -/// See tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs for an example. -/// -/// The provisional cache can theoretically result in changes to the observable behavior, -/// see tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs. -#[derive_where(Default; X: Cx)] +/// A provisional result of an already computed goals which depends on other +/// goals still on the stack. +#[derive_where(Debug; X: Cx)] struct ProvisionalCacheEntry<X: Cx> { - stack_depth: Option<StackDepth>, - with_inductive_stack: Option<DetachedEntry<X>>, - with_coinductive_stack: Option<DetachedEntry<X>>, -} - -impl<X: Cx> ProvisionalCacheEntry<X> { - fn is_empty(&self) -> bool { - self.stack_depth.is_none() - && self.with_inductive_stack.is_none() - && self.with_coinductive_stack.is_none() - } + /// Whether evaluating the goal encountered overflow. This is used to + /// disable the cache entry except if the last goal on the stack is + /// already involved in this cycle. + encountered_overflow: bool, + /// All cycle heads this cache entry depends on. + heads: CycleHeads, + /// The path from the highest cycle head to this goal. + path_from_head: PathKind, + nested_goals: NestedGoals<X>, + result: X::Result, } pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> { @@ -247,7 +362,11 @@ pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> { /// /// An element is *deeper* in the stack if its index is *lower*. stack: IndexVec<StackDepth, StackEntry<X>>, - provisional_cache: HashMap<X::Input, ProvisionalCacheEntry<X>>, + /// The provisional cache contains entries for already computed goals which + /// still depend on goals higher-up in the stack. We don't move them to the + /// global cache and track them locally instead. A provisional cache entry + /// is only valid until the result of one of its cycle heads changes. + provisional_cache: HashMap<X::Input, Vec<ProvisionalCacheEntry<X>>>, _marker: PhantomData<D>, } @@ -266,77 +385,66 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { self.mode } - fn update_parent_goal(&mut self, reached_depth: StackDepth, encountered_overflow: bool) { - if let Some(parent) = self.stack.raw.last_mut() { + /// Lazily update the stack entry for the parent goal. + /// This behavior is shared between actually evaluating goals + /// and using existing global cache entries to make sure they + /// have the same impact on the remaining evaluation. + fn update_parent_goal( + cx: X, + stack: &mut IndexVec<StackDepth, StackEntry<X>>, + reached_depth: StackDepth, + heads: &CycleHeads, + encountered_overflow: bool, + nested_goals: &NestedGoals<X>, + ) { + if let Some(parent_index) = stack.last_index() { + let parent = &mut stack[parent_index]; parent.reached_depth = parent.reached_depth.max(reached_depth); parent.encountered_overflow |= encountered_overflow; + + parent.heads.extend_from_child(parent_index, heads); + let step_kind = Self::step_kind(cx, parent.input); + parent.nested_goals.extend_from_child(step_kind, nested_goals); + // Once we've got goals which encountered overflow or a cycle, + // we track all goals whose behavior may depend depend on these + // goals as this change may cause them to now depend on additional + // goals, resulting in new cycles. See the dev-guide for examples. + if !nested_goals.is_empty() { + parent.nested_goals.insert(parent.input, UsageKind::Single(PathKind::Coinductive)) + } } } pub fn is_empty(&self) -> bool { - self.stack.is_empty() + if self.stack.is_empty() { + debug_assert!(self.provisional_cache.is_empty()); + true + } else { + false + } } - fn stack_coinductive_from( - cx: X, - stack: &IndexVec<StackDepth, StackEntry<X>>, - head: StackDepth, - ) -> bool { - stack.raw[head.index()..].iter().all(|entry| D::step_is_coinductive(cx, entry.input)) - } - - // When encountering a solver cycle, the result of the current goal - // depends on goals lower on the stack. - // - // We have to therefore be careful when caching goals. Only the final result - // of the cycle root, i.e. the lowest goal on the stack involved in this cycle, - // is moved to the global cache while all others are stored in a provisional cache. - // - // We update both the head of this cycle to rerun its evaluation until - // we reach a fixpoint and all other cycle participants to make sure that - // their result does not get moved to the global cache. - fn tag_cycle_participants( - stack: &mut IndexVec<StackDepth, StackEntry<X>>, - usage_kind: Option<UsageKind>, - head: StackDepth, - ) { - if let Some(usage_kind) = usage_kind { - stack[head].has_been_used = - Some(stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); - } - debug_assert!(stack[head].has_been_used.is_some()); - - // The current root of these cycles. Note that this may not be the final - // root in case a later goal depends on a goal higher up the stack. - let mut current_root = head; - while let Some(parent) = stack[current_root].non_root_cycle_participant { - current_root = parent; - debug_assert!(stack[current_root].has_been_used.is_some()); - } + /// The number of goals currently in the search graph. This should only be + /// used for debugging purposes. + pub fn debug_current_depth(&self) -> usize { + self.stack.len() + } - let (stack, cycle_participants) = stack.raw.split_at_mut(head.index() + 1); - let current_cycle_root = &mut stack[current_root.as_usize()]; - for entry in cycle_participants { - entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head)); - current_cycle_root.nested_goals.insert(entry.input); - current_cycle_root.nested_goals.extend(mem::take(&mut entry.nested_goals)); - } + fn step_kind(cx: X, input: X::Input) -> PathKind { + if D::step_is_coinductive(cx, input) { PathKind::Coinductive } else { PathKind::Inductive } } - fn clear_dependent_provisional_results( - provisional_cache: &mut HashMap<X::Input, ProvisionalCacheEntry<X>>, + /// Whether the path from `head` to the current stack entry is inductive or coinductive. + fn stack_path_kind( + cx: X, + stack: &IndexVec<StackDepth, StackEntry<X>>, head: StackDepth, - ) { - #[allow(rustc::potential_query_instability)] - provisional_cache.retain(|_, entry| { - if entry.with_coinductive_stack.as_ref().is_some_and(|p| p.head == head) { - entry.with_coinductive_stack.take(); - } - if entry.with_inductive_stack.as_ref().is_some_and(|p| p.head == head) { - entry.with_inductive_stack.take(); - } - !entry.is_empty() - }); + ) -> PathKind { + if stack.raw[head.index()..].iter().all(|entry| D::step_is_coinductive(cx, entry.input)) { + PathKind::Coinductive + } else { + PathKind::Inductive + } } /// Probably the most involved method of the whole solver. @@ -348,89 +456,65 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { cx: X, input: X::Input, inspect: &mut D::ProofTreeBuilder, - mut prove_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, + mut evaluate_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, ) -> X::Result { - self.check_invariants(); - // Check for overflow. let Some(available_depth) = AvailableDepth::allowed_depth_for_nested::<D>(cx, &self.stack) else { - if let Some(last) = self.stack.raw.last_mut() { - last.encountered_overflow = true; - } - - debug!("encountered stack overflow"); - return D::on_stack_overflow(cx, inspect, input); + return self.handle_overflow(cx, input, inspect); }; - if let Some(result) = self.lookup_global_cache(cx, input, available_depth, inspect) { + // We check the provisional cache before checking the global cache. This simplifies + // the implementation as we can avoid worrying about cases where both the global and + // provisional cache may apply, e.g. consider the following example + // + // - xxBA overflow + // - A + // - BA cycle + // - CB :x: + if let Some(result) = self.lookup_provisional_cache(cx, input) { return result; } - // Check whether the goal is in the provisional cache. - // The provisional result may rely on the path to its cycle roots, - // so we have to check the path of the current goal matches that of - // the cache entry. - let cache_entry = self.provisional_cache.entry(input).or_default(); - if let Some(entry) = cache_entry - .with_coinductive_stack - .as_ref() - .filter(|p| Self::stack_coinductive_from(cx, &self.stack, p.head)) - .or_else(|| { - cache_entry - .with_inductive_stack - .as_ref() - .filter(|p| !Self::stack_coinductive_from(cx, &self.stack, p.head)) - }) - { - debug!("provisional cache hit"); - // We have a nested goal which is already in the provisional cache, use - // its result. We do not provide any usage kind as that should have been - // already set correctly while computing the cache entry. - inspect.on_provisional_cache_hit(); - Self::tag_cycle_participants(&mut self.stack, None, entry.head); - return entry.result; - } else if let Some(stack_depth) = cache_entry.stack_depth { - debug!("encountered cycle with depth {stack_depth:?}"); - // We have a nested goal which directly relies on a goal deeper in the stack. - // - // We start by tagging all cycle participants, as that's necessary for caching. - // - // Finally we can return either the provisional response or the initial response - // in case we're in the first fixpoint iteration for this goal. - inspect.on_cycle_in_stack(); - - let is_coinductive_cycle = Self::stack_coinductive_from(cx, &self.stack, stack_depth); - let cycle_kind = - if is_coinductive_cycle { CycleKind::Coinductive } else { CycleKind::Inductive }; - Self::tag_cycle_participants( - &mut self.stack, - Some(UsageKind::Single(cycle_kind)), - stack_depth, - ); - - // Return the provisional result or, if we're in the first iteration, - // start with no constraints. - return if let Some(result) = self.stack[stack_depth].provisional_result { - result - } else { - D::initial_provisional_result(cx, cycle_kind, input) - }; + // Lookup the global cache unless we're building proof trees or are currently + // fuzzing. + let validate_cache = if !D::inspect_is_noop(inspect) { + None + } else if let Some(scope) = D::enter_validation_scope(cx, input) { + // When validating the global cache we need to track the goals for which the + // global cache has been disabled as it may otherwise change the result for + // cyclic goals. We don't care about goals which are not on the current stack + // so it's fine to drop their scope eagerly. + self.lookup_global_cache_untracked(cx, input, available_depth) + .inspect(|expected| debug!(?expected, "validate cache entry")) + .map(|r| (scope, r)) + } else if let Some(result) = self.lookup_global_cache(cx, input, available_depth) { + return result; } else { - // No entry, we push this goal on the stack and try to prove it. - let depth = self.stack.next_index(); - let entry = StackEntry { - input, - available_depth, - reached_depth: depth, - non_root_cycle_participant: None, - encountered_overflow: false, - has_been_used: None, - nested_goals: Default::default(), - provisional_result: None, - }; - assert_eq!(self.stack.push(entry), depth); - cache_entry.stack_depth = Some(depth); + None + }; + + // Detect cycles on the stack. We do this after the global cache lookup to + // avoid iterating over the stack in case a goal has already been computed. + // This may not have an actual performance impact and we could reorder them + // as it may reduce the number of `nested_goals` we need to track. + if let Some(result) = self.check_cycle_on_stack(cx, input) { + debug_assert!(validate_cache.is_none(), "global cache and cycle on stack"); + return result; + } + + // Unfortunate, it looks like we actually have to compute this goalrar. + let depth = self.stack.next_index(); + let entry = StackEntry { + input, + available_depth, + reached_depth: depth, + heads: Default::default(), + encountered_overflow: false, + has_been_used: None, + nested_goals: Default::default(), + provisional_result: None, }; + assert_eq!(self.stack.push(entry), depth); // This is for global caching, so we properly track query dependencies. // Everything that affects the `result` should be performed within this @@ -439,65 +523,320 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // must not be added to the global cache. Notably, this is the case for // trait solver cycles participants. let ((final_entry, result), dep_node) = cx.with_cached_task(|| { - for _ in 0..D::FIXPOINT_STEP_LIMIT { - match self.fixpoint_step_in_task(cx, input, inspect, &mut prove_goal) { - StepResult::Done(final_entry, result) => return (final_entry, result), - StepResult::HasChanged => debug!("fixpoint changed provisional results"), - } + self.evaluate_goal_in_task(cx, input, inspect, &mut evaluate_goal) + }); + + // We've finished computing the goal and have popped it from the stack, + // lazily update its parent goal. + Self::update_parent_goal( + cx, + &mut self.stack, + final_entry.reached_depth, + &final_entry.heads, + final_entry.encountered_overflow, + &final_entry.nested_goals, + ); + + // We're now done with this goal. We only add the root of cycles to the global cache. + // In case this goal is involved in a larger cycle add it to the provisional cache. + if final_entry.heads.is_empty() { + if let Some((_scope, expected)) = validate_cache { + // Do not try to move a goal into the cache again if we're testing + // the global cache. + assert_eq!(result, expected, "input={input:?}"); + } else if D::inspect_is_noop(inspect) { + self.insert_global_cache(cx, input, final_entry, result, dep_node) } + } else if D::ENABLE_PROVISIONAL_CACHE { + debug_assert!(validate_cache.is_none()); + let entry = self.provisional_cache.entry(input).or_default(); + let StackEntry { heads, nested_goals, encountered_overflow, .. } = final_entry; + let path_from_head = Self::stack_path_kind(cx, &self.stack, heads.highest_cycle_head()); + entry.push(ProvisionalCacheEntry { + encountered_overflow, + heads, + path_from_head, + nested_goals, + result, + }); + } else { + debug_assert!(validate_cache.is_none()); + } + + result + } + + fn handle_overflow( + &mut self, + cx: X, + input: X::Input, + inspect: &mut D::ProofTreeBuilder, + ) -> X::Result { + if let Some(last) = self.stack.raw.last_mut() { + last.encountered_overflow = true; + // If computing a goal `B` depends on another goal `A` and + // `A` has a nested goal which overflows, then computing `B` + // at the same depth, but with `A` already on the stack, + // would encounter a solver cycle instead, potentially + // changing the result. + // + // We must therefore not use the global cache entry for `B` in that case. + // See tests/ui/traits/next-solver/cycles/hidden-by-overflow.rs + last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Coinductive)); + } - debug!("canonical cycle overflow"); - let current_entry = self.stack.pop().unwrap(); - debug_assert!(current_entry.has_been_used.is_none()); - let result = D::on_fixpoint_overflow(cx, input); - (current_entry, result) + debug!("encountered stack overflow"); + D::on_stack_overflow(cx, inspect, input) + } + + /// When reevaluating a goal with a changed provisional result, all provisional cache entry + /// which depend on this goal get invalidated. + fn clear_dependent_provisional_results(&mut self) { + let head = self.stack.next_index(); + #[allow(rustc::potential_query_instability)] + self.provisional_cache.retain(|_, entries| { + entries.retain(|entry| entry.heads.highest_cycle_head() != head); + !entries.is_empty() }); + } - let proof_tree = inspect.finalize_canonical_goal_evaluation(cx); + /// A necessary optimization to handle complex solver cycles. A provisional cache entry + /// relies on a set of cycle heads and the path towards these heads. When popping a cycle + /// head from the stack after we've finished computing it, we can't be sure that the + /// provisional cache entry is still applicable. We need to keep the cache entries to + /// prevent hangs. + /// + /// What we therefore do is check whether the cycle kind of all cycles the goal of a + /// provisional cache entry is involved in would stay the same when computing the + /// goal without its cycle head on the stack. For more details, see the relevant + /// [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html). + /// + /// This can be thought of rotating the sub-tree of this provisional result and changing + /// its entry point while making sure that all paths through this sub-tree stay the same. + /// + /// + /// In case the popped cycle head failed to reach a fixpoint anything which depends on + /// its provisional result is invalid. Actually discarding provisional cache entries in + /// this case would cause hangs, so we instead change the result of dependant provisional + /// cache entries to also be ambiguous. This causes some undesirable ambiguity for nested + /// goals whose result doesn't actually depend on this cycle head, but that's acceptable + /// to me. + fn rebase_provisional_cache_entries( + &mut self, + cx: X, + stack_entry: &StackEntry<X>, + mut mutate_result: impl FnMut(X::Input, X::Result) -> X::Result, + ) { + let head = self.stack.next_index(); + #[allow(rustc::potential_query_instability)] + self.provisional_cache.retain(|&input, entries| { + entries.retain_mut(|entry| { + let ProvisionalCacheEntry { + encountered_overflow: _, + heads, + path_from_head, + nested_goals, + result, + } = entry; + if heads.highest_cycle_head() != head { + return true; + } - self.update_parent_goal(final_entry.reached_depth, final_entry.encountered_overflow); + // We don't try rebasing if the path from the current head + // to the cache entry is not coinductive or if the path from + // the cache entry to the current head is not coinductive. + // + // Both of these constraints could be weakened, but by only + // accepting coinductive paths we don't have to worry about + // changing the cycle kind of the remaining cycles. We can + // extend this in the future once there's a known issue + // caused by it. + if *path_from_head != PathKind::Coinductive + || nested_goals.get(stack_entry.input).unwrap() + != UsageKind::Single(PathKind::Coinductive) + { + return false; + } - // We're now done with this goal. In case this goal is involved in a larger cycle - // do not remove it from the provisional cache and update its provisional result. - // We only add the root of cycles to the global cache. - if let Some(head) = final_entry.non_root_cycle_participant { - let coinductive_stack = Self::stack_coinductive_from(cx, &self.stack, head); + // Merge the cycle heads of the provisional cache entry and the + // popped head. If the popped cycle head was a root, discard all + // provisional cache entries which depend on it. + heads.remove_highest_cycle_head(); + heads.merge(&stack_entry.heads); + let Some(head) = heads.opt_highest_cycle_head() else { + return false; + }; - let entry = self.provisional_cache.get_mut(&input).unwrap(); - entry.stack_depth = None; - if coinductive_stack { - entry.with_coinductive_stack = Some(DetachedEntry { head, result }); - } else { - entry.with_inductive_stack = Some(DetachedEntry { head, result }); + // As we've made sure that the path from the new highest cycle + // head to the uses of the popped cycle head are fully coinductive, + // we can be sure that the paths to all nested goals of the popped + // cycle head remain the same. We can simply merge them. + nested_goals.merge(&stack_entry.nested_goals); + // We now care about the path from the next highest cycle head to the + // provisional cache entry. + *path_from_head = Self::stack_path_kind(cx, &self.stack, head); + // Mutate the result of the provisional cache entry in case we did + // not reach a fixpoint. + *result = mutate_result(input, *result); + true + }); + !entries.is_empty() + }); + } + + fn lookup_provisional_cache(&mut self, cx: X, input: X::Input) -> Option<X::Result> { + if !D::ENABLE_PROVISIONAL_CACHE { + return None; + } + + let entries = self.provisional_cache.get(&input)?; + for &ProvisionalCacheEntry { + encountered_overflow, + ref heads, + path_from_head, + ref nested_goals, + result, + } in entries + { + let head = heads.highest_cycle_head(); + if encountered_overflow { + // This check is overly strict and very subtle. We need to make sure that if + // a global cache entry depends on some goal without adding it to its + // `nested_goals`, that goal must never have an applicable provisional + // cache entry to avoid incorrectly applying the cache entry. + // + // As we'd have to otherwise track literally all nested goals, we only + // apply provisional cache entries which encountered overflow once the + // current goal is already part of the same cycle. This check could be + // improved but seems to be good enough for now. + let last = self.stack.raw.last().unwrap(); + if !last.heads.opt_lowest_cycle_head().is_some_and(|lowest| lowest <= head) { + continue; + } } - } else { - // When encountering a cycle, both inductive and coinductive, we only - // move the root into the global cache. We also store all other cycle - // participants involved. - // - // We must not use the global cache entry of a root goal if a cycle - // participant is on the stack. This is necessary to prevent unstable - // results. See the comment of `StackEntry::nested_goals` for - // more details. - self.provisional_cache.remove(&input); - let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); - cx.with_global_cache(self.mode, |cache| { - cache.insert( + + // A provisional cache entry is only valid if the current path from its + // highest cycle head to the goal is the same. + if path_from_head == Self::stack_path_kind(cx, &self.stack, head) { + // While we don't have to track the full depth of the provisional cache entry, + // we do have to increment the required depth by one as we'd have already failed + // with overflow otherwise + let next_index = self.stack.next_index(); + let last = &mut self.stack.raw.last_mut().unwrap(); + let path_from_entry = Self::step_kind(cx, last.input); + last.nested_goals.insert(input, UsageKind::Single(path_from_entry)); + + Self::update_parent_goal( cx, - input, - result, - proof_tree, - dep_node, - additional_depth, - final_entry.encountered_overflow, - &final_entry.nested_goals, - ) - }) + &mut self.stack, + next_index, + heads, + false, + nested_goals, + ); + debug_assert!(self.stack[head].has_been_used.is_some()); + debug!(?head, ?path_from_head, "provisional cache hit"); + return Some(result); + } } - self.check_invariants(); + None + } - result + /// Even if there is a global cache entry for a given goal, we need to make sure + /// evaluating this entry would not have ended up depending on either a goal + /// already on the stack or a provisional cache entry. + fn candidate_is_applicable( + cx: X, + stack: &IndexVec<StackDepth, StackEntry<X>>, + provisional_cache: &HashMap<X::Input, Vec<ProvisionalCacheEntry<X>>>, + nested_goals: &NestedGoals<X>, + ) -> bool { + // If the global cache entry didn't depend on any nested goals, it always + // applies. + if nested_goals.is_empty() { + return true; + } + + // If a nested goal of the global cache entry is on the stack, we would + // definitely encounter a cycle. + if stack.iter().any(|e| nested_goals.contains(e.input)) { + debug!("cache entry not applicable due to stack"); + return false; + } + + // The global cache entry is also invalid if there's a provisional cache entry + // would apply for any of its nested goals. + #[allow(rustc::potential_query_instability)] + for (input, path_from_global_entry) in nested_goals.iter() { + let Some(entries) = provisional_cache.get(&input) else { + continue; + }; + + debug!(?input, ?path_from_global_entry, ?entries, "candidate_is_applicable"); + // A provisional cache entry is applicable if the path to + // its highest cycle head is equal to the expected path. + for &ProvisionalCacheEntry { + encountered_overflow, + ref heads, + path_from_head, + nested_goals: _, + result: _, + } in entries.iter() + { + // We don't have to worry about provisional cache entries which encountered + // overflow, see the relevant comment in `lookup_provisional_cache`. + if encountered_overflow { + continue; + } + + // A provisional cache entry only applies if the path from its highest head + // matches the path when encountering the goal. + let head = heads.highest_cycle_head(); + let full_path = match Self::stack_path_kind(cx, stack, head) { + PathKind::Coinductive => path_from_global_entry, + PathKind::Inductive => UsageKind::Single(PathKind::Inductive), + }; + + match (full_path, path_from_head) { + (UsageKind::Mixed, _) + | (UsageKind::Single(PathKind::Coinductive), PathKind::Coinductive) + | (UsageKind::Single(PathKind::Inductive), PathKind::Inductive) => { + debug!( + ?full_path, + ?path_from_head, + "cache entry not applicable due to matching paths" + ); + return false; + } + _ => debug!(?full_path, ?path_from_head, "paths don't match"), + } + } + } + + true + } + + /// Used when fuzzing the global cache. Accesses the global cache without + /// updating the state of the search graph. + fn lookup_global_cache_untracked( + &self, + cx: X, + input: X::Input, + available_depth: AvailableDepth, + ) -> Option<X::Result> { + cx.with_global_cache(self.mode, |cache| { + cache + .get(cx, input, available_depth, |nested_goals| { + Self::candidate_is_applicable( + cx, + &self.stack, + &self.provisional_cache, + nested_goals, + ) + }) + .map(|c| c.result) + }) } /// Try to fetch a previously computed result from the global cache, @@ -508,97 +847,206 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { cx: X, input: X::Input, available_depth: AvailableDepth, - inspect: &mut D::ProofTreeBuilder, ) -> Option<X::Result> { cx.with_global_cache(self.mode, |cache| { - let CacheData { - result, - proof_tree, - additional_depth, - encountered_overflow, - nested_goals: _, // FIXME: consider nested goals here. - } = cache.get(cx, input, &self.stack, available_depth)?; - - // If we're building a proof tree and the current cache entry does not - // contain a proof tree, we do not use the entry but instead recompute - // the goal. We simply overwrite the existing entry once we're done, - // caching the proof tree. - if !inspect.try_apply_proof_tree(proof_tree) { - return None; - } + let CacheData { result, additional_depth, encountered_overflow, nested_goals } = cache + .get(cx, input, available_depth, |nested_goals| { + Self::candidate_is_applicable( + cx, + &self.stack, + &self.provisional_cache, + nested_goals, + ) + })?; // Update the reached depth of the current goal to make sure // its state is the same regardless of whether we've used the // global cache or not. let reached_depth = self.stack.next_index().plus(additional_depth); - self.update_parent_goal(reached_depth, encountered_overflow); + // We don't move cycle participants to the global cache, so the + // cycle heads are always empty. + let heads = Default::default(); + Self::update_parent_goal( + cx, + &mut self.stack, + reached_depth, + &heads, + encountered_overflow, + nested_goals, + ); - debug!("global cache hit"); + debug!(?additional_depth, "global cache hit"); Some(result) }) } -} -enum StepResult<X: Cx> { - Done(StackEntry<X>, X::Result), - HasChanged, -} + fn check_cycle_on_stack(&mut self, cx: X, input: X::Input) -> Option<X::Result> { + let (head, _stack_entry) = self.stack.iter_enumerated().find(|(_, e)| e.input == input)?; + debug!("encountered cycle with depth {head:?}"); + // We have a nested goal which directly relies on a goal deeper in the stack. + // + // We start by tagging all cycle participants, as that's necessary for caching. + // + // Finally we can return either the provisional response or the initial response + // in case we're in the first fixpoint iteration for this goal. + let path_kind = Self::stack_path_kind(cx, &self.stack, head); + let usage_kind = UsageKind::Single(path_kind); + self.stack[head].has_been_used = + Some(self.stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); + + // Subtle: when encountering a cyclic goal, we still first checked for overflow, + // so we have to update the reached depth. + let next_index = self.stack.next_index(); + let last_index = self.stack.last_index().unwrap(); + let last = &mut self.stack[last_index]; + last.reached_depth = last.reached_depth.max(next_index); + + let path_from_entry = Self::step_kind(cx, last.input); + last.nested_goals.insert(input, UsageKind::Single(path_from_entry)); + last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Coinductive)); + if last_index != head { + last.heads.insert(head); + } + + // Return the provisional result or, if we're in the first iteration, + // start with no constraints. + if let Some(result) = self.stack[head].provisional_result { + Some(result) + } else { + Some(D::initial_provisional_result(cx, path_kind, input)) + } + } + + /// Whether we've reached a fixpoint when evaluating a cycle head. + fn reached_fixpoint( + &mut self, + cx: X, + stack_entry: &StackEntry<X>, + usage_kind: UsageKind, + result: X::Result, + ) -> bool { + if let Some(prev) = stack_entry.provisional_result { + prev == result + } else if let UsageKind::Single(kind) = usage_kind { + D::is_initial_provisional_result(cx, kind, stack_entry.input, result) + } else { + false + } + } -impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { /// When we encounter a coinductive cycle, we have to fetch the /// result of that cycle while we are still computing it. Because /// of this we continuously recompute the cycle until the result /// of the previous iteration is equal to the final result, at which /// point we are done. - fn fixpoint_step_in_task<F>( + fn evaluate_goal_in_task( &mut self, cx: X, input: X::Input, inspect: &mut D::ProofTreeBuilder, - prove_goal: &mut F, - ) -> StepResult<X> - where - F: FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, - { - let result = prove_goal(self, inspect); - let stack_entry = self.stack.pop().unwrap(); - debug_assert_eq!(stack_entry.input, input); - - // If the current goal is not the root of a cycle, we are done. - let Some(usage_kind) = stack_entry.has_been_used else { - return StepResult::Done(stack_entry, result); - }; + mut evaluate_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, + ) -> (StackEntry<X>, X::Result) { + let mut i = 0; + loop { + let result = evaluate_goal(self, inspect); + let stack_entry = self.stack.pop().unwrap(); + debug_assert_eq!(stack_entry.input, input); - // If it is a cycle head, we have to keep trying to prove it until - // we reach a fixpoint. We need to do so for all cycle heads, - // not only for the root. - // - // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs - // for an example. - - // Start by clearing all provisional cache entries which depend on this - // the current goal. - Self::clear_dependent_provisional_results( - &mut self.provisional_cache, - self.stack.next_index(), - ); + // If the current goal is not the root of a cycle, we are done. + // + // There are no provisional cache entries which depend on this goal. + let Some(usage_kind) = stack_entry.has_been_used else { + return (stack_entry, result); + }; - // Check whether we reached a fixpoint, either because the final result - // is equal to the provisional result of the previous iteration, or because - // this was only the root of either coinductive or inductive cycles, and the - // final result is equal to the initial response for that case. - // - // If we did not reach a fixpoint, update the provisional result and reevaluate. - if D::reached_fixpoint(cx, usage_kind, input, stack_entry.provisional_result, result) { - StepResult::Done(stack_entry, result) - } else { - let depth = self.stack.push(StackEntry { + // If it is a cycle head, we have to keep trying to prove it until + // we reach a fixpoint. We need to do so for all cycle heads, + // not only for the root. + // + // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs + // for an example. + // + // Check whether we reached a fixpoint, either because the final result + // is equal to the provisional result of the previous iteration, or because + // this was only the root of either coinductive or inductive cycles, and the + // final result is equal to the initial response for that case. + if self.reached_fixpoint(cx, &stack_entry, usage_kind, result) { + self.rebase_provisional_cache_entries(cx, &stack_entry, |_, result| result); + return (stack_entry, result); + } + + // If computing this goal results in ambiguity with no constraints, + // we do not rerun it. It's incredibly difficult to get a different + // response in the next iteration in this case. These changes would + // likely either be caused by incompleteness or can change the maybe + // cause from ambiguity to overflow. Returning ambiguity always + // preserves soundness and completeness even if the goal is be known + // to succeed or fail. + // + // This prevents exponential blowup affecting multiple major crates. + // As we only get to this branch if we haven't yet reached a fixpoint, + // we also taint all provisional cache entries which depend on the + // current goal. + if D::is_ambiguous_result(result) { + self.rebase_provisional_cache_entries(cx, &stack_entry, |input, _| { + D::propagate_ambiguity(cx, input, result) + }); + return (stack_entry, result); + }; + + // If we've reached the fixpoint step limit, we bail with overflow and taint all + // provisional cache entries which depend on the current goal. + i += 1; + if i >= D::FIXPOINT_STEP_LIMIT { + debug!("canonical cycle overflow"); + let result = D::on_fixpoint_overflow(cx, input); + self.rebase_provisional_cache_entries(cx, &stack_entry, |input, _| { + D::on_fixpoint_overflow(cx, input) + }); + return (stack_entry, result); + } + + // Clear all provisional cache entries which depend on a previous provisional + // result of this goal and rerun. + self.clear_dependent_provisional_results(); + + debug!(?result, "fixpoint changed provisional results"); + self.stack.push(StackEntry { has_been_used: None, provisional_result: Some(result), ..stack_entry }); - debug_assert_eq!(self.provisional_cache[&input].stack_depth, Some(depth)); - StepResult::HasChanged } } + + /// When encountering a cycle, both inductive and coinductive, we only + /// move the root into the global cache. We also store all other cycle + /// participants involved. + /// + /// We must not use the global cache entry of a root goal if a cycle + /// participant is on the stack. This is necessary to prevent unstable + /// results. See the comment of `StackEntry::nested_goals` for + /// more details. + fn insert_global_cache( + &mut self, + cx: X, + input: X::Input, + final_entry: StackEntry<X>, + result: X::Result, + dep_node: X::DepNodeIndex, + ) { + let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); + debug!(?final_entry, ?result, "insert global cache"); + cx.with_global_cache(self.mode, |cache| { + cache.insert( + cx, + input, + result, + dep_node, + additional_depth, + final_entry.encountered_overflow, + final_entry.nested_goals, + ) + }) + } } diff --git a/compiler/rustc_type_ir/src/search_graph/validate.rs b/compiler/rustc_type_ir/src/search_graph/validate.rs deleted file mode 100644 index 1ae806834ba..00000000000 --- a/compiler/rustc_type_ir/src/search_graph/validate.rs +++ /dev/null @@ -1,75 +0,0 @@ -use super::*; - -impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { - #[allow(rustc::potential_query_instability)] - pub(super) fn check_invariants(&self) { - if !cfg!(debug_assertions) { - return; - } - - let SearchGraph { mode: _, stack, provisional_cache, _marker } = self; - if stack.is_empty() { - assert!(provisional_cache.is_empty()); - } - - for (depth, entry) in stack.iter_enumerated() { - let StackEntry { - input, - available_depth: _, - reached_depth: _, - non_root_cycle_participant, - encountered_overflow: _, - has_been_used, - ref nested_goals, - provisional_result, - } = *entry; - let cache_entry = provisional_cache.get(&entry.input).unwrap(); - assert_eq!(cache_entry.stack_depth, Some(depth)); - if let Some(head) = non_root_cycle_participant { - assert!(head < depth); - assert!(nested_goals.is_empty()); - assert_ne!(stack[head].has_been_used, None); - - let mut current_root = head; - while let Some(parent) = stack[current_root].non_root_cycle_participant { - current_root = parent; - } - assert!(stack[current_root].nested_goals.contains(&input)); - } - - if !nested_goals.is_empty() { - assert!(provisional_result.is_some() || has_been_used.is_some()); - for entry in stack.iter().take(depth.as_usize()) { - assert_eq!(nested_goals.get(&entry.input), None); - } - } - } - - for (&input, entry) in &self.provisional_cache { - let ProvisionalCacheEntry { stack_depth, with_coinductive_stack, with_inductive_stack } = - entry; - assert!( - stack_depth.is_some() - || with_coinductive_stack.is_some() - || with_inductive_stack.is_some() - ); - - if let &Some(stack_depth) = stack_depth { - assert_eq!(stack[stack_depth].input, input); - } - - let check_detached = |detached_entry: &DetachedEntry<X>| { - let DetachedEntry { head, result: _ } = *detached_entry; - assert_ne!(stack[head].has_been_used, None); - }; - - if let Some(with_coinductive_stack) = with_coinductive_stack { - check_detached(with_coinductive_stack); - } - - if let Some(with_inductive_stack) = with_inductive_stack { - check_detached(with_inductive_stack); - } - } - } -} diff --git a/compiler/rustc_type_ir/src/solve/inspect.rs b/compiler/rustc_type_ir/src/solve/inspect.rs index 47d5e0dace7..099c66f6bdc 100644 --- a/compiler/rustc_type_ir/src/solve/inspect.rs +++ b/compiler/rustc_type_ir/src/solve/inspect.rs @@ -69,9 +69,7 @@ pub struct CanonicalGoalEvaluation<I: Interner> { #[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)] pub enum CanonicalGoalEvaluationKind<I: Interner> { Overflow, - CycleInStack, - ProvisionalCacheHit, - Evaluation { final_revision: I::CanonicalGoalEvaluationStepRef }, + Evaluation { final_revision: CanonicalGoalEvaluationStep<I> }, } #[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)] diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs index 444fd01f012..00fc6ba1c5c 100644 --- a/compiler/rustc_type_ir/src/solve/mod.rs +++ b/compiler/rustc_type_ir/src/solve/mod.rs @@ -340,11 +340,3 @@ impl MaybeCause { } } } - -#[derive_where(PartialEq, Eq, Debug; I: Interner)] -pub struct CacheData<I: Interner> { - pub result: QueryResult<I>, - pub proof_tree: Option<I::CanonicalGoalEvaluationStepRef>, - pub additional_depth: usize, - pub encountered_overflow: bool, -} diff --git a/compiler/rustc_type_ir/src/ty_kind.rs b/compiler/rustc_type_ir/src/ty_kind.rs index 7e48f1b20a8..328b6739d97 100644 --- a/compiler/rustc_type_ir/src/ty_kind.rs +++ b/compiler/rustc_type_ir/src/ty_kind.rs @@ -143,7 +143,12 @@ pub enum TyKind<I: Interner> { /// fn foo() -> i32 { 1 } /// let bar: fn() -> i32 = foo; /// ``` - FnPtr(ty::Binder<I, FnSig<I>>), + /// + /// These two fields are equivalent to a `ty::Binder<I, FnSig<I>>`. But by + /// splitting that into two pieces, we get a more compact data layout that + /// reduces the size of `TyKind` by 8 bytes. It is a very hot type, so it's + /// worth the mild inconvenience. + FnPtr(ty::Binder<I, FnSigTys<I>>, FnHeader<I>), /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. Dynamic(I::BoundExistentialPredicates, I::Region, DynKind), @@ -288,7 +293,7 @@ impl<I: Interner> fmt::Debug for TyKind<I> { RawPtr(ty, mutbl) => write!(f, "*{} {:?}", mutbl.ptr_str(), ty), Ref(r, t, m) => write!(f, "&{:?} {}{:?}", r, m.prefix_str(), t), FnDef(d, s) => f.debug_tuple("FnDef").field(d).field(&s).finish(), - FnPtr(s) => write!(f, "{s:?}"), + FnPtr(sig_tys, hdr) => write!(f, "{:?}", sig_tys.with(*hdr)), Dynamic(p, r, repr) => match repr { DynKind::Dyn => write!(f, "dyn {p:?} + {r:?}"), DynKind::DynStar => write!(f, "dyn* {p:?} + {r:?}"), @@ -868,16 +873,12 @@ pub struct FnSig<I: Interner> { } impl<I: Interner> FnSig<I> { - pub fn split_inputs_and_output(self) -> (I::FnInputTys, I::Ty) { - self.inputs_and_output.split_inputs_and_output() - } - pub fn inputs(self) -> I::FnInputTys { - self.split_inputs_and_output().0 + self.inputs_and_output.inputs() } pub fn output(self) -> I::Ty { - self.split_inputs_and_output().1 + self.inputs_and_output.output() } pub fn is_fn_trait_compatible(self) -> bool { @@ -922,6 +923,13 @@ impl<I: Interner> ty::Binder<I, FnSig<I>> { pub fn is_fn_trait_compatible(&self) -> bool { self.skip_binder().is_fn_trait_compatible() } + + // Used to split a single value into the two fields in `TyKind::FnPtr`. + pub fn split(self) -> (ty::Binder<I, FnSigTys<I>>, FnHeader<I>) { + let hdr = + FnHeader { c_variadic: self.c_variadic(), safety: self.safety(), abi: self.abi() }; + (self.map_bound(|sig| FnSigTys { inputs_and_output: sig.inputs_and_output }), hdr) + } } impl<I: Interner> fmt::Debug for FnSig<I> { @@ -935,7 +943,7 @@ impl<I: Interner> fmt::Debug for FnSig<I> { } write!(f, "fn(")?; - let (inputs, output) = sig.split_inputs_and_output(); + let inputs = sig.inputs(); for (i, ty) in inputs.iter().enumerate() { if i > 0 { write!(f, ", ")?; @@ -951,9 +959,69 @@ impl<I: Interner> fmt::Debug for FnSig<I> { } write!(f, ")")?; + let output = sig.output(); match output.kind() { Tuple(list) if list.is_empty() => Ok(()), _ => write!(f, " -> {:?}", sig.output()), } } } + +// This is just a `FnSig` without the `FnHeader` fields. +#[derive_where(Clone, Copy, Debug, PartialEq, Eq, Hash; I: Interner)] +#[cfg_attr(feature = "nightly", derive(TyEncodable, TyDecodable, HashStable_NoContext))] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +pub struct FnSigTys<I: Interner> { + pub inputs_and_output: I::Tys, +} + +impl<I: Interner> FnSigTys<I> { + pub fn inputs(self) -> I::FnInputTys { + self.inputs_and_output.inputs() + } + + pub fn output(self) -> I::Ty { + self.inputs_and_output.output() + } +} + +impl<I: Interner> ty::Binder<I, FnSigTys<I>> { + // Used to combine the two fields in `TyKind::FnPtr` into a single value. + pub fn with(self, hdr: FnHeader<I>) -> ty::Binder<I, FnSig<I>> { + self.map_bound(|sig_tys| FnSig { + inputs_and_output: sig_tys.inputs_and_output, + c_variadic: hdr.c_variadic, + safety: hdr.safety, + abi: hdr.abi, + }) + } + + #[inline] + pub fn inputs(self) -> ty::Binder<I, I::FnInputTys> { + self.map_bound(|sig_tys| sig_tys.inputs()) + } + + #[inline] + #[track_caller] + pub fn input(self, index: usize) -> ty::Binder<I, I::Ty> { + self.map_bound(|sig_tys| sig_tys.inputs().get(index).unwrap()) + } + + pub fn inputs_and_output(self) -> ty::Binder<I, I::Tys> { + self.map_bound(|sig_tys| sig_tys.inputs_and_output) + } + + #[inline] + pub fn output(self) -> ty::Binder<I, I::Ty> { + self.map_bound(|sig_tys| sig_tys.output()) + } +} + +#[derive_where(Clone, Copy, Debug, PartialEq, Eq, Hash; I: Interner)] +#[cfg_attr(feature = "nightly", derive(TyEncodable, TyDecodable, HashStable_NoContext))] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +pub struct FnHeader<I: Interner> { + pub c_variadic: bool, + pub safety: I::Safety, + pub abi: I::Abi, +} diff --git a/compiler/rustc_type_ir/src/ty_kind/closure.rs b/compiler/rustc_type_ir/src/ty_kind/closure.rs index 81717ce4a22..9d907baeeb3 100644 --- a/compiler/rustc_type_ir/src/ty_kind/closure.rs +++ b/compiler/rustc_type_ir/src/ty_kind/closure.rs @@ -197,7 +197,7 @@ impl<I: Interner> ClosureArgs<I> { /// Extracts the signature from the closure. pub fn sig(self) -> ty::Binder<I, ty::FnSig<I>> { match self.sig_as_fn_ptr_ty().kind() { - ty::FnPtr(sig) => sig, + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), ty => panic!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {ty:?}"), } } @@ -292,21 +292,23 @@ impl<I: Interner> CoroutineClosureArgs<I> { pub fn coroutine_closure_sig(self) -> ty::Binder<I, CoroutineClosureSignature<I>> { let interior = self.coroutine_witness_ty(); - let ty::FnPtr(sig) = self.signature_parts_ty().kind() else { panic!() }; - sig.map_bound(|sig| { - let [resume_ty, tupled_inputs_ty] = *sig.inputs().as_slice() else { + let ty::FnPtr(sig_tys, hdr) = self.signature_parts_ty().kind() else { panic!() }; + sig_tys.map_bound(|sig_tys| { + let [resume_ty, tupled_inputs_ty] = *sig_tys.inputs().as_slice() else { panic!(); }; - let [yield_ty, return_ty] = *sig.output().tuple_fields().as_slice() else { panic!() }; + let [yield_ty, return_ty] = *sig_tys.output().tuple_fields().as_slice() else { + panic!() + }; CoroutineClosureSignature { interior, tupled_inputs_ty, resume_ty, yield_ty, return_ty, - c_variadic: sig.c_variadic, - safety: sig.safety, - abi: sig.abi, + c_variadic: hdr.c_variadic, + safety: hdr.safety, + abi: hdr.abi, } }) } @@ -321,7 +323,7 @@ impl<I: Interner> CoroutineClosureArgs<I> { pub fn has_self_borrows(&self) -> bool { match self.coroutine_captures_by_ref_ty().kind() { - ty::FnPtr(sig) => sig + ty::FnPtr(sig_tys, _) => sig_tys .skip_binder() .visit_with(&mut HasRegionsBoundAt { binder: ty::INNERMOST }) .is_break(), @@ -460,11 +462,11 @@ impl<I: Interner> CoroutineClosureSignature<I> { ) -> I::Ty { match kind { ty::ClosureKind::Fn | ty::ClosureKind::FnMut => { - let ty::FnPtr(sig) = coroutine_captures_by_ref_ty.kind() else { + let ty::FnPtr(sig_tys, _) = coroutine_captures_by_ref_ty.kind() else { panic!(); }; let coroutine_captures_by_ref_ty = - sig.output().skip_binder().fold_with(&mut FoldEscapingRegions { + sig_tys.output().skip_binder().fold_with(&mut FoldEscapingRegions { interner: cx, region: env_region, debruijn: ty::INNERMOST, diff --git a/library/Cargo.lock b/library/Cargo.lock index b36399d880e..815f5bb1385 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -339,6 +339,7 @@ dependencies = [ "std_detect", "unwind", "wasi", + "windows-targets 0.0.0", ] [[package]] @@ -421,11 +422,15 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets", + "windows-targets 0.52.5", ] [[package]] name = "windows-targets" +version = "0.0.0" + +[[package]] +name = "windows-targets" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" diff --git a/library/Cargo.toml b/library/Cargo.toml index c4513b4c127..d8ece6b0ebd 100644 --- a/library/Cargo.toml +++ b/library/Cargo.toml @@ -8,6 +8,7 @@ members = [ exclude = [ # stdarch has its own Cargo workspace "stdarch", + "windows_targets" ] [profile.release.package.compiler_builtins] diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs index 4b9b90fc1f1..571fcd177aa 100644 --- a/library/alloc/src/fmt.rs +++ b/library/alloc/src/fmt.rs @@ -581,7 +581,7 @@ pub use core::fmt::Alignment; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::Error; #[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub use core::fmt::FormatterFn; +pub use core::fmt::{from_fn, FromFn}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{write, Arguments}; #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 5b84df9ecef..9c8fa7ceff4 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,7 +1,7 @@ #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")] -use core::alloc::LayoutError; -use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; +use core::marker::PhantomData; +use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ptr::{self, NonNull, Unique}; use core::{cmp, hint}; @@ -40,6 +40,13 @@ struct Cap(usize); impl Cap { const ZERO: Cap = unsafe { Cap(0) }; + + /// `Cap(cap)`, except if `T` is a ZST then `Cap::ZERO`. + /// + /// # Safety: cap must be <= `isize::MAX`. + unsafe fn new<T>(cap: usize) -> Self { + if T::IS_ZST { Cap::ZERO } else { unsafe { Self(cap) } } + } } /// A low-level utility for more ergonomically allocating, reallocating, and deallocating @@ -66,7 +73,19 @@ impl Cap { /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] pub(crate) struct RawVec<T, A: Allocator = Global> { - ptr: Unique<T>, + inner: RawVecInner<A>, + _marker: PhantomData<T>, +} + +/// Like a `RawVec`, but only generic over the allocator, not the type. +/// +/// As such, all the methods need the layout passed-in as a parameter. +/// +/// Having this separation reduces the amount of code we need to monomorphize, +/// as most operations don't need the actual type, just its layout. +#[allow(missing_debug_implementations)] +struct RawVecInner<A: Allocator = Global> { + ptr: Unique<u8>, /// Never used for ZSTs; it's `capacity()`'s responsibility to return usize::MAX in that case. /// /// # Safety @@ -90,8 +109,9 @@ impl<T> RawVec<T, Global> { /// `RawVec` with capacity `usize::MAX`. Useful for implementing /// delayed allocation. #[must_use] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] pub const fn new() -> Self { - Self::new_in(Global) + Self { inner: RawVecInner::new::<T>(), _marker: PhantomData } } /// Creates a `RawVec` (on the system heap) with exactly the @@ -113,10 +133,7 @@ impl<T> RawVec<T, Global> { #[must_use] #[inline] pub fn with_capacity(capacity: usize) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global) { - Ok(res) => res, - Err(err) => handle_error(err), - } + Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData } } /// Like `with_capacity`, but guarantees the buffer is zeroed. @@ -124,29 +141,56 @@ impl<T> RawVec<T, Global> { #[must_use] #[inline] pub fn with_capacity_zeroed(capacity: usize) -> Self { - Self::with_capacity_zeroed_in(capacity, Global) + Self { + inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT), + _marker: PhantomData, + } } } -impl<T, A: Allocator> RawVec<T, A> { - // Tiny Vecs are dumb. Skip to: - // - 8 if the element size is 1, because any heap allocators is likely - // to round up a request of less than 8 bytes to at least 8 bytes. - // - 4 if elements are moderate-sized (<= 1 KiB). - // - 1 otherwise, to avoid wasting too much space for very short Vecs. - pub(crate) const MIN_NON_ZERO_CAP: usize = if mem::size_of::<T>() == 1 { +impl RawVecInner<Global> { + #[must_use] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] + const fn new<T>() -> Self { + Self::new_in(Global, core::mem::align_of::<T>()) + } + + #[cfg(not(any(no_global_oom_handling, test)))] + #[must_use] + #[inline] + fn with_capacity(capacity: usize, elem_layout: Layout) -> Self { + match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) { + Ok(res) => res, + Err(err) => handle_error(err), + } + } +} + +// Tiny Vecs are dumb. Skip to: +// - 8 if the element size is 1, because any heap allocators is likely +// to round up a request of less than 8 bytes to at least 8 bytes. +// - 4 if elements are moderate-sized (<= 1 KiB). +// - 1 otherwise, to avoid wasting too much space for very short Vecs. +const fn min_non_zero_cap(size: usize) -> usize { + if size == 1 { 8 - } else if mem::size_of::<T>() <= 1024 { + } else if size <= 1024 { 4 } else { 1 - }; + } +} + +impl<T, A: Allocator> RawVec<T, A> { + #[cfg(not(no_global_oom_handling))] + pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::<T>()); /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. + #[inline] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] pub const fn new_in(alloc: A) -> Self { - // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr: Unique::dangling(), cap: Cap::ZERO, alloc } + Self { inner: RawVecInner::new_in(alloc, align_of::<T>()), _marker: PhantomData } } /// Like `with_capacity`, but parameterized over the choice of @@ -154,9 +198,9 @@ impl<T, A: Allocator> RawVec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc) { - Ok(res) => res, - Err(err) => handle_error(err), + Self { + inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT), + _marker: PhantomData, } } @@ -164,7 +208,10 @@ impl<T, A: Allocator> RawVec<T, A> { /// allocator for the returned `RawVec`. #[inline] pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> { - Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc) + match RawVecInner::try_with_capacity_in(capacity, alloc, T::LAYOUT) { + Ok(inner) => Ok(Self { inner, _marker: PhantomData }), + Err(e) => Err(e), + } } /// Like `with_capacity_zeroed`, but parameterized over the choice @@ -172,9 +219,9 @@ impl<T, A: Allocator> RawVec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc) { - Ok(res) => res, - Err(err) => handle_error(err), + Self { + inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT), + _marker: PhantomData, } } @@ -200,45 +247,7 @@ impl<T, A: Allocator> RawVec<T, A> { let me = ManuallyDrop::new(self); unsafe { let slice = ptr::slice_from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len); - Box::from_raw_in(slice, ptr::read(&me.alloc)) - } - } - - fn try_allocate_in( - capacity: usize, - init: AllocInit, - alloc: A, - ) -> Result<Self, TryReserveError> { - // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. - - if T::IS_ZST || capacity == 0 { - Ok(Self::new_in(alloc)) - } else { - // We avoid `unwrap_or_else` here because it bloats the amount of - // LLVM IR generated. - let layout = match Layout::array::<T>(capacity) { - Ok(layout) => layout, - Err(_) => return Err(CapacityOverflow.into()), - }; - - if let Err(err) = alloc_guard(layout.size()) { - return Err(err); - } - - let result = match init { - AllocInit::Uninitialized => alloc.allocate(layout), - #[cfg(not(no_global_oom_handling))] - AllocInit::Zeroed => alloc.allocate_zeroed(layout), - }; - let ptr = match result { - Ok(ptr) => ptr, - Err(_) => return Err(AllocError { layout, non_exhaustive: () }.into()), - }; - - // Allocators currently return a `NonNull<[u8]>` whose length - // matches the size requested. If that ever changes, the capacity - // here should change to `ptr.len() / mem::size_of::<T>()`. - Ok(Self { ptr: Unique::from(ptr.cast()), cap: unsafe { Cap(capacity) }, alloc }) + Box::from_raw_in(slice, ptr::read(&me.inner.alloc)) } } @@ -254,8 +263,15 @@ impl<T, A: Allocator> RawVec<T, A> { /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { - let cap = if T::IS_ZST { Cap::ZERO } else { unsafe { Cap(capacity) } }; - Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc } + // SAFETY: Precondition passed to the caller + unsafe { + let ptr = ptr.cast(); + let capacity = Cap::new::<T>(capacity); + Self { + inner: RawVecInner::from_raw_parts_in(ptr, capacity, alloc), + _marker: PhantomData, + } + } } /// A convenience method for hoisting the non-null precondition out of [`RawVec::from_raw_parts_in`]. @@ -264,9 +280,13 @@ impl<T, A: Allocator> RawVec<T, A> { /// /// See [`RawVec::from_raw_parts_in`]. #[inline] - pub(crate) unsafe fn from_nonnull_in(ptr: NonNull<T>, capacity: usize, alloc: A) -> Self { - let cap = if T::IS_ZST { Cap::ZERO } else { unsafe { Cap(capacity) } }; - Self { ptr: Unique::from(ptr), cap, alloc } + pub unsafe fn from_nonnull_in(ptr: NonNull<T>, capacity: usize, alloc: A) -> Self { + // SAFETY: Precondition passed to the caller + unsafe { + let ptr = ptr.cast(); + let capacity = Cap::new::<T>(capacity); + Self { inner: RawVecInner::from_nonnull_in(ptr, capacity, alloc), _marker: PhantomData } + } } /// Gets a raw pointer to the start of the allocation. Note that this is @@ -274,43 +294,26 @@ impl<T, A: Allocator> RawVec<T, A> { /// be careful. #[inline] pub fn ptr(&self) -> *mut T { - self.ptr.as_ptr() + self.inner.ptr() } #[inline] pub fn non_null(&self) -> NonNull<T> { - NonNull::from(self.ptr) + self.inner.non_null() } /// Gets the capacity of the allocation. /// /// This will always be `usize::MAX` if `T` is zero-sized. - #[inline(always)] + #[inline] pub fn capacity(&self) -> usize { - if T::IS_ZST { usize::MAX } else { self.cap.0 } + self.inner.capacity(size_of::<T>()) } /// Returns a shared reference to the allocator backing this `RawVec`. + #[inline] pub fn allocator(&self) -> &A { - &self.alloc - } - - fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> { - if T::IS_ZST || self.cap.0 == 0 { - None - } else { - // We could use Layout::array here which ensures the absence of isize and usize overflows - // and could hypothetically handle differences between stride and size, but this memory - // has already been allocated so we know it can't overflow and currently Rust does not - // support such types. So we can do better by skipping some checks and avoid an unwrap. - const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) }; - unsafe { - let align = mem::align_of::<T>(); - let size = mem::size_of::<T>().unchecked_mul(self.cap.0); - let layout = Layout::from_size_align_unchecked(size, align); - Some((self.ptr.cast().into(), layout)) - } - } + self.inner.allocator() } /// Ensures that the buffer contains at least enough space to hold `len + @@ -335,24 +338,7 @@ impl<T, A: Allocator> RawVec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] pub fn reserve(&mut self, len: usize, additional: usize) { - // Callers expect this function to be very cheap when there is already sufficient capacity. - // Therefore, we move all the resizing and error-handling logic from grow_amortized and - // handle_reserve behind a call, while making sure that this function is likely to be - // inlined as just a comparison and a call if the comparison fails. - #[cold] - fn do_reserve_and_handle<T, A: Allocator>( - slf: &mut RawVec<T, A>, - len: usize, - additional: usize, - ) { - if let Err(err) = slf.grow_amortized(len, additional) { - handle_error(err); - } - } - - if self.needs_to_grow(len, additional) { - do_reserve_and_handle(self, len, additional); - } + self.inner.reserve(len, additional, T::LAYOUT) } /// A specialized version of `self.reserve(len, 1)` which requires the @@ -360,21 +346,12 @@ impl<T, A: Allocator> RawVec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline(never)] pub fn grow_one(&mut self) { - if let Err(err) = self.grow_amortized(self.cap.0, 1) { - handle_error(err); - } + self.inner.grow_one(T::LAYOUT) } /// The same as `reserve`, but returns on errors instead of panicking or aborting. pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { - if self.needs_to_grow(len, additional) { - self.grow_amortized(len, additional)?; - } - unsafe { - // Inform the optimizer that the reservation has succeeded or wasn't needed - hint::assert_unchecked(!self.needs_to_grow(len, additional)); - } - Ok(()) + self.inner.try_reserve(len, additional, T::LAYOUT) } /// Ensures that the buffer contains at least enough space to hold `len + @@ -396,9 +373,7 @@ impl<T, A: Allocator> RawVec<T, A> { /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] pub fn reserve_exact(&mut self, len: usize, additional: usize) { - if let Err(err) = self.try_reserve_exact(len, additional) { - handle_error(err); - } + self.inner.reserve_exact(len, additional, T::LAYOUT) } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. @@ -407,14 +382,7 @@ impl<T, A: Allocator> RawVec<T, A> { len: usize, additional: usize, ) -> Result<(), TryReserveError> { - if self.needs_to_grow(len, additional) { - self.grow_exact(len, additional)?; - } - unsafe { - // Inform the optimizer that the reservation has succeeded or wasn't needed - hint::assert_unchecked(!self.needs_to_grow(len, additional)); - } - Ok(()) + self.inner.try_reserve_exact(len, additional, T::LAYOUT) } /// Shrinks the buffer down to the specified capacity. If the given amount @@ -430,22 +398,230 @@ impl<T, A: Allocator> RawVec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] pub fn shrink_to_fit(&mut self, cap: usize) { - if let Err(err) = self.shrink(cap) { + self.inner.shrink_to_fit(cap, T::LAYOUT) + } +} + +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> { + /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. + fn drop(&mut self) { + // SAFETY: We are in a Drop impl, self.inner will not be used again. + unsafe { self.inner.deallocate(T::LAYOUT) } + } +} + +impl<A: Allocator> RawVecInner<A> { + #[inline] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] + const fn new_in(alloc: A, align: usize) -> Self { + let ptr = unsafe { core::mem::transmute(align) }; + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { ptr, cap: Cap::ZERO, alloc } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { + match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) { + Ok(this) => { + unsafe { + // Make it more obvious that a subsquent Vec::reserve(capacity) will not allocate. + hint::assert_unchecked(!this.needs_to_grow(0, capacity, elem_layout)); + } + this + } + Err(err) => handle_error(err), + } + } + + #[inline] + fn try_with_capacity_in( + capacity: usize, + alloc: A, + elem_layout: Layout, + ) -> Result<Self, TryReserveError> { + Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { + match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) { + Ok(res) => res, + Err(err) => handle_error(err), + } + } + + fn try_allocate_in( + capacity: usize, + init: AllocInit, + alloc: A, + elem_layout: Layout, + ) -> Result<Self, TryReserveError> { + // We avoid `unwrap_or_else` here because it bloats the amount of + // LLVM IR generated. + let layout = match layout_array(capacity, elem_layout) { + Ok(layout) => layout, + Err(_) => return Err(CapacityOverflow.into()), + }; + + // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. + if layout.size() == 0 { + return Ok(Self::new_in(alloc, elem_layout.align())); + } + + if let Err(err) = alloc_guard(layout.size()) { + return Err(err); + } + + let result = match init { + AllocInit::Uninitialized => alloc.allocate(layout), + #[cfg(not(no_global_oom_handling))] + AllocInit::Zeroed => alloc.allocate_zeroed(layout), + }; + let ptr = match result { + Ok(ptr) => ptr, + Err(_) => return Err(AllocError { layout, non_exhaustive: () }.into()), + }; + + // Allocators currently return a `NonNull<[u8]>` whose length + // matches the size requested. If that ever changes, the capacity + // here should change to `ptr.len() / mem::size_of::<T>()`. + Ok(Self { ptr: Unique::from(ptr.cast()), cap: unsafe { Cap(capacity) }, alloc }) + } + + #[inline] + unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self { + Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc } + } + + #[inline] + unsafe fn from_nonnull_in(ptr: NonNull<u8>, cap: Cap, alloc: A) -> Self { + Self { ptr: Unique::from(ptr), cap, alloc } + } + + #[inline] + fn ptr<T>(&self) -> *mut T { + self.non_null::<T>().as_ptr() + } + + #[inline] + fn non_null<T>(&self) -> NonNull<T> { + self.ptr.cast().into() + } + + #[inline] + fn capacity(&self, elem_size: usize) -> usize { + if elem_size == 0 { usize::MAX } else { self.cap.0 } + } + + #[inline] + fn allocator(&self) -> &A { + &self.alloc + } + + #[inline] + fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> { + if elem_layout.size() == 0 || self.cap.0 == 0 { + None + } else { + // We could use Layout::array here which ensures the absence of isize and usize overflows + // and could hypothetically handle differences between stride and size, but this memory + // has already been allocated so we know it can't overflow and currently Rust does not + // support such types. So we can do better by skipping some checks and avoid an unwrap. + unsafe { + let alloc_size = elem_layout.size().unchecked_mul(self.cap.0); + let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align()); + Some((self.ptr.into(), layout)) + } + } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) { + // Callers expect this function to be very cheap when there is already sufficient capacity. + // Therefore, we move all the resizing and error-handling logic from grow_amortized and + // handle_reserve behind a call, while making sure that this function is likely to be + // inlined as just a comparison and a call if the comparison fails. + #[cold] + fn do_reserve_and_handle<A: Allocator>( + slf: &mut RawVecInner<A>, + len: usize, + additional: usize, + elem_layout: Layout, + ) { + if let Err(err) = slf.grow_amortized(len, additional, elem_layout) { + handle_error(err); + } + } + + if self.needs_to_grow(len, additional, elem_layout) { + do_reserve_and_handle(self, len, additional, elem_layout); + } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn grow_one(&mut self, elem_layout: Layout) { + if let Err(err) = self.grow_amortized(self.cap.0, 1, elem_layout) { handle_error(err); } } -} -impl<T, A: Allocator> RawVec<T, A> { - /// Returns if the buffer needs to grow to fulfill the needed extra capacity. - /// Mainly used to make inlining reserve-calls possible without inlining `grow`. - fn needs_to_grow(&self, len: usize, additional: usize) -> bool { - additional > self.capacity().wrapping_sub(len) + fn try_reserve( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + if self.needs_to_grow(len, additional, elem_layout) { + self.grow_amortized(len, additional, elem_layout)?; + } + unsafe { + // Inform the optimizer that the reservation has succeeded or wasn't needed + hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout)); + } + Ok(()) } - /// # Safety: - /// - /// `cap` must not exceed `isize::MAX`. + #[cfg(not(no_global_oom_handling))] + fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) { + if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) { + handle_error(err); + } + } + + fn try_reserve_exact( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + if self.needs_to_grow(len, additional, elem_layout) { + self.grow_exact(len, additional, elem_layout)?; + } + unsafe { + // Inform the optimizer that the reservation has succeeded or wasn't needed + hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout)); + } + Ok(()) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) { + if let Err(err) = self.shrink(cap, elem_layout) { + handle_error(err); + } + } + + #[inline] + fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool { + additional > self.capacity(elem_layout.size()).wrapping_sub(len) + } + + #[inline] unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { // Allocators currently return a `NonNull<[u8]>` whose length matches // the size requested. If that ever changes, the capacity here should @@ -454,18 +630,16 @@ impl<T, A: Allocator> RawVec<T, A> { self.cap = unsafe { Cap(cap) }; } - // This method is usually instantiated many times. So we want it to be as - // small as possible, to improve compile times. But we also want as much of - // its contents to be statically computable as possible, to make the - // generated code run faster. Therefore, this method is carefully written - // so that all of the code that depends on `T` is within it, while as much - // of the code that doesn't depend on `T` as possible is in functions that - // are non-generic over `T`. - fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { + fn grow_amortized( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { // This is ensured by the calling contexts. debug_assert!(additional > 0); - if T::IS_ZST { + if elem_layout.size() == 0 { // Since we return a capacity of `usize::MAX` when `elem_size` is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow.into()); @@ -477,33 +651,34 @@ impl<T, A: Allocator> RawVec<T, A> { // This guarantees exponential growth. The doubling cannot overflow // because `cap <= isize::MAX` and the type of `cap` is `usize`. let cap = cmp::max(self.cap.0 * 2, required_cap); - let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap); + let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap); - let new_layout = Layout::array::<T>(cap); + let new_layout = layout_array(cap, elem_layout)?; - // `finish_grow` is non-generic over `T`. - let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; + let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?; // SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items + unsafe { self.set_ptr_and_cap(ptr, cap) }; Ok(()) } - // The constraints on this method are much the same as those on - // `grow_amortized`, but this method is usually instantiated less often so - // it's less critical. - fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { - if T::IS_ZST { + fn grow_exact( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + if elem_layout.size() == 0 { // Since we return a capacity of `usize::MAX` when the type size is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow.into()); } let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; - let new_layout = Layout::array::<T>(cap); + let new_layout = layout_array(cap, elem_layout)?; - // `finish_grow` is non-generic over `T`. - let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; - // SAFETY: `finish_grow` would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items + let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?; + // SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items unsafe { self.set_ptr_and_cap(ptr, cap); } @@ -512,10 +687,10 @@ impl<T, A: Allocator> RawVec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] - fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> { - assert!(cap <= self.capacity(), "Tried to shrink to a larger capacity"); + fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> { + assert!(cap <= self.capacity(elem_layout.size()), "Tried to shrink to a larger capacity"); // SAFETY: Just checked this isn't trying to grow - unsafe { self.shrink_unchecked(cap) } + unsafe { self.shrink_unchecked(cap, elem_layout) } } /// `shrink`, but without the capacity check. @@ -529,23 +704,27 @@ impl<T, A: Allocator> RawVec<T, A> { /// # Safety /// `cap <= self.capacity()` #[cfg(not(no_global_oom_handling))] - unsafe fn shrink_unchecked(&mut self, cap: usize) -> Result<(), TryReserveError> { - let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; - // See current_memory() why this assert is here - const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) }; + unsafe fn shrink_unchecked( + &mut self, + cap: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + let (ptr, layout) = + if let Some(mem) = self.current_memory(elem_layout) { mem } else { return Ok(()) }; // If shrinking to 0, deallocate the buffer. We don't reach this point // for the T::IS_ZST case since current_memory() will have returned // None. if cap == 0 { unsafe { self.alloc.deallocate(ptr, layout) }; - self.ptr = Unique::dangling(); + self.ptr = + unsafe { Unique::new_unchecked(ptr::without_provenance_mut(elem_layout.align())) }; self.cap = Cap::ZERO; } else { let ptr = unsafe { - // `Layout::array` cannot overflow here because it would have + // Layout cannot overflow here because it would have // overflowed earlier when capacity was larger. - let new_size = mem::size_of::<T>().unchecked_mul(cap); + let new_size = elem_layout.size().unchecked_mul(cap); let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); self.alloc .shrink(ptr, layout, new_layout) @@ -558,24 +737,32 @@ impl<T, A: Allocator> RawVec<T, A> { } Ok(()) } + + /// # Safety + /// + /// This function deallocates the owned allocation, but does not update `ptr` or `cap` to + /// prevent double-free or use-after-free. Essentially, do not do anything with the caller + /// after this function returns. + /// Ideally this function would take `self` by move, but it cannot because it exists to be + /// called from a `Drop` impl. + unsafe fn deallocate(&mut self, elem_layout: Layout) { + if let Some((ptr, layout)) = self.current_memory(elem_layout) { + unsafe { + self.alloc.deallocate(ptr, layout); + } + } + } } -// This function is outside `RawVec` to minimize compile times. See the comment -// above `RawVec::grow_amortized` for details. (The `A` parameter isn't -// significant, because the number of different `A` types seen in practice is -// much smaller than the number of `T` types.) #[inline(never)] fn finish_grow<A>( - new_layout: Result<Layout, LayoutError>, + new_layout: Layout, current_memory: Option<(NonNull<u8>, Layout)>, alloc: &mut A, ) -> Result<NonNull<[u8]>, TryReserveError> where A: Allocator, { - // Check for the error here to minimize the size of `RawVec::grow_*`. - let new_layout = new_layout.map_err(|_| CapacityOverflow)?; - alloc_guard(new_layout.size())?; let memory = if let Some((ptr, old_layout)) = current_memory { @@ -592,15 +779,6 @@ where memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) } -unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> { - /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. - fn drop(&mut self) { - if let Some((ptr, layout)) = self.current_memory() { - unsafe { self.alloc.deallocate(ptr, layout) } - } - } -} - // Central function for reserve error handling. #[cfg(not(no_global_oom_handling))] #[cold] @@ -627,3 +805,8 @@ fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> { Ok(()) } } + +#[inline] +fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> { + elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into()) +} diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs index 48c6e5f46f8..d78ded104fb 100644 --- a/library/alloc/src/raw_vec/tests.rs +++ b/library/alloc/src/raw_vec/tests.rs @@ -43,9 +43,9 @@ fn allocator_param() { let a = BoundedAlloc { fuel: Cell::new(500) }; let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a); - assert_eq!(v.alloc.fuel.get(), 450); + assert_eq!(v.inner.alloc.fuel.get(), 450); v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel) - assert_eq!(v.alloc.fuel.get(), 250); + assert_eq!(v.inner.alloc.fuel.get(), 250); } #[test] @@ -86,7 +86,7 @@ struct ZST; fn zst_sanity<T>(v: &RawVec<T>) { assert_eq!(v.capacity(), usize::MAX); assert_eq!(v.ptr(), core::ptr::Unique::<T>::dangling().as_ptr()); - assert_eq!(v.current_memory(), None); + assert_eq!(v.inner.current_memory(T::LAYOUT), None); } #[test] @@ -106,22 +106,11 @@ fn zst() { let v: RawVec<ZST> = RawVec::with_capacity_in(100, Global); zst_sanity(&v); - let v: RawVec<ZST> = RawVec::try_allocate_in(0, AllocInit::Uninitialized, Global).unwrap(); - zst_sanity(&v); - - let v: RawVec<ZST> = RawVec::try_allocate_in(100, AllocInit::Uninitialized, Global).unwrap(); - zst_sanity(&v); - - let mut v: RawVec<ZST> = - RawVec::try_allocate_in(usize::MAX, AllocInit::Uninitialized, Global).unwrap(); + let mut v: RawVec<ZST> = RawVec::with_capacity_in(usize::MAX, Global); zst_sanity(&v); // Check all these operations work as expected with zero-sized elements. - assert!(!v.needs_to_grow(100, usize::MAX - 100)); - assert!(v.needs_to_grow(101, usize::MAX - 100)); - zst_sanity(&v); - v.reserve(100, usize::MAX - 100); //v.reserve(101, usize::MAX - 100); // panics, in `zst_reserve_panic` below zst_sanity(&v); @@ -138,12 +127,12 @@ fn zst() { assert_eq!(v.try_reserve_exact(101, usize::MAX - 100), cap_err); zst_sanity(&v); - assert_eq!(v.grow_amortized(100, usize::MAX - 100), cap_err); - assert_eq!(v.grow_amortized(101, usize::MAX - 100), cap_err); + assert_eq!(v.inner.grow_amortized(100, usize::MAX - 100, ZST::LAYOUT), cap_err); + assert_eq!(v.inner.grow_amortized(101, usize::MAX - 100, ZST::LAYOUT), cap_err); zst_sanity(&v); - assert_eq!(v.grow_exact(100, usize::MAX - 100), cap_err); - assert_eq!(v.grow_exact(101, usize::MAX - 100), cap_err); + assert_eq!(v.inner.grow_exact(100, usize::MAX - 100, ZST::LAYOUT), cap_err); + assert_eq!(v.inner.grow_exact(101, usize::MAX - 100, ZST::LAYOUT), cap_err); zst_sanity(&v); } diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index 124230812df..e628be1546f 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -2643,14 +2643,54 @@ impl ToString for i8 { } } -#[doc(hidden)] +// Generic/generated code can sometimes have multiple, nested references +// for strings, including `&&&str`s that would never be written +// by hand. This macro generates twelve layers of nested `&`-impl +// for primitive strings. #[cfg(not(no_global_oom_handling))] -#[stable(feature = "str_to_string_specialization", since = "1.9.0")] -impl ToString for str { - #[inline] - fn to_string(&self) -> String { - String::from(self) - } +macro_rules! to_string_str_wrap_in_ref { + {x $($x:ident)*} => { + &to_string_str_wrap_in_ref! { $($x)* } + }; + {} => { str }; +} +#[cfg(not(no_global_oom_handling))] +macro_rules! to_string_expr_wrap_in_deref { + {$self:expr ; x $($x:ident)*} => { + *(to_string_expr_wrap_in_deref! { $self ; $($x)* }) + }; + {$self:expr ;} => { $self }; +} +#[cfg(not(no_global_oom_handling))] +macro_rules! to_string_str { + {$($($x:ident)*),+} => { + $( + #[doc(hidden)] + #[stable(feature = "str_to_string_specialization", since = "1.9.0")] + impl ToString for to_string_str_wrap_in_ref!($($x)*) { + #[inline] + fn to_string(&self) -> String { + String::from(to_string_expr_wrap_in_deref!(self ; $($x)*)) + } + } + )+ + }; +} + +#[cfg(not(no_global_oom_handling))] +to_string_str! { + x x x x x x x x x x x x, + x x x x x x x x x x x, + x x x x x x x x x x, + x x x x x x x x x, + x x x x x x x x, + x x x x x x x, + x x x x x x, + x x x x x, + x x x x, + x x x, + x x, + x, } #[doc(hidden)] diff --git a/library/alloc/tests/task.rs b/library/alloc/tests/task.rs index 034039a1eae..390dec14484 100644 --- a/library/alloc/tests/task.rs +++ b/library/alloc/tests/task.rs @@ -4,7 +4,7 @@ use alloc::task::{LocalWake, Wake}; use core::task::{LocalWaker, Waker}; #[test] -#[cfg_attr(miri, should_panic)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it fails +#[cfg_attr(miri, ignore)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it can fail fn test_waker_will_wake_clone() { struct NoopWaker; @@ -20,7 +20,7 @@ fn test_waker_will_wake_clone() { } #[test] -#[cfg_attr(miri, should_panic)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it fails +#[cfg_attr(miri, ignore)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it can fail fn test_local_waker_will_wake_clone() { struct NoopWaker; diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index 5c826b9993f..61c713c9e81 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -889,6 +889,7 @@ impl<T> Guard<'_, T> { } impl<T> Drop for Guard<'_, T> { + #[inline] fn drop(&mut self) { debug_assert!(self.initialized <= self.array_mut.len()); diff --git a/library/core/src/fmt/builders.rs b/library/core/src/fmt/builders.rs index 794ca1851b1..467fa17a6f3 100644 --- a/library/core/src/fmt/builders.rs +++ b/library/core/src/fmt/builders.rs @@ -1018,7 +1018,8 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { } } -/// Implements [`fmt::Debug`] and [`fmt::Display`] using a function. +/// Creates a type whose [`fmt::Debug`] and [`fmt::Display`] impls are provided with the function +/// `f`. /// /// # Examples /// @@ -1030,17 +1031,25 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { /// assert_eq!(format!("{}", value), "a"); /// assert_eq!(format!("{:?}", value), "'a'"); /// -/// let wrapped = fmt::FormatterFn(|f| write!(f, "{value:?}")); +/// let wrapped = fmt::from_fn(|f| write!(f, "{value:?}")); /// assert_eq!(format!("{}", wrapped), "'a'"); /// assert_eq!(format!("{:?}", wrapped), "'a'"); /// ``` #[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub struct FormatterFn<F>(pub F) +pub fn from_fn<F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result>(f: F) -> FromFn<F> { + FromFn(f) +} + +/// Implements [`fmt::Debug`] and [`fmt::Display`] using a function. +/// +/// Created with [`from_fn`]. +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +pub struct FromFn<F>(F) where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result; #[unstable(feature = "debug_closure_helpers", issue = "117729")] -impl<F> fmt::Debug for FormatterFn<F> +impl<F> fmt::Debug for FromFn<F> where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, { @@ -1050,7 +1059,7 @@ where } #[unstable(feature = "debug_closure_helpers", issue = "117729")] -impl<F> fmt::Display for FormatterFn<F> +impl<F> fmt::Display for FromFn<F> where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, { diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index 60c0dc76852..45c2b6a6a0f 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -34,7 +34,7 @@ pub enum Alignment { } #[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub use self::builders::FormatterFn; +pub use self::builders::{from_fn, FromFn}; #[stable(feature = "debug_builders", since = "1.2.0")] pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; @@ -1626,6 +1626,11 @@ impl<'a> Formatter<'a> { self.buf.write_str(data) } + /// Glue for usage of the [`write!`] macro with implementors of this trait. + /// + /// This method should generally not be invoked manually, but rather through + /// the [`write!`] macro itself. + /// /// Writes some formatted information into this instance. /// /// # Examples diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 6f1e0cb7471..bd99e90376a 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -2675,12 +2675,12 @@ extern "rust-intrinsic" { #[rustc_nounwind] pub fn catch_unwind(try_fn: fn(*mut u8), data: *mut u8, catch_fn: fn(*mut u8, *mut u8)) -> i32; - /// Emits a `!nontemporal` store according to LLVM (see their docs). - /// Probably will never become stable. + /// Emits a `nontemporal` store, which gives a hint to the CPU that the data should not be held + /// in cache. Except for performance, this is fully equivalent to `ptr.write(val)`. /// - /// Do NOT use this intrinsic; "nontemporal" operations do not exist in our memory model! - /// It exists to support current stdarch, but the plan is to change stdarch and remove this intrinsic. - /// See <https://github.com/rust-lang/rust/issues/114582> for some more discussion. + /// Not all architectures provide such an operation. For instance, x86 does not: while `MOVNT` + /// exists, that operation is *not* equivalent to `ptr.write(val)` (`MOVNT` writes can be reordered + /// in ways that are not allowed for regular writes). #[rustc_nounwind] pub fn nontemporal_store<T>(ptr: *mut T, val: T); diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 07daa32afa8..e3640627c56 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -170,6 +170,7 @@ #![feature(internal_impls_macro)] #![feature(ip)] #![feature(is_ascii_octdigit)] +#![feature(is_val_statically_known)] #![feature(isqrt)] #![feature(link_cfg)] #![feature(offset_of_enum)] @@ -192,12 +193,12 @@ // // Language features: // tidy-alphabetical-start +#![cfg_attr(bootstrap, feature(asm_const))] #![cfg_attr(bootstrap, feature(min_exhaustive_patterns))] #![feature(abi_unadjusted)] #![feature(adt_const_params)] #![feature(allow_internal_unsafe)] #![feature(allow_internal_unstable)] -#![feature(asm_const)] #![feature(auto_traits)] #![feature(cfg_sanitize)] #![feature(cfg_target_has_atomic)] diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index 6a83ec2eb1e..374fa086aec 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -1060,7 +1060,7 @@ pub trait FnPtr: Copy + Clone { } /// Derive macro generating impls of traits related to smart pointers. -#[rustc_builtin_macro] +#[rustc_builtin_macro(SmartPointer, attributes(pointee))] #[allow_internal_unstable(dispatch_from_dyn, coerce_unsized, unsize)] #[unstable(feature = "derive_smart_pointer", issue = "123430")] pub macro SmartPointer($item:item) { diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index ea2dcdce6e8..7a9ca4011be 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -5,6 +5,7 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::alloc::Layout; use crate::marker::DiscriminantKind; use crate::{clone, cmp, fmt, hash, intrinsics, ptr}; @@ -1238,6 +1239,10 @@ pub trait SizedTypeProperties: Sized { #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] const IS_ZST: bool = size_of::<Self>() == 0; + + #[doc(hidden)] + #[unstable(feature = "sized_type_properties", issue = "none")] + const LAYOUT: Layout = Layout::new::<Self>(); } #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] diff --git a/library/core/src/net/ip_addr.rs b/library/core/src/net/ip_addr.rs index 3e036b88128..919f681f911 100644 --- a/library/core/src/net/ip_addr.rs +++ b/library/core/src/net/ip_addr.rs @@ -1,6 +1,7 @@ use super::display_buffer::DisplayBuffer; use crate::cmp::Ordering; use crate::fmt::{self, Write}; +use crate::hash::{Hash, Hasher}; use crate::iter; use crate::mem::transmute; use crate::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, Not}; @@ -67,12 +68,22 @@ pub enum IpAddr { /// assert!("0000000.0.0.0".parse::<Ipv4Addr>().is_err()); // first octet is a zero in octal /// assert!("0xcb.0x0.0x71.0x00".parse::<Ipv4Addr>().is_err()); // all octets are in hex /// ``` -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Ipv4Addr { octets: [u8; 4], } +#[stable(feature = "rust1", since = "1.0.0")] +impl Hash for Ipv4Addr { + fn hash<H: Hasher>(&self, state: &mut H) { + // Hashers are often more efficient at hashing a fixed-width integer + // than a bytestring, so convert before hashing. We don't use to_bits() + // here as that may involve a byteswap which is unnecessary. + u32::from_ne_bytes(self.octets).hash(state); + } +} + /// An IPv6 address. /// /// IPv6 addresses are defined as 128-bit integers in [IETF RFC 4291]. @@ -149,12 +160,22 @@ pub struct Ipv4Addr { /// assert_eq!("::1".parse(), Ok(localhost)); /// assert_eq!(localhost.is_loopback(), true); /// ``` -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Ipv6Addr { octets: [u8; 16], } +#[stable(feature = "rust1", since = "1.0.0")] +impl Hash for Ipv6Addr { + fn hash<H: Hasher>(&self, state: &mut H) { + // Hashers are often more efficient at hashing a fixed-width integer + // than a bytestring, so convert before hashing. We don't use to_bits() + // here as that may involve unnecessary byteswaps. + u128::from_ne_bytes(self.octets).hash(state); + } +} + /// Scope of an [IPv6 multicast address] as defined in [IETF RFC 7346 section 2]. /// /// # Stability Guarantees diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index dd88e859b30..17cf2a7b261 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -1496,18 +1496,17 @@ macro_rules! int_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = try_opt!(acc.checked_mul(base)); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return Some(acc); + } } exp /= 2; base = try_opt!(base.checked_mul(base)); } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.checked_mul(base) } /// Strict exponentiation. Computes `self.pow(exp)`, panicking if @@ -1547,18 +1546,17 @@ macro_rules! int_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = acc.strict_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } } exp /= 2; base = base.strict_mul(base); } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.strict_mul(base) } /// Returns the square root of the number, rounded down. @@ -2175,6 +2173,7 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn wrapping_pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2182,19 +2181,36 @@ macro_rules! int_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc.wrapping_mul(base); + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + } + exp /= 2; + base = base.wrapping_mul(base); } - exp /= 2; - base = base.wrapping_mul(base); - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.wrapping_mul(base) + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary. + acc.wrapping_mul(base) + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base.wrapping_mul(base); + } + } } /// Calculates `self` + `rhs`. @@ -2690,9 +2706,14 @@ macro_rules! int_impl { // Scratch space for storing results of overflowing_mul. let mut r; - while exp > 1 { + loop { if (exp & 1) == 1 { r = acc.overflowing_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + r.1 |= overflown; + return r; + } acc = r.0; overflown |= r.1; } @@ -2701,14 +2722,6 @@ macro_rules! int_impl { base = r.0; overflown |= r.1; } - - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - r = acc.overflowing_mul(base); - r.1 |= overflown; - r } /// Raises self to the power of `exp`, using exponentiation by squaring. @@ -2728,6 +2741,7 @@ macro_rules! int_impl { without modifying the original"] #[inline] #[rustc_inherit_overflow_checks] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2735,19 +2749,37 @@ macro_rules! int_impl { let mut base = self; let mut acc = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc * base; + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc * base; + } + exp /= 2; + base = base * base; } - exp /= 2; - base = base * base; - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc * base + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary and may cause a + // needless overflow. + acc * base + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc * base; + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base * base; + } + } } /// Returns the square root of the number, rounded down. diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index a2e17fae768..719a6a55940 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -1622,20 +1622,17 @@ macro_rules! uint_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = try_opt!(acc.checked_mul(base)); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return Some(acc); + } } exp /= 2; base = try_opt!(base.checked_mul(base)); } - - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - - acc.checked_mul(base) } /// Strict exponentiation. Computes `self.pow(exp)`, panicking if @@ -1675,18 +1672,17 @@ macro_rules! uint_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = acc.strict_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } } exp /= 2; base = base.strict_mul(base); } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.strict_mul(base) } /// Saturating integer addition. Computes `self + rhs`, saturating at @@ -2138,6 +2134,7 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn wrapping_pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2145,19 +2142,36 @@ macro_rules! uint_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc.wrapping_mul(base); + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + } + exp /= 2; + base = base.wrapping_mul(base); } - exp /= 2; - base = base.wrapping_mul(base); - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.wrapping_mul(base) + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary. + acc.wrapping_mul(base) + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base.wrapping_mul(base); + } + } } /// Calculates `self` + `rhs`. @@ -2603,9 +2617,14 @@ macro_rules! uint_impl { // Scratch space for storing results of overflowing_mul. let mut r; - while exp > 1 { + loop { if (exp & 1) == 1 { r = acc.overflowing_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + r.1 |= overflown; + return r; + } acc = r.0; overflown |= r.1; } @@ -2614,15 +2633,6 @@ macro_rules! uint_impl { base = r.0; overflown |= r.1; } - - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - r = acc.overflowing_mul(base); - r.1 |= overflown; - - r } /// Raises self to the power of `exp`, using exponentiation by squaring. @@ -2640,6 +2650,7 @@ macro_rules! uint_impl { without modifying the original"] #[inline] #[rustc_inherit_overflow_checks] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2647,19 +2658,37 @@ macro_rules! uint_impl { let mut base = self; let mut acc = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc * base; + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc * base; + } + exp /= 2; + base = base * base; } - exp /= 2; - base = base * base; - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc * base + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary and may cause a + // needless overflow. + acc * base + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc * base; + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base * base; + } + } } /// Returns the square root of the number, rounded down. diff --git a/library/core/src/ops/control_flow.rs b/library/core/src/ops/control_flow.rs index a2709c66b06..ab73dc19fcc 100644 --- a/library/core/src/ops/control_flow.rs +++ b/library/core/src/ops/control_flow.rs @@ -116,7 +116,9 @@ impl<B, C> ops::Try for ControlFlow<B, C> { } #[unstable(feature = "try_trait_v2", issue = "84277")] -impl<B, C> ops::FromResidual for ControlFlow<B, C> { +// Note: manually specifying the residual type instead of using the default to work around +// https://github.com/rust-lang/rust/issues/99940 +impl<B, C> ops::FromResidual<ControlFlow<B, convert::Infallible>> for ControlFlow<B, C> { #[inline] fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self { match residual { diff --git a/library/core/src/ops/coroutine.rs b/library/core/src/ops/coroutine.rs index 13df888d24c..c7d596d74c3 100644 --- a/library/core/src/ops/coroutine.rs +++ b/library/core/src/ops/coroutine.rs @@ -69,6 +69,7 @@ pub enum CoroutineState<Y, R> { #[lang = "coroutine"] #[unstable(feature = "coroutine_trait", issue = "43122")] #[fundamental] +#[must_use = "coroutines are lazy and do nothing unless resumed"] pub trait Coroutine<R = ()> { /// The type of value this coroutine yields. /// diff --git a/library/core/src/option.rs b/library/core/src/option.rs index 6c89c810180..9cec79c17ca 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -2495,7 +2495,9 @@ impl<T> ops::Try for Option<T> { } #[unstable(feature = "try_trait_v2", issue = "84277")] -impl<T> ops::FromResidual for Option<T> { +// Note: manually specifying the residual type instead of using the default to work around +// https://github.com/rust-lang/rust/issues/99940 +impl<T> ops::FromResidual<Option<convert::Infallible>> for Option<T> { #[inline] fn from_residual(residual: Option<convert::Infallible>) -> Self { match residual { diff --git a/library/core/src/panic/location.rs b/library/core/src/panic/location.rs index 8c04994ac0f..930edffd505 100644 --- a/library/core/src/panic/location.rs +++ b/library/core/src/panic/location.rs @@ -44,7 +44,7 @@ impl<'a> Location<'a> { /// /// # Examples /// - /// ``` + /// ```standalone /// use std::panic::Location; /// /// /// Returns the [`Location`] at which it is called. diff --git a/library/core/tests/ops.rs b/library/core/tests/ops.rs index 2ee0abd399b..501e0f33fe4 100644 --- a/library/core/tests/ops.rs +++ b/library/core/tests/ops.rs @@ -1,4 +1,5 @@ mod control_flow; +mod from_residual; use core::ops::{ Bound, Deref, DerefMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, diff --git a/library/core/tests/ops/from_residual.rs b/library/core/tests/ops/from_residual.rs new file mode 100644 index 00000000000..d5c86ccbcd3 --- /dev/null +++ b/library/core/tests/ops/from_residual.rs @@ -0,0 +1,26 @@ +//! Regression test that Option and ControlFlow can have downstream FromResidual impls. +//! cc https://github.com/rust-lang/rust/issues/99940, +//! This does NOT test that issue in general; Option and ControlFlow's FromResidual +//! impls in core were changed to not be affected by that issue. + +use core::ops::{ControlFlow, FromResidual}; + +struct Local; + +impl<T> FromResidual<Local> for Option<T> { + fn from_residual(_: Local) -> Option<T> { + unimplemented!() + } +} + +impl<B, C> FromResidual<Local> for ControlFlow<B, C> { + fn from_residual(_: Local) -> ControlFlow<B, C> { + unimplemented!() + } +} + +impl<T, E> FromResidual<Local> for Result<T, E> { + fn from_residual(_: Local) -> Result<T, E> { + unimplemented!() + } +} diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs index bc1940ebf32..78d1b137e63 100644 --- a/library/core/tests/ptr.rs +++ b/library/core/tests/ptr.rs @@ -810,9 +810,12 @@ fn ptr_metadata() { assert_ne!(address_1, address_2); // Different erased type => different vtable pointer assert_ne!(address_2, address_3); - // Same erased type and same trait => same vtable pointer - assert_eq!(address_3, address_4); - assert_eq!(address_3, address_5); + // Same erased type and same trait => same vtable pointer. + // This is *not guaranteed*, so we skip it in Miri. + if !cfg!(miri) { + assert_eq!(address_3, address_4); + assert_eq!(address_3, address_5); + } } } diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index 2ce284c85e2..7f23681ee45 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -57,6 +57,9 @@ object = { version = "0.36.0", default-features = false, optional = true, featur 'archive', ] } +[target.'cfg(windows)'.dependencies.windows-targets] +path = "../windows_targets" + [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } rand_xorshift = "0.3.0" @@ -116,7 +119,7 @@ std_detect_env_override = ["std_detect/std_detect_env_override"] # Enable using raw-dylib for Windows imports. # This will eventually be the default. -windows_raw_dylib = [] +windows_raw_dylib = ["windows-targets/windows_raw_dylib"] [package.metadata.fortanix-sgx] # Maximum possible number of threads when testing diff --git a/library/std/src/env.rs b/library/std/src/env.rs index 50ae83090c7..80890e61471 100644 --- a/library/std/src/env.rs +++ b/library/std/src/env.rs @@ -355,7 +355,13 @@ impl Error for VarError { /// } /// assert_eq!(env::var(key), Ok("VALUE".to_string())); /// ``` -#[rustc_deprecated_safe_2024] +#[cfg_attr(bootstrap, rustc_deprecated_safe_2024)] +#[cfg_attr( + not(bootstrap), + rustc_deprecated_safe_2024( + audit_that = "the environment access only happens in single-threaded code" + ) +)] #[stable(feature = "env", since = "1.0.0")] pub unsafe fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(key: K, value: V) { let (key, value) = (key.as_ref(), value.as_ref()); @@ -419,7 +425,13 @@ pub unsafe fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(key: K, value: V) { /// } /// assert!(env::var(key).is_err()); /// ``` -#[rustc_deprecated_safe_2024] +#[cfg_attr(bootstrap, rustc_deprecated_safe_2024)] +#[cfg_attr( + not(bootstrap), + rustc_deprecated_safe_2024( + audit_that = "the environment access only happens in single-threaded code" + ) +)] #[stable(feature = "env", since = "1.0.0")] pub unsafe fn remove_var<K: AsRef<OsStr>>(key: K) { let key = key.as_ref(); diff --git a/library/std/src/os/unix/process.rs b/library/std/src/os/unix/process.rs index c53423675bd..46202441d4e 100644 --- a/library/std/src/os/unix/process.rs +++ b/library/std/src/os/unix/process.rs @@ -109,13 +109,21 @@ pub trait CommandExt: Sealed { /// Schedules a closure to be run just before the `exec` function is /// invoked. /// - /// This method is stable and usable, but it should be unsafe. To fix - /// that, it got deprecated in favor of the unsafe [`pre_exec`]. + /// `before_exec` used to be a safe method, but it needs to be unsafe since the closure may only + /// perform operations that are *async-signal-safe*. Hence it got deprecated in favor of the + /// unsafe [`pre_exec`]. Meanwhile, Rust gained the ability to make an existing safe method + /// fully unsafe in a new edition, which is how `before_exec` became `unsafe`. It still also + /// remains deprecated; `pre_exec` should be used instead. /// /// [`pre_exec`]: CommandExt::pre_exec #[stable(feature = "process_exec", since = "1.15.0")] #[deprecated(since = "1.37.0", note = "should be unsafe, use `pre_exec` instead")] - fn before_exec<F>(&mut self, f: F) -> &mut process::Command + #[cfg_attr(bootstrap, rustc_deprecated_safe_2024)] + #[cfg_attr( + not(bootstrap), + rustc_deprecated_safe_2024(audit_that = "the closure is async-signal-safe") + )] + unsafe fn before_exec<F>(&mut self, f: F) -> &mut process::Command where F: FnMut() -> io::Result<()> + Send + Sync + 'static, { diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 4c496ade81c..6f0952c41ed 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -440,13 +440,12 @@ impl BacktraceStyle { } fn from_u8(s: u8) -> Option<Self> { - Some(match s { - 0 => return None, - 1 => BacktraceStyle::Short, - 2 => BacktraceStyle::Full, - 3 => BacktraceStyle::Off, - _ => unreachable!(), - }) + match s { + 1 => Some(BacktraceStyle::Short), + 2 => Some(BacktraceStyle::Full), + 3 => Some(BacktraceStyle::Off), + _ => None, + } } } @@ -465,7 +464,7 @@ static SHOULD_CAPTURE: AtomicU8 = AtomicU8::new(0); pub fn set_backtrace_style(style: BacktraceStyle) { if cfg!(feature = "backtrace") { // If the `backtrace` feature of this crate is enabled, set the backtrace style. - SHOULD_CAPTURE.store(style.as_u8(), Ordering::Release); + SHOULD_CAPTURE.store(style.as_u8(), Ordering::Relaxed); } } @@ -498,7 +497,9 @@ pub fn get_backtrace_style() -> Option<BacktraceStyle> { // to optimize away callers. return None; } - if let Some(style) = BacktraceStyle::from_u8(SHOULD_CAPTURE.load(Ordering::Acquire)) { + + let current = SHOULD_CAPTURE.load(Ordering::Relaxed); + if let Some(style) = BacktraceStyle::from_u8(current) { return Some(style); } @@ -509,8 +510,11 @@ pub fn get_backtrace_style() -> Option<BacktraceStyle> { None if crate::sys::FULL_BACKTRACE_DEFAULT => BacktraceStyle::Full, None => BacktraceStyle::Off, }; - set_backtrace_style(format); - Some(format) + + match SHOULD_CAPTURE.compare_exchange(0, format.as_u8(), Ordering::Relaxed, Ordering::Relaxed) { + Ok(_) => Some(format), + Err(new) => BacktraceStyle::from_u8(new), + } } #[cfg(test)] diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index bdb83f07857..be13e1ae9b3 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -1552,17 +1552,6 @@ impl fmt::Debug for File { None } - #[cfg(any( - target_os = "linux", - target_os = "freebsd", - target_os = "hurd", - target_os = "netbsd", - target_os = "openbsd", - target_os = "vxworks", - target_os = "solaris", - target_os = "illumos", - target_vendor = "apple", - ))] fn get_mode(fd: c_int) -> Option<(bool, bool)> { let mode = unsafe { libc::fcntl(fd, libc::F_GETFL) }; if mode == -1 { @@ -1576,22 +1565,6 @@ impl fmt::Debug for File { } } - #[cfg(not(any( - target_os = "linux", - target_os = "freebsd", - target_os = "hurd", - target_os = "netbsd", - target_os = "openbsd", - target_os = "vxworks", - target_os = "solaris", - target_os = "illumos", - target_vendor = "apple", - )))] - fn get_mode(_fd: c_int) -> Option<(bool, bool)> { - // FIXME(#24570): implement this for other Unix platforms - None - } - let fd = self.as_raw_fd(); let mut b = f.debug_struct("File"); b.field("fd", &fd); diff --git a/library/std/src/sys/pal/unix/rand.rs b/library/std/src/sys/pal/unix/rand.rs index 8a78ea8e7cc..cc0852aab43 100644 --- a/library/std/src/sys/pal/unix/rand.rs +++ b/library/std/src/sys/pal/unix/rand.rs @@ -2,7 +2,9 @@ pub fn hashmap_random_keys() -> (u64, u64) { const KEY_LEN: usize = core::mem::size_of::<u64>(); let mut v = [0u8; KEY_LEN * 2]; - imp::fill_bytes(&mut v); + if let Err(err) = read(&mut v) { + panic!("failed to retrieve random hash map seed: {err}"); + } let key1 = v[0..KEY_LEN].try_into().unwrap(); let key2 = v[KEY_LEN..].try_into().unwrap(); @@ -10,27 +12,78 @@ pub fn hashmap_random_keys() -> (u64, u64) { (u64::from_ne_bytes(key1), u64::from_ne_bytes(key2)) } -#[cfg(all( - unix, - not(target_os = "openbsd"), - not(target_os = "netbsd"), - not(target_os = "fuchsia"), - not(target_os = "redox"), - not(target_os = "vxworks"), - not(target_os = "emscripten"), - not(target_os = "vita"), - not(target_vendor = "apple"), +cfg_if::cfg_if! { + if #[cfg(any( + target_vendor = "apple", + target_os = "openbsd", + target_os = "emscripten", + target_os = "vita", + all(target_os = "netbsd", not(netbsd10)), + target_os = "fuchsia", + target_os = "vxworks", + ))] { + // Some systems have a syscall that directly retrieves random data. + // If that is guaranteed to be available, use it. + use imp::syscall as read; + } else { + // Otherwise, try the syscall to see if it exists only on some systems + // and fall back to reading from the random device otherwise. + fn read(bytes: &mut [u8]) -> crate::io::Result<()> { + use crate::fs::File; + use crate::io::Read; + use crate::sync::OnceLock; + + #[cfg(any( + target_os = "linux", + target_os = "android", + target_os = "espidf", + target_os = "horizon", + target_os = "freebsd", + target_os = "dragonfly", + target_os = "solaris", + target_os = "illumos", + netbsd10, + ))] + if let Some(res) = imp::syscall(bytes) { + return res; + } + + const PATH: &'static str = if cfg!(target_os = "redox") { + "/scheme/rand" + } else { + "/dev/urandom" + }; + + static FILE: OnceLock<File> = OnceLock::new(); + + FILE.get_or_try_init(|| File::open(PATH))?.read_exact(bytes) + } + } +} + +// All these systems a `getrandom` syscall. +// +// It is not guaranteed to be available, so return None to fallback to the file +// implementation. +#[cfg(any( + target_os = "linux", + target_os = "android", + target_os = "espidf", + target_os = "horizon", + target_os = "freebsd", + target_os = "dragonfly", + target_os = "solaris", + target_os = "illumos", + netbsd10, ))] mod imp { - use crate::fs::File; - use crate::io::Read; - #[cfg(any(target_os = "linux", target_os = "android"))] - use crate::sys::weak::syscall; + use crate::io::{Error, Result}; + use crate::sync::atomic::{AtomicBool, Ordering}; + use crate::sys::os::errno; #[cfg(any(target_os = "linux", target_os = "android"))] fn getrandom(buf: &mut [u8]) -> libc::ssize_t { - use crate::sync::atomic::{AtomicBool, Ordering}; - use crate::sys::os::errno; + use crate::sys::weak::syscall; // A weak symbol allows interposition, e.g. for perf measurements that want to // disable randomness for consistency. Otherwise, we'll try a raw syscall. @@ -59,6 +112,7 @@ mod imp { } #[cfg(any( + target_os = "dragonfly", target_os = "espidf", target_os = "horizon", target_os = "freebsd", @@ -70,51 +124,11 @@ mod imp { unsafe { libc::getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) } } - #[cfg(target_os = "dragonfly")] - fn getrandom(buf: &mut [u8]) -> libc::ssize_t { - extern "C" { - fn getrandom( - buf: *mut libc::c_void, - buflen: libc::size_t, - flags: libc::c_uint, - ) -> libc::ssize_t; - } - unsafe { getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) } - } - - #[cfg(not(any( - target_os = "linux", - target_os = "android", - target_os = "espidf", - target_os = "horizon", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "solaris", - target_os = "illumos", - netbsd10 - )))] - fn getrandom_fill_bytes(_buf: &mut [u8]) -> bool { - false - } - - #[cfg(any( - target_os = "linux", - target_os = "android", - target_os = "espidf", - target_os = "horizon", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "solaris", - target_os = "illumos", - netbsd10 - ))] - fn getrandom_fill_bytes(v: &mut [u8]) -> bool { - use crate::sync::atomic::{AtomicBool, Ordering}; - use crate::sys::os::errno; - + pub fn syscall(v: &mut [u8]) -> Option<Result<()>> { static GETRANDOM_UNAVAILABLE: AtomicBool = AtomicBool::new(false); + if GETRANDOM_UNAVAILABLE.load(Ordering::Relaxed) { - return false; + return None; } let mut read = 0; @@ -125,8 +139,7 @@ mod imp { if err == libc::EINTR { continue; } else if err == libc::ENOSYS || err == libc::EPERM { - // Fall back to reading /dev/urandom if `getrandom` is not - // supported on the current kernel. + // `getrandom` is not supported on the current system. // // Also fall back in case it is disabled by something like // seccomp or inside of docker. @@ -142,123 +155,83 @@ mod imp { // https://github.com/moby/moby/issues/42680 // GETRANDOM_UNAVAILABLE.store(true, Ordering::Relaxed); - return false; + return None; } else if err == libc::EAGAIN { - return false; + // getrandom has failed because it would have blocked as the + // non-blocking pool (urandom) has not been initialized in + // the kernel yet due to a lack of entropy. Fallback to + // reading from `/dev/urandom` which will return potentially + // insecure random data to avoid blocking applications which + // could depend on this call without ever knowing they do and + // don't have a work around. + return None; } else { - panic!("unexpected getrandom error: {err}"); + return Some(Err(Error::from_raw_os_error(err))); } } else { read += result as usize; } } - true - } - - pub fn fill_bytes(v: &mut [u8]) { - // getrandom_fill_bytes here can fail if getrandom() returns EAGAIN, - // meaning it would have blocked because the non-blocking pool (urandom) - // has not initialized in the kernel yet due to a lack of entropy. The - // fallback we do here is to avoid blocking applications which could - // depend on this call without ever knowing they do and don't have a - // work around. The PRNG of /dev/urandom will still be used but over a - // possibly predictable entropy pool. - if getrandom_fill_bytes(v) { - return; - } - // getrandom failed because it is permanently or temporarily (because - // of missing entropy) unavailable. Open /dev/urandom, read from it, - // and close it again. - let mut file = File::open("/dev/urandom").expect("failed to open /dev/urandom"); - file.read_exact(v).expect("failed to read /dev/urandom") + Some(Ok(())) } } -#[cfg(target_vendor = "apple")] +#[cfg(any( + target_os = "macos", // Supported since macOS 10.12+. + target_os = "openbsd", + target_os = "emscripten", + target_os = "vita", +))] mod imp { - use libc::{c_int, c_void, size_t}; - - use crate::io; - - #[inline(always)] - fn random_failure() -> ! { - panic!("unexpected random generation error: {}", io::Error::last_os_error()); - } - - #[cfg(target_os = "macos")] - fn getentropy_fill_bytes(v: &mut [u8]) { - extern "C" { - fn getentropy(bytes: *mut c_void, count: size_t) -> c_int; - } + use crate::io::{Error, Result}; + pub fn syscall(v: &mut [u8]) -> Result<()> { // getentropy(2) permits a maximum buffer size of 256 bytes for s in v.chunks_mut(256) { - let ret = unsafe { getentropy(s.as_mut_ptr().cast(), s.len()) }; + let ret = unsafe { libc::getentropy(s.as_mut_ptr().cast(), s.len()) }; if ret == -1 { - random_failure() + return Err(Error::last_os_error()); } } - } - #[cfg(not(target_os = "macos"))] - fn ccrandom_fill_bytes(v: &mut [u8]) { - extern "C" { - fn CCRandomGenerateBytes(bytes: *mut c_void, count: size_t) -> c_int; - } - - let ret = unsafe { CCRandomGenerateBytes(v.as_mut_ptr().cast(), v.len()) }; - if ret == -1 { - random_failure() - } - } - - pub fn fill_bytes(v: &mut [u8]) { - // All supported versions of macOS (10.12+) support getentropy. - // - // `getentropy` is measurably faster (via Divan) then the other alternatives so its preferred - // when usable. - #[cfg(target_os = "macos")] - getentropy_fill_bytes(v); - - // On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply - // call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes` - // manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on - // its own thread accessed via GCD. This seems needlessly heavyweight for our purposes - // so we only use it on non-Mac OSes where the better entrypoints are blocked. - // - // `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible - // via `libSystem` (libc) while the other needs to link to `Security.framework`. - // - // Note that while `getentropy` has a available attribute in the macOS headers, the lack - // of a header in the iOS (and others) SDK means that its can cause app store rejections. - // Just use `CCRandomGenerateBytes` instead. - #[cfg(not(target_os = "macos"))] - ccrandom_fill_bytes(v); + Ok(()) } } -#[cfg(any(target_os = "openbsd", target_os = "emscripten", target_os = "vita"))] +// On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply +// call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes` +// manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on +// its own thread accessed via GCD. This seems needlessly heavyweight for our purposes +// so we only use it when `getentropy` is blocked, which appears to be the case +// on all platforms except macOS (see #102643). +// +// `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible +// via `libSystem` (libc) while the other needs to link to `Security.framework`. +#[cfg(all(target_vendor = "apple", not(target_os = "macos")))] mod imp { - use crate::sys::os::errno; + use libc::size_t; - pub fn fill_bytes(v: &mut [u8]) { - // getentropy(2) permits a maximum buffer size of 256 bytes - for s in v.chunks_mut(256) { - let ret = unsafe { libc::getentropy(s.as_mut_ptr() as *mut libc::c_void, s.len()) }; - if ret == -1 { - panic!("unexpected getentropy error: {}", errno()); - } + use crate::ffi::{c_int, c_void}; + use crate::io::{Error, Result}; + + pub fn syscall(v: &mut [u8]) -> Result<()> { + extern "C" { + fn CCRandomGenerateBytes(bytes: *mut c_void, count: size_t) -> c_int; } + + let ret = unsafe { CCRandomGenerateBytes(v.as_mut_ptr().cast(), v.len()) }; + if ret != -1 { Ok(()) } else { Err(Error::last_os_error()) } } } // FIXME: once the 10.x release becomes the minimum, this can be dropped for simplification. #[cfg(all(target_os = "netbsd", not(netbsd10)))] mod imp { + use crate::io::{Error, Result}; use crate::ptr; - pub fn fill_bytes(v: &mut [u8]) { + pub fn syscall(v: &mut [u8]) -> Result<()> { let mib = [libc::CTL_KERN, libc::KERN_ARND]; // kern.arandom permits a maximum buffer size of 256 bytes for s in v.chunks_mut(256) { @@ -273,39 +246,30 @@ mod imp { 0, ) }; - if ret == -1 || s_len != s.len() { - panic!( - "kern.arandom sysctl failed! (returned {}, s.len() {}, oldlenp {})", - ret, - s.len(), - s_len - ); + if ret == -1 { + return Err(Error::last_os_error()); + } else if s_len != s.len() { + // FIXME(joboet): this can't actually happen, can it? + panic!("read less bytes than requested from kern.arandom"); } } + + Ok(()) } } #[cfg(target_os = "fuchsia")] mod imp { + use crate::io::Result; + #[link(name = "zircon")] extern "C" { fn zx_cprng_draw(buffer: *mut u8, len: usize); } - pub fn fill_bytes(v: &mut [u8]) { - unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) } - } -} - -#[cfg(target_os = "redox")] -mod imp { - use crate::fs::File; - use crate::io::Read; - - pub fn fill_bytes(v: &mut [u8]) { - // Open rand:, read from it, and close it again. - let mut file = File::open("rand:").expect("failed to open rand:"); - file.read_exact(v).expect("failed to read rand:") + pub fn syscall(v: &mut [u8]) -> Result<()> { + unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) }; + Ok(()) } } @@ -314,25 +278,25 @@ mod imp { use core::sync::atomic::AtomicBool; use core::sync::atomic::Ordering::Relaxed; - use crate::io; + use crate::io::{Error, Result}; - pub fn fill_bytes(v: &mut [u8]) { + pub fn syscall(v: &mut [u8]) -> Result<()> { static RNG_INIT: AtomicBool = AtomicBool::new(false); while !RNG_INIT.load(Relaxed) { let ret = unsafe { libc::randSecure() }; if ret < 0 { - panic!("couldn't generate random bytes: {}", io::Error::last_os_error()); + return Err(Error::last_os_error()); } else if ret > 0 { RNG_INIT.store(true, Relaxed); break; } + unsafe { libc::usleep(10) }; } + let ret = unsafe { libc::randABytes(v.as_mut_ptr() as *mut libc::c_uchar, v.len() as libc::c_int) }; - if ret < 0 { - panic!("couldn't generate random bytes: {}", io::Error::last_os_error()); - } + if ret >= 0 { Ok(()) } else { Err(Error::last_os_error()) } } } diff --git a/library/std/src/sys/pal/windows/alloc.rs b/library/std/src/sys/pal/windows/alloc.rs index 92b68b26032..2205885687d 100644 --- a/library/std/src/sys/pal/windows/alloc.rs +++ b/library/std/src/sys/pal/windows/alloc.rs @@ -4,7 +4,7 @@ use crate::alloc::{GlobalAlloc, Layout, System}; use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicPtr, Ordering}; -use crate::sys::c::{self, windows_targets}; +use crate::sys::c; use crate::sys::common::alloc::{realloc_fallback, MIN_ALIGN}; #[cfg(test)] diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 08b75186aef..2f5d75dc4bc 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -8,8 +8,6 @@ use core::ffi::{c_uint, c_ulong, c_ushort, c_void, CStr}; use core::{mem, ptr}; -pub(super) mod windows_targets; - mod windows_sys; pub use windows_sys::*; diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index 9f22f548195..529c96a0e1e 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -3317,4 +3317,3 @@ pub struct WSADATA { #[cfg(target_arch = "arm")] pub enum CONTEXT {} // ignore-tidy-filelength -use super::windows_targets; diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index 59720f77465..88b31cd78a6 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -434,25 +434,24 @@ impl Builder { /// /// [`io::Result`]: crate::io::Result #[unstable(feature = "thread_spawn_unchecked", issue = "55132")] - pub unsafe fn spawn_unchecked<'a, F, T>(self, f: F) -> io::Result<JoinHandle<T>> + pub unsafe fn spawn_unchecked<F, T>(self, f: F) -> io::Result<JoinHandle<T>> where F: FnOnce() -> T, - F: Send + 'a, - T: Send + 'a, + F: Send, + T: Send, { Ok(JoinHandle(unsafe { self.spawn_unchecked_(f, None) }?)) } - unsafe fn spawn_unchecked_<'a, 'scope, F, T>( + unsafe fn spawn_unchecked_<'scope, F, T>( self, f: F, scope_data: Option<Arc<scoped::ScopeData>>, ) -> io::Result<JoinInner<'scope, T>> where F: FnOnce() -> T, - F: Send + 'a, - T: Send + 'a, - 'scope: 'a, + F: Send, + T: Send, { let Builder { name, stack_size } = self; @@ -532,7 +531,7 @@ impl Builder { // will call `decrement_num_running_threads` and therefore signal that this thread is // done. drop(their_packet); - // Here, the lifetime `'a` and even `'scope` can end. `main` keeps running for a bit + // Here, the lifetime `'scope` can end. `main` keeps running for a bit // after that before returning itself. }; diff --git a/library/test/src/types.rs b/library/test/src/types.rs index c3be3466cb9..802cab989c6 100644 --- a/library/test/src/types.rs +++ b/library/test/src/types.rs @@ -250,3 +250,37 @@ pub struct TestDescAndFn { pub desc: TestDesc, pub testfn: TestFn, } + +impl TestDescAndFn { + pub const fn new_doctest( + test_name: &'static str, + ignore: bool, + source_file: &'static str, + start_line: usize, + no_run: bool, + should_panic: bool, + testfn: TestFn, + ) -> Self { + Self { + desc: TestDesc { + name: StaticTestName(test_name), + ignore, + ignore_message: None, + source_file, + start_line, + start_col: 0, + end_line: 0, + end_col: 0, + compile_fail: false, + no_run, + should_panic: if should_panic { + options::ShouldPanic::Yes + } else { + options::ShouldPanic::No + }, + test_type: TestType::DocTest, + }, + testfn, + } + } +} diff --git a/library/windows_targets/Cargo.toml b/library/windows_targets/Cargo.toml new file mode 100644 index 00000000000..94d7c821064 --- /dev/null +++ b/library/windows_targets/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "windows-targets" +description = "A drop-in replacement for the real windows-targets crate for use in std only." +version = "0.0.0" +edition = "2021" + +[features] +# Enable using raw-dylib for Windows imports. +# This will eventually be the default. +windows_raw_dylib = [] diff --git a/library/std/src/sys/pal/windows/c/windows_targets.rs b/library/windows_targets/src/lib.rs index 252bceb7094..1965b6cf4ce 100644 --- a/library/std/src/sys/pal/windows/c/windows_targets.rs +++ b/library/windows_targets/src/lib.rs @@ -2,6 +2,10 @@ //! //! This is a simple wrapper around an `extern` block with a `#[link]` attribute. //! It's very roughly equivalent to the windows-targets crate. +#![no_std] +#![no_core] +#![feature(decl_macro)] +#![feature(no_core)] #[cfg(feature = "windows_raw_dylib")] pub macro link { diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index dc8b5487a61..f03f03e2d93 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -11,13 +11,19 @@ use std::str::FromStr; use std::{env, process}; use bootstrap::{ - find_recent_config_change_ids, human_readable_changes, t, Build, Config, Subcommand, + find_recent_config_change_ids, human_readable_changes, t, Build, Config, Flags, Subcommand, CONFIG_CHANGE_HISTORY, }; fn main() { let args = env::args().skip(1).collect::<Vec<_>>(); - let config = Config::parse(&args); + + if Flags::try_parse_verbose_help(&args) { + return; + } + + let flags = Flags::parse(&args); + let config = Config::parse(flags); let mut build_lock; let _build_lock_guard; diff --git a/src/bootstrap/src/bin/rustc.rs b/src/bootstrap/src/bin/rustc.rs index 011c289d932..d04e2fbeb78 100644 --- a/src/bootstrap/src/bin/rustc.rs +++ b/src/bootstrap/src/bin/rustc.rs @@ -89,6 +89,25 @@ fn main() { rustc_real }; + // Get the name of the crate we're compiling, if any. + let crate_name = parse_value_from_args(&orig_args, "--crate-name"); + + // When statically linking `std` into `rustc_driver`, remove `-C prefer-dynamic` + if env::var("RUSTC_LINK_STD_INTO_RUSTC_DRIVER").unwrap() == "1" + && crate_name == Some("rustc_driver") + && stage != "0" + { + if let Some(pos) = args.iter().enumerate().position(|(i, a)| { + a == "-C" && args.get(i + 1).map(|a| a == "prefer-dynamic").unwrap_or(false) + }) { + args.remove(pos); + args.remove(pos); + } + if let Some(pos) = args.iter().position(|a| a == "-Cprefer-dynamic") { + args.remove(pos); + } + } + let mut cmd = match env::var_os("RUSTC_WRAPPER_REAL") { Some(wrapper) if !wrapper.is_empty() => { let mut cmd = Command::new(wrapper); @@ -99,9 +118,6 @@ fn main() { }; cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - // Get the name of the crate we're compiling, if any. - let crate_name = parse_value_from_args(&orig_args, "--crate-name"); - if let Some(crate_name) = crate_name { if let Some(target) = env::var_os("RUSTC_TIME") { if target == "all" diff --git a/src/bootstrap/src/core/build_steps/clean.rs b/src/bootstrap/src/core/build_steps/clean.rs index 49310367182..f608e5d715e 100644 --- a/src/bootstrap/src/core/build_steps/clean.rs +++ b/src/bootstrap/src/core/build_steps/clean.rs @@ -121,7 +121,7 @@ fn clean(build: &Build, all: bool, stage: Option<u32>) { fn clean_specific_stage(build: &Build, stage: u32) { for host in &build.hosts { - let entries = match build.out.join(host.triple).read_dir() { + let entries = match build.out.join(host).read_dir() { Ok(iter) => iter, Err(_) => continue, }; @@ -148,7 +148,7 @@ fn clean_default(build: &Build) { rm_rf(&build.out.join("bootstrap-shims-dump")); rm_rf(&build.out.join("rustfmt.stamp")); - let mut hosts: Vec<_> = build.hosts.iter().map(|t| build.out.join(t.triple)).collect(); + let mut hosts: Vec<_> = build.hosts.iter().map(|t| build.out.join(t)).collect(); // After cross-compilation, artifacts of the host architecture (which may differ from build.host) // might not get removed. // Adding its path (linked one for easier accessibility) will solve this problem. diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index c09180e542f..4353cfadd8d 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -246,7 +246,7 @@ impl Step for Std { .rustc_snapshot_sysroot() .join("lib") .join("rustlib") - .join(compiler.host.triple) + .join(compiler.host) .join("bin"); if src_sysroot_bin.exists() { let target_sysroot_bin = @@ -432,7 +432,7 @@ fn copy_self_contained_objects( DependencyType::TargetSelfContained, ); } - } else if target.ends_with("windows-gnu") { + } else if target.is_windows_gnu() { for obj in ["crt2.o", "dllcrt2.o"].iter() { let src = compiler_file(builder, &builder.cc(target), target, CLang::C, obj); let target = libdir_self_contained.join(obj); @@ -651,8 +651,8 @@ impl Step for StdLink { compiler: self.compiler, force_recompile: self.force_recompile, }); - let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib"); - let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib"); + let libdir = sysroot.join(lib).join("rustlib").join(target).join("lib"); + let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host).join("lib"); (libdir, hostdir) } else { let libdir = builder.sysroot_libdir(target_compiler, target); @@ -670,12 +670,12 @@ impl Step for StdLink { .build .config .initial_rustc - .starts_with(builder.out.join(compiler.host.triple).join("stage0/bin")) + .starts_with(builder.out.join(compiler.host).join("stage0/bin")) { // Copy bin files from stage0/bin to stage0-sysroot/bin - let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot"); + let sysroot = builder.out.join(compiler.host).join("stage0-sysroot"); - let host = compiler.host.triple; + let host = compiler.host; let stage0_bin_dir = builder.out.join(host).join("stage0/bin"); let sysroot_bin_dir = sysroot.join("bin"); t!(fs::create_dir_all(&sysroot_bin_dir)); @@ -793,7 +793,7 @@ impl Step for StartupObjects { fn run(self, builder: &Builder<'_>) -> Vec<(PathBuf, DependencyType)> { let for_compiler = self.compiler; let target = self.target; - if !target.ends_with("windows-gnu") { + if !target.is_windows_gnu() { return vec![]; } @@ -1554,7 +1554,7 @@ impl Step for Sysroot { /// For all other stages, it's the same stage directory that the compiler lives in. fn run(self, builder: &Builder<'_>) -> PathBuf { let compiler = self.compiler; - let host_dir = builder.out.join(compiler.host.triple); + let host_dir = builder.out.join(compiler.host); let sysroot_dir = |stage| { if stage == 0 { diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 43306eab1b1..530eb9b446a 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -275,12 +275,8 @@ fn make_win_dist( } //Copy platform tools to platform-specific bin directory - let target_bin_dir = plat_root - .join("lib") - .join("rustlib") - .join(target.triple) - .join("bin") - .join("self-contained"); + let target_bin_dir = + plat_root.join("lib").join("rustlib").join(target).join("bin").join("self-contained"); fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed"); for src in target_tools { builder.copy_link_to_folder(&src, &target_bin_dir); @@ -295,12 +291,8 @@ fn make_win_dist( ); //Copy platform libs to platform-specific lib directory - let target_lib_dir = plat_root - .join("lib") - .join("rustlib") - .join(target.triple) - .join("lib") - .join("self-contained"); + let target_lib_dir = + plat_root.join("lib").join("rustlib").join(target).join("lib").join("self-contained"); fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed"); for src in target_libs { builder.copy_link_to_folder(&src, &target_lib_dir); @@ -450,7 +442,7 @@ impl Step for Rustc { // component for now. maybe_install_llvm_runtime(builder, host, image); - let dst_dir = image.join("lib/rustlib").join(&*host.triple).join("bin"); + let dst_dir = image.join("lib/rustlib").join(host).join("bin"); t!(fs::create_dir_all(&dst_dir)); // Copy over lld if it's there @@ -607,7 +599,7 @@ fn verify_uefi_rlib_format(builder: &Builder<'_>, target: TargetSelection, stamp /// Copy stamped files into an image's `target/lib` directory. fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) { - let dst = image.join("lib/rustlib").join(target.triple).join("lib"); + let dst = image.join("lib/rustlib").join(target).join("lib"); let self_contained_dst = dst.join("self-contained"); t!(fs::create_dir_all(&dst)); t!(fs::create_dir_all(&self_contained_dst)); @@ -769,7 +761,7 @@ impl Step for Analysis { let src = builder .stage_out(compiler, Mode::Std) - .join(target.triple) + .join(target) .join(builder.cargo_dir()) .join("deps") .join("save-analysis"); @@ -1509,7 +1501,7 @@ impl Step for Extended { tarballs.push(builder.ensure(Rustc { compiler: builder.compiler(stage, target) })); tarballs.push(builder.ensure(Std { compiler, target }).expect("missing std")); - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { tarballs.push(builder.ensure(Mingw { host: target }).expect("missing mingw")); } @@ -1683,7 +1675,7 @@ impl Step for Extended { prepare(tool); } } - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { prepare("rust-mingw"); } @@ -1830,7 +1822,7 @@ impl Step for Extended { .arg("-t") .arg(etc.join("msi/remove-duplicates.xsl")) .run(builder); - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { command(&heat) .current_dir(&exe) .arg("dir") @@ -1876,7 +1868,7 @@ impl Step for Extended { if built_tools.contains("miri") { cmd.arg("-dMiriDir=miri"); } - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { cmd.arg("-dGccDir=rust-mingw"); } cmd.run(builder); @@ -1901,7 +1893,7 @@ impl Step for Extended { } candle("AnalysisGroup.wxs".as_ref()); - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { candle("GccGroup.wxs".as_ref()); } @@ -1941,7 +1933,7 @@ impl Step for Extended { cmd.arg("DocsGroup.wixobj"); } - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { cmd.arg("GccGroup.wixobj"); } // ICE57 wrongly complains about the shortcuts @@ -1973,7 +1965,7 @@ fn add_env(builder: &Builder<'_>, cmd: &mut BootstrapCommand, target: TargetSele if target.contains("windows-gnullvm") { cmd.env("CFG_MINGW", "1").env("CFG_ABI", "LLVM"); - } else if target.contains("windows-gnu") { + } else if target.is_windows_gnu() { cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU"); } else { cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC"); @@ -2087,7 +2079,7 @@ fn maybe_install_llvm( /// Maybe add libLLVM.so to the target lib-dir for linking. pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { - let dst_libdir = sysroot.join("lib/rustlib").join(&*target.triple).join("lib"); + let dst_libdir = sysroot.join("lib/rustlib").join(target).join("lib"); // We do not need to copy LLVM files into the sysroot if it is not // dynamically linked; it is already included into librustc_llvm // statically. diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index 2cd5db706c2..301633559fe 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -699,13 +699,12 @@ fn doc_std( let compiler = builder.compiler(stage, builder.config.build); let target_doc_dir_name = if format == DocumentationFormat::Json { "json-doc" } else { "doc" }; - let target_dir = - builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name); + let target_dir = builder.stage_out(compiler, Mode::Std).join(target).join(target_doc_dir_name); // This is directory where the compiler will place the output of the command. // We will then copy the files from this directory into the final `out` directory, the specified // as a function parameter. - let out_dir = target_dir.join(target.triple).join("doc"); + let out_dir = target_dir.join(target).join("doc"); let mut cargo = builder::Cargo::new(builder, compiler, Mode::Std, SourceType::InTree, target, Kind::Doc); @@ -846,7 +845,7 @@ impl Step for Rustc { let mut to_open = None; - let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc"); + let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target).join("doc"); for krate in &*self.crates { // Create all crate output directories first to make sure rustdoc uses // relative links. @@ -992,7 +991,7 @@ macro_rules! tool_doc { // see https://github.com/rust-lang/rust/pull/122066#issuecomment-1983049222 // cargo.rustdocflag("--generate-link-to-definition"); - let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc"); + let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target).join("doc"); $(for krate in $crates { let dir_name = krate.replace("-", "_"); t!(fs::create_dir_all(out_dir.join(&*dir_name))); diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 597d7733abe..cc01afd4c18 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -149,7 +149,7 @@ You can skip linkcheck with --skip src/tools/linkchecker" let _guard = builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host); let _time = helpers::timeit(builder); - linkchecker.delay_failure().arg(builder.out.join(host.triple).join("doc")).run(builder); + linkchecker.delay_failure().arg(builder.out.join(host).join("doc")).run(builder); } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -434,8 +434,8 @@ impl Miri { builder: &Builder<'_>, compiler: Compiler, target: TargetSelection, - ) -> String { - let miri_sysroot = builder.out.join(compiler.host.triple).join("miri-sysroot"); + ) -> PathBuf { + let miri_sysroot = builder.out.join(compiler.host).join("miri-sysroot"); let mut cargo = builder::Cargo::new( builder, compiler, @@ -467,7 +467,7 @@ impl Miri { // Output is "<sysroot>\n". let sysroot = stdout.trim_end(); builder.verbose(|| println!("`cargo miri setup --print-sysroot` said: {sysroot:?}")); - sysroot.to_owned() + PathBuf::from(sysroot) } } @@ -520,12 +520,14 @@ impl Step for Miri { builder.ensure(compile::Std::new(target_compiler, host)); let host_sysroot = builder.sysroot(target_compiler); - // Miri has its own "target dir" for ui test dependencies. Make sure it gets cleared - // properly when rustc changes. Similar to `Builder::cargo`, we skip this in dry runs to - // make sure the relevant compiler has been set up properly. + // Miri has its own "target dir" for ui test dependencies. Make sure it gets cleared when + // the sysroot gets rebuilt, to avoid "found possibly newer version of crate `std`" errors. if !builder.config.dry_run() { let ui_test_dep_dir = builder.stage_out(host_compiler, Mode::ToolStd).join("miri_ui"); - builder.clear_if_dirty(&ui_test_dep_dir, &builder.rustc(host_compiler)); + // The mtime of `miri_sysroot` changes when the sysroot gets rebuilt (also see + // <https://github.com/RalfJung/rustc-build-sysroot/commit/10ebcf60b80fe2c3dc765af0ff19fdc0da4b7466>). + // We can hence use that directly as a signal to clear the ui test dir. + builder.clear_if_dirty(&ui_test_dep_dir, &miri_sysroot); } // Run `cargo test`. @@ -1113,7 +1115,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to } fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf { - builder.out.join(host.triple).join("test") + builder.out.join(host).join("test") } macro_rules! default_test { @@ -1815,7 +1817,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); - flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string())); + flags.extend(builder.config.cmd.compiletest_rustc_args().iter().map(|s| s.to_string())); if suite != "mir-opt" { if let Some(linker) = builder.linker(target) { @@ -2683,7 +2685,7 @@ impl Step for Crate { if builder.download_rustc() && compiler.stage > 0 { let sysroot = builder .out - .join(compiler.host.triple) + .join(compiler.host) .join(format!("stage{}-test-sysroot", compiler.stage)); cargo.env("RUSTC_SYSROOT", sysroot); } diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 84c23c059e9..454cc20d155 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -1106,6 +1106,12 @@ impl<'a> Builder<'a> { StepDescription::run(v, self, paths); } + /// Returns if `std` should be statically linked into `rustc_driver`. + /// It's currently not done on `windows-gnu` due to linker bugs. + pub fn link_std_into_rustc_driver(&self, target: TargetSelection) -> bool { + !target.triple.ends_with("-windows-gnu") + } + /// Obtain a compiler at a given stage and for a given host (i.e., this is the target that the /// compiler will run on, *not* the target it will build code for). Explicitly does not take /// `Compiler` since all `Compiler` instances are meant to be obtained through this function, @@ -1165,7 +1171,7 @@ impl<'a> Builder<'a> { .sysroot(self.compiler) .join(lib) .join("rustlib") - .join(self.target.triple) + .join(self.target) .join("lib"); // Avoid deleting the rustlib/ directory we just copied // (in `impl Step for Sysroot`). @@ -1248,7 +1254,7 @@ impl<'a> Builder<'a> { // Ensure that the downloaded LLVM libraries can be found. if self.config.llvm_from_ci { - let ci_llvm_lib = self.out.join(&*compiler.host.triple).join("ci-llvm").join("lib"); + let ci_llvm_lib = self.out.join(compiler.host).join("ci-llvm").join("lib"); dylib_dirs.push(ci_llvm_lib); } @@ -1498,9 +1504,9 @@ impl<'a> Builder<'a> { Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target), Mode::Std => { if self.config.cmd.json() { - out_dir.join(target.triple).join("json-doc") + out_dir.join(target).join("json-doc") } else { - out_dir.join(target.triple).join("doc") + out_dir.join(target).join("doc") } } _ => panic!("doc mode {mode:?} not expected"), @@ -2162,10 +2168,18 @@ impl<'a> Builder<'a> { // When we build Rust dylibs they're all intended for intermediate // usage, so make sure we pass the -Cprefer-dynamic flag instead of // linking all deps statically into the dylib. - if matches!(mode, Mode::Std | Mode::Rustc) { + if matches!(mode, Mode::Std) { + rustflags.arg("-Cprefer-dynamic"); + } + if matches!(mode, Mode::Rustc) && !self.link_std_into_rustc_driver(target) { rustflags.arg("-Cprefer-dynamic"); } + cargo.env( + "RUSTC_LINK_STD_INTO_RUSTC_DRIVER", + if self.link_std_into_rustc_driver(target) { "1" } else { "0" }, + ); + // When building incrementally we default to a lower ThinLTO import limit // (unless explicitly specified otherwise). This will produce a somewhat // slower code but give way better compile times. @@ -2212,11 +2226,6 @@ impl<'a> Builder<'a> { rustdocflags.arg("--cfg=parallel_compiler"); } - // Pass the value of `--rustc-args` from test command. If it's not a test command, this won't set anything. - self.config.cmd.rustc_args().iter().for_each(|v| { - rustflags.arg(v); - }); - Cargo { command: cargo, compiler, diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index f19a4dd6d49..c554684d5a7 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -3,13 +3,14 @@ use std::thread; use super::*; use crate::core::build_steps::doc::DocumentationFormat; use crate::core::config::Config; +use crate::Flags; fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config { configure_with_args(&[cmd.to_owned()], host, target) } fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config { - let mut config = Config::parse(cmd); + let mut config = Config::parse(Flags::parse(cmd)); // don't save toolstates config.save_toolstates = None; config.dry_run = DryRun::SelfCheck; @@ -23,7 +24,7 @@ fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config let submodule_build = Build::new(Config { // don't include LLVM, so CI doesn't require ninja/cmake to be installed rust_codegen_backends: vec![], - ..Config::parse(&["check".to_owned()]) + ..Config::parse(Flags::parse(&["check".to_owned()])) }); submodule_build.require_submodule("src/doc/book", None); config.submodules = Some(false); @@ -632,7 +633,7 @@ mod dist { config.paths = vec!["library/std".into()]; config.cmd = Subcommand::Test { test_args: vec![], - rustc_args: vec![], + compiletest_rustc_args: vec![], no_fail_fast: false, no_doc: true, doc: false, @@ -703,7 +704,7 @@ mod dist { let mut config = configure(&["A-A"], &["A-A"]); config.cmd = Subcommand::Test { test_args: vec![], - rustc_args: vec![], + compiletest_rustc_args: vec![], no_fail_fast: false, doc: true, no_doc: false, diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 915fcb449e8..36de8324f67 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -514,6 +514,10 @@ impl TargetSelection { self.contains("windows") } + pub fn is_windows_gnu(&self) -> bool { + self.ends_with("windows-gnu") + } + /// Path to the file defining the custom target, if any. pub fn filepath(&self) -> Option<&Path> { self.file.as_ref().map(Path::new) @@ -542,6 +546,14 @@ impl PartialEq<&str> for TargetSelection { } } +// Targets are often used as directory names throughout bootstrap. +// This impl makes it more ergonomics to use them as such. +impl AsRef<Path> for TargetSelection { + fn as_ref(&self) -> &Path { + self.triple.as_ref() + } +} + /// Per-target configuration stored in the global configuration structure. #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct Target { @@ -1191,7 +1203,7 @@ impl Config { } } - pub fn parse(args: &[String]) -> Config { + pub fn parse(flags: Flags) -> Config { #[cfg(test)] fn get_toml(_: &Path) -> TomlConfig { TomlConfig::default() @@ -1221,11 +1233,10 @@ impl Config { exit!(2); }) } - Self::parse_inner(args, get_toml) + Self::parse_inner(flags, get_toml) } - pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config { - let mut flags = Flags::parse(args); + pub(crate) fn parse_inner(mut flags: Flags, get_toml: impl Fn(&Path) -> TomlConfig) -> Config { let mut config = Config::default_opts(); // Set flags. @@ -1470,7 +1481,7 @@ impl Config { config.download_beta_toolchain(); config .out - .join(config.build.triple) + .join(config.build) .join("stage0") .join("bin") .join(exe("rustc", config.build)) @@ -1485,7 +1496,7 @@ impl Config { config.download_beta_toolchain(); config .out - .join(config.build.triple) + .join(config.build) .join("stage0") .join("bin") .join(exe("cargo", config.build)) @@ -2278,13 +2289,13 @@ impl Config { /// The absolute path to the downloaded LLVM artifacts. pub(crate) fn ci_llvm_root(&self) -> PathBuf { assert!(self.llvm_from_ci); - self.out.join(&*self.build.triple).join("ci-llvm") + self.out.join(self.build).join("ci-llvm") } /// Directory where the extracted `rustc-dev` component is stored. pub(crate) fn ci_rustc_dir(&self) -> PathBuf { assert!(self.download_rustc()); - self.out.join(self.build.triple).join("ci-rustc") + self.out.join(self.build).join("ci-rustc") } /// Determine whether llvm should be linked dynamically. diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index 19f752da81c..c3f17402814 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -183,9 +183,9 @@ pub struct Flags { } impl Flags { - pub fn parse(args: &[String]) -> Self { - let first = String::from("x.py"); - let it = std::iter::once(&first).chain(args.iter()); + /// Check if `<cmd> -h -v` was passed. + /// If yes, print the available paths and return `true`. + pub fn try_parse_verbose_help(args: &[String]) -> bool { // We need to check for `<cmd> -h -v`, in which case we list the paths #[derive(Parser)] #[command(disable_help_flag(true))] @@ -198,10 +198,10 @@ impl Flags { cmd: Kind, } if let Ok(HelpVerboseOnly { help: true, verbose: 1.., cmd: subcommand }) = - HelpVerboseOnly::try_parse_from(it.clone()) + HelpVerboseOnly::try_parse_from(normalize_args(args)) { println!("NOTE: updating submodules before printing available paths"); - let config = Config::parse(&[String::from("build")]); + let config = Config::parse(Self::parse(&[String::from("build")])); let build = Build::new(config); let paths = Builder::get_help(&build, subcommand); if let Some(s) = paths { @@ -209,13 +209,23 @@ impl Flags { } else { panic!("No paths available for subcommand `{}`", subcommand.as_str()); } - crate::exit!(0); + true + } else { + false } + } - Flags::parse_from(it) + pub fn parse(args: &[String]) -> Self { + Flags::parse_from(normalize_args(args)) } } +fn normalize_args(args: &[String]) -> Vec<String> { + let first = String::from("x.py"); + let it = std::iter::once(first).chain(args.iter().cloned()); + it.collect() +} + #[derive(Debug, Clone, Default, clap::Subcommand)] pub enum Subcommand { #[command(aliases = ["b"], long_about = "\n @@ -347,9 +357,9 @@ pub enum Subcommand { /// extra arguments to be passed for the test tool being used /// (e.g. libtest, compiletest or rustdoc) test_args: Vec<String>, - /// extra options to pass the compiler when running tests + /// extra options to pass the compiler when running compiletest tests #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] - rustc_args: Vec<String>, + compiletest_rustc_args: Vec<String>, #[arg(long)] /// do not run doc tests no_doc: bool, @@ -392,9 +402,6 @@ pub enum Subcommand { /// extra arguments to be passed for the test tool being used /// (e.g. libtest, compiletest or rustdoc) test_args: Vec<String>, - /// extra options to pass the compiler when running tests - #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] - rustc_args: Vec<String>, #[arg(long)] /// do not run doc tests no_doc: bool, @@ -499,10 +506,10 @@ impl Subcommand { } } - pub fn rustc_args(&self) -> Vec<&str> { + pub fn compiletest_rustc_args(&self) -> Vec<&str> { match *self { - Subcommand::Test { ref rustc_args, .. } | Subcommand::Miri { ref rustc_args, .. } => { - rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() + Subcommand::Test { ref compiletest_rustc_args, .. } => { + compiletest_rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() } _ => vec![], } diff --git a/src/bootstrap/src/core/config/mod.rs b/src/bootstrap/src/core/config/mod.rs index 23556e8bc5d..9f09dd13f29 100644 --- a/src/bootstrap/src/core/config/mod.rs +++ b/src/bootstrap/src/core/config/mod.rs @@ -1,6 +1,6 @@ #[allow(clippy::module_inception)] mod config; -pub(crate) mod flags; +pub mod flags; #[cfg(test)] mod tests; diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs index 6e695d269cf..40f3e5e7222 100644 --- a/src/bootstrap/src/core/config/tests.rs +++ b/src/bootstrap/src/core/config/tests.rs @@ -12,9 +12,10 @@ use crate::core::build_steps::clippy::get_clippy_rules_in_order; use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig}; fn parse(config: &str) -> Config { - Config::parse_inner(&["check".to_string(), "--config=/does/not/exist".to_string()], |&_| { - toml::from_str(&config).unwrap() - }) + Config::parse_inner( + Flags::parse(&["check".to_string(), "--config=/does/not/exist".to_string()]), + |&_| toml::from_str(&config).unwrap(), + ) } #[test] @@ -108,7 +109,7 @@ fn clap_verify() { #[test] fn override_toml() { let config = Config::parse_inner( - &[ + Flags::parse(&[ "check".to_owned(), "--config=/does/not/exist".to_owned(), "--set=change-id=1".to_owned(), @@ -121,7 +122,7 @@ fn override_toml() { "--set=target.x86_64-unknown-linux-gnu.rpath=false".to_owned(), "--set=target.aarch64-unknown-linux-gnu.sanitizers=false".to_owned(), "--set=target.aarch64-apple-darwin.runner=apple".to_owned(), - ], + ]), |&_| { toml::from_str( r#" @@ -201,12 +202,12 @@ runner = "x86_64-runner" #[should_panic] fn override_toml_duplicate() { Config::parse_inner( - &[ + Flags::parse(&[ "check".to_owned(), "--config=/does/not/exist".to_string(), "--set=change-id=1".to_owned(), "--set=change-id=2".to_owned(), - ], + ]), |&_| toml::from_str("change-id = 0").unwrap(), ); } @@ -226,7 +227,7 @@ fn profile_user_dist() { .and_then(|table: toml::Value| TomlConfig::deserialize(table)) .unwrap() } - Config::parse_inner(&["check".to_owned()], get_toml); + Config::parse_inner(Flags::parse(&["check".to_owned()]), get_toml); } #[test] @@ -301,7 +302,7 @@ fn order_of_clippy_rules() { "-Aclippy::foo1".to_string(), "-Aclippy::foo2".to_string(), ]; - let config = Config::parse(&args); + let config = Config::parse(Flags::parse(&args)); let actual = match &config.cmd { crate::Subcommand::Clippy { allow, deny, warn, forbid, .. } => { diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 4d1aea3cd95..fd85650bc56 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -379,7 +379,7 @@ impl Config { let version = &self.stage0_metadata.compiler.version; let host = self.build; - let bin_root = self.out.join(host.triple).join("stage0"); + let bin_root = self.out.join(host).join("stage0"); let clippy_stamp = bin_root.join(".clippy-stamp"); let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host)); if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, date) { @@ -412,7 +412,7 @@ impl Config { let channel = format!("{version}-{date}"); let host = self.build; - let bin_root = self.out.join(host.triple).join("rustfmt"); + let bin_root = self.out.join(host).join("rustfmt"); let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { @@ -592,7 +592,7 @@ impl Config { t!(fs::create_dir_all(&cache_dir)); } - let bin_root = self.out.join(self.build.triple).join(destination); + let bin_root = self.out.join(self.build).join(destination); let tarball = cache_dir.join(&filename); let (base_url, url, should_verify) = match mode { DownloadSource::CI => { diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 2062d435bfc..bfd0e42acfd 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -43,7 +43,7 @@ mod core; mod utils; pub use core::builder::PathSet; -pub use core::config::flags::Subcommand; +pub use core::config::flags::{Flags, Subcommand}; pub use core::config::Config; pub use utils::change_tracker::{ @@ -452,7 +452,7 @@ impl Build { } // Make a symbolic link so we can use a consistent directory in the documentation. - let build_triple = build.out.join(build.build.triple); + let build_triple = build.out.join(build.build); t!(fs::create_dir_all(&build_triple)); let host = build.out.join("host"); if host.is_symlink() { @@ -807,10 +807,7 @@ impl Build { } fn tools_dir(&self, compiler: Compiler) -> PathBuf { - let out = self - .out - .join(&*compiler.host.triple) - .join(format!("stage{}-tools-bin", compiler.stage)); + let out = self.out.join(compiler.host).join(format!("stage{}-tools-bin", compiler.stage)); t!(fs::create_dir_all(&out)); out } @@ -827,14 +824,14 @@ impl Build { Mode::ToolBootstrap => "-bootstrap-tools", Mode::ToolStd | Mode::ToolRustc => "-tools", }; - self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix)) + self.out.join(compiler.host).join(format!("stage{}{}", compiler.stage, suffix)) } /// Returns the root output directory for all Cargo output in a given stage, /// running a particular compiler, whether or not we're building the /// standard library, and targeting the specified architecture. fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf { - self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir()) + self.stage_out(compiler, mode).join(target).join(self.cargo_dir()) } /// Root output directory of LLVM for `target` @@ -845,36 +842,36 @@ impl Build { if self.config.llvm_from_ci && self.config.build == target { self.config.ci_llvm_root() } else { - self.out.join(&*target.triple).join("llvm") + self.out.join(target).join("llvm") } } fn lld_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("lld") + self.out.join(target).join("lld") } /// Output directory for all documentation for a target fn doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("doc") + self.out.join(target).join("doc") } /// Output directory for all JSON-formatted documentation for a target fn json_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("json-doc") + self.out.join(target).join("json-doc") } fn test_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("test") + self.out.join(target).join("test") } /// Output directory for all documentation for a target fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("compiler-doc") + self.out.join(target).join("compiler-doc") } /// Output directory for some generated md crate documentation for a target (temporary) fn md_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("md-doc") + self.out.join(target).join("md-doc") } /// Returns `true` if this is an external version of LLVM not managed by bootstrap. @@ -954,7 +951,7 @@ impl Build { /// Directory for libraries built from C/C++ code and shared between stages. fn native_dir(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("native") + self.out.join(target).join("native") } /// Root output directory for rust_test_helpers library compiled for diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index 84dac25188e..c629f04c00e 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -225,4 +225,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Info, summary: "New option `llvm.libzstd` to control whether llvm is built with zstd support.", }, + ChangeInfo { + change_id: 128841, + severity: ChangeSeverity::Warning, + summary: "./x test --rustc-args was renamed to --compiletest-rustc-args as it only applies there. ./x miri --rustc-args was also removed.", + }, ]; diff --git a/src/bootstrap/src/utils/helpers/tests.rs b/src/bootstrap/src/utils/helpers/tests.rs index f0cb324674f..103c4d26a18 100644 --- a/src/bootstrap/src/utils/helpers/tests.rs +++ b/src/bootstrap/src/utils/helpers/tests.rs @@ -5,7 +5,7 @@ use std::path::PathBuf; use crate::utils::helpers::{ check_cfg_arg, extract_beta_rev, hex_encode, make, program_out_of_date, symlink_dir, }; -use crate::Config; +use crate::{Config, Flags}; #[test] fn test_make() { @@ -58,7 +58,8 @@ fn test_check_cfg_arg() { #[test] fn test_program_out_of_date() { - let config = Config::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()]); + let config = + Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()])); let tempfile = config.tempdir().join(".tmp-stamp-file"); File::create(&tempfile).unwrap().write_all(b"dummy value").unwrap(); assert!(tempfile.exists()); @@ -73,7 +74,8 @@ fn test_program_out_of_date() { #[test] fn test_symlink_dir() { - let config = Config::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()]); + let config = + Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()])); let tempdir = config.tempdir().join(".tmp-dir"); let link_path = config.tempdir().join(".tmp-link"); diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm.sh b/src/ci/docker/scripts/x86_64-gnu-llvm.sh index b3921f11421..98290f5a72c 100755 --- a/src/ci/docker/scripts/x86_64-gnu-llvm.sh +++ b/src/ci/docker/scripts/x86_64-gnu-llvm.sh @@ -18,9 +18,9 @@ if [[ -z "${PR_CI_JOB}" ]]; then # compiler, and is sensitive to the addition of new flags. ../x.py --stage 1 test tests/ui-fulldeps - # The tests are run a second time with the size optimizations enabled. - ../x.py --stage 1 test library/std library/alloc library/core \ - --rustc-args "--cfg feature=\"optimize_for_size\"" + # Rebuild the stdlib with the size optimizations enabled and run tests again. + RUSTFLAGS_NOT_BOOTSTRAP="--cfg feature=\"optimize_for_size\"" ../x.py --stage 1 test \ + library/std library/alloc library/core fi # NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux. diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index 467fd6f43e4..cb0b2e63452 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -61,6 +61,7 @@ - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) - [powerpc-unknown-openbsd](platform-support/powerpc-unknown-openbsd.md) + - [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md) - [powerpc64-ibm-aix](platform-support/aix.md) - [riscv32im-risc0-zkvm-elf](platform-support/riscv32im-risc0-zkvm-elf.md) - [riscv32imac-unknown-xous-elf](platform-support/riscv32imac-unknown-xous-elf.md) diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index bd12172ecbb..c3f73a8367a 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -332,6 +332,7 @@ target | std | host | notes `msp430-none-elf` | * | | 16-bit MSP430 microcontrollers `powerpc-unknown-linux-gnuspe` | ✓ | | PowerPC SPE Linux `powerpc-unknown-linux-musl` | ? | | PowerPC Linux with musl 1.2.3 +[`powerpc-unknown-linux-muslspe`](platform-support/powerpc-unknown-linux-muslspe.md) | ? | | PowerPC SPE Linux [`powerpc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD 32-bit powerpc systems [`powerpc-unknown-openbsd`](platform-support/powerpc-unknown-openbsd.md) | * | | [`powerpc-wrs-vxworks-spe`](platform-support/vxworks.md) | ✓ | | diff --git a/src/doc/rustc/src/platform-support/powerpc-unknown-linux-muslspe.md b/src/doc/rustc/src/platform-support/powerpc-unknown-linux-muslspe.md new file mode 100644 index 00000000000..4c416b51929 --- /dev/null +++ b/src/doc/rustc/src/platform-support/powerpc-unknown-linux-muslspe.md @@ -0,0 +1,32 @@ +# powerpc-unknown-linux-muslspe + +**Tier: 3** + +This target is very similar to already existing ones like `powerpc_unknown_linux_musl` and `powerpc_unknown_linux_gnuspe`. +This one has PowerPC SPE support for musl. Unfortunately, the last supported gcc version with PowerPC SPE is 8.4.0. + +## Target maintainers + +- [@BKPepe](https://github.com/BKPepe) + +## Requirements + +This target is cross-compiled. There is no support for `std`. There is no +default allocator, but it's possible to use `alloc` by supplying an allocator. + +This target generated binaries in the ELF format. + +## Building the target + +This target was tested and used within the `OpenWrt` build system for CZ.NIC Turris 1.x routers using Freescale P2020. + +## Building Rust programs + +Rust does not yet ship pre-compiled artifacts for this target. To compile for +this target, you will either need to build Rust with the target enabled (see +"Building the target" above), or build your own copy of `core` by using +`build-std` or similar. + +## Testing + +This is a cross-compiled target and there is no support to run rustc test suite. diff --git a/src/doc/rustdoc/src/write-documentation/documentation-tests.md b/src/doc/rustdoc/src/write-documentation/documentation-tests.md index 9526f33359e..7ed2e9720fe 100644 --- a/src/doc/rustdoc/src/write-documentation/documentation-tests.md +++ b/src/doc/rustdoc/src/write-documentation/documentation-tests.md @@ -376,6 +376,57 @@ that the code sample should be compiled using the respective edition of Rust. # fn foo() {} ``` +Starting in the 2024 edition[^edition-note], compatible doctests are merged as one before being +run. We combine doctests for performance reasons: the slowest part of doctests is to compile them. +Merging all of them into one file and compiling this new file, then running the doctests is much +faster. Whether doctests are merged or not, they are run in their own process. + +An example of time spent when running doctests: + +[sysinfo crate](https://crates.io/crates/sysinfo): + +```text +wall-time duration: 4.59s +total compile time: 27.067s +total runtime: 3.969s +``` + +Rust core library: + +```text +wall-time duration: 102s +total compile time: 775.204s +total runtime: 15.487s +``` + +[^edition-note]: This is based on the edition of the whole crate, not the edition of the individual +test case that may be specified in its code attribute. + +In some cases, doctests cannot be merged. For example, if you have: + +```rust +//! ``` +//! let location = std::panic::Location::caller(); +//! assert_eq!(location.line(), 4); +//! ``` +``` + +The problem with this code is that, if you change any other doctests, it'll likely break when +runing `rustdoc --test`, making it tricky to maintain. + +This is where the `standalone` attribute comes in: it tells `rustdoc` that a doctest +should not be merged with the others. So the previous code should use it: + +```rust +//! ```standalone +//! let location = std::panic::Location::caller(); +//! assert_eq!(location.line(), 4); +//! ``` +``` + +In this case, it means that the line information will not change if you add/remove other +doctests. + ### Custom CSS classes for code blocks ```rust diff --git a/src/doc/unstable-book/src/language-features/asm-const.md b/src/doc/unstable-book/src/language-features/asm-const.md deleted file mode 100644 index 670c4df414f..00000000000 --- a/src/doc/unstable-book/src/language-features/asm-const.md +++ /dev/null @@ -1,11 +0,0 @@ -# `asm_const` - -The tracking issue for this feature is: [#93332] - -[#93332]: https://github.com/rust-lang/rust/issues/93332 - ------------------------- - -This feature adds a `const <expr>` operand type to `asm!` and `global_asm!`. -- `<expr>` must be an integer constant expression. -- The value of the expression is formatted as a string and substituted directly into the asm template string. diff --git a/src/doc/unstable-book/src/language-features/rustc-private.md b/src/doc/unstable-book/src/language-features/rustc-private.md new file mode 100644 index 00000000000..97fce5980e4 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/rustc-private.md @@ -0,0 +1,11 @@ +# `rustc_private` + +The tracking issue for this feature is: [#27812] + +[#27812]: https://github.com/rust-lang/rust/issues/27812 + +------------------------ + +This feature allows access to unstable internal compiler crates. + +Additionally it changes the linking behavior of crates which have this feature enabled. It will prevent linking to a dylib if there's a static variant of it already statically linked into another dylib dependency. This is required to successfully link to `rustc_driver`. diff --git a/src/etc/completions/x.py.fish b/src/etc/completions/x.py.fish index 805fc8aa8cc..297dc11cd61 100644 --- a/src/etc/completions/x.py.fish +++ b/src/etc/completions/x.py.fish @@ -266,7 +266,7 @@ complete -c x.py -n "__fish_seen_subcommand_from doc" -l enable-bolt-settings -d complete -c x.py -n "__fish_seen_subcommand_from doc" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from doc" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r -complete -c x.py -n "__fish_seen_subcommand_from test" -l rustc-args -d 'extra options to pass the compiler when running tests' -r +complete -c x.py -n "__fish_seen_subcommand_from test" -l compiletest-rustc-args -d 'extra options to pass the compiler when running compiletest tests' -r complete -c x.py -n "__fish_seen_subcommand_from test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)' -r complete -c x.py -n "__fish_seen_subcommand_from test" -l compare-mode -d 'mode describing what file the actual ui output will be compared to' -r complete -c x.py -n "__fish_seen_subcommand_from test" -l pass -d 'force {check,build,run}-pass tests to this mode' -r @@ -313,7 +313,6 @@ complete -c x.py -n "__fish_seen_subcommand_from test" -l enable-bolt-settings - complete -c x.py -n "__fish_seen_subcommand_from test" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from miri" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r -complete -c x.py -n "__fish_seen_subcommand_from miri" -l rustc-args -d 'extra options to pass the compiler when running tests' -r complete -c x.py -n "__fish_seen_subcommand_from miri" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from miri" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from miri" -l build -d 'build target of the stage0 compiler' -r -f diff --git a/src/etc/completions/x.py.ps1 b/src/etc/completions/x.py.ps1 index ce590d2fa48..4b424471a26 100644 --- a/src/etc/completions/x.py.ps1 +++ b/src/etc/completions/x.py.ps1 @@ -338,7 +338,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { } 'x.py;test' { [CompletionResult]::new('--test-args', 'test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') - [CompletionResult]::new('--rustc-args', 'rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running tests') + [CompletionResult]::new('--compiletest-rustc-args', 'compiletest-rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running compiletest tests') [CompletionResult]::new('--extra-checks', 'extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)') [CompletionResult]::new('--compare-mode', 'compare-mode', [CompletionResultType]::ParameterName, 'mode describing what file the actual ui output will be compared to') [CompletionResult]::new('--pass', 'pass', [CompletionResultType]::ParameterName, 'force {check,build,run}-pass tests to this mode') @@ -392,7 +392,6 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { } 'x.py;miri' { [CompletionResult]::new('--test-args', 'test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') - [CompletionResult]::new('--rustc-args', 'rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running tests') [CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') [CompletionResult]::new('--build-dir', 'build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') [CompletionResult]::new('--build', 'build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') diff --git a/src/etc/completions/x.py.sh b/src/etc/completions/x.py.sh index a4234905476..60ba8d3ba70 100644 --- a/src/etc/completions/x.py.sh +++ b/src/etc/completions/x.py.sh @@ -1300,7 +1300,7 @@ _x.py() { return 0 ;; x.py__miri) - opts="-v -i -j -h --no-fail-fast --test-args --rustc-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --no-fail-fast --test-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1310,10 +1310,6 @@ _x.py() { COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --rustc-args) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; --config) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -1862,7 +1858,7 @@ _x.py() { return 0 ;; x.py__test) - opts="-v -i -j -h --no-fail-fast --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --no-fail-fast --test-args --compiletest-rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1872,7 +1868,7 @@ _x.py() { COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --rustc-args) + --compiletest-rustc-args) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; diff --git a/src/etc/completions/x.py.zsh b/src/etc/completions/x.py.zsh index fc8be4f7881..688f692da24 100644 --- a/src/etc/completions/x.py.zsh +++ b/src/etc/completions/x.py.zsh @@ -337,7 +337,7 @@ _arguments "${_arguments_options[@]}" \ (test) _arguments "${_arguments_options[@]}" \ '*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \ -'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \ +'*--compiletest-rustc-args=[extra options to pass the compiler when running compiletest tests]:ARGS: ' \ '--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell)]:EXTRA_CHECKS: ' \ '--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE: ' \ '--pass=[force {check,build,run}-pass tests to this mode]:check | build | run: ' \ @@ -393,7 +393,6 @@ _arguments "${_arguments_options[@]}" \ (miri) _arguments "${_arguments_options[@]}" \ '*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \ -'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \ '--config=[TOML configuration file for build]:FILE:_files' \ '--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ '--build=[build target of the stage0 compiler]:BUILD:( )' \ diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 227695cdadd..e8f9dee07d3 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -56,7 +56,7 @@ class StdStringProvider(printer_base): self._valobj = valobj vec = valobj["vec"] self._length = int(vec["len"]) - self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["inner"]["ptr"]) def to_string(self): return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) @@ -74,7 +74,7 @@ class StdOsStringProvider(printer_base): vec = buf[ZERO_FIELD] if is_windows else buf self._length = int(vec["len"]) - self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["inner"]["ptr"]) def to_string(self): return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) @@ -96,6 +96,7 @@ class StdStrProvider(printer_base): def display_hint(): return "string" + def _enumerate_array_elements(element_ptrs): for (i, element_ptr) in enumerate(element_ptrs): key = "[{}]".format(i) @@ -112,6 +113,7 @@ def _enumerate_array_elements(element_ptrs): yield key, element + class StdSliceProvider(printer_base): def __init__(self, valobj): self._valobj = valobj @@ -130,11 +132,14 @@ class StdSliceProvider(printer_base): def display_hint(): return "array" + class StdVecProvider(printer_base): def __init__(self, valobj): self._valobj = valobj self._length = int(valobj["len"]) - self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["inner"]["ptr"]) + ptr_ty = gdb.Type.pointer(valobj.type.template_argument(0)) + self._data_ptr = self._data_ptr.reinterpret_cast(ptr_ty) def to_string(self): return "Vec(size={})".format(self._length) @@ -155,11 +160,13 @@ class StdVecDequeProvider(printer_base): self._head = int(valobj["head"]) self._size = int(valobj["len"]) # BACKCOMPAT: rust 1.75 - cap = valobj["buf"]["cap"] + cap = valobj["buf"]["inner"]["cap"] if cap.type.code != gdb.TYPE_CODE_INT: cap = cap[ZERO_FIELD] self._cap = int(cap) - self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["inner"]["ptr"]) + ptr_ty = gdb.Type.pointer(valobj.type.template_argument(0)) + self._data_ptr = self._data_ptr.reinterpret_cast(ptr_ty) def to_string(self): return "VecDeque(size={})".format(self._size) diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index c6330117380..8750d7682d1 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -389,11 +389,11 @@ class StdVecSyntheticProvider: def update(self): # type: () -> None self.length = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned() - self.buf = self.valobj.GetChildMemberWithName("buf") + self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner") self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr")) - self.element_type = self.data_ptr.GetType().GetPointeeType() + self.element_type = self.valobj.GetType().GetTemplateArgumentType(0) self.element_type_size = self.element_type.GetByteSize() def has_children(self): @@ -474,7 +474,7 @@ class StdVecDequeSyntheticProvider: # type: () -> None self.head = self.valobj.GetChildMemberWithName("head").GetValueAsUnsigned() self.size = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned() - self.buf = self.valobj.GetChildMemberWithName("buf") + self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner") cap = self.buf.GetChildMemberWithName("cap") if cap.GetType().num_fields == 1: cap = cap.GetChildAtIndex(0) @@ -482,7 +482,7 @@ class StdVecDequeSyntheticProvider: self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr")) - self.element_type = self.data_ptr.GetType().GetPointeeType() + self.element_type = self.valobj.GetType().GetTemplateArgumentType(0) self.element_type_size = self.element_type.GetByteSize() def has_children(self): diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis index da307809f7b..49d82dfad82 100644 --- a/src/etc/natvis/liballoc.natvis +++ b/src/etc/natvis/liballoc.natvis @@ -4,10 +4,10 @@ <DisplayString>{{ len={len} }}</DisplayString> <Expand> <Item Name="[len]" ExcludeView="simple">len</Item> - <Item Name="[capacity]" ExcludeView="simple">buf.cap.__0</Item> + <Item Name="[capacity]" ExcludeView="simple">buf.inner.cap.__0</Item> <ArrayItems> <Size>len</Size> - <ValuePointer>buf.ptr.pointer.pointer</ValuePointer> + <ValuePointer>($T1*)buf.inner.ptr.pointer.pointer</ValuePointer> </ArrayItems> </Expand> </Type> @@ -15,7 +15,7 @@ <DisplayString>{{ len={len} }}</DisplayString> <Expand> <Item Name="[len]" ExcludeView="simple">len</Item> - <Item Name="[capacity]" ExcludeView="simple">buf.cap.__0</Item> + <Item Name="[capacity]" ExcludeView="simple">buf.inner.cap.__0</Item> <CustomListItems> <Variable Name="i" InitialValue="0" /> <Size>len</Size> @@ -23,7 +23,7 @@ <If Condition="i == len"> <Break/> </If> - <Item>buf.ptr.pointer.pointer[(i + head) % buf.cap.__0]</Item> + <Item>(($T1*)buf.inner.ptr.pointer.pointer)[(i + head) % buf.inner.cap.__0]</Item> <Exec>i = i + 1</Exec> </Loop> </CustomListItems> @@ -41,17 +41,17 @@ </Expand> </Type> <Type Name="alloc::string::String"> - <DisplayString>{(char*)vec.buf.ptr.pointer.pointer,[vec.len]s8}</DisplayString> - <StringView>(char*)vec.buf.ptr.pointer.pointer,[vec.len]s8</StringView> + <DisplayString>{(char*)vec.buf.inner.ptr.pointer.pointer,[vec.len]s8}</DisplayString> + <StringView>(char*)vec.buf.inner.ptr.pointer.pointer,[vec.len]s8</StringView> <Expand> <Item Name="[len]" ExcludeView="simple">vec.len</Item> - <Item Name="[capacity]" ExcludeView="simple">vec.buf.cap.__0</Item> + <Item Name="[capacity]" ExcludeView="simple">vec.buf.inner.cap.__0</Item> <Synthetic Name="[chars]"> - <DisplayString>{(char*)vec.buf.ptr.pointer.pointer,[vec.len]s8}</DisplayString> + <DisplayString>{(char*)vec.buf.inner.ptr.pointer.pointer,[vec.len]s8}</DisplayString> <Expand> <ArrayItems> <Size>vec.len</Size> - <ValuePointer>(char*)vec.buf.ptr.pointer.pointer</ValuePointer> + <ValuePointer>(char*)vec.buf.inner.ptr.pointer.pointer</ValuePointer> </ArrayItems> </Expand> </Synthetic> diff --git a/src/etc/natvis/libstd.natvis b/src/etc/natvis/libstd.natvis index 4371b995318..4719a479c47 100644 --- a/src/etc/natvis/libstd.natvis +++ b/src/etc/natvis/libstd.natvis @@ -104,14 +104,14 @@ </Type> <Type Name="std::ffi::os_str::OsString"> - <DisplayString>{(char*)inner.inner.bytes.buf.ptr.pointer.pointer,[inner.inner.bytes.len]}</DisplayString> + <DisplayString>{(char*)inner.inner.bytes.buf.inner.ptr.pointer.pointer,[inner.inner.bytes.len]}</DisplayString> <Expand> <Synthetic Name="[chars]"> - <DisplayString>{(char*)inner.inner.bytes.buf.ptr.pointer.pointer,[inner.inner.bytes.len]}</DisplayString> + <DisplayString>{(char*)inner.inner.bytes.buf.inner.ptr.pointer.pointer,[inner.inner.bytes.len]}</DisplayString> <Expand> <ArrayItems> <Size>inner.inner.bytes.len</Size> - <ValuePointer>(char*)inner.inner.bytes.buf.ptr.pointer.pointer</ValuePointer> + <ValuePointer>(char*)inner.inner.bytes.buf.inner.ptr.pointer.pointer</ValuePointer> </ArrayItems> </Expand> </Synthetic> diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index cffadc7c10a..53757349a9b 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2069,7 +2069,7 @@ pub(crate) fn clean_middle_ty<'tcx>( Some(ContainerTy::Ref(r)), )), }, - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { // FIXME: should we merge the outer and inner binders somehow? let sig = bound_ty.skip_binder().fn_sig(cx.tcx); let decl = clean_poly_fn_sig(cx, None, sig); diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index cd2892e7a25..9e9d8f02a2e 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -737,9 +737,11 @@ impl Options { let html_no_source = matches.opt_present("html-no-source"); if generate_link_to_definition && (show_coverage || output_format != OutputFormat::Html) { - dcx.fatal( - "--generate-link-to-definition option can only be used with HTML output format", - ); + dcx.struct_warn( + "`--generate-link-to-definition` option can only be used with HTML output format", + ) + .with_note("`--generate-link-to-definition` option will be ignored") + .emit(); } let scrape_examples_options = ScrapeExamplesOptions::new(matches, dcx); diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index 08d6a5a52b2..743c1ed507e 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -1,5 +1,6 @@ mod make; mod markdown; +mod runner; mod rust; use std::fs::File; @@ -10,7 +11,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; use std::{panic, str}; -pub(crate) use make::make_test; +pub(crate) use make::DocTestBuilder; pub(crate) use markdown::test as test_markdown; use rustc_ast as ast; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; @@ -150,8 +151,6 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, options: RustdocOptions) -> Result<() expanded_args: options.expanded_args.clone(), }; - let test_args = options.test_args.clone(); - let nocapture = options.nocapture; let externs = options.externs.clone(); let json_unused_externs = options.json_unused_externs; @@ -164,39 +163,46 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, options: RustdocOptions) -> Result<() let args_path = temp_dir.path().join("rustdoc-cfgs"); crate::wrap_return(dcx, generate_args_file(&args_path, &options))?; - let (tests, unused_extern_reports, compiling_test_count) = - interface::run_compiler(config, |compiler| { - compiler.enter(|queries| { - let collector = queries.global_ctxt()?.enter(|tcx| { - let crate_name = tcx.crate_name(LOCAL_CRATE).to_string(); - let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID); - let opts = scrape_test_config(crate_name, crate_attrs, args_path); - let enable_per_target_ignores = options.enable_per_target_ignores; - - let mut collector = CreateRunnableDoctests::new(options, opts); - let hir_collector = HirCollector::new( - &compiler.sess, - tcx.hir(), - ErrorCodes::from(compiler.sess.opts.unstable_features.is_nightly_build()), - enable_per_target_ignores, - tcx, - ); - let tests = hir_collector.collect_crate(); - tests.into_iter().for_each(|t| collector.add_test(t)); - - collector - }); - if compiler.sess.dcx().has_errors().is_some() { - FatalError.raise(); - } + let CreateRunnableDocTests { + standalone_tests, + mergeable_tests, + rustdoc_options, + opts, + unused_extern_reports, + compiling_test_count, + .. + } = interface::run_compiler(config, |compiler| { + compiler.enter(|queries| { + let collector = queries.global_ctxt()?.enter(|tcx| { + let crate_name = tcx.crate_name(LOCAL_CRATE).to_string(); + let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID); + let opts = scrape_test_config(crate_name, crate_attrs, args_path); + let enable_per_target_ignores = options.enable_per_target_ignores; + + let mut collector = CreateRunnableDocTests::new(options, opts); + let hir_collector = HirCollector::new( + &compiler.sess, + tcx.hir(), + ErrorCodes::from(compiler.sess.opts.unstable_features.is_nightly_build()), + enable_per_target_ignores, + tcx, + ); + let tests = hir_collector.collect_crate(); + tests.into_iter().for_each(|t| collector.add_test(t)); + + collector + }); + if compiler.sess.dcx().has_errors().is_some() { + FatalError.raise(); + } + + Ok(collector) + }) + })?; - let unused_extern_reports = collector.unused_extern_reports.clone(); - let compiling_test_count = collector.compiling_test_count.load(Ordering::SeqCst); - Ok((collector.tests, unused_extern_reports, compiling_test_count)) - }) - })?; + run_tests(opts, &rustdoc_options, &unused_extern_reports, standalone_tests, mergeable_tests); - run_tests(test_args, nocapture, tests); + let compiling_test_count = compiling_test_count.load(Ordering::SeqCst); // Collect and warn about unused externs, but only if we've gotten // reports for each doctest @@ -240,16 +246,83 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, options: RustdocOptions) -> Result<() } pub(crate) fn run_tests( - mut test_args: Vec<String>, - nocapture: bool, - mut tests: Vec<test::TestDescAndFn>, + opts: GlobalTestOptions, + rustdoc_options: &Arc<RustdocOptions>, + unused_extern_reports: &Arc<Mutex<Vec<UnusedExterns>>>, + mut standalone_tests: Vec<test::TestDescAndFn>, + mergeable_tests: FxHashMap<Edition, Vec<(DocTestBuilder, ScrapedDocTest)>>, ) { + let mut test_args = Vec::with_capacity(rustdoc_options.test_args.len() + 1); test_args.insert(0, "rustdoctest".to_string()); - if nocapture { + test_args.extend_from_slice(&rustdoc_options.test_args); + if rustdoc_options.nocapture { test_args.push("--nocapture".to_string()); } - tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice())); - test::test_main(&test_args, tests, None); + + let mut nb_errors = 0; + let mut ran_edition_tests = 0; + let target_str = rustdoc_options.target.to_string(); + + for (edition, mut doctests) in mergeable_tests { + if doctests.is_empty() { + continue; + } + doctests.sort_by(|(_, a), (_, b)| a.name.cmp(&b.name)); + + let mut tests_runner = runner::DocTestRunner::new(); + + let rustdoc_test_options = IndividualTestOptions::new( + &rustdoc_options, + &Some(format!("merged_doctest_{edition}")), + PathBuf::from(format!("doctest_{edition}.rs")), + ); + + for (doctest, scraped_test) in &doctests { + tests_runner.add_test(doctest, scraped_test, &target_str); + } + if let Ok(success) = tests_runner.run_merged_tests( + rustdoc_test_options, + edition, + &opts, + &test_args, + rustdoc_options, + ) { + ran_edition_tests += 1; + if !success { + nb_errors += 1; + } + continue; + } + // We failed to compile all compatible tests as one so we push them into the + // `standalone_tests` doctests. + debug!("Failed to compile compatible doctests for edition {} all at once", edition); + for (doctest, scraped_test) in doctests { + doctest.generate_unique_doctest( + &scraped_test.text, + scraped_test.langstr.test_harness, + &opts, + Some(&opts.crate_name), + ); + standalone_tests.push(generate_test_desc_and_fn( + doctest, + scraped_test, + opts.clone(), + Arc::clone(&rustdoc_options), + unused_extern_reports.clone(), + )); + } + } + + // We need to call `test_main` even if there is no doctest to run to get the output + // `running 0 tests...`. + if ran_edition_tests == 0 || !standalone_tests.is_empty() { + standalone_tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice())); + test::test_main(&test_args, standalone_tests, None); + } + if nb_errors != 0 { + // libtest::ERROR_EXIT_CODE is not public but it's the same value. + std::process::exit(101); + } } // Look for `#![doc(test(no_crate_inject))]`, used by crates in the std facade. @@ -330,7 +403,7 @@ impl DirState { // We could unify this struct the one in rustc but they have different // ownership semantics, so doing so would create wasteful allocations. #[derive(serde::Serialize, serde::Deserialize)] -struct UnusedExterns { +pub(crate) struct UnusedExterns { /// Lint level of the unused_crate_dependencies lint lint_level: String, /// List of unused externs by their names. @@ -359,22 +432,41 @@ fn wrapped_rustc_command(rustc_wrappers: &[PathBuf], rustc_binary: &Path) -> Com command } -struct RunnableDoctest { +/// Information needed for running a bundle of doctests. +/// +/// This data structure contains the "full" test code, including the wrappers +/// (if multiple doctests are merged), `main` function, +/// and everything needed to calculate the compiler's command-line arguments. +/// The `# ` prefix on boring lines has also been stripped. +pub(crate) struct RunnableDocTest { full_test_code: String, full_test_line_offset: usize, test_opts: IndividualTestOptions, global_opts: GlobalTestOptions, - scraped_test: ScrapedDoctest, + langstr: LangString, + line: usize, + edition: Edition, + no_run: bool, + is_multiple_tests: bool, } +impl RunnableDocTest { + fn path_for_merged_doctest(&self) -> PathBuf { + self.test_opts.outdir.path().join(&format!("doctest_{}.rs", self.edition)) + } +} + +/// Execute a `RunnableDoctest`. +/// +/// This is the function that calculates the compiler command line, invokes the compiler, then +/// invokes the test or tests in a separate executable (if applicable). fn run_test( - doctest: RunnableDoctest, + doctest: RunnableDocTest, rustdoc_options: &RustdocOptions, supports_color: bool, report_unused_externs: impl Fn(UnusedExterns), ) -> Result<(), TestFailure> { - let scraped_test = &doctest.scraped_test; - let langstr = &scraped_test.langstr; + let langstr = &doctest.langstr; // Make sure we emit well-formed executable names for our target. let rust_out = add_exe_suffix("rust_out".to_owned(), &rustdoc_options.target); let output_file = doctest.test_opts.outdir.path().join(rust_out); @@ -391,12 +483,15 @@ fn run_test( compiler.arg(format!("--sysroot={}", sysroot.display())); } - compiler.arg("--edition").arg(&scraped_test.edition(rustdoc_options).to_string()); - compiler.env("UNSTABLE_RUSTDOC_TEST_PATH", &doctest.test_opts.path); - compiler.env( - "UNSTABLE_RUSTDOC_TEST_LINE", - format!("{}", scraped_test.line as isize - doctest.full_test_line_offset as isize), - ); + compiler.arg("--edition").arg(&doctest.edition.to_string()); + if !doctest.is_multiple_tests { + // Setting these environment variables is unneeded if this is a merged doctest. + compiler.env("UNSTABLE_RUSTDOC_TEST_PATH", &doctest.test_opts.path); + compiler.env( + "UNSTABLE_RUSTDOC_TEST_LINE", + format!("{}", doctest.line as isize - doctest.full_test_line_offset as isize), + ); + } compiler.arg("-o").arg(&output_file); if langstr.test_harness { compiler.arg("--test"); @@ -408,10 +503,7 @@ fn run_test( compiler.arg("-Z").arg("unstable-options"); } - if scraped_test.no_run(rustdoc_options) - && !langstr.compile_fail - && rustdoc_options.persist_doctests.is_none() - { + if doctest.no_run && !langstr.compile_fail && rustdoc_options.persist_doctests.is_none() { // FIXME: why does this code check if it *shouldn't* persist doctests // -- shouldn't it be the negation? compiler.arg("--emit=metadata"); @@ -442,18 +534,40 @@ fn run_test( } } - compiler.arg("-"); - compiler.stdin(Stdio::piped()); - compiler.stderr(Stdio::piped()); + // If this is a merged doctest, we need to write it into a file instead of using stdin + // because if the size of the merged doctests is too big, it'll simply break stdin. + if doctest.is_multiple_tests { + // It makes the compilation failure much faster if it is for a combined doctest. + compiler.arg("--error-format=short"); + let input_file = doctest.path_for_merged_doctest(); + if std::fs::write(&input_file, &doctest.full_test_code).is_err() { + // If we cannot write this file for any reason, we leave. All combined tests will be + // tested as standalone tests. + return Err(TestFailure::CompileError); + } + compiler.arg(input_file); + if !rustdoc_options.nocapture { + // If `nocapture` is disabled, then we don't display rustc's output when compiling + // the merged doctests. + compiler.stderr(Stdio::null()); + } + } else { + compiler.arg("-"); + compiler.stdin(Stdio::piped()); + compiler.stderr(Stdio::piped()); + } debug!("compiler invocation for doctest: {compiler:?}"); let mut child = compiler.spawn().expect("Failed to spawn rustc process"); - { + let output = if doctest.is_multiple_tests { + let status = child.wait().expect("Failed to wait"); + process::Output { status, stdout: Vec::new(), stderr: Vec::new() } + } else { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); stdin.write_all(doctest.full_test_code.as_bytes()).expect("could write out test sources"); - } - let output = child.wait_with_output().expect("Failed to read stdout"); + child.wait_with_output().expect("Failed to read stdout") + }; struct Bomb<'a>(&'a str); impl Drop for Bomb<'_> { @@ -492,8 +606,7 @@ fn run_test( // We used to check if the output contained "error[{}]: " but since we added the // colored output, we can't anymore because of the color escape characters before // the ":". - let missing_codes: Vec<String> = scraped_test - .langstr + let missing_codes: Vec<String> = langstr .error_codes .iter() .filter(|err| !out.contains(&format!("error[{err}]"))) @@ -510,7 +623,7 @@ fn run_test( } } - if scraped_test.no_run(rustdoc_options) { + if doctest.no_run { return Ok(()); } @@ -522,15 +635,19 @@ fn run_test( let tool = make_maybe_absolute_path(tool.into()); cmd = Command::new(tool); cmd.args(&rustdoc_options.runtool_args); - cmd.arg(output_file); + cmd.arg(&output_file); } else { - cmd = Command::new(output_file); + cmd = Command::new(&output_file); + if doctest.is_multiple_tests { + cmd.arg("*doctest-bin-path"); + cmd.arg(&output_file); + } } if let Some(run_directory) = &rustdoc_options.test_run_directory { cmd.current_dir(run_directory); } - let result = if rustdoc_options.nocapture { + let result = if doctest.is_multiple_tests || rustdoc_options.nocapture { cmd.status().map(|status| process::Output { status, stdout: Vec::new(), @@ -568,15 +685,14 @@ fn make_maybe_absolute_path(path: PathBuf) -> PathBuf { } struct IndividualTestOptions { outdir: DirState, - test_id: String, path: PathBuf, } impl IndividualTestOptions { - fn new(options: &RustdocOptions, test_id: String, test_path: PathBuf) -> Self { + fn new(options: &RustdocOptions, test_id: &Option<String>, test_path: PathBuf) -> Self { let outdir = if let Some(ref path) = options.persist_doctests { let mut path = path.clone(); - path.push(&test_id); + path.push(&test_id.as_deref().unwrap_or_else(|| "<doctest>")); if let Err(err) = std::fs::create_dir_all(&path) { eprintln!("Couldn't create directory for doctest executables: {err}"); @@ -588,20 +704,45 @@ impl IndividualTestOptions { DirState::Temp(get_doctest_dir().expect("rustdoc needs a tempdir")) }; - Self { outdir, test_id, path: test_path } + Self { outdir, path: test_path } } } /// A doctest scraped from the code, ready to be turned into a runnable test. -struct ScrapedDoctest { +/// +/// The pipeline goes: [`clean`] AST -> `ScrapedDoctest` -> `RunnableDoctest`. +/// [`run_merged_tests`] converts a bunch of scraped doctests to a single runnable doctest, +/// while [`generate_unique_doctest`] does the standalones. +/// +/// [`clean`]: crate::clean +/// [`run_merged_tests`]: crate::doctest::runner::DocTestRunner::run_merged_tests +/// [`generate_unique_doctest`]: crate::doctest::make::DocTestBuilder::generate_unique_doctest +pub(crate) struct ScrapedDocTest { filename: FileName, line: usize, - logical_path: Vec<String>, langstr: LangString, text: String, + name: String, } -impl ScrapedDoctest { +impl ScrapedDocTest { + fn new( + filename: FileName, + line: usize, + logical_path: Vec<String>, + langstr: LangString, + text: String, + ) -> Self { + let mut item_path = logical_path.join("::"); + item_path.retain(|c| c != ' '); + if !item_path.is_empty() { + item_path.push(' '); + } + let name = + format!("{} - {item_path}(line {line})", filename.prefer_remapped_unconditionaly()); + + Self { filename, line, langstr, text, name } + } fn edition(&self, opts: &RustdocOptions) -> Edition { self.langstr.edition.unwrap_or(opts.edition) } @@ -609,67 +750,56 @@ impl ScrapedDoctest { fn no_run(&self, opts: &RustdocOptions) -> bool { self.langstr.no_run || opts.no_run } + fn path(&self) -> PathBuf { + match &self.filename { + FileName::Real(path) => { + if let Some(local_path) = path.local_path() { + local_path.to_path_buf() + } else { + // Somehow we got the filename from the metadata of another crate, should never happen + unreachable!("doctest from a different crate"); + } + } + _ => PathBuf::from(r"doctest.rs"), + } + } } -pub(crate) trait DoctestVisitor { +pub(crate) trait DocTestVisitor { fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine); fn visit_header(&mut self, _name: &str, _level: u32) {} } -struct CreateRunnableDoctests { - tests: Vec<test::TestDescAndFn>, +struct CreateRunnableDocTests { + standalone_tests: Vec<test::TestDescAndFn>, + mergeable_tests: FxHashMap<Edition, Vec<(DocTestBuilder, ScrapedDocTest)>>, rustdoc_options: Arc<RustdocOptions>, opts: GlobalTestOptions, visited_tests: FxHashMap<(String, usize), usize>, unused_extern_reports: Arc<Mutex<Vec<UnusedExterns>>>, compiling_test_count: AtomicUsize, + can_merge_doctests: bool, } -impl CreateRunnableDoctests { - fn new(rustdoc_options: RustdocOptions, opts: GlobalTestOptions) -> CreateRunnableDoctests { - CreateRunnableDoctests { - tests: Vec::new(), +impl CreateRunnableDocTests { + fn new(rustdoc_options: RustdocOptions, opts: GlobalTestOptions) -> CreateRunnableDocTests { + let can_merge_doctests = rustdoc_options.edition >= Edition::Edition2024; + CreateRunnableDocTests { + standalone_tests: Vec::new(), + mergeable_tests: FxHashMap::default(), rustdoc_options: Arc::new(rustdoc_options), opts, visited_tests: FxHashMap::default(), unused_extern_reports: Default::default(), compiling_test_count: AtomicUsize::new(0), + can_merge_doctests, } } - fn generate_name(&self, filename: &FileName, line: usize, logical_path: &[String]) -> String { - let mut item_path = logical_path.join("::"); - item_path.retain(|c| c != ' '); - if !item_path.is_empty() { - item_path.push(' '); - } - format!("{} - {item_path}(line {line})", filename.prefer_remapped_unconditionaly()) - } - - fn add_test(&mut self, test: ScrapedDoctest) { - let name = self.generate_name(&test.filename, test.line, &test.logical_path); - let opts = self.opts.clone(); - let target_str = self.rustdoc_options.target.to_string(); - let unused_externs = self.unused_extern_reports.clone(); - if !test.langstr.compile_fail { - self.compiling_test_count.fetch_add(1, Ordering::SeqCst); - } - - let path = match &test.filename { - FileName::Real(path) => { - if let Some(local_path) = path.local_path() { - local_path.to_path_buf() - } else { - // Somehow we got the filename from the metadata of another crate, should never happen - unreachable!("doctest from a different crate"); - } - } - _ => PathBuf::from(r"doctest.rs"), - }; - + fn add_test(&mut self, scraped_test: ScrapedDocTest) { // For example `module/file.rs` would become `module_file_rs` - let file = test + let file = scraped_test .filename .prefer_local() .to_string_lossy() @@ -679,75 +809,134 @@ impl CreateRunnableDoctests { let test_id = format!( "{file}_{line}_{number}", file = file, - line = test.line, + line = scraped_test.line, number = { // Increases the current test number, if this file already // exists or it creates a new entry with a test number of 0. self.visited_tests - .entry((file.clone(), test.line)) + .entry((file.clone(), scraped_test.line)) .and_modify(|v| *v += 1) .or_insert(0) }, ); - let rustdoc_options = self.rustdoc_options.clone(); - let rustdoc_test_options = IndividualTestOptions::new(&self.rustdoc_options, test_id, path); - - debug!("creating test {name}: {}", test.text); - self.tests.push(test::TestDescAndFn { - desc: test::TestDesc { - name: test::DynTestName(name), - ignore: match test.langstr.ignore { - Ignore::All => true, - Ignore::None => false, - Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)), - }, - ignore_message: None, - source_file: "", - start_line: 0, - start_col: 0, - end_line: 0, - end_col: 0, - // compiler failures are test failures - should_panic: test::ShouldPanic::No, - compile_fail: test.langstr.compile_fail, - no_run: test.no_run(&rustdoc_options), - test_type: test::TestType::DocTest, + let edition = scraped_test.edition(&self.rustdoc_options); + let doctest = DocTestBuilder::new( + &scraped_test.text, + Some(&self.opts.crate_name), + edition, + self.can_merge_doctests, + Some(test_id), + Some(&scraped_test.langstr), + ); + let is_standalone = !doctest.can_be_merged + || scraped_test.langstr.compile_fail + || scraped_test.langstr.test_harness + || scraped_test.langstr.standalone + || self.rustdoc_options.nocapture + || self.rustdoc_options.test_args.iter().any(|arg| arg == "--show-output"); + if is_standalone { + let test_desc = self.generate_test_desc_and_fn(doctest, scraped_test); + self.standalone_tests.push(test_desc); + } else { + self.mergeable_tests.entry(edition).or_default().push((doctest, scraped_test)); + } + } + + fn generate_test_desc_and_fn( + &mut self, + test: DocTestBuilder, + scraped_test: ScrapedDocTest, + ) -> test::TestDescAndFn { + if !scraped_test.langstr.compile_fail { + self.compiling_test_count.fetch_add(1, Ordering::SeqCst); + } + + generate_test_desc_and_fn( + test, + scraped_test, + self.opts.clone(), + Arc::clone(&self.rustdoc_options), + self.unused_extern_reports.clone(), + ) + } +} + +fn generate_test_desc_and_fn( + test: DocTestBuilder, + scraped_test: ScrapedDocTest, + opts: GlobalTestOptions, + rustdoc_options: Arc<RustdocOptions>, + unused_externs: Arc<Mutex<Vec<UnusedExterns>>>, +) -> test::TestDescAndFn { + let target_str = rustdoc_options.target.to_string(); + let rustdoc_test_options = + IndividualTestOptions::new(&rustdoc_options, &test.test_id, scraped_test.path()); + + debug!("creating test {}: {}", scraped_test.name, scraped_test.text); + test::TestDescAndFn { + desc: test::TestDesc { + name: test::DynTestName(scraped_test.name.clone()), + ignore: match scraped_test.langstr.ignore { + Ignore::All => true, + Ignore::None => false, + Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)), }, - testfn: test::DynTestFn(Box::new(move || { - doctest_run_fn(rustdoc_test_options, opts, test, rustdoc_options, unused_externs) - })), - }); + ignore_message: None, + source_file: "", + start_line: 0, + start_col: 0, + end_line: 0, + end_col: 0, + // compiler failures are test failures + should_panic: test::ShouldPanic::No, + compile_fail: scraped_test.langstr.compile_fail, + no_run: scraped_test.no_run(&rustdoc_options), + test_type: test::TestType::DocTest, + }, + testfn: test::DynTestFn(Box::new(move || { + doctest_run_fn( + rustdoc_test_options, + opts, + test, + scraped_test, + rustdoc_options, + unused_externs, + ) + })), } } fn doctest_run_fn( test_opts: IndividualTestOptions, global_opts: GlobalTestOptions, - scraped_test: ScrapedDoctest, + doctest: DocTestBuilder, + scraped_test: ScrapedDocTest, rustdoc_options: Arc<RustdocOptions>, unused_externs: Arc<Mutex<Vec<UnusedExterns>>>, ) -> Result<(), String> { let report_unused_externs = |uext| { unused_externs.lock().unwrap().push(uext); }; - let edition = scraped_test.edition(&rustdoc_options); - let (full_test_code, full_test_line_offset, supports_color) = make_test( + let (full_test_code, full_test_line_offset) = doctest.generate_unique_doctest( &scraped_test.text, - Some(&global_opts.crate_name), scraped_test.langstr.test_harness, &global_opts, - edition, - Some(&test_opts.test_id), + Some(&global_opts.crate_name), ); - let runnable_test = RunnableDoctest { + let runnable_test = RunnableDocTest { full_test_code, full_test_line_offset, test_opts, global_opts, - scraped_test, + langstr: scraped_test.langstr.clone(), + line: scraped_test.line, + edition: scraped_test.edition(&rustdoc_options), + no_run: scraped_test.no_run(&rustdoc_options), + is_multiple_tests: false, }; - let res = run_test(runnable_test, &rustdoc_options, supports_color, report_unused_externs); + let res = + run_test(runnable_test, &rustdoc_options, doctest.supports_color, report_unused_externs); if let Err(err) = res { match err { @@ -804,7 +993,7 @@ fn doctest_run_fn( } #[cfg(test)] // used in tests -impl DoctestVisitor for Vec<usize> { +impl DocTestVisitor for Vec<usize> { fn visit_test(&mut self, _test: String, _config: LangString, rel_line: MdRelLine) { self.push(1 + rel_line.offset()); } diff --git a/src/librustdoc/doctest/make.rs b/src/librustdoc/doctest/make.rs index 74833c11362..aed079e5887 100644 --- a/src/librustdoc/doctest/make.rs +++ b/src/librustdoc/doctest/make.rs @@ -16,250 +16,428 @@ use rustc_span::symbol::sym; use rustc_span::FileName; use super::GlobalTestOptions; +use crate::html::markdown::LangString; + +/// This struct contains information about the doctest itself which is then used to generate +/// doctest source code appropriately. +pub(crate) struct DocTestBuilder { + pub(crate) supports_color: bool, + pub(crate) already_has_extern_crate: bool, + pub(crate) has_main_fn: bool, + pub(crate) crate_attrs: String, + /// If this is a merged doctest, it will be put into `everything_else`, otherwise it will + /// put into `crate_attrs`. + pub(crate) maybe_crate_attrs: String, + pub(crate) crates: String, + pub(crate) everything_else: String, + pub(crate) test_id: Option<String>, + pub(crate) failed_ast: bool, + pub(crate) can_be_merged: bool, +} -/// Transforms a test into code that can be compiled into a Rust binary, and returns the number of -/// lines before the test code begins as well as if the output stream supports colors or not. -pub(crate) fn make_test( - s: &str, - crate_name: Option<&str>, - dont_insert_main: bool, - opts: &GlobalTestOptions, - edition: Edition, - test_id: Option<&str>, -) -> (String, usize, bool) { - let (crate_attrs, everything_else, crates) = partition_source(s, edition); - let everything_else = everything_else.trim(); - let mut line_offset = 0; - let mut prog = String::new(); - let mut supports_color = false; - - if opts.attrs.is_empty() { - // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some - // lints that are commonly triggered in doctests. The crate-level test attributes are - // commonly used to make tests fail in case they trigger warnings, so having this there in - // that case may cause some tests to pass when they shouldn't have. - prog.push_str("#![allow(unused)]\n"); - line_offset += 1; +impl DocTestBuilder { + pub(crate) fn new( + source: &str, + crate_name: Option<&str>, + edition: Edition, + can_merge_doctests: bool, + // If `test_id` is `None`, it means we're generating code for a code example "run" link. + test_id: Option<String>, + lang_str: Option<&LangString>, + ) -> Self { + let can_merge_doctests = can_merge_doctests + && lang_str.is_some_and(|lang_str| { + !lang_str.compile_fail && !lang_str.test_harness && !lang_str.standalone + }); + + let SourceInfo { crate_attrs, maybe_crate_attrs, crates, everything_else } = + partition_source(source, edition); + + // Uses librustc_ast to parse the doctest and find if there's a main fn and the extern + // crate already is included. + let Ok(( + ParseSourceInfo { + has_main_fn, + found_extern_crate, + supports_color, + has_global_allocator, + has_macro_def, + .. + }, + failed_ast, + )) = check_for_main_and_extern_crate( + crate_name, + source, + &everything_else, + &crates, + edition, + can_merge_doctests, + ) + else { + // If the parser panicked due to a fatal error, pass the test code through unchanged. + // The error will be reported during compilation. + return Self { + supports_color: false, + has_main_fn: false, + crate_attrs, + maybe_crate_attrs, + crates, + everything_else, + already_has_extern_crate: false, + test_id, + failed_ast: true, + can_be_merged: false, + }; + }; + // If the AST returned an error, we don't want this doctest to be merged with the + // others. Same if it contains `#[feature]` or `#[no_std]`. + let can_be_merged = can_merge_doctests + && !failed_ast + && !has_global_allocator + && crate_attrs.is_empty() + // If this is a merged doctest and a defined macro uses `$crate`, then the path will + // not work, so better not put it into merged doctests. + && !(has_macro_def && everything_else.contains("$crate")); + Self { + supports_color, + has_main_fn, + crate_attrs, + maybe_crate_attrs, + crates, + everything_else, + already_has_extern_crate: found_extern_crate, + test_id, + failed_ast: false, + can_be_merged, + } } - // Next, any attributes that came from the crate root via #![doc(test(attr(...)))]. - for attr in &opts.attrs { - prog.push_str(&format!("#![{attr}]\n")); - line_offset += 1; - } + /// Transforms a test into code that can be compiled into a Rust binary, and returns the number of + /// lines before the test code begins. + pub(crate) fn generate_unique_doctest( + &self, + test_code: &str, + dont_insert_main: bool, + opts: &GlobalTestOptions, + crate_name: Option<&str>, + ) -> (String, usize) { + if self.failed_ast { + // If the AST failed to compile, no need to go generate a complete doctest, the error + // will be better this way. + return (test_code.to_string(), 0); + } + let mut line_offset = 0; + let mut prog = String::new(); + let everything_else = self.everything_else.trim(); + if opts.attrs.is_empty() { + // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some + // lints that are commonly triggered in doctests. The crate-level test attributes are + // commonly used to make tests fail in case they trigger warnings, so having this there in + // that case may cause some tests to pass when they shouldn't have. + prog.push_str("#![allow(unused)]\n"); + line_offset += 1; + } - // Now push any outer attributes from the example, assuming they - // are intended to be crate attributes. - prog.push_str(&crate_attrs); - prog.push_str(&crates); - - // Uses librustc_ast to parse the doctest and find if there's a main fn and the extern - // crate already is included. - let Ok((already_has_main, already_has_extern_crate)) = - check_for_main_and_extern_crate(crate_name, s.to_owned(), edition, &mut supports_color) - else { - // If the parser panicked due to a fatal error, pass the test code through unchanged. - // The error will be reported during compilation. - return (s.to_owned(), 0, false); - }; + // Next, any attributes that came from the crate root via #![doc(test(attr(...)))]. + for attr in &opts.attrs { + prog.push_str(&format!("#![{attr}]\n")); + line_offset += 1; + } - // Don't inject `extern crate std` because it's already injected by the - // compiler. - if !already_has_extern_crate && !opts.no_crate_inject && crate_name != Some("std") { - if let Some(crate_name) = crate_name { + // Now push any outer attributes from the example, assuming they + // are intended to be crate attributes. + prog.push_str(&self.crate_attrs); + prog.push_str(&self.maybe_crate_attrs); + prog.push_str(&self.crates); + + // Don't inject `extern crate std` because it's already injected by the + // compiler. + if !self.already_has_extern_crate && + !opts.no_crate_inject && + let Some(crate_name) = crate_name && + crate_name != "std" && // Don't inject `extern crate` if the crate is never used. // NOTE: this is terribly inaccurate because it doesn't actually // parse the source, but only has false positives, not false // negatives. - if s.contains(crate_name) { - // rustdoc implicitly inserts an `extern crate` item for the own crate - // which may be unused, so we need to allow the lint. - prog.push_str("#[allow(unused_extern_crates)]\n"); - - prog.push_str(&format!("extern crate r#{crate_name};\n")); - line_offset += 1; - } + test_code.contains(crate_name) + { + // rustdoc implicitly inserts an `extern crate` item for the own crate + // which may be unused, so we need to allow the lint. + prog.push_str("#[allow(unused_extern_crates)]\n"); + + prog.push_str(&format!("extern crate r#{crate_name};\n")); + line_offset += 1; } - } - // FIXME: This code cannot yet handle no_std test cases yet - if dont_insert_main || already_has_main || prog.contains("![no_std]") { - prog.push_str(everything_else); - } else { - let returns_result = everything_else.trim_end().ends_with("(())"); - // Give each doctest main function a unique name. - // This is for example needed for the tooling around `-C instrument-coverage`. - let inner_fn_name = if let Some(test_id) = test_id { - format!("_doctest_main_{test_id}") + // FIXME: This code cannot yet handle no_std test cases yet + if dont_insert_main || self.has_main_fn || prog.contains("![no_std]") { + prog.push_str(everything_else); } else { - "_inner".into() - }; - let inner_attr = if test_id.is_some() { "#[allow(non_snake_case)] " } else { "" }; - let (main_pre, main_post) = if returns_result { - ( - format!( - "fn main() {{ {inner_attr}fn {inner_fn_name}() -> Result<(), impl core::fmt::Debug> {{\n", - ), - format!("\n}} {inner_fn_name}().unwrap() }}"), - ) - } else if test_id.is_some() { - ( - format!("fn main() {{ {inner_attr}fn {inner_fn_name}() {{\n",), - format!("\n}} {inner_fn_name}() }}"), - ) - } else { - ("fn main() {\n".into(), "\n}".into()) - }; - // Note on newlines: We insert a line/newline *before*, and *after* - // the doctest and adjust the `line_offset` accordingly. - // In the case of `-C instrument-coverage`, this means that the generated - // inner `main` function spans from the doctest opening codeblock to the - // closing one. For example - // /// ``` <- start of the inner main - // /// <- code under doctest - // /// ``` <- end of the inner main - line_offset += 1; - - // add extra 4 spaces for each line to offset the code block - let content = if opts.insert_indent_space { - everything_else - .lines() - .map(|line| format!(" {}", line)) - .collect::<Vec<String>>() - .join("\n") - } else { - everything_else.to_string() - }; - prog.extend([&main_pre, content.as_str(), &main_post].iter().cloned()); - } + let returns_result = everything_else.ends_with("(())"); + // Give each doctest main function a unique name. + // This is for example needed for the tooling around `-C instrument-coverage`. + let inner_fn_name = if let Some(ref test_id) = self.test_id { + format!("_doctest_main_{test_id}") + } else { + "_inner".into() + }; + let inner_attr = if self.test_id.is_some() { "#[allow(non_snake_case)] " } else { "" }; + let (main_pre, main_post) = if returns_result { + ( + format!( + "fn main() {{ {inner_attr}fn {inner_fn_name}() -> Result<(), impl core::fmt::Debug> {{\n", + ), + format!("\n}} {inner_fn_name}().unwrap() }}"), + ) + } else if self.test_id.is_some() { + ( + format!("fn main() {{ {inner_attr}fn {inner_fn_name}() {{\n",), + format!("\n}} {inner_fn_name}() }}"), + ) + } else { + ("fn main() {\n".into(), "\n}".into()) + }; + // Note on newlines: We insert a line/newline *before*, and *after* + // the doctest and adjust the `line_offset` accordingly. + // In the case of `-C instrument-coverage`, this means that the generated + // inner `main` function spans from the doctest opening codeblock to the + // closing one. For example + // /// ``` <- start of the inner main + // /// <- code under doctest + // /// ``` <- end of the inner main + line_offset += 1; + + prog.push_str(&main_pre); + + // add extra 4 spaces for each line to offset the code block + if opts.insert_indent_space { + prog.push_str( + &everything_else + .lines() + .map(|line| format!(" {}", line)) + .collect::<Vec<String>>() + .join("\n"), + ); + } else { + prog.push_str(everything_else); + }; + prog.push_str(&main_post); + } - debug!("final doctest:\n{prog}"); + debug!("final doctest:\n{prog}"); - (prog, line_offset, supports_color) + (prog, line_offset) + } } -fn check_for_main_and_extern_crate( - crate_name: Option<&str>, - source: String, - edition: Edition, - supports_color: &mut bool, -) -> Result<(bool, bool), FatalError> { - let result = rustc_driver::catch_fatal_errors(|| { - rustc_span::create_session_if_not_set_then(edition, |_| { - use rustc_errors::emitter::{Emitter, HumanEmitter}; - use rustc_errors::DiagCtxt; - use rustc_parse::parser::ForceCollect; - use rustc_span::source_map::FilePathMapping; - - let filename = FileName::anon_source_code(&source); - - // Any errors in parsing should also appear when the doctest is compiled for real, so just - // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr. - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let fallback_bundle = rustc_errors::fallback_fluent_bundle( - rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), - false, - ); - *supports_color = - HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle.clone()) - .supports_color(); +#[derive(PartialEq, Eq, Debug)] +enum ParsingResult { + Failed, + AstError, + Ok, +} - let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle); +fn cancel_error_count(psess: &ParseSess) { + // Reset errors so that they won't be reported as compiler bugs when dropping the + // dcx. Any errors in the tests will be reported when the test file is compiled, + // Note that we still need to cancel the errors above otherwise `Diag` will panic on + // drop. + psess.dcx().reset_err_count(); +} - // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser - let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings(); - let psess = ParseSess::with_dcx(dcx, sm); +fn parse_source( + source: String, + info: &mut ParseSourceInfo, + crate_name: &Option<&str>, +) -> ParsingResult { + use rustc_errors::emitter::{Emitter, HumanEmitter}; + use rustc_errors::DiagCtxt; + use rustc_parse::parser::ForceCollect; + use rustc_span::source_map::FilePathMapping; + + let filename = FileName::anon_source_code(&source); + + // Any errors in parsing should also appear when the doctest is compiled for real, so just + // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr. + let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + let fallback_bundle = rustc_errors::fallback_fluent_bundle( + rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), + false, + ); + info.supports_color = + HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle.clone()) + .supports_color(); + + let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle); + + // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser + let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings(); + let psess = ParseSess::with_dcx(dcx, sm); + + let mut parser = match new_parser_from_source_str(&psess, filename, source) { + Ok(p) => p, + Err(errs) => { + errs.into_iter().for_each(|err| err.cancel()); + cancel_error_count(&psess); + return ParsingResult::Failed; + } + }; + let mut parsing_result = ParsingResult::Ok; + + // Recurse through functions body. It is necessary because the doctest source code is + // wrapped in a function to limit the number of AST errors. If we don't recurse into + // functions, we would thing all top-level items (so basically nothing). + fn check_item(item: &ast::Item, info: &mut ParseSourceInfo, crate_name: &Option<&str>) { + if !info.has_global_allocator + && item.attrs.iter().any(|attr| attr.name_or_empty() == sym::global_allocator) + { + info.has_global_allocator = true; + } + match item.kind { + ast::ItemKind::Fn(ref fn_item) if !info.has_main_fn => { + if item.ident.name == sym::main { + info.has_main_fn = true; + } + if let Some(ref body) = fn_item.body { + for stmt in &body.stmts { + match stmt.kind { + ast::StmtKind::Item(ref item) => check_item(item, info, crate_name), + ast::StmtKind::MacCall(..) => info.found_macro = true, + _ => {} + } + } + } + } + ast::ItemKind::ExternCrate(original) => { + if !info.found_extern_crate + && let Some(ref crate_name) = crate_name + { + info.found_extern_crate = match original { + Some(name) => name.as_str() == *crate_name, + None => item.ident.as_str() == *crate_name, + }; + } + } + ast::ItemKind::MacCall(..) => info.found_macro = true, + ast::ItemKind::MacroDef(..) => info.has_macro_def = true, + _ => {} + } + } - let mut found_main = false; - let mut found_extern_crate = crate_name.is_none(); - let mut found_macro = false; + loop { + match parser.parse_item(ForceCollect::No) { + Ok(Some(item)) => { + check_item(&item, info, crate_name); - let mut parser = match new_parser_from_source_str(&psess, filename, source.clone()) { - Ok(p) => p, - Err(errs) => { - errs.into_iter().for_each(|err| err.cancel()); - return (found_main, found_extern_crate, found_macro); + if info.has_main_fn && info.found_extern_crate { + break; } - }; - - loop { - match parser.parse_item(ForceCollect::No) { - Ok(Some(item)) => { - if !found_main - && let ast::ItemKind::Fn(..) = item.kind - && item.ident.name == sym::main - { - found_main = true; - } + } + Ok(None) => break, + Err(e) => { + parsing_result = ParsingResult::AstError; + e.cancel(); + break; + } + } - if !found_extern_crate - && let ast::ItemKind::ExternCrate(original) = item.kind - { - // This code will never be reached if `crate_name` is none because - // `found_extern_crate` is initialized to `true` if it is none. - let crate_name = crate_name.unwrap(); - - match original { - Some(name) => found_extern_crate = name.as_str() == crate_name, - None => found_extern_crate = item.ident.as_str() == crate_name, - } - } + // The supplied item is only used for diagnostics, + // which are swallowed here anyway. + parser.maybe_consume_incorrect_semicolon(None); + } - if !found_macro && let ast::ItemKind::MacCall(..) = item.kind { - found_macro = true; - } + cancel_error_count(&psess); + parsing_result +} - if found_main && found_extern_crate { - break; - } - } - Ok(None) => break, - Err(e) => { - e.cancel(); - break; - } - } +#[derive(Default)] +struct ParseSourceInfo { + has_main_fn: bool, + found_extern_crate: bool, + found_macro: bool, + supports_color: bool, + has_global_allocator: bool, + has_macro_def: bool, +} - // The supplied item is only used for diagnostics, - // which are swallowed here anyway. - parser.maybe_consume_incorrect_semicolon(None); +fn check_for_main_and_extern_crate( + crate_name: Option<&str>, + original_source_code: &str, + everything_else: &str, + crates: &str, + edition: Edition, + can_merge_doctests: bool, +) -> Result<(ParseSourceInfo, bool), FatalError> { + let result = rustc_driver::catch_fatal_errors(|| { + rustc_span::create_session_if_not_set_then(edition, |_| { + let mut info = + ParseSourceInfo { found_extern_crate: crate_name.is_none(), ..Default::default() }; + + let mut parsing_result = + parse_source(format!("{crates}{everything_else}"), &mut info, &crate_name); + // No need to double-check this if the "merged doctests" feature isn't enabled (so + // before the 2024 edition). + if can_merge_doctests && parsing_result != ParsingResult::Ok { + // If we found an AST error, we want to ensure it's because of an expression being + // used outside of a function. + // + // To do so, we wrap in a function in order to make sure that the doctest AST is + // correct. For example, if your doctest is `foo::bar()`, if we don't wrap it in a + // block, it would emit an AST error, which would be problematic for us since we + // want to filter out such errors which aren't "real" errors. + // + // The end goal is to be able to merge as many doctests as possible as one for much + // faster doctests run time. + parsing_result = parse_source( + format!("{crates}\nfn __doctest_wrap(){{{everything_else}\n}}"), + &mut info, + &crate_name, + ); } - // Reset errors so that they won't be reported as compiler bugs when dropping the - // dcx. Any errors in the tests will be reported when the test file is compiled, - // Note that we still need to cancel the errors above otherwise `Diag` will panic on - // drop. - psess.dcx().reset_err_count(); - - (found_main, found_extern_crate, found_macro) + (info, parsing_result) }) }); - let (already_has_main, already_has_extern_crate, found_macro) = result?; + let (mut info, parsing_result) = match result { + Err(..) | Ok((_, ParsingResult::Failed)) => return Err(FatalError), + Ok((info, parsing_result)) => (info, parsing_result), + }; // If a doctest's `fn main` is being masked by a wrapper macro, the parsing loop above won't // see it. In that case, run the old text-based scan to see if they at least have a main // function written inside a macro invocation. See // https://github.com/rust-lang/rust/issues/56898 - let already_has_main = if found_macro && !already_has_main { - source + if info.found_macro + && !info.has_main_fn + && original_source_code .lines() .map(|line| { let comment = line.find("//"); if let Some(comment_begins) = comment { &line[0..comment_begins] } else { line } }) .any(|code| code.contains("fn main")) - } else { - already_has_main - }; + { + info.has_main_fn = true; + } - Ok((already_has_main, already_has_extern_crate)) + Ok((info, parsing_result != ParsingResult::Ok)) } -fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { +enum AttrKind { + CrateAttr, + Attr, +} + +/// Returns `Some` if the attribute is complete and `Some(true)` if it is an attribute that can be +/// placed at the crate root. +fn check_if_attr_is_complete(source: &str, edition: Edition) -> Option<AttrKind> { if source.is_empty() { // Empty content so nothing to check in here... - return true; + return None; } + let not_crate_attrs = [sym::forbid, sym::allow, sym::warn, sym::deny]; + rustc_driver::catch_fatal_errors(|| { rustc_span::create_session_if_not_set_then(edition, |_| { use rustc_errors::emitter::HumanEmitter; @@ -285,32 +463,75 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { errs.into_iter().for_each(|err| err.cancel()); // If there is an unclosed delimiter, an error will be returned by the // tokentrees. - return false; + return None; } }; // If a parsing error happened, it's very likely that the attribute is incomplete. - if let Err(e) = parser.parse_attribute(InnerAttrPolicy::Permitted) { - e.cancel(); - return false; - } - true + let ret = match parser.parse_attribute(InnerAttrPolicy::Permitted) { + Ok(attr) => { + let attr_name = attr.name_or_empty(); + + if not_crate_attrs.contains(&attr_name) { + // There is one exception to these attributes: + // `#![allow(internal_features)]`. If this attribute is used, we need to + // consider it only as a crate-level attribute. + if attr_name == sym::allow + && let Some(list) = attr.meta_item_list() + && list.iter().any(|sub_attr| { + sub_attr.name_or_empty().as_str() == "internal_features" + }) + { + Some(AttrKind::CrateAttr) + } else { + Some(AttrKind::Attr) + } + } else { + Some(AttrKind::CrateAttr) + } + } + Err(e) => { + e.cancel(); + None + } + }; + ret }) }) - .unwrap_or(false) + .unwrap_or(None) +} + +fn handle_attr(mod_attr_pending: &mut String, source_info: &mut SourceInfo, edition: Edition) { + if let Some(attr_kind) = check_if_attr_is_complete(mod_attr_pending, edition) { + let push_to = match attr_kind { + AttrKind::CrateAttr => &mut source_info.crate_attrs, + AttrKind::Attr => &mut source_info.maybe_crate_attrs, + }; + push_to.push_str(mod_attr_pending); + push_to.push('\n'); + // If it's complete, then we can clear the pending content. + mod_attr_pending.clear(); + } else if mod_attr_pending.ends_with('\\') { + mod_attr_pending.push('n'); + } +} + +#[derive(Default)] +struct SourceInfo { + crate_attrs: String, + maybe_crate_attrs: String, + crates: String, + everything_else: String, } -fn partition_source(s: &str, edition: Edition) -> (String, String, String) { +fn partition_source(s: &str, edition: Edition) -> SourceInfo { #[derive(Copy, Clone, PartialEq)] enum PartitionState { Attrs, Crates, Other, } + let mut source_info = SourceInfo::default(); let mut state = PartitionState::Attrs; - let mut before = String::new(); - let mut crates = String::new(); - let mut after = String::new(); - let mut mod_attr_pending = String::new(); for line in s.lines() { @@ -321,12 +542,9 @@ fn partition_source(s: &str, edition: Edition) -> (String, String, String) { match state { PartitionState::Attrs => { state = if trimline.starts_with("#![") { - if !check_if_attr_is_complete(line, edition) { - mod_attr_pending = line.to_owned(); - } else { - mod_attr_pending.clear(); - } - PartitionState::Attrs + mod_attr_pending = line.to_owned(); + handle_attr(&mut mod_attr_pending, &mut source_info, edition); + continue; } else if trimline.chars().all(|c| c.is_whitespace()) || (trimline.starts_with("//") && !trimline.starts_with("///")) { @@ -341,15 +559,10 @@ fn partition_source(s: &str, edition: Edition) -> (String, String, String) { // If not, then we append the new line into the pending attribute to check // if this time it's complete... mod_attr_pending.push_str(line); - if !trimline.is_empty() - && check_if_attr_is_complete(&mod_attr_pending, edition) - { - // If it's complete, then we can clear the pending content. - mod_attr_pending.clear(); + if !trimline.is_empty() { + handle_attr(&mut mod_attr_pending, &mut source_info, edition); } - // In any case, this is considered as `PartitionState::Attrs` so it's - // prepended before rustdoc's inserts. - PartitionState::Attrs + continue; } else { PartitionState::Other } @@ -371,23 +584,25 @@ fn partition_source(s: &str, edition: Edition) -> (String, String, String) { match state { PartitionState::Attrs => { - before.push_str(line); - before.push('\n'); + source_info.crate_attrs.push_str(line); + source_info.crate_attrs.push('\n'); } PartitionState::Crates => { - crates.push_str(line); - crates.push('\n'); + source_info.crates.push_str(line); + source_info.crates.push('\n'); } PartitionState::Other => { - after.push_str(line); - after.push('\n'); + source_info.everything_else.push_str(line); + source_info.everything_else.push('\n'); } } } - debug!("before:\n{before}"); - debug!("crates:\n{crates}"); - debug!("after:\n{after}"); + source_info.everything_else = source_info.everything_else.trim().to_string(); + + debug!("crate_attrs:\n{}{}", source_info.crate_attrs, source_info.maybe_crate_attrs); + debug!("crates:\n{}", source_info.crates); + debug!("after:\n{}", source_info.everything_else); - (before, after, crates) + source_info } diff --git a/src/librustdoc/doctest/markdown.rs b/src/librustdoc/doctest/markdown.rs index b8ab7adb36e..4806d865589 100644 --- a/src/librustdoc/doctest/markdown.rs +++ b/src/librustdoc/doctest/markdown.rs @@ -1,34 +1,29 @@ //! Doctest functionality used only for doctests in `.md` Markdown files. use std::fs::read_to_string; +use std::sync::{Arc, Mutex}; use rustc_span::FileName; use tempfile::tempdir; use super::{ - generate_args_file, CreateRunnableDoctests, DoctestVisitor, GlobalTestOptions, ScrapedDoctest, + generate_args_file, CreateRunnableDocTests, DocTestVisitor, GlobalTestOptions, ScrapedDocTest, }; use crate::config::Options; use crate::html::markdown::{find_testable_code, ErrorCodes, LangString, MdRelLine}; struct MdCollector { - tests: Vec<ScrapedDoctest>, + tests: Vec<ScrapedDocTest>, cur_path: Vec<String>, filename: FileName, } -impl DoctestVisitor for MdCollector { +impl DocTestVisitor for MdCollector { fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine) { let filename = self.filename.clone(); // First line of Markdown is line 1. let line = 1 + rel_line.offset(); - self.tests.push(ScrapedDoctest { - filename, - line, - logical_path: self.cur_path.clone(), - langstr: config, - text: test, - }); + self.tests.push(ScrapedDocTest::new(filename, line, self.cur_path.clone(), config, test)); } fn visit_header(&mut self, name: &str, level: u32) { @@ -118,8 +113,16 @@ pub(crate) fn test(options: Options) -> Result<(), String> { None, ); - let mut collector = CreateRunnableDoctests::new(options.clone(), opts); + let mut collector = CreateRunnableDocTests::new(options.clone(), opts); md_collector.tests.into_iter().for_each(|t| collector.add_test(t)); - crate::doctest::run_tests(options.test_args, options.nocapture, collector.tests); + let CreateRunnableDocTests { opts, rustdoc_options, standalone_tests, mergeable_tests, .. } = + collector; + crate::doctest::run_tests( + opts, + &rustdoc_options, + &Arc::new(Mutex::new(Vec::new())), + standalone_tests, + mergeable_tests, + ); Ok(()) } diff --git a/src/librustdoc/doctest/runner.rs b/src/librustdoc/doctest/runner.rs new file mode 100644 index 00000000000..b91333e5f81 --- /dev/null +++ b/src/librustdoc/doctest/runner.rs @@ -0,0 +1,269 @@ +use std::fmt::Write; + +use rustc_data_structures::fx::FxHashSet; +use rustc_span::edition::Edition; + +use crate::doctest::{ + run_test, DocTestBuilder, GlobalTestOptions, IndividualTestOptions, RunnableDocTest, + RustdocOptions, ScrapedDocTest, TestFailure, UnusedExterns, +}; +use crate::html::markdown::{Ignore, LangString}; + +/// Convenient type to merge compatible doctests into one. +pub(crate) struct DocTestRunner { + crate_attrs: FxHashSet<String>, + ids: String, + output: String, + supports_color: bool, + nb_tests: usize, +} + +impl DocTestRunner { + pub(crate) fn new() -> Self { + Self { + crate_attrs: FxHashSet::default(), + ids: String::new(), + output: String::new(), + supports_color: true, + nb_tests: 0, + } + } + + pub(crate) fn add_test( + &mut self, + doctest: &DocTestBuilder, + scraped_test: &ScrapedDocTest, + target_str: &str, + ) { + let ignore = match scraped_test.langstr.ignore { + Ignore::All => true, + Ignore::None => false, + Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)), + }; + if !ignore { + for line in doctest.crate_attrs.split('\n') { + self.crate_attrs.insert(line.to_string()); + } + } + if !self.ids.is_empty() { + self.ids.push(','); + } + self.ids.push_str(&format!( + "{}::TEST", + generate_mergeable_doctest( + doctest, + scraped_test, + ignore, + self.nb_tests, + &mut self.output + ), + )); + self.supports_color &= doctest.supports_color; + self.nb_tests += 1; + } + + pub(crate) fn run_merged_tests( + &mut self, + test_options: IndividualTestOptions, + edition: Edition, + opts: &GlobalTestOptions, + test_args: &[String], + rustdoc_options: &RustdocOptions, + ) -> Result<bool, ()> { + let mut code = "\ +#![allow(unused_extern_crates)] +#![allow(internal_features)] +#![feature(test)] +#![feature(rustc_attrs)] +#![feature(coverage_attribute)] +" + .to_string(); + + for crate_attr in &self.crate_attrs { + code.push_str(crate_attr); + code.push('\n'); + } + + if opts.attrs.is_empty() { + // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some + // lints that are commonly triggered in doctests. The crate-level test attributes are + // commonly used to make tests fail in case they trigger warnings, so having this there in + // that case may cause some tests to pass when they shouldn't have. + code.push_str("#![allow(unused)]\n"); + } + + // Next, any attributes that came from the crate root via #![doc(test(attr(...)))]. + for attr in &opts.attrs { + code.push_str(&format!("#![{attr}]\n")); + } + + code.push_str("extern crate test;\n"); + + let test_args = + test_args.iter().map(|arg| format!("{arg:?}.to_string(),")).collect::<String>(); + write!( + code, + "\ +{output} + +mod __doctest_mod {{ + use std::sync::OnceLock; + use std::path::PathBuf; + + pub static BINARY_PATH: OnceLock<PathBuf> = OnceLock::new(); + pub const RUN_OPTION: &str = \"*doctest-inner-test\"; + pub const BIN_OPTION: &str = \"*doctest-bin-path\"; + + #[allow(unused)] + pub fn doctest_path() -> Option<&'static PathBuf> {{ + self::BINARY_PATH.get() + }} + + #[allow(unused)] + pub fn doctest_runner(bin: &std::path::Path, test_nb: usize) -> Result<(), String> {{ + let out = std::process::Command::new(bin) + .arg(self::RUN_OPTION) + .arg(test_nb.to_string()) + .output() + .expect(\"failed to run command\"); + if !out.status.success() {{ + Err(String::from_utf8_lossy(&out.stderr).to_string()) + }} else {{ + Ok(()) + }} + }} +}} + +#[rustc_main] +#[coverage(off)] +fn main() -> std::process::ExitCode {{ +const TESTS: [test::TestDescAndFn; {nb_tests}] = [{ids}]; +let bin_marker = std::ffi::OsStr::new(__doctest_mod::BIN_OPTION); +let test_marker = std::ffi::OsStr::new(__doctest_mod::RUN_OPTION); +let test_args = &[{test_args}]; + +let mut args = std::env::args_os().skip(1); +while let Some(arg) = args.next() {{ + if arg == bin_marker {{ + let Some(binary) = args.next() else {{ + panic!(\"missing argument after `{{}}`\", __doctest_mod::BIN_OPTION); + }}; + if crate::__doctest_mod::BINARY_PATH.set(binary.into()).is_err() {{ + panic!(\"`{{}}` option was used more than once\", bin_marker.to_string_lossy()); + }} + return std::process::Termination::report(test::test_main(test_args, Vec::from(TESTS), None)); + }} else if arg == test_marker {{ + let Some(nb_test) = args.next() else {{ + panic!(\"missing argument after `{{}}`\", __doctest_mod::RUN_OPTION); + }}; + if let Some(nb_test) = nb_test.to_str().and_then(|nb| nb.parse::<usize>().ok()) {{ + if let Some(test) = TESTS.get(nb_test) {{ + if let test::StaticTestFn(f) = test.testfn {{ + return std::process::Termination::report(f()); + }} + }} + }} + panic!(\"Unexpected value after `{{}}`\", __doctest_mod::RUN_OPTION); + }} +}} + +eprintln!(\"WARNING: No argument provided so doctests will be run in the same process\"); +std::process::Termination::report(test::test_main(test_args, Vec::from(TESTS), None)) +}}", + nb_tests = self.nb_tests, + output = self.output, + ids = self.ids, + ) + .expect("failed to generate test code"); + let runnable_test = RunnableDocTest { + full_test_code: code, + full_test_line_offset: 0, + test_opts: test_options, + global_opts: opts.clone(), + langstr: LangString::default(), + line: 0, + edition, + no_run: false, + is_multiple_tests: true, + }; + let ret = + run_test(runnable_test, rustdoc_options, self.supports_color, |_: UnusedExterns| {}); + if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) } + } +} + +/// Push new doctest content into `output`. Returns the test ID for this doctest. +fn generate_mergeable_doctest( + doctest: &DocTestBuilder, + scraped_test: &ScrapedDocTest, + ignore: bool, + id: usize, + output: &mut String, +) -> String { + let test_id = format!("__doctest_{id}"); + + if ignore { + // We generate nothing else. + writeln!(output, "mod {test_id} {{\n").unwrap(); + } else { + writeln!(output, "mod {test_id} {{\n{}{}", doctest.crates, doctest.maybe_crate_attrs) + .unwrap(); + if scraped_test.langstr.no_run { + // To prevent having warnings about unused items since they're not called. + writeln!(output, "#![allow(unused)]").unwrap(); + } + if doctest.has_main_fn { + output.push_str(&doctest.everything_else); + } else { + let returns_result = if doctest.everything_else.trim_end().ends_with("(())") { + "-> Result<(), impl core::fmt::Debug>" + } else { + "" + }; + write!( + output, + "\ +fn main() {returns_result} {{ +{} +}}", + doctest.everything_else + ) + .unwrap(); + } + } + let not_running = ignore || scraped_test.langstr.no_run; + writeln!( + output, + " +#[rustc_test_marker = {test_name:?}] +pub const TEST: test::TestDescAndFn = test::TestDescAndFn::new_doctest( +{test_name:?}, {ignore}, {file:?}, {line}, {no_run}, {should_panic}, +test::StaticTestFn( + #[coverage(off)] + || {{{runner}}}, +)); +}}", + test_name = scraped_test.name, + file = scraped_test.path(), + line = scraped_test.line, + no_run = scraped_test.langstr.no_run, + should_panic = !scraped_test.langstr.no_run && scraped_test.langstr.should_panic, + // Setting `no_run` to `true` in `TestDesc` still makes the test run, so we simply + // don't give it the function to run. + runner = if not_running { + "test::assert_test_result(Ok::<(), String>(()))".to_string() + } else { + format!( + " +if let Some(bin_path) = crate::__doctest_mod::doctest_path() {{ + test::assert_test_result(crate::__doctest_mod::doctest_runner(bin_path, {id})) +}} else {{ + test::assert_test_result(self::main()) +}} +", + ) + }, + ) + .unwrap(); + test_id +} diff --git a/src/librustdoc/doctest/rust.rs b/src/librustdoc/doctest/rust.rs index f179f3aa1c9..abd66f15dc0 100644 --- a/src/librustdoc/doctest/rust.rs +++ b/src/librustdoc/doctest/rust.rs @@ -14,14 +14,14 @@ use rustc_session::Session; use rustc_span::source_map::SourceMap; use rustc_span::{BytePos, FileName, Pos, Span, DUMMY_SP}; -use super::{DoctestVisitor, ScrapedDoctest}; +use super::{DocTestVisitor, ScrapedDocTest}; use crate::clean::types::AttributesExt; use crate::clean::Attributes; use crate::html::markdown::{self, ErrorCodes, LangString, MdRelLine}; struct RustCollector { source_map: Lrc<SourceMap>, - tests: Vec<ScrapedDoctest>, + tests: Vec<ScrapedDocTest>, cur_path: Vec<String>, position: Span, } @@ -48,16 +48,16 @@ impl RustCollector { } } -impl DoctestVisitor for RustCollector { +impl DocTestVisitor for RustCollector { fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine) { let line = self.get_base_line() + rel_line.offset(); - self.tests.push(ScrapedDoctest { - filename: self.get_filename(), + self.tests.push(ScrapedDocTest::new( + self.get_filename(), line, - logical_path: self.cur_path.clone(), - langstr: config, - text: test, - }); + self.cur_path.clone(), + config, + test, + )); } fn visit_header(&mut self, _name: &str, _level: u32) {} @@ -89,7 +89,7 @@ impl<'a, 'tcx> HirCollector<'a, 'tcx> { Self { sess, map, codes, enable_per_target_ignores, tcx, collector } } - pub fn collect_crate(mut self) -> Vec<ScrapedDoctest> { + pub fn collect_crate(mut self) -> Vec<ScrapedDocTest> { let tcx = self.tcx; self.visit_testable("".to_string(), CRATE_DEF_ID, tcx.hir().span(CRATE_HIR_ID), |this| { tcx.hir().walk_toplevel_module(this) diff --git a/src/librustdoc/doctest/tests.rs b/src/librustdoc/doctest/tests.rs index 0f13ee404c6..160d0f222b4 100644 --- a/src/librustdoc/doctest/tests.rs +++ b/src/librustdoc/doctest/tests.rs @@ -2,7 +2,27 @@ use std::path::PathBuf; use rustc_span::edition::DEFAULT_EDITION; -use super::{make_test, GlobalTestOptions}; +use super::{DocTestBuilder, GlobalTestOptions}; + +fn make_test( + test_code: &str, + crate_name: Option<&str>, + dont_insert_main: bool, + opts: &GlobalTestOptions, + test_id: Option<&str>, +) -> (String, usize) { + let doctest = DocTestBuilder::new( + test_code, + crate_name, + DEFAULT_EDITION, + false, + test_id.map(|s| s.to_string()), + None, + ); + let (code, line_offset) = + doctest.generate_unique_doctest(test_code, dont_insert_main, opts, crate_name); + (code, line_offset) +} /// Default [`GlobalTestOptions`] for these unit tests. fn default_global_opts(crate_name: impl Into<String>) -> GlobalTestOptions { @@ -25,7 +45,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -40,7 +60,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -59,7 +79,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 3)); } @@ -76,7 +96,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -94,7 +114,7 @@ use std::*; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("std"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("std"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -113,7 +133,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -130,7 +150,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -150,7 +170,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 3)); // Adding more will also bump the returned line offset. @@ -164,7 +184,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 4)); } @@ -181,7 +201,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -197,7 +217,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 1)); } @@ -213,7 +233,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -227,7 +247,7 @@ assert_eq!(2+2, 4);"; //Ceci n'est pas une `fn main` assert_eq!(2+2, 4);" .to_string(); - let (output, len, _) = make_test(input, None, true, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, true, &opts, None); assert_eq!((output, len), (expected, 1)); } @@ -245,7 +265,7 @@ assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -265,7 +285,7 @@ assert_eq!(asdf::foo, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 3)); } @@ -283,7 +303,7 @@ test_wrapper! { }" .to_string(); - let (output, len, _) = make_test(input, Some("my_crate"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("my_crate"), false, &opts, None); assert_eq!((output, len), (expected, 1)); } @@ -303,7 +323,7 @@ io::stdin().read_line(&mut input)?; Ok::<(), io:Error>(()) } _inner().unwrap() }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -317,8 +337,7 @@ fn main() { #[allow(non_snake_case)] fn _doctest_main__some_unique_name() { assert_eq!(2+2, 4); } _doctest_main__some_unique_name() }" .to_string(); - let (output, len, _) = - make_test(input, None, false, &opts, DEFAULT_EDITION, Some("_some_unique_name")); + let (output, len) = make_test(input, None, false, &opts, Some("_some_unique_name")); assert_eq!((output, len), (expected, 2)); } @@ -337,7 +356,7 @@ fn main() { eprintln!(\"hello anan\"); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -357,6 +376,6 @@ fn main() { eprintln!(\"hello anan\"); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 1)); } diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index bb8d39aaf1d..7bfe5d87d39 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -297,14 +297,16 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> { attrs: vec![], args_file: PathBuf::new(), }; - let (test, _, _) = doctest::make_test(&test, krate, false, &opts, edition, None); + let doctest = doctest::DocTestBuilder::new(&test, krate, edition, false, None, None); + let (test, _) = doctest.generate_unique_doctest(&test, false, &opts, krate); let channel = if test.contains("#![feature(") { "&version=nightly" } else { "" }; let test_escaped = small_url_encode(test); Some(format!( "<a class=\"test-arrow\" \ target=\"_blank\" \ - href=\"{url}?code={test_escaped}{channel}&edition={edition}\">Run</a>", + title=\"Run code\" \ + href=\"{url}?code={test_escaped}{channel}&edition={edition}\"></a>", )) }); @@ -736,7 +738,7 @@ impl MdRelLine { } } -pub(crate) fn find_testable_code<T: doctest::DoctestVisitor>( +pub(crate) fn find_testable_code<T: doctest::DocTestVisitor>( doc: &str, tests: &mut T, error_codes: ErrorCodes, @@ -746,7 +748,7 @@ pub(crate) fn find_testable_code<T: doctest::DoctestVisitor>( find_codes(doc, tests, error_codes, enable_per_target_ignores, extra_info, false) } -pub(crate) fn find_codes<T: doctest::DoctestVisitor>( +pub(crate) fn find_codes<T: doctest::DocTestVisitor>( doc: &str, tests: &mut T, error_codes: ErrorCodes, @@ -867,6 +869,7 @@ pub(crate) struct LangString { pub(crate) rust: bool, pub(crate) test_harness: bool, pub(crate) compile_fail: bool, + pub(crate) standalone: bool, pub(crate) error_codes: Vec<String>, pub(crate) edition: Option<Edition>, pub(crate) added_classes: Vec<String>, @@ -1189,6 +1192,7 @@ impl Default for LangString { rust: true, test_harness: false, compile_fail: false, + standalone: false, error_codes: Vec::new(), edition: None, added_classes: Vec::new(), @@ -1258,6 +1262,10 @@ impl LangString { seen_rust_tags = !seen_other_tags || seen_rust_tags; data.no_run = true; } + LangStringToken::LangToken("standalone") => { + data.standalone = true; + seen_rust_tags = !seen_other_tags || seen_rust_tags; + } LangStringToken::LangToken(x) if x.starts_with("edition") => { data.edition = x[7..].parse::<Edition>().ok(); } diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css index 6e10cf21537..86e8edad703 100644 --- a/src/librustdoc/html/static/css/noscript.css +++ b/src/librustdoc/html/static/css/noscript.css @@ -104,10 +104,6 @@ nav.sub { --code-highlight-doc-comment-color: #4d4d4c; --src-line-numbers-span-color: #c67e2d; --src-line-number-highlighted-background-color: #fdffd3; - --test-arrow-color: #f5f5f5; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #f5f5f5; - --test-arrow-hover-background-color: rgb(78, 139, 202); --target-background-color: #fdffd3; --target-border-color: #ad7c37; --kbd-color: #000; @@ -210,10 +206,6 @@ nav.sub { --code-highlight-doc-comment-color: #8ca375; --src-line-numbers-span-color: #3b91e2; --src-line-number-highlighted-background-color: #0a042f; - --test-arrow-color: #dedede; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #dedede; - --test-arrow-hover-background-color: #4e8bca; --target-background-color: #494a3d; --target-border-color: #bb7410; --kbd-color: #000; diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 02a6bb8f548..eb5a5d935e2 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -353,7 +353,7 @@ details:not(.toggle) summary { margin-bottom: .6em; } -code, pre, a.test-arrow, .code-header { +code, pre, .code-header { font-family: "Source Code Pro", monospace; } .docblock code, .docblock-short code { @@ -946,8 +946,8 @@ because of the `[-]` element which would overlap with it. */ .main-heading a:hover, .example-wrap .rust a:hover, .all-items a:hover, -.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover:not(.doc-anchor), -.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover, +.docblock a:not(.scrape-help):not(.tooltip):hover:not(.doc-anchor), +.docblock-short a:not(.scrape-help):not(.tooltip):hover, .item-info a { text-decoration: underline; } @@ -1461,22 +1461,17 @@ documentation. */ z-index: 1; } a.test-arrow { - padding: 5px 7px; - border-radius: var(--button-border-radius); - font-size: 1rem; - color: var(--test-arrow-color); - background-color: var(--test-arrow-background-color); + height: var(--copy-path-height); + padding: 6px 4px 0 11px; } -a.test-arrow:hover { - color: var(--test-arrow-hover-color); - background-color: var(--test-arrow-hover-background-color); +a.test-arrow::before { + content: url('data:image/svg+xml,<svg viewBox="0 0 20 20" width="18" height="20" \ + xmlns="http://www.w3.org/2000/svg"><path d="M0 0l18 10-18 10z"/></svg>'); } .example-wrap .button-holder { display: flex; } -.example-wrap:hover > .test-arrow { - padding: 2px 7px; -} + /* On iPad, the ":hover" state sticks around, making things work not greatly. Do work around it, we move it into this media query. More information can be found at: @@ -1486,29 +1481,34 @@ However, using `@media (hover: hover)` makes this rule never to be applied in GU instead, we check that it's not a "finger" cursor. */ @media not (pointer: coarse) { - .example-wrap:hover > .test-arrow, .example-wrap:hover > .button-holder { + .example-wrap:hover > a.test-arrow, .example-wrap:hover > .button-holder { visibility: visible; } } .example-wrap .button-holder.keep-visible { visibility: visible; } -.example-wrap .button-holder .copy-button { - color: var(--copy-path-button-color); +.example-wrap .button-holder .copy-button, .example-wrap .test-arrow { background: var(--main-background-color); + cursor: pointer; + border-radius: var(--button-border-radius); height: var(--copy-path-height); width: var(--copy-path-width); +} +.example-wrap .button-holder .copy-button { margin-left: var(--button-left-margin); padding: 2px 0 0 4px; border: 0; - cursor: pointer; - border-radius: var(--button-border-radius); } -.example-wrap .button-holder .copy-button::before { +.example-wrap .button-holder .copy-button::before, +.example-wrap .test-arrow::before { filter: var(--copy-path-img-filter); +} +.example-wrap .button-holder .copy-button::before { content: var(--clipboard-image); } -.example-wrap .button-holder .copy-button:hover::before { +.example-wrap .button-holder .copy-button:hover::before, +.example-wrap .test-arrow:hover::before { filter: var(--copy-path-img-hover-filter); } .example-wrap .button-holder .copy-button.clicked::before { @@ -2552,10 +2552,6 @@ by default. --code-highlight-doc-comment-color: #4d4d4c; --src-line-numbers-span-color: #c67e2d; --src-line-number-highlighted-background-color: #fdffd3; - --test-arrow-color: #f5f5f5; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #f5f5f5; - --test-arrow-hover-background-color: rgb(78, 139, 202); --target-background-color: #fdffd3; --target-border-color: #ad7c37; --kbd-color: #000; @@ -2658,10 +2654,6 @@ by default. --code-highlight-doc-comment-color: #8ca375; --src-line-numbers-span-color: #3b91e2; --src-line-number-highlighted-background-color: #0a042f; - --test-arrow-color: #dedede; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #dedede; - --test-arrow-hover-background-color: #4e8bca; --target-background-color: #494a3d; --target-border-color: #bb7410; --kbd-color: #000; @@ -2771,10 +2763,6 @@ Original by Dempfi (https://github.com/dempfi/ayu) --code-highlight-doc-comment-color: #a1ac88; --src-line-numbers-span-color: #5c6773; --src-line-number-highlighted-background-color: rgba(255, 236, 164, 0.06); - --test-arrow-color: #788797; - --test-arrow-background-color: rgba(57, 175, 215, 0.09); - --test-arrow-hover-color: #c5c5c5; - --test-arrow-hover-background-color: rgba(57, 175, 215, 0.368); --target-background-color: rgba(255, 236, 164, 0.06); --target-border-color: rgba(255, 180, 76, 0.85); --kbd-color: #c5c5c5; diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 62fbde6f225..75f2a1418cd 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -1835,10 +1835,14 @@ href="https://doc.rust-lang.org/${channel}/rustdoc/read-documentation/search.htm function getExampleWrap(event) { let elem = event.target; while (!hasClass(elem, "example-wrap")) { - elem = elem.parentElement; - if (elem === document.body || hasClass(elem, "docblock")) { + if (elem === document.body || + elem.tagName === "A" || + elem.tagName === "BUTTON" || + hasClass(elem, "docblock") + ) { return null; } + elem = elem.parentElement; } return elem; } diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs index d78afdffc62..5015d665955 100644 --- a/src/librustdoc/passes/check_doc_test_visibility.rs +++ b/src/librustdoc/passes/check_doc_test_visibility.rs @@ -45,7 +45,7 @@ pub(crate) struct Tests { pub(crate) found_tests: usize, } -impl crate::doctest::DoctestVisitor for Tests { +impl crate::doctest::DocTestVisitor for Tests { fn visit_test(&mut self, _: String, config: LangString, _: MdRelLine) { if config.rust && config.ignore == Ignore::None { self.found_tests += 1; diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 9c7eee4040b..5b96529fed7 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -495,7 +495,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { ty::RawPtr(_, _) => Res::Primitive(RawPointer), ty::Ref(..) => Res::Primitive(Reference), ty::FnDef(..) => panic!("type alias to a function definition"), - ty::FnPtr(_) => Res::Primitive(Fn), + ty::FnPtr(..) => Res::Primitive(Fn), ty::Never => Res::Primitive(Never), ty::Adt(ty::AdtDef(Interned(&ty::AdtDefData { did, .. }, _)), _) | ty::Foreign(did) => { Res::from_def_id(self.cx.tcx, did) diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs index f263bec1576..dbe03e4ae80 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs @@ -15,7 +15,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, } match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx); diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs index 75de53f73ee..5dc6df1e907 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs @@ -14,7 +14,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, _ => { /* continue to checks */ }, } - if let ty::FnDef(..) | ty::FnPtr(_) = cast_from.kind() { + if let ty::FnDef(..) | ty::FnPtr(..) = cast_from.kind() { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "..", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs index 0e11bcfb8ec..dfbae1618ac 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs @@ -14,7 +14,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, _ => return, } match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs index a74b3a8c836..05c3cd3c814 100644 --- a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs +++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs @@ -236,7 +236,7 @@ fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<PolyFnSig<' // We can't use `Ty::fn_sig` because it automatically performs args, this may result in FNs. match node_ty.kind() { ty::FnDef(def_id, _) => Some(cx.tcx.fn_sig(*def_id).instantiate_identity()), - ty::FnPtr(fn_sig) => Some(*fn_sig), + ty::FnPtr(sig_tys, hdr) => Some(sig_tys.with(*hdr)), _ => None, } } diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index d0cb2488468..0e55d3db469 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -872,7 +872,7 @@ impl TyCoercionStability { | ty::Pat(..) | ty::Float(_) | ty::RawPtr(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Str | ty::Slice(..) | ty::Adt(..) diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs index 5a7226d590c..a7e831fdc42 100644 --- a/src/tools/clippy/clippy_lints/src/eta_reduction.rs +++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs @@ -158,7 +158,7 @@ fn check_clousure<'tcx>(cx: &LateContext<'tcx>, outer_receiver: Option<&Expr<'tc cx.tcx.fn_sig(def).skip_binder().skip_binder() }, - ty::FnPtr(sig) => sig.skip_binder(), + ty::FnPtr(sig_tys, hdr) => sig_tys.with(*hdr).skip_binder(), ty::Closure(_, subs) => cx .tcx .signature_unclosure(subs.as_closure().sig(), Safety::Safe) diff --git a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs index def8be2ef73..22a03825194 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs @@ -58,7 +58,7 @@ fn try_get_caller_ty_name_and_method_name( fn is_map_to_option(cx: &LateContext<'_>, map_arg: &Expr<'_>) -> bool { let map_closure_ty = cx.typeck_results().expr_ty(map_arg); match map_closure_ty.kind() { - ty::Closure(_, _) | ty::FnDef(_, _) | ty::FnPtr(_) => { + ty::Closure(_, _) | ty::FnDef(_, _) | ty::FnPtr(..) => { let map_closure_sig = match map_closure_ty.kind() { ty::Closure(_, args) => args.as_closure().sig(), _ => map_closure_ty.fn_sig(cx.tcx), diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs index 6964d8c8dbb..0b3769ecb7c 100644 --- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs +++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs @@ -166,7 +166,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DivergenceVisitor<'a, 'tcx> { ExprKind::Call(func, _) => { let typ = self.cx.typeck_results().expr_ty(func); match typ.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let sig = typ.fn_sig(self.cx.tcx); if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never { self.report_diverging_sub_expr(e); diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs index da74a7c7145..0bde0da3cd8 100644 --- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs +++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs @@ -130,7 +130,7 @@ fn collect_unsafe_exprs<'tcx>( ExprKind::Call(path_expr, _) => { let sig = match *cx.typeck_results().expr_ty(path_expr).kind() { ty::FnDef(id, _) => cx.tcx.fn_sig(id).skip_binder(), - ty::FnPtr(sig) => sig, + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), _ => return Continue(Descend::Yes), }; if sig.safety() == Safety::Unsafe { diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs index 0a3b769c3e6..3c0f06f66d1 100644 --- a/src/tools/clippy/clippy_lints/src/mut_reference.rs +++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs @@ -79,7 +79,7 @@ fn check_arguments<'tcx>( fn_kind: &str, ) { match type_definition.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let parameters = type_definition.fn_sig(cx.tcx).skip_binder().inputs(); for (argument, parameter) in iter::zip(arguments, parameters) { match parameter.kind() { diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs index bd48990aea9..2f6faba073e 100644 --- a/src/tools/clippy/clippy_utils/src/ty.rs +++ b/src/tools/clippy/clippy_utils/src/ty.rs @@ -541,7 +541,7 @@ pub fn peel_mid_ty_refs_is_mutable(ty: Ty<'_>) -> (Ty<'_>, usize, Mutability) { /// Returns `true` if the given type is an `unsafe` function. pub fn type_is_unsafe_function<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { match ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => ty.fn_sig(cx.tcx).safety() == Safety::Unsafe, + ty::FnDef(..) | ty::FnPtr(..) => ty.fn_sig(cx.tcx).safety() == Safety::Unsafe, _ => false, } } @@ -721,7 +721,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t cx.tcx.item_super_predicates(def_id).iter_instantiated(cx.tcx, args), cx.tcx.opt_parent(def_id), ), - ty::FnPtr(sig) => Some(ExprFnSig::Sig(sig, None)), + ty::FnPtr(sig_tys, hdr) => Some(ExprFnSig::Sig(sig_tys.with(hdr), None)), ty::Dynamic(bounds, _, _) => { let lang_items = cx.tcx.lang_items(); match bounds.principal() { diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs index 2a5d3536ff6..e5b6d3965e9 100644 --- a/src/tools/clippy/clippy_utils/src/visitors.rs +++ b/src/tools/clippy/clippy_utils/src/visitors.rs @@ -441,7 +441,7 @@ pub fn is_expr_unsafe<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool { ty::FnDef(id, _) if self.cx.tcx.fn_sig(id).skip_binder().safety() == Safety::Unsafe => { self.is_unsafe = true; }, - ty::FnPtr(sig) if sig.safety() == Safety::Unsafe => self.is_unsafe = true, + ty::FnPtr(_, hdr) if hdr.safety == Safety::Unsafe => self.is_unsafe = true, _ => walk_expr(self, e), }, ExprKind::Path(ref p) diff --git a/src/tools/clippy/src/main.rs b/src/tools/clippy/src/main.rs index c9af2138a72..c9853e53f3b 100644 --- a/src/tools/clippy/src/main.rs +++ b/src/tools/clippy/src/main.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to +// `rustc_driver`. +#![feature(rustc_private)] // warn on lints, that are included in `rust-lang/rust`s bootstrap #![warn(rust_2018_idioms, unused_lifetimes)] diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs index c7080e5dcdc..64253514fbe 100644 --- a/src/tools/clippy/tests/compile-test.rs +++ b/src/tools/clippy/tests/compile-test.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to +// `rustc_driver`. +#![feature(rustc_private)] #![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unused_extern_crates)] diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs index 0ea93dd8462..452d1b19813 100644 --- a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs +++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs @@ -10,7 +10,7 @@ use std::sync::Once; const ATOMIC: AtomicUsize = AtomicUsize::new(5); const CELL: Cell<usize> = Cell::new(6); -const ATOMIC_TUPLE: ([AtomicUsize; 1], Vec<AtomicUsize>, u8) = ([ATOMIC], Vec::new(), 7); +const ATOMIC_TUPLE: ([AtomicUsize; 1], Option<Box<AtomicUsize>>, u8) = ([ATOMIC], None, 7); const INTEGER: u8 = 8; const STRING: String = String::new(); const STR: &str = "012345"; @@ -74,7 +74,6 @@ fn main() { let _ = &(&&&&ATOMIC_TUPLE).0; //~ ERROR: interior mutability let _ = &ATOMIC_TUPLE.0[0]; //~ ERROR: interior mutability let _ = ATOMIC_TUPLE.0[0].load(Ordering::SeqCst); //~ ERROR: interior mutability - let _ = &*ATOMIC_TUPLE.1; let _ = &ATOMIC_TUPLE.2; let _ = (&&&&ATOMIC_TUPLE).0; let _ = (&&&&ATOMIC_TUPLE).2; diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr index 33c774667f9..9a9028c8649 100644 --- a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr +++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr @@ -92,7 +92,7 @@ LL | let _ = ATOMIC_TUPLE.0[0].load(Ordering::SeqCst); = help: assign this const to a local or static variable, and use the variable here error: a `const` item with interior mutability should not be borrowed - --> tests/ui/borrow_interior_mutable_const/others.rs:82:13 + --> tests/ui/borrow_interior_mutable_const/others.rs:81:13 | LL | let _ = ATOMIC_TUPLE.0[0]; | ^^^^^^^^^^^^ @@ -100,7 +100,7 @@ LL | let _ = ATOMIC_TUPLE.0[0]; = help: assign this const to a local or static variable, and use the variable here error: a `const` item with interior mutability should not be borrowed - --> tests/ui/borrow_interior_mutable_const/others.rs:87:5 + --> tests/ui/borrow_interior_mutable_const/others.rs:86:5 | LL | CELL.set(2); | ^^^^ @@ -108,7 +108,7 @@ LL | CELL.set(2); = help: assign this const to a local or static variable, and use the variable here error: a `const` item with interior mutability should not be borrowed - --> tests/ui/borrow_interior_mutable_const/others.rs:88:16 + --> tests/ui/borrow_interior_mutable_const/others.rs:87:16 | LL | assert_eq!(CELL.get(), 6); | ^^^^ diff --git a/src/tools/compiletest/src/json.rs b/src/tools/compiletest/src/json.rs index 76b83f02b14..0da93dcafa2 100644 --- a/src/tools/compiletest/src/json.rs +++ b/src/tools/compiletest/src/json.rs @@ -1,5 +1,4 @@ //! These structs are a subset of the ones found in `rustc_errors::json`. -//! They are only used for deserialization of JSON output provided by libtest. use std::path::{Path, PathBuf}; use std::str::FromStr; @@ -127,11 +126,10 @@ pub fn extract_rendered(output: &str) -> String { // Ignore the notification. None } else { - print!( - "failed to decode compiler output as json: line: {}\noutput: {}", - line, output - ); - panic!() + // This function is called for both compiler and non-compiler output, + // so if the line isn't recognized as JSON from the compiler then + // just print it as-is. + Some(format!("{line}\n")) } } else { // preserve non-JSON lines, such as ICEs diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index b1b6d6fc8eb..59fce44d1c7 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -3027,11 +3027,17 @@ impl<'test> TestCx<'test> { const PREFIX: &str = "MONO_ITEM "; const CGU_MARKER: &str = "@@"; + // Some MonoItems can contain {closure@/path/to/checkout/tests/codgen-units/test.rs} + // To prevent the current dir from leaking, we just replace the entire path to the test + // file with TEST_PATH. let actual: Vec<MonoItem> = proc_res .stdout .lines() .filter(|line| line.starts_with(PREFIX)) - .map(|line| str_to_mono_item(line, true)) + .map(|line| { + line.replace(&self.testpaths.file.display().to_string(), "TEST_PATH").to_string() + }) + .map(|line| str_to_mono_item(&line, true)) .collect(); let expected: Vec<MonoItem> = errors::load_errors(&self.testpaths.file, None) @@ -3729,15 +3735,14 @@ impl<'test> TestCx<'test> { } if self.config.bless { - cmd.env("RUSTC_BLESS_TEST", "--bless"); - // Assume this option is active if the environment variable is "defined", with _any_ value. - // As an example, a `Makefile` can use this option by: + // If we're running in `--bless` mode, set an environment variable to tell + // `run_make_support` to bless snapshot files instead of checking them. // - // ifdef RUSTC_BLESS_TEST - // cp "$(TMPDIR)"/actual_something.ext expected_something.ext - // else - // $(DIFF) expected_something.ext "$(TMPDIR)"/actual_something.ext - // endif + // The value is this test's source directory, because the support code + // will need that path in order to bless the _original_ snapshot files, + // not the copies in `rmake_out`. + // (See <https://github.com/rust-lang/rust/issues/129038>.) + cmd.env("RUSTC_BLESS_TEST", &self.testpaths.file); } if self.config.target.contains("msvc") && !self.config.cc.is_empty() { diff --git a/src/tools/generate-windows-sys/src/main.rs b/src/tools/generate-windows-sys/src/main.rs index fe1b1bd5ceb..6dbf29d957f 100644 --- a/src/tools/generate-windows-sys/src/main.rs +++ b/src/tools/generate-windows-sys/src/main.rs @@ -35,7 +35,6 @@ fn main() -> Result<(), Box<dyn Error>> { let mut f = std::fs::File::options().append(true).open("windows_sys.rs")?; f.write_all(ARM32_SHIM.as_bytes())?; writeln!(&mut f, "// ignore-tidy-filelength")?; - writeln!(&mut f, "use super::windows_targets;")?; Ok(()) } diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index 2b3ae6df5de..966d38508f6 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -56,6 +56,7 @@ extern crate either; extern crate tracing; // The rustc crates we need +extern crate rustc_attr; extern crate rustc_apfloat; extern crate rustc_ast; extern crate rustc_const_eval; diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index 94598e7d2e3..754f8cf1381 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -11,6 +11,7 @@ use rand::rngs::StdRng; use rand::Rng; use rand::SeedableRng; +use rustc_attr::InlineAttr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; #[allow(unused)] use rustc_data_structures::static_assert_size; @@ -23,6 +24,7 @@ use rustc_middle::{ Instance, Ty, TyCtxt, }, }; +use rustc_session::config::InliningThreshold; use rustc_span::def_id::{CrateNum, DefId}; use rustc_span::{Span, SpanData, Symbol}; use rustc_target::abi::{Align, Size}; @@ -47,10 +49,10 @@ pub const SIGRTMIN: i32 = 34; /// `SIGRTMAX` - `SIGRTMIN` >= 8 (which is the value of `_POSIX_RTSIG_MAX`) pub const SIGRTMAX: i32 = 42; -/// Each const has multiple addresses, but only this many. Since const allocations are never -/// deallocated, choosing a new [`AllocId`] and thus base address for each evaluation would -/// produce unbounded memory usage. -const ADDRS_PER_CONST: usize = 16; +/// Each anonymous global (constant, vtable, function pointer, ...) has multiple addresses, but only +/// this many. Since const allocations are never deallocated, choosing a new [`AllocId`] and thus +/// base address for each evaluation would produce unbounded memory usage. +const ADDRS_PER_ANON_GLOBAL: usize = 32; /// Extra data stored with each stack frame pub struct FrameExtra<'tcx> { @@ -1372,7 +1374,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { catch_unwind: None, timing, is_user_relevant: ecx.machine.is_user_relevant(&frame), - salt: ecx.machine.rng.borrow_mut().gen::<usize>() % ADDRS_PER_CONST, + salt: ecx.machine.rng.borrow_mut().gen::<usize>() % ADDRS_PER_ANON_GLOBAL, }; Ok(frame.with_extra(extra)) @@ -1518,4 +1520,45 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { Entry::Occupied(oe) => Ok(oe.get().clone()), } } + + fn get_global_alloc_salt( + ecx: &InterpCx<'tcx, Self>, + instance: Option<ty::Instance<'tcx>>, + ) -> usize { + let unique = if let Some(instance) = instance { + // Functions cannot be identified by pointers, as asm-equal functions can get + // deduplicated by the linker (we set the "unnamed_addr" attribute for LLVM) and + // functions can be duplicated across crates. We thus generate a new `AllocId` for every + // mention of a function. This means that `main as fn() == main as fn()` is false, while + // `let x = main as fn(); x == x` is true. However, as a quality-of-life feature it can + // be useful to identify certain functions uniquely, e.g. for backtraces. So we identify + // whether codegen will actually emit duplicate functions. It does that when they have + // non-lifetime generics, or when they can be inlined. All other functions are given a + // unique address. This is not a stable guarantee! The `inline` attribute is a hint and + // cannot be relied upon for anything. But if we don't do this, the + // `__rust_begin_short_backtrace`/`__rust_end_short_backtrace` logic breaks and panic + // backtraces look terrible. + let is_generic = instance + .args + .into_iter() + .any(|kind| !matches!(kind.unpack(), ty::GenericArgKind::Lifetime(_))); + let can_be_inlined = matches!( + ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold, + InliningThreshold::Always + ) || !matches!( + ecx.tcx.codegen_fn_attrs(instance.def_id()).inline, + InlineAttr::Never + ); + !is_generic && !can_be_inlined + } else { + // Non-functions are never unique. + false + }; + // Always use the same salt if the allocation is unique. + if unique { + CTFE_ALLOC_SALT + } else { + ecx.machine.rng.borrow_mut().gen::<usize>() % ADDRS_PER_ANON_GLOBAL + } + } } diff --git a/src/tools/miri/tests/pass/dyn-traits.rs b/src/tools/miri/tests/pass/dyn-traits.rs index 908d521a0d8..f6220120968 100644 --- a/src/tools/miri/tests/pass/dyn-traits.rs +++ b/src/tools/miri/tests/pass/dyn-traits.rs @@ -141,7 +141,17 @@ fn unsized_dyn_autoderef() { } */ +fn vtable_ptr_eq() { + use std::{fmt, ptr}; + + // We don't always get the same vtable when casting this to a wide pointer. + let x = &2; + let x_wide = x as &dyn fmt::Display; + assert!((0..256).any(|_| !ptr::eq(x as &dyn fmt::Display, x_wide))); +} + fn main() { ref_box_dyn(); box_box_trait(); + vtable_ptr_eq(); } diff --git a/src/tools/miri/tests/pass/function_pointers.rs b/src/tools/miri/tests/pass/function_pointers.rs index 2aa3ebf2dd0..a5c4bc5e0d9 100644 --- a/src/tools/miri/tests/pass/function_pointers.rs +++ b/src/tools/miri/tests/pass/function_pointers.rs @@ -82,7 +82,8 @@ fn main() { assert!(return_fn_ptr(i) == i); assert!(return_fn_ptr(i) as unsafe fn() -> i32 == i as fn() -> i32 as unsafe fn() -> i32); // Miri gives different addresses to different reifications of a generic function. - assert!(return_fn_ptr(f) != f); + // at least if we try often enough. + assert!((0..256).any(|_| return_fn_ptr(f) != f)); // However, if we only turn `f` into a function pointer and use that pointer, // it is equal to itself. let f2 = f as fn() -> i32; diff --git a/src/tools/miri/tests/pass/rc.rs b/src/tools/miri/tests/pass/rc.rs index 6dd1b3aff9e..b1470dabc26 100644 --- a/src/tools/miri/tests/pass/rc.rs +++ b/src/tools/miri/tests/pass/rc.rs @@ -75,7 +75,8 @@ fn rc_fat_ptr_eq() { let p = Rc::new(1) as Rc<dyn Debug>; let a: *const dyn Debug = &*p; let r = Rc::into_raw(p); - assert!(a == r); + // Only compare the pointer parts, as the vtable might differ. + assert!(a as *const () == r as *const ()); drop(unsafe { Rc::from_raw(r) }); } diff --git a/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs b/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs index 05090d685ab..9debe224d45 100644 --- a/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs +++ b/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs @@ -1,3 +1,5 @@ +// FIXME: This test is broken since https://github.com/rust-lang/rust/pull/126793, +// possibly related to the additional struct between Vec and Unique. //@revisions: default uniq // We disable the GC for this test because it would change what is printed. //@compile-flags: -Zmiri-tree-borrows -Zmiri-provenance-gc=0 diff --git a/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr b/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr index 7942e9884f4..e3796a742e9 100644 --- a/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr +++ b/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr @@ -2,7 +2,9 @@ Warning: this tree is indicative only. Some tags may have been hidden. 0.. 2 | Act | └─┬──<TAG=root of the allocation> -|-----| └─┬──<TAG=base.as_ptr(), base.as_ptr()> -|-----| └─┬──<TAG=raw_parts.0> -|-----| └────<TAG=reconstructed.as_ptr(), reconstructed.as_ptr()> +|-----| ├────<TAG=base.as_ptr()> +|-----| ├────<TAG=base.as_ptr()> +|-----| └─┬──<TAG=raw_parts.0> +|-----| ├────<TAG=reconstructed.as_ptr()> +|-----| └────<TAG=reconstructed.as_ptr()> ────────────────────────────────────────────────── diff --git a/src/tools/run-make-support/src/assertion_helpers.rs b/src/tools/run-make-support/src/assertion_helpers.rs index 6d256fc594d..b4da65aff4a 100644 --- a/src/tools/run-make-support/src/assertion_helpers.rs +++ b/src/tools/run-make-support/src/assertion_helpers.rs @@ -77,6 +77,20 @@ pub fn assert_not_contains_regex<H: AsRef<str>, N: AsRef<str>>(haystack: H, need } } +/// Assert that `haystack` contains `needle` a `count` number of times. +#[track_caller] +pub fn assert_count_is<H: AsRef<str>, N: AsRef<str>>(count: usize, haystack: H, needle: N) { + let haystack = haystack.as_ref(); + let needle = needle.as_ref(); + if count != haystack.matches(needle).count() { + eprintln!("=== HAYSTACK ==="); + eprintln!("{}", haystack); + eprintln!("=== NEEDLE ==="); + eprintln!("{}", needle); + panic!("needle did not appear {count} times in haystack"); + } +} + /// Assert that all files in `dir1` exist and have the same content in `dir2` pub fn assert_dirs_are_equal(dir1: impl AsRef<Path>, dir2: impl AsRef<Path>) { let dir2 = dir2.as_ref(); diff --git a/src/tools/run-make-support/src/diff/mod.rs b/src/tools/run-make-support/src/diff/mod.rs index b0ed4d5445c..ee48e373366 100644 --- a/src/tools/run-make-support/src/diff/mod.rs +++ b/src/tools/run-make-support/src/diff/mod.rs @@ -112,14 +112,8 @@ impl Diff { let (expected_name, actual_name, output, actual) = self.run_common(); if !output.is_empty() { - // If we can bless (meaning we have a file to write into and the `RUSTC_BLESS_TEST` - // environment variable set), then we write into the file and return. - if let Some(ref expected_file) = self.expected_file { - if std::env::var("RUSTC_BLESS_TEST").is_ok() { - println!("Blessing `{}`", expected_file.display()); - fs::write(expected_file, actual); - return; - } + if self.maybe_bless_expected_file(&actual) { + return; } panic!( "test failed: `{}` is different from `{}`\n\n{}", @@ -134,14 +128,8 @@ impl Diff { let (expected_name, actual_name, output, actual) = self.run_common(); if output.is_empty() { - // If we can bless (meaning we have a file to write into and the `RUSTC_BLESS_TEST` - // environment variable set), then we write into the file and return. - if let Some(ref expected_file) = self.expected_file { - if std::env::var("RUSTC_BLESS_TEST").is_ok() { - println!("Blessing `{}`", expected_file.display()); - fs::write(expected_file, actual); - return; - } + if self.maybe_bless_expected_file(&actual) { + return; } panic!( "test failed: `{}` is not different from `{}`\n\n{}", @@ -149,4 +137,24 @@ impl Diff { ) } } + + /// If we have an expected file to write into, and `RUSTC_BLESS_TEST` is + /// set, then write the actual output into the file and return `true`. + /// + /// We assume that `RUSTC_BLESS_TEST` contains the path to the original test's + /// source directory. That lets us bless the original snapshot file in the + /// source tree, not the copy in `rmake_out` that we would normally use. + fn maybe_bless_expected_file(&self, actual: &str) -> bool { + let Some(ref expected_file) = self.expected_file else { + return false; + }; + let Ok(bless_dir) = std::env::var("RUSTC_BLESS_TEST") else { + return false; + }; + + let bless_file = Path::new(&bless_dir).join(expected_file); + println!("Blessing `{}`", bless_file.display()); + fs::write(bless_file, actual); + true + } } diff --git a/src/tools/run-make-support/src/external_deps/llvm.rs b/src/tools/run-make-support/src/external_deps/llvm.rs index dc651fdd820..7af79443aff 100644 --- a/src/tools/run-make-support/src/external_deps/llvm.rs +++ b/src/tools/run-make-support/src/external_deps/llvm.rs @@ -48,6 +48,12 @@ pub fn llvm_bcanalyzer() -> LlvmBcanalyzer { LlvmBcanalyzer::new() } +/// Construct a new `llvm-dwarfdump` invocation. This assumes that `llvm-dwarfdump` is available +/// at `$LLVM_BIN_DIR/llvm-dwarfdump`. +pub fn llvm_dwarfdump() -> LlvmDwarfdump { + LlvmDwarfdump::new() +} + /// A `llvm-readobj` invocation builder. #[derive(Debug)] #[must_use] @@ -97,6 +103,13 @@ pub struct LlvmBcanalyzer { cmd: Command, } +/// A `llvm-dwarfdump` invocation builder. +#[derive(Debug)] +#[must_use] +pub struct LlvmDwarfdump { + cmd: Command, +} + crate::macros::impl_common_helpers!(LlvmReadobj); crate::macros::impl_common_helpers!(LlvmProfdata); crate::macros::impl_common_helpers!(LlvmFilecheck); @@ -104,6 +117,7 @@ crate::macros::impl_common_helpers!(LlvmObjdump); crate::macros::impl_common_helpers!(LlvmAr); crate::macros::impl_common_helpers!(LlvmNm); crate::macros::impl_common_helpers!(LlvmBcanalyzer); +crate::macros::impl_common_helpers!(LlvmDwarfdump); /// Generate the path to the bin directory of LLVM. #[must_use] @@ -317,3 +331,19 @@ impl LlvmBcanalyzer { self } } + +impl LlvmDwarfdump { + /// Construct a new `llvm-dwarfdump` invocation. This assumes that `llvm-dwarfdump` is available + /// at `$LLVM_BIN_DIR/llvm-dwarfdump`. + pub fn new() -> Self { + let llvm_dwarfdump = llvm_bin_dir().join("llvm-dwarfdump"); + let cmd = Command::new(llvm_dwarfdump); + Self { cmd } + } + + /// Provide an input file. + pub fn input<P: AsRef<Path>>(&mut self, path: P) -> &mut Self { + self.cmd.arg(path.as_ref()); + self + } +} diff --git a/src/tools/run-make-support/src/lib.rs b/src/tools/run-make-support/src/lib.rs index a44dd00ad79..4bef4f05007 100644 --- a/src/tools/run-make-support/src/lib.rs +++ b/src/tools/run-make-support/src/lib.rs @@ -49,8 +49,9 @@ pub use c_build::{build_native_dynamic_lib, build_native_static_lib, build_nativ pub use clang::{clang, Clang}; pub use htmldocck::htmldocck; pub use llvm::{ - llvm_ar, llvm_bcanalyzer, llvm_filecheck, llvm_nm, llvm_objdump, llvm_profdata, llvm_readobj, - LlvmAr, LlvmBcanalyzer, LlvmFilecheck, LlvmNm, LlvmObjdump, LlvmProfdata, LlvmReadobj, + llvm_ar, llvm_bcanalyzer, llvm_dwarfdump, llvm_filecheck, llvm_nm, llvm_objdump, llvm_profdata, + llvm_readobj, LlvmAr, LlvmBcanalyzer, LlvmDwarfdump, LlvmFilecheck, LlvmNm, LlvmObjdump, + LlvmProfdata, LlvmReadobj, }; pub use python::python_command; pub use rustc::{aux_build, bare_rustc, rustc, Rustc}; @@ -86,7 +87,7 @@ pub use path_helpers::{ pub use scoped_run::{run_in_tmpdir, test_while_readonly}; pub use assertion_helpers::{ - assert_contains, assert_contains_regex, assert_dirs_are_equal, assert_equals, + assert_contains, assert_contains_regex, assert_count_is, assert_dirs_are_equal, assert_equals, assert_not_contains, assert_not_contains_regex, }; diff --git a/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml b/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml index 34ca53e2e53..f1533bf26e5 100644 --- a/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml +++ b/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml @@ -30,7 +30,6 @@ jobs: run: | git config --global user.email "runner@gha.local" git config --global user.name "GitHub Action" - # Remove r-a crates from the workspaces so we don't auto-publish them as well - sed -i 's/ "crates\/\*"//' ./Cargo.toml - sed -i 's/ "xtask\/"//' ./Cargo.toml + # Only publish the crates under lib/ + sed -i 's|^members = .*$|members = ["lib/*"]|' Cargo.toml cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index b98a1195d8b..41dc4405099 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -52,16 +52,16 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.72" +version = "0.3.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.35.0", + "object 0.36.3", "rustc-demangle", ] @@ -92,9 +92,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "byteorder" @@ -136,9 +136,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.98" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" +checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292" [[package]] name = "cfg" @@ -148,10 +148,10 @@ dependencies = [ "derive_arbitrary", "expect-test", "intern", - "mbe", "oorandom", "rustc-hash", "syntax", + "syntax-bridge", "tt", ] @@ -185,7 +185,7 @@ version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "chalk-derive", ] @@ -226,9 +226,9 @@ checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cov-mark" -version = "2.0.0-pre.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a" +checksum = "0570650661aa447e7335f1d5e4f499d8e58796e617bedc9267d971e51c8b49d4" [[package]] name = "crc32fast" @@ -366,9 +366,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "ena" @@ -397,14 +397,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "libredox", + "windows-sys 0.59.0", ] [[package]] @@ -415,32 +415,15 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920" dependencies = [ "crc32fast", "miniz_oxide", ] [[package]] -name = "flycheck" -version = "0.0.0" -dependencies = [ - "cargo_metadata", - "crossbeam-channel", - "paths", - "process-wrap", - "project-model", - "rustc-hash", - "serde", - "serde_json", - "stdx", - "toolchain", - "tracing", -] - -[[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -529,7 +512,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg", "cov-mark", "dashmap", @@ -554,6 +537,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tracing", @@ -582,6 +566,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "tracing", "triomphe", "tt", @@ -593,7 +578,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.5.0", + "bitflags 2.6.0", "chalk-derive", "chalk-ir", "chalk-recursive", @@ -722,7 +707,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.5.0", + "bitflags 2.6.0", "cov-mark", "crossbeam-channel", "either", @@ -803,9 +788,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" dependencies = [ "equivalent", "hashbrown", @@ -895,9 +880,9 @@ checksum = "3752f229dcc5a481d60f385fa479ff46818033d881d2d801aa27dffcfb5e8306" [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" @@ -907,19 +892,19 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libloading" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] name = "libmimalloc-sys" -version = "0.1.38" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6" +checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" dependencies = [ "cc", "libc", @@ -931,8 +916,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", + "redox_syscall", ] [[package]] @@ -996,9 +982,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "lsp-server" @@ -1056,6 +1042,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "test-utils", "tracing", "tt", @@ -1063,9 +1050,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" @@ -1087,18 +1074,18 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176" +checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" dependencies = [ "libmimalloc-sys", ] [[package]] name = "miniz_oxide" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] @@ -1130,7 +1117,7 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if", "cfg_aliases", "libc", @@ -1148,7 +1135,7 @@ version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "crossbeam-channel", "filetime", "fsevent-sys", @@ -1163,11 +1150,11 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.50.0" +version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14" +checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1197,9 +1184,9 @@ dependencies = [ [[package]] name = "object" -version = "0.35.0" +version = "0.36.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" +checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" dependencies = [ "memchr", ] @@ -1212,9 +1199,9 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" -version = "11.1.3" +version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "option-ext" @@ -1240,9 +1227,9 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.1", + "redox_syscall", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -1268,6 +1255,7 @@ name = "paths" version = "0.0.0" dependencies = [ "camino", + "serde", ] [[package]] @@ -1319,9 +1307,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "proc-macro-api" @@ -1330,14 +1321,12 @@ dependencies = [ "base-db", "indexmap", "intern", - "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", "rustc-hash", "serde", "serde_json", "span", "stdx", - "text-size", "tracing", "tt", ] @@ -1350,7 +1339,6 @@ dependencies = [ "expect-test", "intern", "libloading", - "mbe", "memmap2", "object 0.33.0", "paths", @@ -1360,6 +1348,7 @@ dependencies = [ "snap", "span", "stdx", + "syntax-bridge", "tt", ] @@ -1380,9 +1369,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.85" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -1460,7 +1449,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "memchr", "unicase", ] @@ -1485,20 +1474,20 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46" +checksum = "b011c39d409940a890414e3a7b239762ac16d88029ad71b050a8374831b93790" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "ra-ap-rustc_index", "tracing", ] [[package]] name = "ra-ap-rustc_index" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9" +checksum = "9027acdee649b0b27eb10b7db5be833efee3362d394935c5eed8f0745a9d43ce" dependencies = [ "arrayvec", "ra-ap-rustc_index_macros", @@ -1507,21 +1496,20 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82f3d6dcb30a66905388e14756b8f2216131d9f8004922c07f13335840e058d1" +checksum = "540b86dc0384141ac8e825fc2874cd44bffd4277d99d8ec63ee416f1a98d5997" dependencies = [ "proc-macro2", "quote", "syn", - "synstructure", ] [[package]] name = "ra-ap-rustc_lexer" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbd8a2b0bdcba9892cbce0b25f6c953d31b0febc1f3420fc692884fce5a23ad8" +checksum = "3bdf98bb457b47b9ae4aeebf867d0ca440c86925e0b6381658c4a02589748c9d" dependencies = [ "unicode-properties", "unicode-xid", @@ -1529,9 +1517,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135" +checksum = "e8fe3556ab6311bb775220563a300e2bf62ec56404521fe0c511a583937683d5" dependencies = [ "ra-ap-rustc_index", "ra-ap-rustc_lexer", @@ -1539,9 +1527,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365" +checksum = "1709080fdeb5db630e1c2644026c2962aaa32416cd92f0190c04b0c21e114b91" dependencies = [ "ra-ap-rustc_index", "rustc-hash", @@ -1602,20 +1590,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] @@ -1648,12 +1627,12 @@ version = "0.0.0" dependencies = [ "always-assert", "anyhow", + "cargo_metadata", "cfg", "crossbeam-channel", "dirs", "dissimilar", "expect-test", - "flycheck", "hir", "hir-def", "hir-ty", @@ -1665,7 +1644,6 @@ dependencies = [ "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", - "mbe", "memchr", "mimalloc", "nohash-hasher", @@ -1675,6 +1653,7 @@ dependencies = [ "parser", "paths", "proc-macro-api", + "process-wrap", "profile", "project-model", "rayon", @@ -1685,6 +1664,7 @@ dependencies = [ "serde_json", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tikv-jemallocator", @@ -1716,9 +1696,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc_apfloat" -version = "0.2.0+llvm-462a31f5a5ab" +version = "0.2.1+llvm-462a31f5a5ab" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465187772033a5ee566f69fe008df03628fce549a0899aae76f0a0c2e34696be" +checksum = "886d94c63c812a8037c4faca2607453a0fa4cf82f734665266876b022244543f" dependencies = [ "bitflags 1.3.2", "smallvec", @@ -1801,18 +1781,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.203" +version = "1.0.206" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.203" +version = "1.0.206" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97" dependencies = [ "proc-macro2", "quote", @@ -1821,12 +1801,13 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" dependencies = [ "indexmap", "itoa", + "memchr", "ryu", "serde", ] @@ -1844,9 +1825,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -1923,9 +1904,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.66" +version = "2.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" dependencies = [ "proc-macro2", "quote", @@ -1968,6 +1949,21 @@ dependencies = [ ] [[package]] +name = "syntax-bridge" +version = "0.0.0" +dependencies = [ + "intern", + "parser", + "rustc-hash", + "span", + "stdx", + "syntax", + "test-utils", + "tracing", + "tt", +] + +[[package]] name = "test-fixture" version = "0.0.0" dependencies = [ @@ -1987,6 +1983,7 @@ name = "test-utils" version = "0.0.0" dependencies = [ "dissimilar", + "paths", "profile", "rustc-hash", "stdx", @@ -2010,18 +2007,18 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", @@ -2090,9 +2087,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -2105,9 +2102,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", @@ -2117,18 +2114,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.14" +version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap", "serde", @@ -2214,9 +2211,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90" +checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" dependencies = [ "serde", "stable_deref_trait", @@ -2289,9 +2286,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", @@ -2307,14 +2304,15 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vfs" version = "0.0.0" dependencies = [ + "crossbeam-channel", "fst", "indexmap", "nohash-hasher", @@ -2331,6 +2329,8 @@ dependencies = [ "crossbeam-channel", "notify", "paths", + "rayon", + "rustc-hash", "stdx", "tracing", "vfs", @@ -2355,11 +2355,11 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2369,7 +2369,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132" dependencies = [ "windows-core", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2381,7 +2381,7 @@ dependencies = [ "windows-implement", "windows-interface", "windows-result", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2408,11 +2408,11 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749f0da9cc72d82e600d8d2e44cadd0b9eedb9038f71a1c58556ac1c5791813b" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2430,7 +2430,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -2450,18 +2459,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -2472,9 +2481,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -2484,9 +2493,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -2496,15 +2505,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -2514,9 +2523,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -2526,9 +2535,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -2538,9 +2547,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -2550,15 +2559,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.11" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c52728401e1dc672a56e81e593e912aa54c78f40246869f78359a2bf24d29d" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] @@ -2619,6 +2628,27 @@ dependencies = [ ] [[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "zip" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index c2f601a91bc..56e80e11e77 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,10 +4,11 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.78" +rust-version = "1.80" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] +repository = "https://github.com/rust-lang/rust-analyzer" [profile.dev] debug = 1 @@ -51,7 +52,6 @@ debug = 2 # local crates base-db = { path = "./crates/base-db", version = "0.0.0" } cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] } -flycheck = { path = "./crates/flycheck", version = "0.0.0" } hir = { path = "./crates/hir", version = "0.0.0" } hir-def = { path = "./crates/hir-def", version = "0.0.0" } hir-expand = { path = "./crates/hir-expand", version = "0.0.0" } @@ -77,17 +77,18 @@ salsa = { path = "./crates/salsa", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } +syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.53.0", default-features = false } -ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false } -ra-ap-rustc_index = { version = "0.53.0", default-features = false } -ra-ap-rustc_abi = { version = "0.53.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false } +ra-ap-rustc_lexer = { version = "0.63.0", default-features = false } +ra-ap-rustc_parse_format = { version = "0.63.0", default-features = false } +ra-ap-rustc_index = { version = "0.63.0", default-features = false } +ra-ap-rustc_abi = { version = "0.63.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.63.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. test-fixture = { path = "./crates/test-fixture" } @@ -124,11 +125,11 @@ memmap2 = "0.5.4" nohash-hasher = "0.2.0" oorandom = "11.1.3" object = { version = "0.33.0", default-features = false, features = [ - "std", - "read_core", - "elf", - "macho", - "pe", + "std", + "read_core", + "elf", + "macho", + "pe", ] } process-wrap = { version = "8.0.2", features = ["std"] } pulldown-cmark-to-cmark = "10.0.4" @@ -158,7 +159,6 @@ url = "2.3.1" xshell = "0.2.5" - # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.5.3", features = ["raw-api"] } diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 1b1ee034cac..b17b08a720c 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "base-db" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Basic database traits for rust-analyzer. The concrete DB is defined by `ide` (aka `ra_ap_ide`)." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs index 0fd54e1211c..a9d91d64ceb 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/change.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs @@ -7,7 +7,7 @@ use salsa::Durability; use triomphe::Arc; use vfs::FileId; -use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId}; +use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] @@ -50,7 +50,7 @@ impl FileChange { self.crate_graph = Some(graph); } - pub fn apply(self, db: &mut dyn SourceDatabaseExt) { + pub fn apply(self, db: &mut dyn SourceRootDatabase) { let _p = tracing::info_span!("FileChange::apply").entered(); if let Some(roots) = self.roots { for (idx, root) in roots.into_iter().enumerate() { diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 460581f4a6c..3616fa9fd86 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -690,6 +690,14 @@ impl Env { pub fn extend_from_other(&mut self, other: &Env) { self.entries.extend(other.entries.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); } + + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + + pub fn insert(&mut self, k: impl Into<String>, v: impl Into<String>) -> Option<String> { + self.entries.insert(k.into(), v.into()) + } } impl From<Env> for Vec<(String, String)> { @@ -700,6 +708,15 @@ impl From<Env> for Vec<(String, String)> { } } +impl<'a> IntoIterator for &'a Env { + type Item = (&'a String, &'a String); + type IntoIter = std::collections::hash_map::Iter<'a, String, String>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.iter() + } +} + #[derive(Debug)] pub struct CyclicDependenciesError { path: Vec<(CrateId, Option<CrateDisplayName>)>, diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index f319f98537b..20ef45d0b37 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -1,5 +1,5 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. - +// FIXME: Rename this crate, base db is non descriptive mod change; mod input; @@ -47,8 +47,6 @@ pub const DEFAULT_PARSE_LRU_CAP: u16 = 128; pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024; pub trait FileLoader { - /// Text of the file. - fn file_text(&self, file_id: FileId) -> Arc<str>; fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>; /// Crates whose root's source root is the same as the source root of `file_id` fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>; @@ -58,6 +56,13 @@ pub trait FileLoader { /// model. Everything else in rust-analyzer is derived from these queries. #[salsa::query_group(SourceDatabaseStorage)] pub trait SourceDatabase: FileLoader + std::fmt::Debug { + #[salsa::input] + fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; + + /// Text of the file. + #[salsa::lru] + fn file_text(&self, file_id: FileId) -> Arc<str>; + /// Parses the file into the syntax tree. #[salsa::lru] fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>; @@ -99,16 +104,18 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc } } +fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> { + let bytes = db.compressed_file_text(file_id); + let bytes = + lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); + let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); + Arc::from(text) +} + /// We don't want to give HIR knowledge of source roots, hence we extract these /// methods into a separate DB. -#[salsa::query_group(SourceDatabaseExtStorage)] -pub trait SourceDatabaseExt: SourceDatabase { - #[salsa::input] - fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; - - #[salsa::lru] - fn file_text(&self, file_id: FileId) -> Arc<str>; - +#[salsa::query_group(SourceRootDatabaseStorage)] +pub trait SourceRootDatabase: SourceDatabase { /// Path to a file, relative to the root of its source root. /// Source root of the file. #[salsa::input] @@ -121,15 +128,7 @@ pub trait SourceDatabaseExt: SourceDatabase { fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>; } -fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc<str> { - let bytes = db.compressed_file_text(file_id); - let bytes = - lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); - let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); - Arc::from(text) -} - -pub trait SourceDatabaseExt2 { +pub trait SourceDatabaseFileInputExt { fn set_file_text(&mut self, file_id: FileId, text: &str) { self.set_file_text_with_durability(file_id, text, Durability::LOW); } @@ -142,7 +141,7 @@ pub trait SourceDatabaseExt2 { ); } -impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db { +impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db { fn set_file_text_with_durability( &mut self, file_id: FileId, @@ -159,7 +158,7 @@ impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db { } } -fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> { +fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> { let graph = db.crate_graph(); let mut crates = graph .iter() @@ -173,13 +172,12 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[Crat crates.into_iter().collect() } -/// Silly workaround for cyclic deps between the traits +// FIXME: Would be nice to get rid of this somehow +/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split +/// regarding FileLoader pub struct FileLoaderDelegate<T>(pub T); -impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> { - fn file_text(&self, file_id: FileId) -> Arc<str> { - SourceDatabaseExt::file_text(self.0, file_id) - } +impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { // FIXME: this *somehow* should be platform agnostic... let source_root = self.0.file_source_root(path.anchor); diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index faf93f62c6a..29b7ad6f8fe 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "cfg" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Conditional compiling options, `cfg` attribute parser and evaluator for rust-analyzer." authors.workspace = true edition.workspace = true @@ -28,7 +29,7 @@ arbitrary = "1.3.2" derive_arbitrary = "1.3.2" # local deps -mbe.workspace = true +syntax-bridge.workspace = true syntax.workspace = true [lints] diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 6d46dfb9994..e9daaf7de3c 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -108,6 +108,14 @@ impl<'a> IntoIterator for &'a CfgOptions { } } +impl FromIterator<CfgAtom> for CfgOptions { + fn from_iter<T: IntoIterator<Item = CfgAtom>>(iter: T) -> Self { + let mut options = CfgOptions::default(); + options.extend(iter); + options + } +} + #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct CfgDiff { // Invariants: No duplicates, no atom that's both in `enable` and `disable`. diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs index 597023a792b..6d87d83ad93 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs @@ -1,8 +1,11 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use intern::Symbol; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ast, AstNode, Edition}; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml deleted file mode 100644 index bb3a94c8da6..00000000000 --- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "flycheck" -version = "0.0.0" -description = "TBD" - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[dependencies] -cargo_metadata.workspace = true -crossbeam-channel.workspace = true -tracing.workspace = true -rustc-hash.workspace = true -serde_json.workspace = true -serde.workspace = true -process-wrap.workspace = true - -# local deps -paths.workspace = true -stdx.workspace = true -toolchain.workspace = true -project-model.workspace = true - -[lints] -workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index 8ac2d003137..5b9d227e315 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-def" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true @@ -52,7 +53,7 @@ expect-test.workspace = true # local deps test-utils.workspace = true test-fixture.workspace = true - +syntax-bridge.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index ba88495e14d..198dc93f6b1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -657,9 +657,9 @@ mod tests { use triomphe::Arc; use hir_expand::span_map::{RealSpanMap, SpanMap}; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode}; use span::FileId; use syntax::{ast, AstNode, TextRange}; + use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use crate::attr::{DocAtom, DocExpr}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index d3c134f3266..a988317e046 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -118,6 +118,7 @@ pub enum BodyDiagnostic { MacroError { node: InFile<AstPtr<ast::MacroCall>>, err: ExpandError }, UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath }, UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, + AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String }, UndeclaredLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, } @@ -157,7 +158,7 @@ impl Body { }), ) }); - is_async_fn = data.has_async_kw(); + is_async_fn = data.is_async(); src.map(|it| it.body().map(ast::Expr::from)) } DefWithBodyId::ConstId(c) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index 9e30aff8fe9..abf78958292 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -72,6 +72,7 @@ pub(super) fn lower( is_lowering_coroutine: false, label_ribs: Vec::new(), current_binding_owner: None, + awaitable_context: None, } .collect(params, body, is_async_fn) } @@ -100,6 +101,8 @@ struct ExprCollector<'a> { // resolution label_ribs: Vec<LabelRib>, current_binding_owner: Option<ExprId>, + + awaitable_context: Option<Awaitable>, } #[derive(Clone, Debug)] @@ -135,6 +138,11 @@ impl RibKind { } } +enum Awaitable { + Yes, + No(&'static str), +} + #[derive(Debug, Default)] struct BindingList { map: FxHashMap<Name, BindingId>, @@ -180,6 +188,18 @@ impl ExprCollector<'_> { body: Option<ast::Expr>, is_async_fn: bool, ) -> (Body, BodySourceMap) { + self.awaitable_context.replace(if is_async_fn { + Awaitable::Yes + } else { + match self.owner { + DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"), + DefWithBodyId::StaticId(..) => Awaitable::No("static"), + DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => { + Awaitable::No("constant") + } + DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"), + } + }); if let Some((param_list, mut attr_enabled)) = param_list { let mut params = vec![]; if let Some(self_param) = @@ -280,31 +300,40 @@ impl ExprCollector<'_> { } Some(ast::BlockModifier::Async(_)) => { self.with_label_rib(RibKind::Closure, |this| { - this.collect_block_(e, |id, statements, tail| Expr::Async { - id, - statements, - tail, + this.with_awaitable_block(Awaitable::Yes, |this| { + this.collect_block_(e, |id, statements, tail| Expr::Async { + id, + statements, + tail, + }) }) }) } Some(ast::BlockModifier::Const(_)) => { self.with_label_rib(RibKind::Constant, |this| { - let (result_expr_id, prev_binding_owner) = - this.initialize_binding_owner(syntax_ptr); - let inner_expr = this.collect_block(e); - let it = this.db.intern_anonymous_const(ConstBlockLoc { - parent: this.owner, - root: inner_expr, - }); - this.body.exprs[result_expr_id] = Expr::Const(it); - this.current_binding_owner = prev_binding_owner; - result_expr_id + this.with_awaitable_block(Awaitable::No("constant block"), |this| { + let (result_expr_id, prev_binding_owner) = + this.initialize_binding_owner(syntax_ptr); + let inner_expr = this.collect_block(e); + let it = this.db.intern_anonymous_const(ConstBlockLoc { + parent: this.owner, + root: inner_expr, + }); + this.body.exprs[result_expr_id] = Expr::Const(it); + this.current_binding_owner = prev_binding_owner; + result_expr_id + }) }) } // FIXME - Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => { - self.collect_block(e) + Some(ast::BlockModifier::AsyncGen(_)) => { + self.with_awaitable_block(Awaitable::Yes, |this| this.collect_block(e)) } + Some(ast::BlockModifier::Gen(_)) => self + .with_awaitable_block(Awaitable::No("non-async gen block"), |this| { + this.collect_block(e) + }), + None => self.collect_block(e), }, ast::Expr::LoopExpr(e) => { let label = e.label().map(|label| self.collect_label(label)); @@ -469,6 +498,12 @@ impl ExprCollector<'_> { } ast::Expr::AwaitExpr(e) => { let expr = self.collect_expr_opt(e.expr()); + if let Awaitable::No(location) = self.is_lowering_awaitable_block() { + self.source_map.diagnostics.push(BodyDiagnostic::AwaitOutsideOfAsync { + node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)), + location: location.to_string(), + }); + } self.alloc_expr(Expr::Await { expr }, syntax_ptr) } ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e), @@ -527,7 +562,13 @@ impl ExprCollector<'_> { let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine); let prev_try_block_label = this.current_try_block_label.take(); - let body = this.collect_expr_opt(e.body()); + let awaitable = if e.async_token().is_some() { + Awaitable::Yes + } else { + Awaitable::No("non-async closure") + }; + let body = + this.with_awaitable_block(awaitable, |this| this.collect_expr_opt(e.body())); let closure_kind = if this.is_lowering_coroutine { let movability = if e.static_token().is_some() { @@ -2082,6 +2123,21 @@ impl ExprCollector<'_> { fn alloc_label_desugared(&mut self, label: Label) -> LabelId { self.body.labels.alloc(label) } + + fn is_lowering_awaitable_block(&self) -> &Awaitable { + self.awaitable_context.as_ref().unwrap_or(&Awaitable::No("unknown")) + } + + fn with_awaitable_block<T>( + &mut self, + awaitable: Awaitable, + f: impl FnOnce(&mut Self) -> T, + ) -> T { + let orig = self.awaitable_context.replace(awaitable); + let res = f(self); + self.awaitable_context = orig; + res + } } fn comma_follows_token(t: Option<syntax::SyntaxToken>) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index c3c2e51fd03..d17ebd7ff92 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -94,6 +94,12 @@ impl FunctionData { .filter(|it| !it.is_empty()) .map(Box::new); let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); + if flags.contains(FnFlags::HAS_UNSAFE_KW) + && !crate_graph[krate].edition.at_least_2024() + && attrs.by_key(&sym::rustc_deprecated_safe_2024).exists() + { + flags.remove(FnFlags::HAS_UNSAFE_KW); + } Arc::new(FunctionData { name: func.name.clone(), @@ -126,19 +132,19 @@ impl FunctionData { self.flags.contains(FnFlags::HAS_SELF_PARAM) } - pub fn has_default_kw(&self) -> bool { + pub fn is_default(&self) -> bool { self.flags.contains(FnFlags::HAS_DEFAULT_KW) } - pub fn has_const_kw(&self) -> bool { + pub fn is_const(&self) -> bool { self.flags.contains(FnFlags::HAS_CONST_KW) } - pub fn has_async_kw(&self) -> bool { + pub fn is_async(&self) -> bool { self.flags.contains(FnFlags::HAS_ASYNC_KW) } - pub fn has_unsafe_kw(&self) -> bool { + pub fn is_unsafe(&self) -> bool { self.flags.contains(FnFlags::HAS_UNSAFE_KW) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 56feb0163e1..b1103d35cab 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -160,7 +160,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba fn const_data(&self, konst: ConstId) -> Arc<ConstData>; #[salsa::invoke(StaticData::static_data_query)] - fn static_data(&self, konst: StaticId) -> Arc<StaticData>; + fn static_data(&self, statik: StaticId) -> Arc<StaticData>; #[salsa::invoke(Macro2Data::macro2_data_query)] fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>; @@ -240,14 +240,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; - fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>; + fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>; } // return: macro call id and include file id fn include_macro_invoc( db: &dyn DefDatabase, krate: CrateId, -) -> Vec<(MacroCallId, EditionedFileId)> { +) -> Arc<[(MacroCallId, EditionedFileId)]> { db.crate_def_map(krate) .modules .values() diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 91594aecd04..5a3a3e91897 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -50,13 +50,13 @@ pub fn find_path( prefix: prefix_kind, cfg, ignore_local_imports, + is_std_item: db.crate_graph()[item_module.krate()].origin.is_lang(), from, from_def_map: &from.def_map(db), fuel: Cell::new(FIND_PATH_FUEL), }, item, MAX_PATH_LEN, - db.crate_graph()[item_module.krate()].origin.is_lang(), ) } @@ -98,20 +98,16 @@ struct FindPathCtx<'db> { prefix: PrefixKind, cfg: ImportPathConfig, ignore_local_imports: bool, + is_std_item: bool, from: ModuleId, from_def_map: &'db DefMap, fuel: Cell<usize>, } /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId -fn find_path_inner( - ctx: &FindPathCtx<'_>, - item: ItemInNs, - max_len: usize, - is_std_item: bool, -) -> Option<ModPath> { +fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> { // - if the item is a module, jump straight to module search - if !is_std_item { + if !ctx.is_std_item { if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) .map(|choice| choice.path); @@ -138,12 +134,9 @@ fn find_path_inner( if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { // - if the item is an enum variant, refer to it via the enum - if let Some(mut path) = find_path_inner( - ctx, - ItemInNs::Types(variant.lookup(ctx.db).parent.into()), - max_len, - is_std_item, - ) { + if let Some(mut path) = + find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len) + { path.push_segment(ctx.db.enum_variant_data(variant).name.clone()); return Some(path); } @@ -152,16 +145,6 @@ fn find_path_inner( // variant somewhere } - if is_std_item { - // The item we are searching for comes from the sysroot libraries, so skip prefer looking in - // the sysroot libraries directly. - // We do need to fallback as the item in question could be re-exported by another crate - // while not being a transitive dependency of the current crate. - if let Some(choice) = find_in_sysroot(ctx, &mut FxHashSet::default(), item, max_len) { - return Some(choice.path); - } - } - let mut best_choice = None; calculate_best_path(ctx, &mut FxHashSet::default(), item, max_len, &mut best_choice); best_choice.map(|choice| choice.path) @@ -366,6 +349,12 @@ fn calculate_best_path( // Item was defined in the same crate that wants to import it. It cannot be found in any // dependency in this case. calculate_best_path_local(ctx, visited_modules, item, max_len, best_choice) + } else if ctx.is_std_item { + // The item we are searching for comes from the sysroot libraries, so skip prefer looking in + // the sysroot libraries directly. + // We do need to fallback as the item in question could be re-exported by another crate + // while not being a transitive dependency of the current crate. + find_in_sysroot(ctx, visited_modules, item, max_len, best_choice) } else { // Item was defined in some upstream crate. This means that it must be exported from one, // too (unless we can't name it at all). It could *also* be (re)exported by the same crate @@ -382,10 +371,10 @@ fn find_in_sysroot( visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, item: ItemInNs, max_len: usize, -) -> Option<Choice> { + best_choice: &mut Option<Choice>, +) { let crate_graph = ctx.db.crate_graph(); let dependencies = &crate_graph[ctx.from.krate].dependencies; - let mut best_choice = None; let mut search = |lang, best_choice: &mut _| { if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| { match crate_graph[dep.crate_id].origin { @@ -397,29 +386,31 @@ fn find_in_sysroot( } }; if ctx.cfg.prefer_no_std { - search(LangCrateOrigin::Core, &mut best_choice); + search(LangCrateOrigin::Core, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } - search(LangCrateOrigin::Std, &mut best_choice); + search(LangCrateOrigin::Std, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } } else { - search(LangCrateOrigin::Std, &mut best_choice); + search(LangCrateOrigin::Std, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } - search(LangCrateOrigin::Core, &mut best_choice); + search(LangCrateOrigin::Core, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } } - let mut best_choice = None; - dependencies.iter().filter(|it| it.is_sysroot()).for_each(|dep| { - find_in_dep(ctx, visited_modules, item, max_len, &mut best_choice, dep.crate_id); - }); - best_choice + dependencies + .iter() + .filter(|it| it.is_sysroot()) + .chain(dependencies.iter().filter(|it| !it.is_sysroot())) + .for_each(|dep| { + find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id); + }); } fn find_in_dep( @@ -491,6 +482,7 @@ fn calculate_best_path_local( ); } +#[derive(Debug)] struct Choice { path: ModPath, /// The length in characters of the path @@ -676,6 +668,7 @@ mod tests { path: &str, prefer_prelude: bool, prefer_absolute: bool, + prefer_no_std: bool, expect: Expect, ) { let (db, pos) = TestDB::with_position(ra_fixture); @@ -717,7 +710,7 @@ mod tests { module, prefix, ignore_local_imports, - ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute }, + ImportPathConfig { prefer_no_std, prefer_prelude, prefer_absolute }, ); format_to!( res, @@ -732,15 +725,19 @@ mod tests { } fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, false, expect); + check_found_path_(ra_fixture, path, false, false, false, expect); } fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, true, false, expect); + check_found_path_(ra_fixture, path, true, false, false, expect); } fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, true, expect); + check_found_path_(ra_fixture, path, false, true, false, expect); + } + + fn check_found_path_prefer_no_std(ra_fixture: &str, path: &str, expect: Expect) { + check_found_path_(ra_fixture, path, false, false, true, expect); } #[test] @@ -1361,9 +1358,66 @@ pub mod sync { "#]], ); } + #[test] + fn prefer_core_paths_over_std_for_mod_reexport() { + check_found_path_prefer_no_std( + r#" +//- /main.rs crate:main deps:core,std + +$0 + +//- /stdlib.rs crate:std deps:core + +pub use core::pin; + +//- /corelib.rs crate:core + +pub mod pin { + pub struct Pin; +} + "#, + "std::pin::Pin", + expect![[r#" + Plain (imports ✔): core::pin::Pin + Plain (imports ✖): core::pin::Pin + ByCrate(imports ✔): core::pin::Pin + ByCrate(imports ✖): core::pin::Pin + BySelf (imports ✔): core::pin::Pin + BySelf (imports ✖): core::pin::Pin + "#]], + ); + } #[test] fn prefer_core_paths_over_std() { + check_found_path_prefer_no_std( + r#" +//- /main.rs crate:main deps:core,std + +$0 + +//- /std.rs crate:std deps:core + +pub mod fmt { + pub use core::fmt::Error; +} + +//- /zzz.rs crate:core + +pub mod fmt { + pub struct Error; +} + "#, + "core::fmt::Error", + expect![[r#" + Plain (imports ✔): core::fmt::Error + Plain (imports ✖): core::fmt::Error + ByCrate(imports ✔): core::fmt::Error + ByCrate(imports ✖): core::fmt::Error + BySelf (imports ✔): core::fmt::Error + BySelf (imports ✖): core::fmt::Error + "#]], + ); check_found_path( r#" //- /main.rs crate:main deps:core,std @@ -1878,10 +1932,9 @@ pub mod ops { #[test] fn respect_unstable_modules() { - check_found_path( + check_found_path_prefer_no_std( r#" //- /main.rs crate:main deps:std,core -#![no_std] extern crate std; $0 //- /longer.rs crate:std deps:core diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 66412b26a00..4ced30c81dc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -105,7 +105,7 @@ use crate::{ type FxIndexMap<K, V> = indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; -/// A wrapper around two booleans, [`ImportPathConfig::prefer_no_std`] and [`ImportPathConfig::prefer_prelude`]. +/// A wrapper around three booleans #[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)] pub struct ImportPathConfig { /// If true, prefer to unconditionally use imports of the `core` and `alloc` crate diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 64b37d2d065..fc1460870ce 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1201,7 +1201,6 @@ macro_rules! m { #[test] fn test_meta_doc_comments() { - cov_mark::check!(test_meta_doc_comments); check( r#" macro_rules! m { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index d34f0afc3ef..b430e2cefb3 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -317,9 +317,9 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { _: Span, _: Span, ) -> Result<Subtree, ProcMacroExpansionError> { - let (parse, _) = ::mbe::token_tree_to_syntax_node( + let (parse, _) = syntax_bridge::token_tree_to_syntax_node( subtree, - ::mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, span::Edition::CURRENT, ); if parse.errors().is_empty() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 483bffc4b29..debc5a44326 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -56,7 +56,6 @@ use crate::{ }; static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); -static EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128); static FIXED_POINT_LIMIT: Limit = Limit::new(8192); pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap { @@ -1440,7 +1439,14 @@ impl DefCollector<'_> { depth: usize, container: ItemContainerId, ) { - if EXPANSION_DEPTH_LIMIT.check(depth).is_err() { + let recursion_limit = self.def_map.recursion_limit() as usize; + let recursion_limit = Limit::new(if cfg!(test) { + // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug + std::cmp::min(32, recursion_limit) + } else { + recursion_limit + }); + if recursion_limit.check(depth).is_err() { cov_mark::hit!(macro_expansion_overflow); tracing::warn!("macro expansion is too deep"); return; @@ -2003,7 +2009,7 @@ impl ModCollector<'_, '_> { Err(cfg) => { self.emit_unconfigured_diagnostic( self.tree_id, - AttrOwner::TopLevel, + AttrOwner::ModItem(module_id.into()), &cfg, ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index e82af318501..d319831867c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,4 +1,4 @@ -use base_db::{SourceDatabase, SourceDatabaseExt2 as _}; +use base_db::{SourceDatabase, SourceDatabaseFileInputExt as _}; use test_fixture::WithFixture; use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs index 7c39773aa68..70918a9358e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs @@ -194,6 +194,11 @@ pub(super) fn lower_generic_args( match generic_arg { ast::GenericArg::TypeArg(type_arg) => { let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty()); + type_ref.walk(&mut |tr| { + if let TypeRef::ImplTrait(bounds) = tr { + lower_ctx.update_impl_traits_bounds(bounds.clone()); + } + }); args.push(GenericArg::Type(type_ref)); } ast::GenericArg::AssocTypeArg(assoc_type_arg) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index f44472eae5b..df9dec69d46 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -19,7 +19,7 @@ use crate::{ }; #[salsa::database( - base_db::SourceDatabaseExtStorage, + base_db::SourceRootDatabaseStorage, base_db::SourceDatabaseStorage, hir_expand::db::ExpandDatabaseStorage, crate::db::InternDatabaseStorage, @@ -69,9 +69,6 @@ impl fmt::Debug for TestDB { impl panic::RefUnwindSafe for TestDB {} impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc<str> { - FileLoaderDelegate(self).file_text(file_id) - } fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { FileLoaderDelegate(self).resolve_path(path) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml index ca05618aecd..03a9d54d2e6 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-expand" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Macro expansion for rust-analyzer." authors.workspace = true edition.workspace = true @@ -33,6 +34,7 @@ mbe.workspace = true limit.workspace = true span.workspace = true parser.workspace = true +syntax-bridge.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index 777e4154186..79cfeb4cf18 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -6,14 +6,12 @@ use cfg::CfgExpr; use either::Either; use intern::{sym, Interned, Symbol}; -use mbe::{ - desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode, - Punct, -}; +use mbe::{DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; use span::{Span, SyntaxContextId}; use syntax::unescape; use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode}; +use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode}; use triomphe::ThinArc; use crate::name::Name; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs index f560d3bfd1d..7d3e8deaf08 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs @@ -2,10 +2,10 @@ use intern::sym; use itertools::izip; -use mbe::DocCommentDesugarMode; use rustc_hash::FxHashSet; use span::{MacroCallId, Span}; use stdx::never; +use syntax_bridge::DocCommentDesugarMode; use tracing::debug; use crate::{ @@ -209,9 +209,9 @@ struct BasicAdtInfo { } fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> { - let (parsed, tm) = &mbe::token_tree_to_syntax_node( + let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node( tt, - mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, parser::Edition::CURRENT_FIXME, ); let macro_items = ast::MacroItems::cast(parsed.syntax_node()) @@ -268,7 +268,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -282,7 +282,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr }; let bounds = match ¶m { ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -295,7 +295,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr let ty = param .ty() .map(|ty| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( ty.syntax(), tm, call_site, @@ -316,7 +316,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr let where_clause = if let Some(w) = where_clause { w.predicates() .map(|it| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -353,7 +353,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr param_type_set.contains(&name).then_some(p) }) .map(|it| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 7903ac075be..795d9b14df9 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -4,13 +4,14 @@ use base_db::AnchoredPath; use cfg::CfgExpr; use either::Either; use intern::{sym, Symbol}; -use mbe::{parse_exprs_with_sep, parse_to_token_tree, DelimiterKind}; +use mbe::{expect_fragment, DelimiterKind}; use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use stdx::format_to; use syntax::{ format_smolstr, unescape::{unescape_byte, unescape_char, unescape_unicode, Mode}, }; +use syntax_bridge::parse_to_token_tree; use crate::{ builtin::quote::{dollar_crate, quote}, @@ -228,20 +229,22 @@ fn assert_expand( span: Span, ) -> ExpandResult<tt::Subtree> { let call_site_span = span_with_call_site_ctxt(db, span, id); - let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME); + + let mut iter = ::tt::iter::TtIter::new(tt); + + let cond = expect_fragment( + &mut iter, + parser::PrefixEntryPoint::Expr, + db.crate_graph()[id.lookup(db).krate].edition, + tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, + ); + _ = iter.expect_char(','); + let rest = iter.as_slice(); + let dollar_crate = dollar_crate(span); - let expanded = match &*args { - [cond, panic_args @ ..] => { - let comma = tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(call_site_span), - token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - char: ',', - spacing: tt::Spacing::Alone, - span: call_site_span, - }))]), - }; - let cond = cond.clone(); - let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); + let expanded = match cond.value { + Some(cond) => { + let panic_args = rest.iter().cloned(); let mac = if use_panic_2021(db, span) { quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) } } else { @@ -253,10 +256,13 @@ fn assert_expand( } }} } - [] => quote! {call_site_span =>{}}, + None => quote! {call_site_span =>{}}, }; - ExpandResult::ok(expanded) + match cond.err { + Some(err) => ExpandResult::new(expanded, err.into()), + None => ExpandResult::ok(expanded), + } } fn file_expand( diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs index 1a3dd0e7ddb..8b3f69db027 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs @@ -1,7 +1,7 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. use base_db::{ - salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot, + salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase, TargetLayoutLoadResult, Version, }; use la_arena::RawIdx; @@ -23,7 +23,7 @@ impl ChangeWithProcMacros { Self::default() } - pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { + pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) { self.source_change.apply(db); if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 01a35660a90..584f9631e34 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -3,10 +3,11 @@ use base_db::{salsa, CrateId, SourceDatabase}; use either::Either; use limit::Limit; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex}; +use mbe::MatchedArmIndex; use rustc_hash::FxHashSet; use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId}; use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T}; +use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use triomphe::Arc; use crate::{ @@ -165,7 +166,7 @@ pub fn expand_speculative( // Build the subtree and token mapping for the speculative args let (mut tt, undo_info) = match loc.kind { MacroCallKind::FnLike { .. } => ( - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( speculative_args, span_map, span, @@ -178,7 +179,7 @@ pub fn expand_speculative( SyntaxFixupUndoInfo::NONE, ), MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => ( - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( speculative_args, span_map, span, @@ -213,7 +214,7 @@ pub fn expand_speculative( fixups.remove.extend(censor_cfg); ( - mbe::syntax_node_to_token_tree_modified( + syntax_bridge::syntax_node_to_token_tree_modified( speculative_args, span_map, fixups.append, @@ -459,7 +460,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { return dummy_tt(kind); } - let mut tt = mbe::syntax_node_to_token_tree( + let mut tt = syntax_bridge::syntax_node_to_token_tree( tt.syntax(), map.as_ref(), span, @@ -515,7 +516,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { fixups.remove.extend(censor_cfg); ( - mbe::syntax_node_to_token_tree_modified( + syntax_bridge::syntax_node_to_token_tree_modified( syntax, map, fixups.append, @@ -720,13 +721,13 @@ fn token_tree_to_syntax_node( edition: parser::Edition, ) -> (Parse<SyntaxNode>, ExpansionSpanMap) { let entry_point = match expand_to { - ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, - ExpandTo::Items => mbe::TopEntryPoint::MacroItems, - ExpandTo::Pattern => mbe::TopEntryPoint::Pattern, - ExpandTo::Type => mbe::TopEntryPoint::Type, - ExpandTo::Expr => mbe::TopEntryPoint::Expr, + ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts, + ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems, + ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern, + ExpandTo::Type => syntax_bridge::TopEntryPoint::Type, + ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point, edition) + syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index 48851af3fd1..b1a6eed2fbc 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -2,10 +2,10 @@ use base_db::CrateId; use intern::sym; -use mbe::DocCommentDesugarMode; use span::{Edition, MacroCallId, Span, SyntaxContextId}; use stdx::TupleExt; use syntax::{ast, AstNode}; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -112,7 +112,7 @@ impl DeclarativeMacroExpander { ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( + let tt = syntax_bridge::syntax_node_to_token_tree( arg.syntax(), map.as_ref(), map.span_for_range( @@ -135,14 +135,14 @@ impl DeclarativeMacroExpander { let span = map.span_for_range(macro_def.macro_token().unwrap().text_range()); let args = macro_def.args().map(|args| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( args.syntax(), map.as_ref(), span, DocCommentDesugarMode::Mbe, ) }); - let body = mbe::syntax_node_to_token_tree( + let body = syntax_bridge::syntax_node_to_token_tree( body.syntax(), map.as_ref(), span, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 5385b44532b..3528b2dde73 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -19,9 +19,9 @@ //! //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> use base_db::CrateId; -use mbe::DocCommentDesugarMode; use span::SyntaxContextId; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -82,7 +82,7 @@ pub fn expand_eager_macro_input( return ExpandResult { value: None, err }; }; - let mut subtree = mbe::syntax_node_to_token_tree( + let mut subtree = syntax_bridge::syntax_node_to_token_tree( &expanded_eager_input, arg_map, span, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 71579d2f87f..b6d5828da96 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -2,7 +2,6 @@ //! fix up syntax errors in the code we're passing to them. use intern::sym; -use mbe::DocCommentDesugarMode; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; use span::{ @@ -14,6 +13,7 @@ use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use tt::Spacing; @@ -76,7 +76,8 @@ pub(crate) fn fixup_syntax( if can_handle_error(&node) && has_error_to_handle(&node) { remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode); + let original_tree = + syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode); let idx = original.len() as u32; original.push(original_tree); let span = span_map.span_for_range(node_range); @@ -434,9 +435,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use mbe::DocCommentDesugarMode; use span::{Edition, EditionedFileId, FileId}; use syntax::TextRange; + use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -483,7 +484,7 @@ mod tests { span_map.span_for_range(TextRange::empty(0.into())), DocCommentDesugarMode::Mbe, ); - let mut tt = mbe::syntax_node_to_token_tree_modified( + let mut tt = syntax_bridge::syntax_node_to_token_tree_modified( &parsed.syntax_node(), span_map.as_ref(), fixups.append, @@ -498,9 +499,9 @@ mod tests { expect.assert_eq(&actual); // the fixed-up tree should be syntactically valid - let (parse, _) = mbe::token_tree_to_syntax_node( + let (parse, _) = syntax_bridge::token_tree_to_syntax_node( &tt, - ::mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, parser::Edition::CURRENT, ); assert!( @@ -513,7 +514,7 @@ mod tests { // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let original_as_tt = mbe::syntax_node_to_token_tree( + let original_as_tt = syntax_bridge::syntax_node_to_token_tree( &parsed.syntax_node(), span_map.as_ref(), span_map.span_for_range(TextRange::empty(0.into())), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 18da77d6caa..2bea9026265 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -176,7 +176,12 @@ impl ExpandErrorKind { &ExpandErrorKind::MissingProcMacroExpander(def_crate) => { match db.proc_macros().get_error_for_crate(def_crate) { Some((e, hard_err)) => (e.to_owned(), hard_err), - None => ("missing expander".to_owned(), true), + None => ( + format!( + "internal error: proc-macro map is missing error entry for crate {def_crate:?}" + ), + true, + ), } } ExpandErrorKind::MacroDefinition => { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs index 3be88ee9dae..740c27b89ce 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -28,13 +28,13 @@ pub enum SpanMapRef<'a> { RealSpanMap(&'a RealSpanMap), } -impl mbe::SpanMapper<Span> for SpanMap { +impl syntax_bridge::SpanMapper<Span> for SpanMap { fn span_for(&self, range: TextRange) -> Span { self.span_for_range(range) } } -impl mbe::SpanMapper<Span> for SpanMapRef<'_> { +impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> { fn span_for(&self, range: TextRange) -> Span { self.span_for_range(range) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index b6c33683ff6..b079b5675bd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-ty" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "The type system for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index d506e00ca12..a151ee01e64 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -275,7 +275,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> { }; chalk_ir::Binders::new(binders, bound) } - crate::ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = self .db .type_alias_impl_traits(alias) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index 5765262b08b..302558162ac 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -276,7 +276,7 @@ impl TyExt for Ty { data.substitute(Interner, &subst).into_value_and_skipped_binders().0 }) } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { db.type_alias_impl_traits(alias).map(|it| { let data = (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); @@ -295,7 +295,7 @@ impl TyExt for Ty { data.substitute(Interner, &opaque_ty.substitution) }) } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { db.type_alias_impl_traits(alias).map(|it| { let data = (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index e52fae06d7f..6e5a7cce9c9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -4,20 +4,25 @@ use std::fmt; +use chalk_solve::rust_ir::AdtKind; use either::Either; -use hir_def::lang_item::LangItem; -use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule}; -use hir_def::{ItemContainerId, Lookup}; +use hir_def::{ + lang_item::LangItem, + resolver::{HasResolver, ValueNs}, + AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup, +}; use intern::sym; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::constructor::Constructor; -use syntax::{ast, AstNode}; +use syntax::{ + ast::{self, UnaryOp}, + AstNode, +}; use tracing::debug; use triomphe::Arc; use typed_arena::Arena; -use crate::Interner; use crate::{ db::HirDatabase, diagnostics::match_check::{ @@ -25,7 +30,7 @@ use crate::{ pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat}, }, display::HirDisplay, - InferenceResult, Ty, TyExt, + Adjust, InferenceResult, Interner, Ty, TyExt, TyKind, }; pub(crate) use hir_def::{ @@ -117,7 +122,7 @@ impl ExprValidator { Expr::If { .. } => { self.check_for_unnecessary_else(id, expr, db); } - Expr::Block { .. } => { + Expr::Block { .. } | Expr::Async { .. } | Expr::Unsafe { .. } => { self.validate_block(db, expr); } _ => {} @@ -236,7 +241,12 @@ impl ExprValidator { return; } - let report = match cx.compute_match_usefulness(m_arms.as_slice(), scrut_ty.clone()) { + let known_valid_scrutinee = Some(self.is_known_valid_scrutinee(scrutinee_expr, db)); + let report = match cx.compute_match_usefulness( + m_arms.as_slice(), + scrut_ty.clone(), + known_valid_scrutinee, + ) { Ok(report) => report, Err(()) => return, }; @@ -253,8 +263,52 @@ impl ExprValidator { } } + // [rustc's `is_known_valid_scrutinee`](https://github.com/rust-lang/rust/blob/c9bd03cb724e13cca96ad320733046cbdb16fbbe/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L288) + // + // While the above function in rustc uses thir exprs, r-a doesn't have them. + // So, the logic here is getting same result as "hir lowering + match with lowered thir" + // with "hir only" + fn is_known_valid_scrutinee(&self, scrutinee_expr: ExprId, db: &dyn HirDatabase) -> bool { + if self + .infer + .expr_adjustments + .get(&scrutinee_expr) + .is_some_and(|adjusts| adjusts.iter().any(|a| matches!(a.kind, Adjust::Deref(..)))) + { + return false; + } + + match &self.body[scrutinee_expr] { + Expr::UnaryOp { op: UnaryOp::Deref, .. } => false, + Expr::Path(path) => { + let value_or_partial = self + .owner + .resolver(db.upcast()) + .resolve_path_in_value_ns_fully(db.upcast(), path); + value_or_partial.map_or(true, |v| !matches!(v, ValueNs::StaticId(_))) + } + Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) { + TyKind::Adt(adt, ..) + if db.adt_datum(self.owner.krate(db.upcast()), *adt).kind == AdtKind::Union => + { + false + } + _ => self.is_known_valid_scrutinee(*expr, db), + }, + Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db), + Expr::Cast { expr, .. } => self.is_known_valid_scrutinee(*expr, db), + Expr::Missing => false, + _ => true, + } + } + fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) { - let Expr::Block { statements, .. } = expr else { return }; + let (Expr::Block { statements, .. } + | Expr::Async { statements, .. } + | Expr::Unsafe { statements, .. }) = expr + else { + return; + }; let pattern_arena = Arena::new(); let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); for stmt in &**statements { @@ -280,7 +334,7 @@ impl ExprValidator { has_guard: false, arm_data: (), }; - let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) { + let report = match cx.compute_match_usefulness(&[match_arm], ty.clone(), None) { Ok(v) => v, Err(e) => { debug!(?e, "match usefulness error"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index a12e201cf3d..7b3abf501d2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -69,22 +69,20 @@ pub(crate) struct MatchCheckCtx<'db> { body: DefWithBodyId, pub(crate) db: &'db dyn HirDatabase, exhaustive_patterns: bool, - min_exhaustive_patterns: bool, } impl<'db> MatchCheckCtx<'db> { pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns); - let min_exhaustive_patterns = - def_map.is_unstable_feature_enabled(&sym::min_exhaustive_patterns); - Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } + Self { module, body, db, exhaustive_patterns } } pub(crate) fn compute_match_usefulness( &self, arms: &[MatchArm<'db>], scrut_ty: Ty, + known_valid_scrutinee: Option<bool>, ) -> Result<UsefulnessReport<'db, Self>, ()> { if scrut_ty.contains_unknown() { return Err(()); @@ -95,8 +93,7 @@ impl<'db> MatchCheckCtx<'db> { } } - // FIXME: Determine place validity correctly. For now, err on the safe side. - let place_validity = PlaceValidity::MaybeInvalid; + let place_validity = PlaceValidity::from_bool(known_valid_scrutinee.unwrap_or(true)); // Measured to take ~100ms on modern hardware. let complexity_limit = Some(500000); compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit) @@ -307,7 +304,8 @@ impl<'db> MatchCheckCtx<'db> { &Str(void) => match void {}, Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild, Never => PatKind::Never, - Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => { + Missing | F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Opaque(..) + | Or => { never!("can't convert to pattern: {:?}", pat.ctor()); PatKind::Wild } @@ -327,9 +325,6 @@ impl<'db> PatCx for MatchCheckCtx<'db> { fn is_exhaustive_patterns_feature_on(&self) -> bool { self.exhaustive_patterns } - fn is_min_exhaustive_patterns_feature_on(&self) -> bool { - self.min_exhaustive_patterns - } fn ctor_arity( &self, @@ -356,8 +351,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> { }, Ref => 1, Slice(..) => unimplemented!(), - Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) - | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, + Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..) + | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited + | Hidden | Missing | Wildcard => 0, Or => { never!("The `Or` constructor doesn't have a fixed arity"); 0 @@ -419,8 +415,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> { } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), - Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) - | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => { + Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..) + | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited + | Hidden | Missing | Wildcard => { smallvec![] } Or => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 22aa5c69bb0..3f54cdd20ce 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -17,7 +17,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> { let mut res = Vec::new(); let is_unsafe = match def { - DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(), + DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(), DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index a433ecfd778..7c481958d1a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -1022,16 +1022,16 @@ impl HirDisplay for Ty { // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? if parameters.len() - impl_ > 0 { // `parameters` are in the order of fn's params (including impl traits), fn's lifetimes + let without_impl = self_param as usize + type_ + const_ + lifetime; // parent's params (those from enclosing impl or trait, if any). - let (fn_params, other) = - parameters.split_at(self_param as usize + type_ + const_ + lifetime); - let (_impl, parent_params) = other.split_at(impl_); + let (fn_params, parent_params) = parameters.split_at(without_impl + impl_); debug_assert_eq!(parent_params.len(), parent_len); let parent_params = generic_args_sans_defaults(f, Some(generic_def_id), parent_params); let fn_params = - generic_args_sans_defaults(f, Some(generic_def_id), fn_params); + &generic_args_sans_defaults(f, Some(generic_def_id), fn_params) + [0..without_impl]; write!(f, "<")?; hir_fmt_generic_arguments(f, parent_params, None)?; @@ -1069,6 +1069,7 @@ impl HirDisplay for Ty { module_id, PrefixKind::Plain, false, + // FIXME: no_std Cfg? ImportPathConfig { prefer_no_std: false, prefer_prelude: true, @@ -1151,11 +1152,10 @@ impl HirDisplay for Ty { )?; // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = db.type_alias_impl_traits(alias).expect("impl trait id without data"); - let data = - (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); + let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, ¶meters); let krate = alias.krate(db.upcast()); write_bounds_like_dyn_trait_with_prefix( @@ -1338,7 +1338,7 @@ impl HirDisplay for Ty { SizedByDefault::Sized { anchor: krate }, )?; } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = db.type_alias_impl_traits(alias).expect("impl trait id without data"); let data = diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 804bc53905a..45d423d03c0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -36,15 +36,14 @@ use hir_def::{ body::Body, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, - hir::LabelId, - hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId}, + hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId}, lang_item::{LangItem, LangItemTarget}, layout::Integer, path::{ModPath, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::{LifetimeRef, TypeRef}, - AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, Lookup, TraitId, - TupleFieldId, TupleId, TypeAliasId, VariantId, + AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ImplId, ItemContainerId, Lookup, + TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, }; use hir_expand::name::Name; use indexmap::IndexSet; @@ -785,14 +784,19 @@ impl<'a> InferenceContext<'a> { fn collect_const(&mut self, data: &ConstData) { let return_ty = self.make_ty(&data.type_ref); - // Constants might be associated items that define ATPITs. - self.insert_atpit_coercion_table(iter::once(&return_ty)); + // Constants might be defining usage sites of TAITs. + self.make_tait_coercion_table(iter::once(&return_ty)); self.return_ty = return_ty; } fn collect_static(&mut self, data: &StaticData) { - self.return_ty = self.make_ty(&data.type_ref); + let return_ty = self.make_ty(&data.type_ref); + + // Statics might be defining usage sites of TAITs. + self.make_tait_coercion_table(iter::once(&return_ty)); + + self.return_ty = return_ty; } fn collect_fn(&mut self, func: FunctionId) { @@ -857,11 +861,11 @@ impl<'a> InferenceContext<'a> { self.return_ty = self.normalize_associated_types_in(return_ty); self.return_coercion = Some(CoerceMany::new(self.return_ty.clone())); - // Functions might be associated items that define ATPITs. - // To define an ATPITs, that ATPIT must appear in the function's signatures. + // Functions might be defining usage sites of TAITs. + // To define an TAITs, that TAIT must appear in the function's signatures. // So, it suffices to check for params and return types. params_and_ret_tys.push(self.return_ty.clone()); - self.insert_atpit_coercion_table(params_and_ret_tys.iter()); + self.make_tait_coercion_table(params_and_ret_tys.iter()); } fn insert_inference_vars_for_impl_trait<T>(&mut self, t: T, placeholders: Substitution) -> T @@ -880,7 +884,7 @@ impl<'a> InferenceContext<'a> { ImplTraitId::ReturnTypeImplTrait(def, idx) => { (self.db.return_type_impl_traits(def), idx) } - ImplTraitId::AssociatedTypeImplTrait(def, idx) => { + ImplTraitId::TypeAliasImplTrait(def, idx) => { (self.db.type_alias_impl_traits(def), idx) } _ => unreachable!(), @@ -909,23 +913,25 @@ impl<'a> InferenceContext<'a> { } /// The coercion of a non-inference var into an opaque type should fail, - /// but not in the defining sites of the ATPITs. - /// In such cases, we insert an proxy inference var for each ATPIT, - /// and coerce into it instead of ATPIT itself. + /// but not in the defining sites of the TAITs. + /// In such cases, we insert an proxy inference var for each TAIT, + /// and coerce into it instead of TAIT itself. /// /// The inference var stretagy is effective because; /// - /// - It can still unify types that coerced into ATPIT + /// - It can still unify types that coerced into TAITs /// - We are pushing `impl Trait` bounds into it /// /// This function inserts a map that maps the opaque type to that proxy inference var. - fn insert_atpit_coercion_table<'b>(&mut self, tys: impl Iterator<Item = &'b Ty>) { - struct OpaqueTyCollector<'a, 'b> { + fn make_tait_coercion_table<'b>(&mut self, tait_candidates: impl Iterator<Item = &'b Ty>) { + struct TypeAliasImplTraitCollector<'a, 'b> { + db: &'b dyn HirDatabase, table: &'b mut InferenceTable<'a>, - opaque_tys: FxHashMap<OpaqueTyId, Ty>, + assocs: FxHashMap<OpaqueTyId, (ImplId, Ty)>, + non_assocs: FxHashMap<OpaqueTyId, Ty>, } - impl<'a, 'b> TypeVisitor<Interner> for OpaqueTyCollector<'a, 'b> { + impl<'a, 'b> TypeVisitor<Interner> for TypeAliasImplTraitCollector<'a, 'b> { type BreakTy = (); fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> { @@ -944,59 +950,105 @@ impl<'a> InferenceContext<'a> { let ty = self.table.resolve_ty_shallow(ty); if let TyKind::OpaqueType(id, _) = ty.kind(Interner) { - self.opaque_tys.insert(*id, ty.clone()); + if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = + self.db.lookup_intern_impl_trait_id((*id).into()) + { + let loc = self.db.lookup_intern_type_alias(alias_id); + match loc.container { + ItemContainerId::ImplId(impl_id) => { + self.assocs.insert(*id, (impl_id, ty.clone())); + } + ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => { + self.non_assocs.insert(*id, ty.clone()); + } + _ => {} + } + } } ty.super_visit_with(self, outer_binder) } } - // Early return if this is not happening inside the impl block - let impl_id = if let Some(impl_id) = self.resolver.impl_def() { - impl_id - } else { - return; + let mut collector = TypeAliasImplTraitCollector { + db: self.db, + table: &mut self.table, + assocs: FxHashMap::default(), + non_assocs: FxHashMap::default(), }; - - let assoc_tys: FxHashSet<_> = self - .db - .impl_data(impl_id) - .items - .iter() - .filter_map(|item| match item { - AssocItemId::TypeAliasId(alias) => Some(*alias), - _ => None, - }) - .collect(); - if assoc_tys.is_empty() { - return; + for ty in tait_candidates { + ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); } - let mut collector = - OpaqueTyCollector { table: &mut self.table, opaque_tys: FxHashMap::default() }; - for ty in tys { - ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); + // Non-assoc TAITs can be define-used everywhere as long as they are + // in function signatures or const types, etc + let mut taits = collector.non_assocs; + + // assoc TAITs(ATPITs) can be only define-used inside their impl block. + // They cannot be define-used in inner items like in the following; + // + // ``` + // impl Trait for Struct { + // type Assoc = impl Default; + // + // fn assoc_fn() -> Self::Assoc { + // let foo: Self::Assoc = true; // Allowed here + // + // fn inner() -> Self::Assoc { + // false // Not allowed here + // } + // + // foo + // } + // } + // ``` + let impl_id = match self.owner { + DefWithBodyId::FunctionId(it) => { + let loc = self.db.lookup_intern_function(it); + if let ItemContainerId::ImplId(impl_id) = loc.container { + Some(impl_id) + } else { + None + } + } + DefWithBodyId::ConstId(it) => { + let loc = self.db.lookup_intern_const(it); + if let ItemContainerId::ImplId(impl_id) = loc.container { + Some(impl_id) + } else { + None + } + } + _ => None, + }; + + if let Some(impl_id) = impl_id { + taits.extend(collector.assocs.into_iter().filter_map(|(id, (impl_, ty))| { + if impl_ == impl_id { + Some((id, ty)) + } else { + None + } + })); } - let atpit_coercion_table: FxHashMap<_, _> = collector - .opaque_tys + + let tait_coercion_table: FxHashMap<_, _> = taits .into_iter() - .filter_map(|(opaque_ty_id, ty)| { - if let ImplTraitId::AssociatedTypeImplTrait(alias_id, _) = - self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) + .filter_map(|(id, ty)| { + if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = + self.db.lookup_intern_impl_trait_id(id.into()) { - if assoc_tys.contains(&alias_id) { - let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id); - let ty = self.insert_inference_vars_for_impl_trait(ty, alias_placeholders); - return Some((opaque_ty_id, ty)); - } + let subst = TyBuilder::placeholder_subst(self.db, alias_id); + let ty = self.insert_inference_vars_for_impl_trait(ty, subst); + Some((id, ty)) + } else { + None } - - None }) .collect(); - if !atpit_coercion_table.is_empty() { - self.table.atpit_coercion_table = Some(atpit_coercion_table); + if !tait_coercion_table.is_empty() { + self.table.tait_coercion_table = Some(tait_coercion_table); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 72928851f12..6f85a4a4247 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -276,16 +276,16 @@ impl InferenceTable<'_> { return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]); } - // If we are coercing into an ATPIT, coerce into its proxy inference var, instead. + // If we are coercing into a TAIT, coerce into its proxy inference var, instead. let mut to_ty = to_ty; let _to; - if let Some(atpit_table) = &self.atpit_coercion_table { + if let Some(tait_table) = &self.tait_coercion_table { if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) { if !matches!( from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..) ) { - if let Some(ty) = atpit_table.get(opaque_ty_id) { + if let Some(ty) = tait_table.get(opaque_ty_id) { _to = ty.clone(); to_ty = &_to; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 3d762b174ac..f5eb37f4278 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -1759,13 +1759,14 @@ impl InferenceContext<'_> { skip_indices: &[u32], is_varargs: bool, ) { - if args.len() != param_tys.len() + skip_indices.len() && !is_varargs { + let arg_count_mismatch = args.len() != param_tys.len() + skip_indices.len() && !is_varargs; + if arg_count_mismatch { self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount { call_expr: expr, expected: param_tys.len() + skip_indices.len(), found: args.len(), }); - } + }; // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- // We do this in a pretty awful way: first we type-check any arguments @@ -1819,7 +1820,7 @@ impl InferenceContext<'_> { // The function signature may contain some unknown types, so we need to insert // type vars here to avoid type mismatch false positive. let coercion_target = self.insert_type_vars(coercion_target); - if self.coerce(Some(arg), &ty, &coercion_target).is_err() { + if self.coerce(Some(arg), &ty, &coercion_target).is_err() && !arg_count_mismatch { self.result.type_mismatches.insert( arg.into(), TypeMismatch { expected: coercion_target, actual: ty.clone() }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index 7ee63af1c22..3e3578b9f9b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -224,7 +224,7 @@ type ChalkInferenceTable = chalk_solve::infer::InferenceTable<Interner>; pub(crate) struct InferenceTable<'a> { pub(crate) db: &'a dyn HirDatabase, pub(crate) trait_env: Arc<TraitEnvironment>, - pub(crate) atpit_coercion_table: Option<FxHashMap<OpaqueTyId, Ty>>, + pub(crate) tait_coercion_table: Option<FxHashMap<OpaqueTyId, Ty>>, var_unification_table: ChalkInferenceTable, type_variable_table: SmallVec<[TypeVariableFlags; 16]>, pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>, @@ -244,7 +244,7 @@ impl<'a> InferenceTable<'a> { InferenceTable { db, trait_env, - atpit_coercion_table: None, + tait_coercion_table: None, var_unification_table: ChalkInferenceTable::new(), type_variable_table: SmallVec::new(), pending_obligations: Vec::new(), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 034b9c773c2..47cc2a2f1e6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -391,7 +391,7 @@ pub fn layout_of_ty_query( let infer = db.infer(func.into()); return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env); } - crate::ImplTraitId::AssociatedTypeImplTrait(..) => { + crate::ImplTraitId::TypeAliasImplTrait(..) => { return Err(LayoutError::NotImplemented); } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 2f93ce31816..4c9e0a1e118 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -595,7 +595,7 @@ impl TypeFoldable<Interner> for CallableSig { #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum ImplTraitId { ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx), - AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx), + TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), } impl InternValueTrivial for ImplTraitId {} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 444628ff521..67cdb99744a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -341,7 +341,7 @@ impl<'a> TyLoweringContext<'a> { let impl_trait_id = origin.either( |f| ImplTraitId::ReturnTypeImplTrait(f, idx), - |a| ImplTraitId::AssociatedTypeImplTrait(a, idx), + |a| ImplTraitId::TypeAliasImplTrait(a, idx), ); let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); let generics = @@ -1857,7 +1857,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { params, ret, data.is_varargs(), - if data.has_unsafe_kw() { Safety::Unsafe } else { Safety::Safe }, + if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), ); make_binders(db, &generics, sig) @@ -2131,7 +2131,6 @@ pub(crate) fn type_alias_impl_traits( if let Some(type_ref) = &data.type_ref { let _ty = ctx.lower_ty(type_ref); } - let generics = generics(db.upcast(), def.into()); let type_alias_impl_traits = ImplTraits { impl_traits: match ctx.impl_trait_mode { ImplTraitLoweringState::Opaque(x) => x.into_inner(), @@ -2141,6 +2140,7 @@ pub(crate) fn type_alias_impl_traits( if type_alias_impl_traits.impl_traits.is_empty() { None } else { + let generics = generics(db.upcast(), def.into()); Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits))) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index 172dea02e61..8f6582b7f80 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -82,8 +82,8 @@ impl FallibleTypeFolder<Interner> for Filler<'_> { }; filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder) } - crate::ImplTraitId::AssociatedTypeImplTrait(..) => { - not_supported!("associated type impl trait"); + crate::ImplTraitId::TypeAliasImplTrait(..) => { + not_supported!("type alias impl trait"); } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { not_supported!("async block impl trait"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index 108ae198d50..0efb9c52fb6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -15,7 +15,7 @@ use test_utils::extract_annotations; use triomphe::Arc; #[salsa::database( - base_db::SourceDatabaseExtStorage, + base_db::SourceRootDatabaseStorage, base_db::SourceDatabaseStorage, hir_expand::db::ExpandDatabaseStorage, hir_def::db::InternDatabaseStorage, @@ -75,9 +75,6 @@ impl salsa::ParallelDatabase for TestDB { impl panic::RefUnwindSafe for TestDB {} impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc<str> { - FileLoaderDelegate(self).file_text(file_id) - } fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { FileLoaderDelegate(self).resolve_path(path) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index e67124d57a2..0fcd789f761 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -9,10 +9,11 @@ mod patterns; mod regression; mod simple; mod traits; +mod type_alias_impl_traits; use std::env; -use base_db::SourceDatabaseExt2 as _; +use base_db::SourceDatabaseFileInputExt as _; use expect_test::Expect; use hir_def::{ body::{Body, BodySourceMap, SyntheticSyntax}, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index e9c62d34169..0a24eeb1fe8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -1,4 +1,4 @@ -use base_db::SourceDatabaseExt2 as _; +use base_db::SourceDatabaseFileInputExt as _; use test_fixture::WithFixture; use crate::{db::HirDatabase, test_db::TestDB}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index ac2dfea1010..28198386120 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2122,3 +2122,22 @@ fn test() { "#, ) } + +#[test] +fn issue_17191() { + check_types( + r#" +trait A { + type Item; +} + +trait B<T> {} + +fn foo<T: B<impl A>>() {} + +fn test() { + let f = foo; + //^ fn foo<{unknown}>() +}"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index fb07e718d10..a98cff2a08b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -4692,119 +4692,6 @@ fn f<T: Send, U>() { } #[test] -fn associated_type_impl_trait() { - check_types( - r#" -trait Foo {} -struct S1; -impl Foo for S1 {} - -trait Bar { - type Item; - fn bar(&self) -> Self::Item; -} -struct S2; -impl Bar for S2 { - type Item = impl Foo; - fn bar(&self) -> Self::Item { - S1 - } -} - -fn test() { - let x = S2.bar(); - //^ impl Foo + ?Sized -} - "#, - ); -} - -#[test] -fn associated_type_impl_traits_complex() { - check_types( - r#" -struct Unary<T>(T); -struct Binary<T, U>(T, U); - -trait Foo {} -struct S1; -impl Foo for S1 {} - -trait Bar { - type Item; - fn bar(&self) -> Unary<Self::Item>; -} -struct S2; -impl Bar for S2 { - type Item = Unary<impl Foo>; - fn bar(&self) -> Unary<<Self as Bar>::Item> { - Unary(Unary(S1)) - } -} - -trait Baz { - type Target1; - type Target2; - fn baz(&self) -> Binary<Self::Target1, Self::Target2>; -} -struct S3; -impl Baz for S3 { - type Target1 = impl Foo; - type Target2 = Unary<impl Bar>; - fn baz(&self) -> Binary<Self::Target1, Self::Target2> { - Binary(S1, Unary(S2)) - } -} - -fn test() { - let x = S3.baz(); - //^ Binary<impl Foo + ?Sized, Unary<impl Bar + ?Sized>> - let y = x.1.0.bar(); - //^ Unary<Bar::Item<impl Bar + ?Sized>> -} - "#, - ); -} - -#[test] -fn associated_type_with_impl_trait_in_tuple() { - check_no_mismatches( - r#" -pub trait Iterator { - type Item; -} - -pub trait Value {} - -fn bar<I: Iterator<Item = (usize, impl Value)>>() {} - -fn foo() { - bar(); -} -"#, - ); -} - -#[test] -fn associated_type_with_impl_trait_in_nested_tuple() { - check_no_mismatches( - r#" -pub trait Iterator { - type Item; -} - -pub trait Value {} - -fn bar<I: Iterator<Item = ((impl Value, usize), u32)>>() {} - -fn foo() { - bar(); -} -"#, - ); -} - -#[test] fn dyn_trait_with_lifetime_in_rpit() { check_types( r#" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs new file mode 100644 index 00000000000..e2b7bf379cc --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs @@ -0,0 +1,161 @@ +use expect_test::expect; + +use super::{check_infer_with_mismatches, check_no_mismatches, check_types}; + +#[test] +fn associated_type_impl_trait() { + check_types( + r#" +trait Foo {} +struct S1; +impl Foo for S1 {} + +trait Bar { + type Item; + fn bar(&self) -> Self::Item; +} +struct S2; +impl Bar for S2 { + type Item = impl Foo; + fn bar(&self) -> Self::Item { + S1 + } +} + +fn test() { + let x = S2.bar(); + //^ impl Foo + ?Sized +} + "#, + ); +} + +#[test] +fn associated_type_impl_traits_complex() { + check_types( + r#" +struct Unary<T>(T); +struct Binary<T, U>(T, U); + +trait Foo {} +struct S1; +impl Foo for S1 {} + +trait Bar { + type Item; + fn bar(&self) -> Unary<Self::Item>; +} +struct S2; +impl Bar for S2 { + type Item = Unary<impl Foo>; + fn bar(&self) -> Unary<<Self as Bar>::Item> { + Unary(Unary(S1)) + } +} + +trait Baz { + type Target1; + type Target2; + fn baz(&self) -> Binary<Self::Target1, Self::Target2>; +} +struct S3; +impl Baz for S3 { + type Target1 = impl Foo; + type Target2 = Unary<impl Bar>; + fn baz(&self) -> Binary<Self::Target1, Self::Target2> { + Binary(S1, Unary(S2)) + } +} + +fn test() { + let x = S3.baz(); + //^ Binary<impl Foo + ?Sized, Unary<impl Bar + ?Sized>> + let y = x.1.0.bar(); + //^ Unary<Bar::Item<impl Bar + ?Sized>> +} + "#, + ); +} + +#[test] +fn associated_type_with_impl_trait_in_tuple() { + check_no_mismatches( + r#" +pub trait Iterator { + type Item; +} + +pub trait Value {} + +fn bar<I: Iterator<Item = (usize, impl Value)>>() {} + +fn foo() { + bar(); +} +"#, + ); +} + +#[test] +fn associated_type_with_impl_trait_in_nested_tuple() { + check_no_mismatches( + r#" +pub trait Iterator { + type Item; +} + +pub trait Value {} + +fn bar<I: Iterator<Item = ((impl Value, usize), u32)>>() {} + +fn foo() { + bar(); +} +"#, + ); +} + +#[test] +fn type_alias_impl_trait_simple() { + check_no_mismatches( + r#" +trait Trait {} + +struct Struct; + +impl Trait for Struct {} + +type AliasTy = impl Trait; + +static ALIAS: AliasTy = { + let res: AliasTy = Struct; + res +}; +"#, + ); + + check_infer_with_mismatches( + r#" +trait Trait {} + +struct Struct; + +impl Trait for Struct {} + +type AliasTy = impl Trait; + +static ALIAS: i32 = { + // TATIs cannot be define-used if not in signature or type annotations + let _a: AliasTy = Struct; + 5 +}; +"#, + expect![[r#" + 106..220 '{ ... 5 }': i32 + 191..193 '_a': impl Trait + ?Sized + 205..211 'Struct': Struct + 217..218 '5': i32 + 205..211: expected impl Trait + ?Sized, got Struct + "#]], + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index fbec332885d..d1ce68da6d6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -253,12 +253,7 @@ impl<'a> ClosureSubst<'a> { pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { let data = db.function_data(func); - if data.has_unsafe_kw() { - // Functions that are `#[rustc_deprecated_safe_2024]` are safe to call before 2024. - if db.attrs(func.into()).by_key(&sym::rustc_deprecated_safe_2024).exists() { - // FIXME: Properly check the caller span and mark it as unsafe after 2024. - return false; - } + if data.is_unsafe() { return true; } diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index edf26a07a74..324fa1c6a85 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A high-level object-oriented access to Rust code for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 4bb8c140a1f..ffb972475f8 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -48,6 +48,7 @@ macro_rules! diagnostics { // ] diagnostics![ + AwaitOutsideOfAsync, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, @@ -135,6 +136,12 @@ pub struct UnreachableLabel { pub name: Name, } +#[derive(Debug)] +pub struct AwaitOutsideOfAsync { + pub node: InFile<AstPtr<ast::AwaitExpr>>, + pub location: String, +} + #[derive(Debug, Clone, Eq, PartialEq)] pub struct UndeclaredLabel { pub node: InFile<AstPtr<ast::Lifetime>>, diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 7def828e95f..12dd8b5bf4f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -69,13 +69,13 @@ impl HirDisplay for Function { write_visibility(module_id, self.visibility(db), f)?; - if data.has_default_kw() { + if data.is_default() { f.write_str("default ")?; } - if data.has_const_kw() { + if data.is_const() { f.write_str("const ")?; } - if data.has_async_kw() { + if data.is_async() { f.write_str("async ")?; } if self.is_unsafe_to_call(db) { @@ -125,7 +125,7 @@ impl HirDisplay for Function { // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns. // Use ugly pattern match to strip the Future trait. // Better way? - let ret_type = if !data.has_async_kw() { + let ret_type = if !data.is_async() { &data.ret_type } else { match &*data.ret_type { diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 266ef2a55c5..1a3becdf50e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1,4 +1,4 @@ -//! HIR (previously known as descriptors) provides a high-level object oriented +//! HIR (previously known as descriptors) provides a high-level object-oriented //! access to Rust code. //! //! The principal difference between HIR and syntax trees is that HIR is bound @@ -1828,6 +1828,9 @@ impl DefWithBody { is_bang: true, } .into(), + BodyDiagnostic::AwaitOutsideOfAsync { node, location } => { + AwaitOutsideOfAsync { node: *node, location: location.clone() }.into() + } BodyDiagnostic::UnreachableLabel { node, name } => { UnreachableLabel { node: *node, name: name.clone() }.into() } @@ -2186,11 +2189,11 @@ impl Function { } pub fn is_const(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_const_kw() + db.function_data(self.id).is_const() } pub fn is_async(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_async_kw() + db.function_data(self.id).is_async() } /// Does this function have `#[test]` attribute? diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 29f98972dcd..a377163162c 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -28,7 +28,7 @@ use hir_expand::{ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; -use span::{EditionedFileId, FileId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; +use span::{EditionedFileId, FileId}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, @@ -757,81 +757,9 @@ impl<'db> SemanticsImpl<'db> { res } - // return: - // SourceAnalyzer(file_id that original call include!) - // macro file id - // token in include! macro mapped from token in params - // span for the mapped token - fn is_from_include_file( - &self, - token: SyntaxToken, - ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> { - let parent = token.parent()?; - let file_id = self.find_file(&parent).file_id.file_id()?; - - // iterate related crates and find all include! invocations that include_file_id matches - for (invoc, _) in self - .db - .relevant_crates(file_id.file_id()) - .iter() - .flat_map(|krate| self.db.include_macro_invoc(*krate)) - .filter(|&(_, include_file_id)| include_file_id == file_id) - { - let macro_file = invoc.as_macro_file(); - let expansion_info = { - self.with_ctx(|ctx| { - ctx.cache - .expansion_info_cache - .entry(macro_file) - .or_insert_with(|| { - let exp_info = macro_file.expansion_info(self.db.upcast()); - - let InMacroFile { file_id, value } = exp_info.expanded(); - if let InFile { file_id, value: Some(value) } = exp_info.arg() { - self.cache(value.ancestors().last().unwrap(), file_id); - } - self.cache(value, file_id.into()); - - exp_info - }) - .clone() - }) - }; - - // FIXME: uncached parse - // Create the source analyzer for the macro call scope - let Some(sa) = expansion_info - .arg() - .value - .and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap())) - else { - continue; - }; - - // get mapped token in the include! macro file - let span = span::Span { - range: token.text_range(), - anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - ctx: SyntaxContextId::ROOT, - }; - let Some(InMacroFile { file_id, value: mut mapped_tokens }) = - expansion_info.map_range_down_exact(span) - else { - continue; - }; - - // if we find one, then return - if let Some(t) = mapped_tokens.next() { - return Some((sa, file_id.into(), t, span)); - } - } - - None - } - fn descend_into_macros_impl( &self, - mut token: SyntaxToken, + token: SyntaxToken, f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>, ) { let _p = tracing::info_span!("descend_into_macros_impl").entered(); @@ -848,17 +776,7 @@ impl<'db> SemanticsImpl<'db> { return; } }, - None => { - // if we cannot find a source analyzer for this token, then we try to find out - // whether this file is an included file and treat that as the include input - let Some((it, macro_file_id, mapped_token, s)) = - self.is_from_include_file(token) - else { - return; - }; - token = mapped_token; - (it, s, macro_file_id) - } + None => return, }; let mut m_cache = self.macro_call_cache.borrow_mut(); diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 1376dddf671..09df639d4a8 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -94,8 +94,9 @@ use hir_def::{ }, hir::{BindingId, LabelId}, AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, - FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, - StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, + FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId, + ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, + VariantId, }; use hir_expand::{ attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId, @@ -131,11 +132,30 @@ impl SourceToDefCtx<'_, '_> { for &crate_id in self.db.relevant_crates(file).iter() { // Note: `mod` declarations in block modules cannot be supported here let crate_def_map = self.db.crate_def_map(crate_id); - mods.extend( + let n_mods = mods.len(); + let modules = |file| { crate_def_map .modules_for_file(file) - .map(|local_id| crate_def_map.module_id(local_id)), - ) + .map(|local_id| crate_def_map.module_id(local_id)) + }; + mods.extend(modules(file)); + if mods.len() == n_mods { + mods.extend( + self.db + .include_macro_invoc(crate_id) + .iter() + .filter(|&&(_, file_id)| file_id == file) + .flat_map(|(call, _)| { + modules( + call.lookup(self.db.upcast()) + .kind + .file_id() + .original_file(self.db.upcast()) + .file_id(), + ) + }), + ); + } } if mods.is_empty() { // FIXME: detached file diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml index b1e7609afef..df52562b6a1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-assists" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Code assists for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index ebfbb83bb91..4cd15f1c755 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -2280,4 +2280,29 @@ impl b::LocalTrait for B { "#, ) } + + #[test] + fn impl_with_type_param_with_former_param_as_default() { + check_assist( + add_missing_impl_members, + r#" +pub trait Test<'a, T, U = T> { + fn test(item: &'a T) -> U; +} +impl<'a> Test<'a, i32> for bool { + $0 +} +"#, + r#" +pub trait Test<'a, T, U = T> { + fn test(item: &'a T) -> U; +} +impl<'a> Test<'a, i32> for bool { + fn test(item: &'a i32) -> i32 { + ${0:todo!()} + } +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs index 839ffa2614b..8f053f4df94 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs @@ -43,7 +43,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O acc.add( AssistId("bind_unused_param", AssistKind::QuickFix), - &format!("Bind as `let _ = {ident_pat};`"), + format!("Bind as `let _ = {ident_pat};`"), param.syntax().text_range(), |builder| { let line_index = ctx.db().line_index(ctx.file_id().into()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 44f31dcb849..c72bd411d64 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -2,7 +2,7 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ ast::{self, AstNode, HasGenericParams, HasVisibility}, - match_ast, SyntaxNode, + match_ast, SyntaxKind, SyntaxNode, }; use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; @@ -100,7 +100,9 @@ fn edit_struct_def( ast::make::tokens::single_newline().text(), ); edit.insert(tuple_fields_text_range.start(), w.syntax().text()); - edit.insert(tuple_fields_text_range.start(), ","); + if !w.syntax().last_token().is_some_and(|t| t.kind() == SyntaxKind::COMMA) { + edit.insert(tuple_fields_text_range.start(), ","); + } edit.insert( tuple_fields_text_range.start(), ast::make::tokens::single_newline().text(), @@ -882,4 +884,27 @@ fn f() { "#, ); } + + #[test] + fn where_clause_with_trailing_comma() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +trait Foo {} + +struct Bar$0<T>(pub T) +where + T: Foo,; +"#, + r#" +trait Foo {} + +struct Bar<T> +where + T: Foo, +{ pub field1: T } + +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index e42be636d71..6469957fe16 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -3,7 +3,7 @@ mod generated; use expect_test::expect; use hir::{FileRange, Semantics}; use ide_db::{ - base_db::SourceDatabaseExt, + base_db::{SourceDatabase, SourceRootDatabase}, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, EditionedFileId, RootDatabase, SnippetCap, diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 6a4c70d460f..035b2fc0db0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-completion" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Utilities for generating completions of user input for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 414b096ad47..58e9b724df2 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -24,7 +24,7 @@ pub(crate) mod vis; use std::iter; -use hir::{sym, HasAttrs, ImportPathConfig, Name, ScopeDef, Variant}; +use hir::{sym, HasAttrs, Name, ScopeDef, Variant}; use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind}; use syntax::{ast, SmolStr, ToSmolStr}; @@ -645,11 +645,7 @@ fn enum_variants_with_paths( if let Some(path) = ctx.module.find_path( ctx.db, hir::ModuleDef::from(variant), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) { // Variants with trivial paths are already added by the existing completion logic, // so we should avoid adding these twice diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index 71ff6b5aea3..ff2c8da4213 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -1,6 +1,6 @@ //! Completion of names from the current scope in expression position. -use hir::{sym, ImportPathConfig, Name, ScopeDef}; +use hir::{sym, Name, ScopeDef}; use syntax::ast; use crate::{ @@ -174,11 +174,7 @@ pub(crate) fn complete_expr_path( .find_path( ctx.db, hir::ModuleDef::from(strukt), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) .filter(|it| it.len() > 1); @@ -200,11 +196,7 @@ pub(crate) fn complete_expr_path( .find_path( ctx.db, hir::ModuleDef::from(un), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) .filter(|it| it.len() > 1); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index e803072fa8f..fdce7c547a4 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -1,5 +1,5 @@ //! See [`import_on_the_fly`]. -use hir::{ImportPathConfig, ItemInNs, ModuleDef}; +use hir::{ItemInNs, ModuleDef}; use ide_db::imports::{ import_assets::{ImportAssets, LocatedImport}, insert_use::ImportScope, @@ -256,11 +256,7 @@ fn import_on_the_fly( }; let user_input_lowercased = potential_import_name.to_lowercase(); - let import_cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let import_cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, import_cfg, ctx.config.insert_use.prefix_kind) @@ -306,12 +302,7 @@ fn import_on_the_fly_pat_( ItemInNs::Values(def) => matches!(def, hir::ModuleDef::Const(_)), }; let user_input_lowercased = potential_import_name.to_lowercase(); - - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) @@ -353,11 +344,7 @@ fn import_on_the_fly_method( let user_input_lowercased = potential_import_name.to_lowercase(); - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index 713968c1caf..d9a10893bf5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -4,7 +4,7 @@ use std::iter; use hir::{HirFileIdExt, Module}; use ide_db::{ - base_db::{SourceDatabaseExt, VfsPath}, + base_db::{SourceRootDatabase, VfsPath}, FxHashSet, RootDatabase, SymbolKind, }; use stdx::IsNoneOr; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index d919609237a..977e0d80a4d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -2,7 +2,7 @@ mod format_like; -use hir::{ImportPathConfig, ItemInNs}; +use hir::ItemInNs; use ide_db::{ documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, @@ -60,11 +60,7 @@ pub(crate) fn complete_postfix( None => return, }; - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index 7d062cb23e5..d885b82ec90 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -4,6 +4,7 @@ //! module, and we use to statically check that we only produce snippet //! completions if we are allowed to. +use hir::ImportPathConfig; use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap}; use crate::snippet::Snippet; @@ -45,4 +46,12 @@ impl CompletionConfig { .iter() .flat_map(|snip| snip.prefix_triggers.iter().map(move |trigger| (&**trigger, snip))) } + + pub fn import_path_config(&self) -> ImportPathConfig { + ImportPathConfig { + prefer_no_std: self.prefer_no_std, + prefer_prelude: self.prefer_prelude, + prefer_absolute: self.prefer_absolute, + } + } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 424f94457e3..90c1728074d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -10,7 +10,6 @@ mod snippet; #[cfg(test)] mod tests; -use hir::ImportPathConfig; use ide_db::{ helpers::mod_path_to_ast, imports::{ @@ -249,11 +248,7 @@ pub fn resolve_completion_edits( let new_ast = scope.clone_for_update(); let mut import_insert = TextEdit::builder(); - let cfg = ImportPathConfig { - prefer_no_std: config.prefer_no_std, - prefer_prelude: config.prefer_prelude, - prefer_absolute: config.prefer_absolute, - }; + let cfg = config.import_path_config(); imports.into_iter().for_each(|(full_import_path, imported_name)| { let items_with_name = items_locator::items_with_name( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index abcff62341b..02d667c5205 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -10,7 +10,7 @@ pub(crate) mod type_alias; pub(crate) mod union_literal; pub(crate) mod variant; -use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type}; +use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; use ide_db::{ documentation::{Documentation, HasDocs}, helpers::item_name, @@ -294,11 +294,7 @@ pub(crate) fn render_expr( .unwrap_or_else(|| String::from("...")) }; - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 1eb8c574bd1..5265aa8515b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -100,7 +100,6 @@ // } // ---- -use hir::ImportPathConfig; use ide_db::imports::import_assets::LocatedImport; use itertools::Itertools; use syntax::{ast, AstNode, GreenNode, SyntaxNode}; @@ -169,11 +168,7 @@ impl Snippet { } fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> { - let import_cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let import_cfg = ctx.config.import_path_config(); let resolve = |import: &GreenNode| { let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index f6274cf5376..415f2afeebb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -23,10 +23,10 @@ mod type_pos; mod use_tree; mod visibility; +use base_db::SourceDatabase; use expect_test::Expect; use hir::PrefixKind; use ide_db::{ - base_db::FileLoader, imports::insert_use::{ImportGranularity, InsertUseConfig}, FilePosition, RootDatabase, SnippetCap, }; diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index 9a6826a5c42..8f3cae2fa17 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-db" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Core data structure representing IDE state for rust-analyzer." authors.workspace = true edition.workspace = true @@ -13,7 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true tracing.workspace = true rayon.workspace = true fst = { version = "0.4.7", default-features = false } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index f6a781907db..e6638dde5d4 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -2,7 +2,7 @@ use std::collections::VecDeque; -use base_db::SourceDatabaseExt; +use base_db::SourceRootDatabase; use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; use span::FileId; use syntax::{ diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 4c52ba39dec..3cf29987fa1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -1,4 +1,4 @@ -//! This crate defines the core datastructure representing IDE state -- `RootDatabase`. +//! This crate defines the core data structure representing IDE state -- `RootDatabase`. //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. @@ -74,7 +74,7 @@ pub type FilePosition = FilePositionWrapper<FileId>; pub type FileRange = FileRangeWrapper<FileId>; #[salsa::database( - base_db::SourceDatabaseExtStorage, + base_db::SourceRootDatabaseStorage, base_db::SourceDatabaseStorage, hir::db::ExpandDatabaseStorage, hir::db::DefDatabaseStorage, @@ -125,9 +125,6 @@ impl Upcast<dyn HirDatabase> for RootDatabase { } impl FileLoader for RootDatabase { - fn file_text(&self, file_id: FileId) -> Arc<str> { - FileLoaderDelegate(self).file_text(file_id) - } fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { FileLoaderDelegate(self).resolve_path(path) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 84a388a460b..0afa9163e31 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -7,7 +7,7 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use syntax::{ ast::{self, make, AstNode, HasGenericArgs}, - ted, SyntaxNode, + ted, NodeOrToken, SyntaxNode, }; #[derive(Default)] @@ -328,10 +328,26 @@ impl Ctx<'_> { let qualified = make::path_from_segments(std::iter::once(segment), false); ted::replace(path.syntax(), qualified.clone_for_update().syntax()); } else if let Some(path_ty) = ast::PathType::cast(parent) { - ted::replace( - path_ty.syntax(), - subst.clone_subtree().clone_for_update().syntax(), - ); + let old = path_ty.syntax(); + + if old.parent().is_some() { + ted::replace(old, subst.clone_subtree().clone_for_update().syntax()); + } else { + // Some `path_ty` has no parent, especially ones made for default value + // of type parameters. + // In this case, `ted` cannot replace `path_ty` with `subst` directly. + // So, just replace its children as long as the `subst` is the same type. + let new = subst.clone_subtree().clone_for_update(); + if !matches!(new, ast::Type::PathType(..)) { + return None; + } + let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?; + let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?; + ted::replace_all( + start..=end, + new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(), + ); + } } else { ted::replace( path.syntax(), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs index 62104fb7dce..bb121f4a80a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs @@ -11,7 +11,7 @@ use hir::db::DefDatabase; use crate::{ base_db::{ salsa::{Database, ParallelDatabase, Snapshot}, - Cancelled, CrateId, SourceDatabase, SourceDatabaseExt, + Cancelled, CrateId, SourceDatabase, SourceRootDatabase, }, FxIndexMap, RootDatabase, }; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 05b32e2a854..9e01a6d4408 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -6,7 +6,7 @@ use std::mem; -use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt}; +use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase}; use hir::{ sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility, @@ -663,9 +663,16 @@ impl<'a> FindUsages<'a> { name_ref: &ast::NameRef, sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { + // See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845 + let ty_eq = |ty: hir::Type| match (ty.as_adt(), self_ty.as_adt()) { + (Some(ty), Some(self_ty)) => ty == self_ty, + (None, None) => ty == *self_ty, + _ => false, + }; + match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(Definition::SelfType(impl_))) - if impl_.self_ty(self.sema.db).as_adt() == self_ty.as_adt() => + if ty_eq(impl_.self_ty(self.sema.db)) => { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index c70aed4c43b..209b1477bac 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -29,7 +29,7 @@ use std::{ use base_db::{ salsa::{self, ParallelDatabase}, - SourceDatabaseExt, SourceRootId, Upcast, + SourceRootDatabase, SourceRootId, Upcast, }; use fst::{raw::IndexedValue, Automaton, Streamer}; use hir::{ @@ -100,7 +100,7 @@ impl Query { } #[salsa::query_group(SymbolsDatabaseStorage)] -pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> { +pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirDatabase> { /// The symbol index for a given module. These modules should only be in source roots that /// are inside local_roots. fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index edd05009332..9c3a279a945 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-diagnostics" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Diagnostics rendering and fixits for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs new file mode 100644 index 00000000000..92b6e748ca5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs @@ -0,0 +1,101 @@ +use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext}; + +// Diagnostic: await-outside-of-async +// +// This diagnostic is triggered if the `await` keyword is used outside of an async function or block +pub(crate) fn await_outside_of_async( + ctx: &DiagnosticsContext<'_>, + d: &hir::AwaitOutsideOfAsync, +) -> Diagnostic { + let display_range = + adjusted_display_range(ctx, d.node, &|node| Some(node.await_token()?.text_range())); + Diagnostic::new( + crate::DiagnosticCode::RustcHardError("E0728"), + format!("`await` is used inside {}, which is not an `async` context", d.location), + display_range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn await_inside_non_async_fn() { + check_diagnostics( + r#" +async fn foo() {} + +fn bar() { + foo().await; + //^^^^^ error: `await` is used inside non-async function, which is not an `async` context +} +"#, + ); + } + + #[test] + fn await_inside_async_fn() { + check_diagnostics( + r#" +async fn foo() {} + +async fn bar() { + foo().await; +} +"#, + ); + } + + #[test] + fn await_inside_closure() { + check_diagnostics( + r#" +async fn foo() {} + +async fn bar() { + let _a = || { foo().await }; + //^^^^^ error: `await` is used inside non-async closure, which is not an `async` context +} +"#, + ); + } + + #[test] + fn await_inside_async_block() { + check_diagnostics( + r#" +async fn foo() {} + +fn bar() { + let _a = async { foo().await }; +} +"#, + ); + } + + #[test] + fn await_in_complex_context() { + check_diagnostics( + r#" +async fn foo() {} + +fn bar() { + async fn baz() { + let a = foo().await; + } + + let x = || { + let y = async { + baz().await; + let z = || { + baz().await; + //^^^^^ error: `await` is used inside non-async closure, which is not an `async` context + }; + }; + }; +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index acff8116961..1f8f805a1e2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -42,7 +42,10 @@ mod tests { use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig}; pub(crate) fn check(ra_fixture: &str) { - let config = DiagnosticsConfig::test_sample(); + let config = DiagnosticsConfig { + disabled: std::iter::once("unlinked-file".to_owned()).collect(), + ..DiagnosticsConfig::test_sample() + }; check_diagnostics_with_config(config, ra_fixture) } @@ -171,4 +174,25 @@ union FooBar { "#, ); } + + #[test] + fn modules() { + check( + r#" +//- /main.rs + #[cfg(outline)] mod outline; +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: outline is disabled + + mod outline_inner; +//^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: outline_inner is disabled + + #[cfg(inline)] mod inline {} +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: inline is disabled + +//- /outline_inner.rs +#![cfg(outline_inner)] +//- /outline.rs +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index e59b63f288d..6a976697c80 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -273,11 +273,7 @@ fn f() { #[test] fn include_does_not_break_diagnostics() { - let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("inactive-code".to_owned()); - config.disabled.insert("unlinked-file".to_owned()); - check_diagnostics_with_config( - config, + check_diagnostics( r#" //- minicore: include //- /lib.rs crate:lib diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 56ec45c8984..7126617cdee 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -472,4 +472,18 @@ fn f( "#, ) } + + #[test] + fn no_type_mismatches_when_arg_count_mismatch() { + check_diagnostics( + r#" +fn foo((): (), (): ()) { + foo(1, 2, 3); + // ^^ error: expected 2 arguments, found 3 + foo(1); + // ^ error: expected 2 arguments, found 1 +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 97296278c3a..f39738f2fb8 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -1032,6 +1032,44 @@ fn f() { check_diagnostics_no_bails(&code); } + #[test] + fn min_exhaustive() { + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result<i32, !>) { + match x { + Ok(_y) => {} + } +} +"#, + ); + check_diagnostics( + r#" +//- minicore: result +fn test(ptr: *const Result<i32, !>) { + unsafe { + match *ptr { + //^^^^ error: missing match arm: `Err(!)` not covered + Ok(_x) => {} + } + } +} +"#, + ); + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result<i32, &'static !>) { + match x { + //^ error: missing match arm: `Err(_)` not covered + Ok(_y) => {} + } +} +"#, + ); + } + mod rust_unstable { use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 30dd26a118d..af8ac6005d7 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -487,4 +487,28 @@ fn main() { "#, ) } + + #[test] + fn rustc_deprecated_safe_2024() { + check_diagnostics( + r#" +//- /ed2021.rs crate:ed2021 edition:2021 +#[rustc_deprecated_safe_2024] +unsafe fn safe() -> u8 { + 0 +} +//- /ed2024.rs crate:ed2024 edition:2024 +#[rustc_deprecated_safe_2024] +unsafe fn not_safe() -> u8 { + 0 +} +//- /main.rs crate:main deps:ed2021,ed2024 +fn main() { + ed2021::safe(); + ed2024::not_safe(); + //^^^^^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block +} + "#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs index 1a4d2877ef2..ff1eeb0516a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs @@ -44,4 +44,65 @@ fn main() { "#, ); } + + #[test] + fn option_nonexhaustive_inside_blocks() { + check_diagnostics( + r#" +//- minicore: option +fn main() { + '_a: { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered + } +} +"#, + ); + + check_diagnostics( + r#" +//- minicore: future, option +fn main() { + let _ = async { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered + }; +} +"#, + ); + + check_diagnostics( + r#" +//- minicore: option +fn main() { + unsafe { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered + } +} +"#, + ); + } + + #[test] + fn min_exhaustive() { + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result<i32, !>) { + let Ok(_y) = x; +} +"#, + ); + + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result<i32, &'static !>) { + let Ok(_y) = x; + //^^^^^^ error: non-exhaustive pattern: `Err(_)` not covered +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 4e52d28051b..6f5c68d4b5c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -2,8 +2,12 @@ use either::Either; use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ - ast::{self, BlockExpr, ExprStmt}, - AstNode, AstPtr, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + BlockExpr, Expr, ExprStmt, + }, + AstNode, AstPtr, TextSize, }; use text_edit::TextEdit; @@ -119,6 +123,38 @@ fn add_missing_ok_or_some( return None; } + if d.actual.is_unit() { + if let Expr::BlockExpr(block) = &expr { + if block.tail_expr().is_none() { + let mut builder = TextEdit::builder(); + let block_indent = block.indent_level(); + + if block.statements().count() == 0 { + // Empty block + let indent = block_indent + 1; + builder.insert( + block.syntax().text_range().start() + TextSize::from(1), + format!("\n{indent}{variant_name}(())\n{block_indent}"), + ); + } else { + let indent = IndentLevel::from(1); + builder.insert( + block.syntax().text_range().end() - TextSize::from(1), + format!("{indent}{variant_name}(())\n{block_indent}"), + ); + } + + let source_change = SourceChange::from_text_edit( + expr_ptr.file_id.original_file(ctx.sema.db), + builder.finish(), + ); + let name = format!("Insert {variant_name}(()) as the tail of this block"); + acc.push(fix("insert_wrapped_unit", &name, source_change, expr_range)); + } + return Some(()); + } + } + let mut builder = TextEdit::builder(); builder.insert(expr.syntax().text_range().start(), format!("{variant_name}(")); builder.insert(expr.syntax().text_range().end(), ")".to_owned()); @@ -534,6 +570,36 @@ fn div(x: i32, y: i32) -> MyResult<i32> { } #[test] + fn test_wrapped_unit_as_block_tail_expr() { + check_fix( + r#" +//- minicore: result +fn foo() -> Result<(), ()> { + foo(); +}$0 + "#, + r#" +fn foo() -> Result<(), ()> { + foo(); + Ok(()) +} + "#, + ); + + check_fix( + r#" +//- minicore: result +fn foo() -> Result<(), ()> {}$0 + "#, + r#" +fn foo() -> Result<(), ()> { + Ok(()) +} + "#, + ); + } + + #[test] fn test_in_const_and_static() { check_fix( r#" diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index 1b71a3a3e69..a1573bab8ae 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -4,7 +4,7 @@ use std::iter; use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; use ide_db::{ - base_db::{FileLoader, SourceDatabaseExt}, + base_db::{FileLoader, SourceDatabase, SourceRootDatabase}, source_change::SourceChange, FileId, FileRange, LineIndexDatabase, }; @@ -47,7 +47,7 @@ pub(crate) fn unlinked_file( // // Only show this diagnostic on the first three characters of // the file, to avoid overwhelming the user during startup. - range = FileLoader::file_text(ctx.sema.db, file_id) + range = SourceDatabase::file_text(ctx.sema.db, file_id) .char_indices() .take(3) .last() @@ -502,4 +502,16 @@ mod bar { "#, ); } + + #[test] + fn include_macro_works() { + check_diagnostics( + r#" +//- minicore: include +//- /main.rs +include!("bar/foo/mod.rs"); +//- /bar/foo/mod.rs +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 263ab747559..a61c5f0cd4d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -24,6 +24,7 @@ //! don't yet have a great pattern for how to do them properly. mod handlers { + pub(crate) mod await_outside_of_async; pub(crate) mod break_outside_of_loop; pub(crate) mod expected_function; pub(crate) mod inactive_code; @@ -96,6 +97,7 @@ use syntax::{ #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum DiagnosticCode { RustcHardError(&'static str), + SyntaxError, RustcLint(&'static str), Clippy(&'static str), Ra(&'static str, Severity), @@ -107,6 +109,9 @@ impl DiagnosticCode { DiagnosticCode::RustcHardError(e) => { format!("https://doc.rust-lang.org/stable/error_codes/{e}.html") } + DiagnosticCode::SyntaxError => { + String::from("https://doc.rust-lang.org/stable/reference/") + } DiagnosticCode::RustcLint(e) => { format!("https://doc.rust-lang.org/rustc/?search={e}") } @@ -125,6 +130,7 @@ impl DiagnosticCode { | DiagnosticCode::RustcLint(r) | DiagnosticCode::Clippy(r) | DiagnosticCode::Ra(r, _) => r, + DiagnosticCode::SyntaxError => "syntax-error", } } } @@ -154,7 +160,7 @@ impl Diagnostic { message, range: range.into(), severity: match code { - DiagnosticCode::RustcHardError(_) => Severity::Error, + DiagnosticCode::RustcHardError(_) | DiagnosticCode::SyntaxError => Severity::Error, // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings. DiagnosticCode::RustcLint(_) => Severity::Warning, // FIXME: We can make this configurable, and if the user uses `cargo clippy` on flycheck, we can @@ -297,31 +303,54 @@ impl DiagnosticsContext<'_> { } } -/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files +/// Request parser level diagnostics for the given [`FileId`]. +pub fn syntax_diagnostics( + db: &RootDatabase, + config: &DiagnosticsConfig, + file_id: FileId, +) -> Vec<Diagnostic> { + let _p = tracing::info_span!("syntax_diagnostics").entered(); + + if config.disabled.contains("syntax-error") { + return Vec::new(); + } + + let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + + // [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. + db.parse_errors(file_id) + .as_deref() + .into_iter() + .flatten() + .take(128) + .map(|err| { + Diagnostic::new( + DiagnosticCode::SyntaxError, + format!("Syntax Error: {err}"), + FileRange { file_id: file_id.into(), range: err.range() }, + ) + }) + .collect() +} + +/// Request semantic diagnostics for the given [`FileId`]. The produced diagnostics may point to other files /// due to macros. -pub fn diagnostics( +pub fn semantic_diagnostics( db: &RootDatabase, config: &DiagnosticsConfig, resolve: &AssistResolveStrategy, file_id: FileId, ) -> Vec<Diagnostic> { - let _p = tracing::info_span!("diagnostics").entered(); + let _p = tracing::info_span!("semantic_diagnostics").entered(); let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let mut res = Vec::new(); - // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. - res.extend(db.parse_errors(file_id).as_deref().into_iter().flatten().take(128).map(|err| { - Diagnostic::new( - DiagnosticCode::RustcHardError("syntax-error"), - format!("Syntax Error: {err}"), - FileRange { file_id: file_id.into(), range: err.range() }, - ) - })); - let parse_errors = res.len(); - let parse = sema.parse(file_id); // FIXME: This iterates the entire file which is a rather expensive operation. @@ -341,13 +370,17 @@ pub fn diagnostics( match module { // A bunch of parse errors in a file indicate some bigger structural parse changes in the // file, so we skip semantic diagnostics so we can show these faster. - Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints), - Some(_) => (), + Some(m) => { + if !db.parse_errors(file_id).as_deref().is_some_and(|es| es.len() >= 16) { + m.diagnostics(db, &mut diags, config.style_lints); + } + } None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()), } for diag in diags { let d = match diag { + AnyDiagnostic::AwaitOutsideOfAsync(d) => handlers::await_outside_of_async::await_outside_of_async(&ctx, &d), AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d), AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) { Some(it) => it, @@ -363,7 +396,7 @@ pub fn diagnostics( res.extend(d.errors.iter().take(16).map(|err| { { Diagnostic::new( - DiagnosticCode::RustcHardError("syntax-error"), + DiagnosticCode::SyntaxError, format!("Syntax Error in Expansion: {err}"), ctx.resolve_precise_location(&d.node.clone(), d.precise_location), ) @@ -464,6 +497,19 @@ pub fn diagnostics( res } +/// Request both syntax and semantic diagnostics for the given [`FileId`]. +pub fn full_diagnostics( + db: &RootDatabase, + config: &DiagnosticsConfig, + resolve: &AssistResolveStrategy, + file_id: FileId, +) -> Vec<Diagnostic> { + let mut res = syntax_diagnostics(db, config, file_id); + let sema = semantic_diagnostics(db, config, resolve, file_id); + res.extend(sema); + res +} + // `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros static RUSTC_LINT_GROUPS_DICT: Lazy<FxHashMap<&str, Vec<&str>>> = diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index e56fca1e500..ec74640a54d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -1,7 +1,7 @@ #![allow(clippy::print_stderr)] use ide_db::{ - assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase, + assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase, }; use itertools::Itertools; use stdx::trim_indent; @@ -59,10 +59,14 @@ fn check_nth_fix_with_config( let after = trim_indent(ra_fixture_after); let (db, file_position) = RootDatabase::with_position(ra_fixture_before); - let diagnostic = - super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id.into()) - .pop() - .expect("no diagnostics"); + let diagnostic = super::full_diagnostics( + &db, + &config, + &AssistResolveStrategy::All, + file_position.file_id.into(), + ) + .pop() + .expect("no diagnostics"); let fix = &diagnostic .fixes .unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth]; @@ -102,37 +106,39 @@ pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; - let fix = - super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into()) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = - *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let fix = super::full_diagnostics( + &db, + &conf, + &AssistResolveStrategy::All, + file_position.file_id.into(), + ) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); - for (edit, snippet_edit) in source_change.source_file_edits.values() - { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); - } - } - actual - }; - after == actual - }) - }) - .is_some() - }); + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); assert!(fix.is_some(), "no diagnostic with desired fix"); } @@ -144,38 +150,40 @@ pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &s let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; let mut n_fixes = 0; - let fix = - super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into()) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - n_fixes += fixes.len(); - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = - *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let fix = super::full_diagnostics( + &db, + &conf, + &AssistResolveStrategy::All, + file_position.file_id.into(), + ) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + n_fixes += fixes.len(); + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); - for (edit, snippet_edit) in source_change.source_file_edits.values() - { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); - } - } - actual - }; - after == actual - }) - }) - .is_some() - }); + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); assert!(fix.is_some(), "no diagnostic with desired fix"); assert!(n_fixes == 1, "Too many fixes suggested"); } @@ -183,7 +191,7 @@ pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &s /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); - let diagnostic = super::diagnostics( + let diagnostic = super::full_diagnostics( &db, &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, @@ -215,7 +223,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur .iter() .copied() .flat_map(|file_id| { - super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into()) + super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into()) .into_iter() .map(|d| { let mut annotation = String::new(); @@ -243,6 +251,12 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur let mut actual = annotations.remove(&file_id).unwrap_or_default(); let expected = extract_annotations(&db.file_text(file_id)); actual.sort_by_key(|(range, _)| range.start()); + // FIXME: We should panic on duplicates instead, but includes currently cause us to report + // diagnostics twice for the calling module when both files are queried. + actual.dedup(); + // actual.iter().duplicates().for_each(|(range, msg)| { + // panic!("duplicate diagnostic at {:?}: {msg:?}", line_index.line_col(range.start())) + // }); if expected.is_empty() { // makes minicore smoke test debuggable for (e, _) in &actual { @@ -277,10 +291,10 @@ fn test_disabled_diagnostics() { let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); let file_id = file_id.into(); - let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); + let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); assert!(diagnostics.is_empty()); - let diagnostics = super::diagnostics( + let diagnostics = super::full_diagnostics( &db, &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml index 57b1f9465ad..fad62fa3b96 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "ide-ssr" version = "0.0.0" -description = "Structural search and replace of Rust code" -repository = "https://github.com/rust-lang/rust-analyzer" +repository.workspace = true +description = "Structural search and replace of Rust code for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index e62ef604336..54236ea8bc4 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -84,7 +84,7 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc use crate::{errors::bail, matching::MatchFailureReason}; use hir::{FileRange, Semantics}; -use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase}; +use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase}; use resolving::ResolvedRule; use syntax::{ast, AstNode, SyntaxNode, TextRange}; use text_edit::TextEdit; @@ -141,7 +141,7 @@ impl<'db> MatchFinder<'db> { /// Constructs an instance using the start of the first file in `db` as the lookup context. pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> { - use ide_db::base_db::SourceDatabaseExt; + use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; if let Some(first_file_id) = db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next()) @@ -172,7 +172,6 @@ impl<'db> MatchFinder<'db> { /// Finds matches for all added rules and returns edits for all found matches. pub fn edits(&self) -> FxHashMap<FileId, TextEdit> { - use ide_db::base_db::SourceDatabaseExt; let mut matches_by_file = FxHashMap::default(); for m in self.matches().matches { matches_by_file @@ -228,7 +227,6 @@ impl<'db> MatchFinder<'db> { file_id: EditionedFileId, snippet: &str, ) -> Vec<MatchDebugInfo> { - use ide_db::base_db::SourceDatabaseExt; let file = self.sema.parse(file_id); let mut res = Vec::new(); let file_text = self.sema.db.file_text(file_id.into()); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 832386685d7..241de10b44d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -156,7 +156,7 @@ impl MatchFinder<'_> { fn search_files_do(&self, mut callback: impl FnMut(FileId)) { if self.restrict_ranges.is_empty() { // Unrestricted search. - use ide_db::base_db::SourceDatabaseExt; + use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; for &root in self.sema.db.local_roots().iter() { let sr = self.sema.db.source_root(root); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs index 4477a268b29..42930889d75 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs @@ -1,7 +1,7 @@ use expect_test::{expect, Expect}; use hir::{FilePosition, FileRange}; use ide_db::{ - base_db::{salsa::Durability, SourceDatabaseExt}, + base_db::{salsa::Durability, SourceDatabase}, EditionedFileId, FxHashSet, }; use test_utils::RangeOrOffset; diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index 9d8400ba3ad..d976d604f1a 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "IDE-centric APIs for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index d0701a45b10..8a8bc07945f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -10,7 +10,7 @@ use hir::{ Semantics, }; use ide_db::{ - base_db::{AnchoredPath, FileLoader}, + base_db::{AnchoredPath, FileLoader, SourceDatabase}, defs::{Definition, IdentClass}, helpers::pick_best_token, RootDatabase, SymbolKind, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 3257305184e..516e32ef917 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -1,5 +1,5 @@ use expect_test::{expect, Expect}; -use ide_db::{base_db::FileLoader, FileRange}; +use ide_db::{base_db::SourceDatabase, FileRange}; use syntax::TextRange; use crate::{ @@ -8579,3 +8579,26 @@ fn main(a$0: T) {} "#]], ); } + +#[test] +fn hover_fn_with_impl_trait_arg() { + check( + r#" +trait Foo {} +impl Foo for bool {} +fn bar<const WIDTH: u8>(_: impl Foo) {} +fn test() { + let f = bar::<3>; + f$0(true); +} +"#, + expect![[r#" + *f* + + ```rust + // size = 0, align = 1 + let f: fn bar<3>(bool) + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 5775abaeb18..7310852b8ed 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -1147,4 +1147,19 @@ async fn main() { }"#, ); } + + #[test] + fn works_in_included_file() { + check_types( + r#" +//- minicore: include +//- /main.rs +include!("foo.rs"); +//- /foo.rs +fn main() { + let _x = 42; + //^^ i32 +}"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index ea96c9504e5..8f2777f3928 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -7,7 +7,7 @@ use hir::{HirDisplay, Semantics}; use ide_db::{FileRange, RootDatabase}; use span::EditionedFileId; use syntax::{ - ast::{self, AstNode, HasName}, + ast::{self, AstNode, HasLoopBody, HasName}, match_ast, SyntaxKind, SyntaxNode, T, }; @@ -57,9 +57,24 @@ pub(super) fn hints( // the actual number of lines in this case should be the line count of the parent BlockExpr, // which the `min_lines` config cares about node = node.parent()?; - let block = label.syntax().parent().and_then(ast::BlockExpr::cast)?; - closing_token = block.stmt_list()?.r_curly_token()?; + + let parent = label.syntax().parent()?; + let block; + match_ast! { + match parent { + ast::BlockExpr(block_expr) => { + block = block_expr.stmt_list()?; + }, + ast::AnyHasLoopBody(loop_expr) => { + block = loop_expr.loop_body()?.stmt_list()?; + }, + _ => return None, + } + } + closing_token = block.r_curly_token()?; + let lifetime = label.lifetime().map_or_else(String::new, |it| it.to_string()); + (lifetime, Some(label.syntax().text_range())) } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { closing_token = block.stmt_list()?.r_curly_token()?; @@ -219,6 +234,19 @@ fn test() { //^ 'do_a } //^ 'end + + 'a: loop { + 'b: for i in 0..5 { + 'c: while true { + + + } + //^ 'c + } + //^ 'b + } + //^ 'a + } //^ fn test "#, diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs index aeb3c8c1ee5..ff1317d135c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs @@ -1,5 +1,5 @@ use hir::Semantics; -use ide_db::{base_db::SourceDatabaseExt, FilePosition, LineIndexDatabase, RootDatabase}; +use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase}; use std::{fmt::Write, time::Instant}; use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 8cb81a9cc45..eff4bc3d376 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -65,7 +65,7 @@ use hir::{sym, ChangeWithProcMacros}; use ide_db::{ base_db::{ salsa::{self, ParallelDatabase}, - CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath, + CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceRootDatabase, VfsPath, }, prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase, }; @@ -286,7 +286,7 @@ impl Analysis { /// Gets the text of the source file. pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> { - self.with_db(|db| SourceDatabaseExt::file_text(db, file_id)) + self.with_db(|db| SourceDatabase::file_text(db, file_id)) } /// Gets the syntax tree of the file. @@ -672,14 +672,33 @@ impl Analysis { .unwrap_or_default()) } - /// Computes the set of diagnostics for the given file. - pub fn diagnostics( + /// Computes the set of parser level diagnostics for the given file. + pub fn syntax_diagnostics( + &self, + config: &DiagnosticsConfig, + file_id: FileId, + ) -> Cancellable<Vec<Diagnostic>> { + self.with_db(|db| ide_diagnostics::syntax_diagnostics(db, config, file_id)) + } + + /// Computes the set of semantic diagnostics for the given file. + pub fn semantic_diagnostics( + &self, + config: &DiagnosticsConfig, + resolve: AssistResolveStrategy, + file_id: FileId, + ) -> Cancellable<Vec<Diagnostic>> { + self.with_db(|db| ide_diagnostics::semantic_diagnostics(db, config, &resolve, file_id)) + } + + /// Computes the set of both syntax and semantic diagnostics for the given file. + pub fn full_diagnostics( &self, config: &DiagnosticsConfig, resolve: AssistResolveStrategy, file_id: FileId, ) -> Cancellable<Vec<Diagnostic>> { - self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id)) + self.with_db(|db| ide_diagnostics::full_diagnostics(db, config, &resolve, file_id)) } /// Convenience function to return assists + quick fixes for diagnostics @@ -697,7 +716,7 @@ impl Analysis { self.with_db(|db| { let diagnostic_assists = if diagnostics_config.enabled && include_fixes { - ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id) + ide_diagnostics::full_diagnostics(db, diagnostics_config, &resolve, frange.file_id) .into_iter() .flat_map(|it| it.fixes.unwrap_or_default()) .filter(|it| it.target.intersect(frange.range).is_some()) diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 46c2d47ee82..64b82b31c74 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -60,7 +60,6 @@ pub(crate) fn find_all_refs( move |def: Definition| { let mut usages = def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all(); - if literal_search { retain_adt_literal_usages(&mut usages, def, sema); } @@ -818,6 +817,30 @@ impl<T> S<T> { } #[test] + fn test_self_inside_not_adt_impl() { + check( + r#" +pub trait TestTrait { + type Assoc; + fn stuff() -> Self; +} +impl TestTrait for () { + type Assoc$0 = u8; + fn stuff() -> Self { + let me: Self = (); + me + } +} +"#, + expect![[r#" + Assoc TypeAlias FileId(0) 92..108 97..102 + + FileId(0) 31..36 + "#]], + ) + } + + #[test] fn test_find_all_refs_two_modules() { check( r#" diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index cd9b7ae2f62..eaccee08e8c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -3,9 +3,12 @@ use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ - base_db::SourceDatabaseExt, defs::Definition, documentation::Documentation, - famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet, - RootDatabase, + base_db::{SourceRootDatabase, VfsPath}, + defs::Definition, + documentation::Documentation, + famous_defs::FamousDefs, + helpers::get_definition, + FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, }; use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; @@ -227,13 +230,16 @@ impl StaticIndex<'_> { self.files.push(result); } - pub fn compute(analysis: &Analysis) -> StaticIndex<'_> { + pub fn compute<'a>(analysis: &'a Analysis, workspace_root: &VfsPath) -> StaticIndex<'a> { let db = &*analysis.db; let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source_file_id(db).original_file(db); let source_root = db.file_source_root(file_id.into()); let source_root = db.source_root(source_root); - !source_root.is_library + let is_vendored = source_root + .path_for_file(&file_id.into()) + .is_some_and(|module_path| module_path.starts_with(workspace_root)); + !source_root.is_library || is_vendored }); let mut this = StaticIndex { files: vec![], @@ -259,12 +265,13 @@ impl StaticIndex<'_> { #[cfg(test)] mod tests { use crate::{fixture, StaticIndex}; - use ide_db::{FileRange, FxHashSet}; + use ide_db::{base_db::VfsPath, FileRange, FxHashSet}; use syntax::TextSize; fn check_all_ranges(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = StaticIndex::compute(&analysis); + let s = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for f in s.files { for (range, _) in f.tokens { @@ -283,7 +290,8 @@ mod tests { #[track_caller] fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = StaticIndex::compute(&analysis); + let s = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for (_, t) in s.tokens.iter() { if let Some(t) = t.definition { @@ -326,7 +334,7 @@ enum E { X(Foo) } fn multi_crate() { check_definitions( r#" -//- /main.rs crate:main deps:foo +//- /workspace/main.rs crate:main deps:foo use foo::func; @@ -335,7 +343,7 @@ fn main() { //^^^^ func(); } -//- /foo/lib.rs crate:foo +//- /workspace/foo/lib.rs crate:foo pub func() { @@ -345,6 +353,24 @@ pub func() { } #[test] + fn vendored_crate() { + check_all_ranges( + r#" +//- /workspace/main.rs crate:main deps:external,vendored +struct Main(i32); + //^^^^ ^^^ + +//- /external/lib.rs new_source_root:library crate:external@0.1.0,https://a.b/foo.git library +struct ExternalLibrary(i32); + +//- /workspace/vendored/lib.rs new_source_root:library crate:vendored@0.1.0,https://a.b/bar.git library +struct VendoredLibrary(i32); + //^^^^^^^^^^^^^^^ ^^^ +"#, + ); + } + + #[test] fn derives() { check_all_ranges( r#" diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs index 727012112eb..9ff099f479e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs @@ -1,6 +1,6 @@ use dot::{Id, LabelText}; use ide_db::{ - base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt}, + base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase}, FxHashSet, RootDatabase, }; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index c08ecb5c307..5e7ee54c6af 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "intern" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Global `Arc`-based object interning infrastructure for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/limit/Cargo.toml b/src/tools/rust-analyzer/crates/limit/Cargo.toml index c1a768833b9..30666f5219a 100644 --- a/src/tools/rust-analyzer/crates/limit/Cargo.toml +++ b/src/tools/rust-analyzer/crates/limit/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "limit" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A struct to enforce limits for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml index 64ed93bbb16..23fd50a0564 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml +++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "load-cargo" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Loads a Cargo project into a static instance of rust-analyzer for analysis." rust-version.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 8737f2246be..abad7e9f7d2 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -16,11 +16,16 @@ use ide_db::{ use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{ - CargoConfig, ManifestPath, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind, + CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind, }; use span::Span; -use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; +use vfs::{ + file_set::FileSetConfig, + loader::{Handle, LoadingProgress}, + AbsPath, AbsPathBuf, VfsPath, +}; +#[derive(Debug)] pub struct LoadCargoConfig { pub load_out_dirs_from_check: bool, pub with_proc_macro_server: ProcMacroServerChoice, @@ -60,11 +65,11 @@ pub fn load_workspace( let (sender, receiver) = unbounded(); let mut vfs = vfs::Vfs::default(); let mut loader = { - let loader = - vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); + let loader = vfs_notify::NotifyHandle::spawn(sender); Box::new(loader) }; + tracing::debug!(?load_config, "LoadCargoConfig"); let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() @@ -77,6 +82,14 @@ pub fn load_workspace( Err((anyhow::format_err!("proc macro server disabled"), false)) } }; + match &proc_macro_server { + Ok(server) => { + tracing::info!(path=%server.path(), "Proc-macro server started") + } + Err((e, _)) => { + tracing::info!(%e, "Failed to start proc-macro server") + } + } let (crate_graph, proc_macros) = ws.to_crate_graph( &mut |path: &AbsPath| { @@ -247,7 +260,7 @@ impl ProjectFolders { let mut file_set_roots: Vec<VfsPath> = vec![]; let mut entries = vec![]; - if let Some(manifest) = ws.manifest().map(ManifestPath::as_ref) { + if let Some(manifest) = ws.manifest().map(|it| it.to_path_buf()) { file_set_roots.push(VfsPath::from(manifest.to_owned())); entries.push(manifest.to_owned()); } @@ -409,8 +422,8 @@ fn load_crate_graph( // wait until Vfs has loaded all roots for task in receiver { match task { - vfs::loader::Message::Progress { n_done, n_total, .. } => { - if n_done == Some(n_total) { + vfs::loader::Message::Progress { n_done, .. } => { + if n_done == LoadingProgress::Finished { break; } } diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml index 57834623e84..756d42ef573 100644 --- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml +++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "mbe" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Handling of `macro_rules` macros for rust-analyzer." authors.workspace = true edition.workspace = true @@ -25,6 +26,7 @@ tt.workspace = true stdx.workspace = true span.workspace = true intern.workspace = true +syntax-bridge.workspace = true [dev-dependencies] test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index b6db4d2e76c..43604eb232d 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -7,11 +7,15 @@ use syntax::{ ast::{self, HasName}, AstNode, }; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY, + DeclarativeMacro, }; #[test] diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index 568490d5734..88785537c7d 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -8,13 +8,12 @@ mod expander; mod parser; -mod syntax_bridge; -mod to_parser_input; #[cfg(test)] mod benchmark; use span::{Edition, Span, SyntaxContextId}; +use syntax_bridge::to_parser_input; use tt::iter::TtIter; use tt::DelimSpan; @@ -23,18 +22,8 @@ use std::sync::Arc; use crate::parser::{MetaTemplate, MetaVarKind, Op}; -// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces -pub use ::parser::TopEntryPoint; pub use tt::{Delimiter, DelimiterKind, Punct}; -pub use crate::syntax_bridge::{ - desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree, - parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified, - token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper, -}; - -pub use crate::syntax_bridge::dummy_test_span_utils::*; - #[derive(Debug, PartialEq, Eq, Clone)] pub enum ParseError { UnexpectedToken(Box<str>), @@ -361,7 +350,7 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> { } } -fn expect_fragment( +pub fn expect_fragment( tt_iter: &mut TtIter<'_, Span>, entry_point: ::parser::PrefixEntryPoint, edition: ::parser::Edition, @@ -369,7 +358,7 @@ fn expect_fragment( ) -> ExpandResult<Option<tt::TokenTree<Span>>> { use ::parser; let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); - let parser_input = to_parser_input::to_parser_input(edition, &buffer); + let parser_input = to_parser_input(edition, &buffer); let tree_traversal = entry_point.parse(&parser_input, edition); let mut cursor = buffer.begin(); let mut error = false; diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml index 54b57c201be..d5255665b46 100644 --- a/src/tools/rust-analyzer/crates/parser/Cargo.toml +++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "parser" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "The Rust parser for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml index 59a4ad9a255..d4b0a54ed64 100644 --- a/src/tools/rust-analyzer/crates/paths/Cargo.toml +++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "paths" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Path wrappers for absolute and relative paths rust-analyzer." authors.workspace = true edition.workspace = true @@ -13,13 +14,10 @@ doctest = false [dependencies] camino.workspace = true -# Adding this dep sadly puts a lot of rust-analyzer crates after the -# serde-derive crate. Even though we don't activate the derive feature here, -# someone else in the crate graph certainly does! -# serde.workspace = true +serde = { workspace = true, optional = true } [features] -serde1 = ["camino/serde1"] +serde1 = ["camino/serde1", "dep:serde"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs index 885f071889e..00842441616 100644 --- a/src/tools/rust-analyzer/crates/paths/src/lib.rs +++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs @@ -1,4 +1,4 @@ -//! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and +//! Thin wrappers around [`camino::path`], distinguishing between absolute and //! relative paths. use std::{ @@ -8,9 +8,9 @@ use std::{ path::{Path, PathBuf}, }; -pub use camino::*; +pub use camino::{Utf8Component, Utf8Components, Utf8Path, Utf8PathBuf, Utf8Prefix}; -/// Wrapper around an absolute [`Utf8PathBuf`]. +/// A [`Utf8PathBuf`] that is guaranteed to be absolute. #[derive(Debug, Clone, Ord, PartialOrd, Eq, Hash)] pub struct AbsPathBuf(Utf8PathBuf); @@ -73,16 +73,6 @@ impl TryFrom<Utf8PathBuf> for AbsPathBuf { } } -impl TryFrom<PathBuf> for AbsPathBuf { - type Error = PathBuf; - fn try_from(path_buf: PathBuf) -> Result<AbsPathBuf, PathBuf> { - if !path_buf.is_absolute() { - return Err(path_buf); - } - Ok(AbsPathBuf(Utf8PathBuf::from_path_buf(path_buf)?)) - } -} - impl TryFrom<&str> for AbsPathBuf { type Error = Utf8PathBuf; fn try_from(path: &str) -> Result<AbsPathBuf, Utf8PathBuf> { @@ -151,6 +141,10 @@ impl AbsPathBuf { pub fn push<P: AsRef<Utf8Path>>(&mut self, suffix: P) { self.0.push(suffix) } + + pub fn join(&self, path: impl AsRef<Utf8Path>) -> Self { + Self(self.0.join(path)) + } } impl fmt::Display for AbsPathBuf { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index 345fb9f8ae9..84b877f026b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "proc-macro-api" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true @@ -22,12 +23,11 @@ indexmap.workspace = true paths = { workspace = true, features = ["serde1"] } tt.workspace = true stdx.workspace = true -text-size.workspace = true -span.workspace = true # Ideally this crate would not depend on salsa things, but we need span information here which wraps # InternIds for the syntax context +span.workspace = true +# only here due to the `Env` newtype :/ base-db.workspace = true -la-arena.workspace = true intern.workspace = true [lints] diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index 6a99b5ed1cc..883528558d9 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -158,9 +158,7 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) #[cfg(test)] mod tests { use intern::{sym, Symbol}; - use la_arena::RawIdx; - use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; - use text_size::{TextRange, TextSize}; + use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize}; use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; use super::*; @@ -171,7 +169,7 @@ mod tests { span::FileId::from_raw(0xe4e4e), span::Edition::CURRENT, ), - ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), + ast_id: ErasedFileAstId::from_raw(0), }; let token_trees = Box::new([ diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index a8661f59b28..88256e98b58 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -38,11 +38,9 @@ use std::collections::VecDeque; use intern::Symbol; -use la_arena::RawIdx; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; -use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; -use text_size::TextRange; +use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange}; use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; @@ -54,7 +52,7 @@ pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> { .flat_map(|span| { [ span.anchor.file_id.as_u32(), - span.anchor.ast_id.into_raw().into_u32(), + span.anchor.ast_id.into_raw(), span.range.start().into(), span.range.end().into(), span.ctx.into_u32(), @@ -71,7 +69,7 @@ pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { Span { anchor: SpanAnchor { file_id: EditionedFileId::from_raw(file_id), - ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)), + ast_id: ErasedFileAstId::from_raw(ast_id), }, range: TextRange::new(start.into(), end.into()), ctx: SyntaxContextId::from_u32(e), diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index a559ba01755..1c394513c45 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "proc-macro-srv-cli" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A standalone binary for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 673b5bd78a8..e8d9677c928 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "proc-macro-srv" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Proc-macro server for rust-analyzer." authors.workspace = true edition.workspace = true @@ -19,23 +20,24 @@ snap.workspace = true stdx.workspace = true tt.workspace = true -mbe.workspace = true +syntax-bridge.workspace = true paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true -ra-ap-rustc_lexer.workspace = true intern.workspace = true +ra-ap-rustc_lexer.workspace = true + [dev-dependencies] -expect-test = "1.4.0" +expect-test.workspace = true # used as proc macro test targets proc-macro-test.path = "./proc-macro-test" [features] sysroot-abi = [] -in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"] +in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 8b9eb3beb6e..552d99f51ba 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -479,7 +479,7 @@ mod tests { range: TextRange::empty(TextSize::new(0)), anchor: span::SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(0)), - ast_id: span::ErasedFileAstId::from_raw(0.into()), + ast_id: span::ErasedFileAstId::from_raw(0), }, ctx: SyntaxContextId::ROOT, }; @@ -515,7 +515,7 @@ mod tests { range: TextRange::empty(TextSize::new(0)), anchor: span::SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(0)), - ast_id: span::ErasedFileAstId::from_raw(0.into()), + ast_id: span::ErasedFileAstId::from_raw(0), }, ctx: SyntaxContextId::ROOT, }; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs index cdf93fa4251..4d8d496418b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs @@ -126,9 +126,12 @@ pub(super) mod token_stream { /// change these errors into `LexError`s later. impl<S: Copy + fmt::Debug> TokenStream<S> { pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> { - let subtree = - mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src) - .ok_or("lexing error")?; + let subtree = syntax_bridge::parse_to_token_tree_static_span( + span::Edition::CURRENT_FIXME, + call_site, + src, + ) + .ok_or("lexing error")?; Ok(TokenStream::with_subtree(subtree)) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 70eff51cade..4f1a18c03fc 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -9,7 +9,8 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(), + syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src) + .unwrap(), ) } @@ -19,7 +20,7 @@ fn parse_string_spanned( src: &str, ) -> crate::server_impl::TokenStream<Span> { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), + syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), ) } @@ -69,7 +70,7 @@ fn assert_expand_impl( range: TextRange::new(0.into(), 150.into()), anchor: SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(41)), - ast_id: ErasedFileAstId::from_raw(From::from(1)), + ast_id: ErasedFileAstId::from_raw(1), }, ctx: SyntaxContextId::ROOT, }; @@ -77,7 +78,7 @@ fn assert_expand_impl( range: TextRange::new(0.into(), 100.into()), anchor: SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(42)), - ast_id: ErasedFileAstId::from_raw(From::from(2)), + ast_id: ErasedFileAstId::from_raw(2), }, ctx: SyntaxContextId::ROOT, }; diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 5989dc6c962..2e3413f339b 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "profile" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A collection of tools for profiling rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 8b34bd3fad1..68e0e1ba554 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "project-model" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A representation for a Cargo project for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index 839d8e569fe..e7a4b8f39f7 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -1,28 +1,25 @@ -//! Workspace information we get from cargo consists of two pieces. The first is -//! the output of `cargo metadata`. The second is the output of running -//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc -//! macro. +//! Logic to invoke `cargo` for building build-dependencies (build scripts and proc-macros) as well as +//! executing the build scripts to fetch required dependency information (`OUT_DIR` env var, extra +//! cfg flags, etc). //! -//! This module implements this second part. We use "build script" terminology -//! here, but it covers procedural macros as well. - -use std::{ - cell::RefCell, - io, mem, - path::{self, PathBuf}, - process::Command, -}; +//! In essence this just invokes `cargo` with the appropriate output format which we consume, +//! but if enabled we will also use `RUSTC_WRAPPER` to only compile the build scripts and +//! proc-macros and skip everything else. + +use std::{cell::RefCell, io, mem, process::Command}; +use base_db::Env; use cargo_metadata::{camino::Utf8Path, Message}; +use cfg::CfgAtom; use itertools::Itertools; use la_arena::ArenaMap; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize; use toolchain::Tool; use crate::{ - cfg::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, + utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, InvocationStrategy, ManifestPath, Package, Sysroot, TargetKind, }; @@ -37,12 +34,12 @@ pub struct WorkspaceBuildScripts { #[derive(Debug, Clone, Default, PartialEq, Eq)] pub(crate) struct BuildScriptOutput { /// List of config flags defined by this package's build script. - pub(crate) cfgs: Vec<CfgFlag>, + pub(crate) cfgs: Vec<CfgAtom>, /// List of cargo-related environment variables with their value. /// /// If the package has a build script which defines environment variables, /// they can also be found here. - pub(crate) envs: Vec<(String, String)>, + pub(crate) envs: Env, /// Directory where a build script might place its output. pub(crate) out_dir: Option<AbsPathBuf>, /// Path to the proc-macro library file if this package exposes proc-macros. @@ -50,7 +47,7 @@ pub(crate) struct BuildScriptOutput { } impl BuildScriptOutput { - fn is_unchanged(&self) -> bool { + fn is_empty(&self) -> bool { self.cfgs.is_empty() && self.envs.is_empty() && self.out_dir.is_none() @@ -59,85 +56,6 @@ impl BuildScriptOutput { } impl WorkspaceBuildScripts { - fn build_command( - config: &CargoConfig, - allowed_features: &FxHashSet<String>, - manifest_path: &ManifestPath, - sysroot: &Sysroot, - ) -> io::Result<Command> { - let mut cmd = match config.run_build_script_command.as_deref() { - Some([program, args @ ..]) => { - let mut cmd = Command::new(program); - cmd.args(args); - cmd - } - _ => { - let mut cmd = sysroot.tool(Tool::Cargo); - - cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); - cmd.args(&config.extra_args); - - cmd.arg("--manifest-path"); - cmd.arg(manifest_path.as_ref()); - - if let Some(target_dir) = &config.target_dir { - cmd.arg("--target-dir").arg(target_dir); - } - - // --all-targets includes tests, benches and examples in addition to the - // default lib and bins. This is an independent concept from the --target - // flag below. - if config.all_targets { - cmd.arg("--all-targets"); - } - - if let Some(target) = &config.target { - cmd.args(["--target", target]); - } - - match &config.features { - CargoFeatures::All => { - cmd.arg("--all-features"); - } - CargoFeatures::Selected { features, no_default_features } => { - if *no_default_features { - cmd.arg("--no-default-features"); - } - if !features.is_empty() { - cmd.arg("--features"); - cmd.arg( - features - .iter() - .filter(|&feat| allowed_features.contains(feat)) - .join(","), - ); - } - } - } - - if manifest_path.is_rust_manifest() { - cmd.arg("-Zscript"); - } - - cmd.arg("--keep-going"); - - cmd - } - }; - - cmd.envs(&config.extra_env); - if config.wrap_rustc_in_build_scripts { - // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use - // that to compile only proc macros and build scripts during the initial - // `cargo check`. - let myself = std::env::current_exe()?; - cmd.env("RUSTC_WRAPPER", myself); - cmd.env("RA_RUSTC_WRAPPER", "1"); - } - - Ok(cmd) - } - /// Runs the build scripts for the given workspace pub(crate) fn run_for_workspace( config: &CargoConfig, @@ -146,17 +64,19 @@ impl WorkspaceBuildScripts { sysroot: &Sysroot, ) -> io::Result<WorkspaceBuildScripts> { let current_dir = match &config.invocation_location { - InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { - root.as_path() - } + InvocationLocation::Root(root) if config.run_build_script_command.is_some() => root, _ => workspace.workspace_root(), - } - .as_ref(); + }; let allowed_features = workspace.workspace_features(); - let cmd = - Self::build_command(config, &allowed_features, workspace.manifest_path(), sysroot)?; - Self::run_per_ws(cmd, workspace, current_dir, progress) + let cmd = Self::build_command( + config, + &allowed_features, + workspace.manifest_path(), + current_dir, + sysroot, + )?; + Self::run_per_ws(cmd, workspace, progress) } /// Runs the build scripts by invoking the configured command *once*. @@ -183,6 +103,7 @@ impl WorkspaceBuildScripts { &Default::default(), // This is not gonna be used anyways, so just construct a dummy here &ManifestPath::try_from(workspace_root.clone()).unwrap(), + current_dir, &Sysroot::empty(), )?; // NB: Cargo.toml could have been modified between `cargo metadata` and @@ -211,7 +132,6 @@ impl WorkspaceBuildScripts { let errors = Self::run_command( cmd, - current_dir.as_path().as_ref(), |package, cb| { if let Some(&(package, workspace)) = by_id.get(package) { cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]); @@ -230,7 +150,7 @@ impl WorkspaceBuildScripts { for (idx, workspace) in workspaces.iter().enumerate() { for package in workspace.packages() { let package_build_data = &mut res[idx].outputs[package]; - if !package_build_data.is_unchanged() { + if !package_build_data.is_empty() { tracing::info!( "{}: {package_build_data:?}", workspace[package].manifest.parent(), @@ -243,10 +163,100 @@ impl WorkspaceBuildScripts { Ok(res) } + pub fn error(&self) -> Option<&str> { + self.error.as_deref() + } + + pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { + self.outputs.get(idx) + } + + /// Assembles build script outputs for the rustc crates via `--print target-libdir`. + pub(crate) fn rustc_crates( + rustc: &CargoWorkspace, + current_dir: &AbsPath, + extra_env: &FxHashMap<String, String>, + sysroot: &Sysroot, + ) -> Self { + let mut bs = WorkspaceBuildScripts::default(); + for p in rustc.packages() { + bs.outputs.insert(p, BuildScriptOutput::default()); + } + let res = (|| { + let target_libdir = (|| { + let mut cargo_config = sysroot.tool(Tool::Cargo); + cargo_config.envs(extra_env); + cargo_config + .current_dir(current_dir) + .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Ok(it) = utf8_stdout(cargo_config) { + return Ok(it); + } + let mut cmd = sysroot.tool(Tool::Rustc); + cmd.envs(extra_env); + cmd.args(["--print", "target-libdir"]); + utf8_stdout(cmd) + })()?; + + let target_libdir = AbsPathBuf::try_from(Utf8PathBuf::from(target_libdir)) + .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?; + tracing::info!("Loading rustc proc-macro paths from {target_libdir}"); + + let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)? + .filter_map(|entry| { + let dir_entry = entry.ok()?; + if dir_entry.file_type().ok()?.is_file() { + let path = dir_entry.path(); + let extension = path.extension()?; + if extension == std::env::consts::DLL_EXTENSION { + let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned(); + let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?) + .ok()?; + return Some((name, path)); + } + } + None + }) + .collect(); + for p in rustc.packages() { + let package = &rustc[p]; + if package + .targets + .iter() + .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) + { + if let Some((_, path)) = proc_macro_dylibs + .iter() + .find(|(name, _)| *name.trim_start_matches("lib") == package.name) + { + bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); + } + } + } + + if tracing::enabled!(tracing::Level::INFO) { + for package in rustc.packages() { + let package_build_data = &bs.outputs[package]; + if !package_build_data.is_empty() { + tracing::info!( + "{}: {package_build_data:?}", + rustc[package].manifest.parent(), + ); + } + } + } + Ok(()) + })(); + if let Err::<_, anyhow::Error>(e) = res { + bs.error = Some(e.to_string()); + } + bs + } + fn run_per_ws( cmd: Command, workspace: &CargoWorkspace, - current_dir: &path::Path, progress: &dyn Fn(String), ) -> io::Result<WorkspaceBuildScripts> { let mut res = WorkspaceBuildScripts::default(); @@ -262,7 +272,6 @@ impl WorkspaceBuildScripts { res.error = Self::run_command( cmd, - current_dir, |package, cb| { if let Some(&package) = by_id.get(package) { cb(&workspace[package].name, &mut outputs[package]); @@ -274,7 +283,7 @@ impl WorkspaceBuildScripts { if tracing::enabled!(tracing::Level::INFO) { for package in workspace.packages() { let package_build_data = &outputs[package]; - if !package_build_data.is_unchanged() { + if !package_build_data.is_empty() { tracing::info!( "{}: {package_build_data:?}", workspace[package].manifest.parent(), @@ -287,8 +296,7 @@ impl WorkspaceBuildScripts { } fn run_command( - mut cmd: Command, - current_dir: &path::Path, + cmd: Command, // ideally this would be something like: // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)), // but owned trait objects aren't a thing @@ -302,8 +310,7 @@ impl WorkspaceBuildScripts { e.push('\n'); }; - tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd); - cmd.current_dir(current_dir); + tracing::info!("Running build scripts: {:?}", cmd); let output = stdx::process::spawn_with_streaming_output( cmd, &mut |line| { @@ -321,7 +328,7 @@ impl WorkspaceBuildScripts { let cfgs = { let mut acc = Vec::new(); for cfg in &message.cfgs { - match cfg.parse::<CfgFlag>() { + match crate::parse_cfg(cfg) { Ok(it) => acc.push(it), Err(err) => { push_err(&format!( @@ -333,16 +340,14 @@ impl WorkspaceBuildScripts { } acc }; - if !message.env.is_empty() { - data.envs = mem::take(&mut message.env); - } + data.envs.extend(message.env.drain(..)); // cargo_metadata crate returns default (empty) path for // older cargos, which is not absolute, so work around that. let out_dir = mem::take(&mut message.out_dir); if !out_dir.as_str().is_empty() { let out_dir = AbsPathBuf::assert(out_dir); // inject_cargo_env(package, package_build_data); - data.envs.push(("OUT_DIR".to_owned(), out_dir.as_str().to_owned())); + data.envs.insert("OUT_DIR", out_dir.as_str()); data.out_dir = Some(out_dir); data.cfgs = cfgs; } @@ -354,7 +359,7 @@ impl WorkspaceBuildScripts { if message.target.kind.iter().any(|k| k == "proc-macro") { // Skip rmeta file if let Some(filename) = - message.filenames.iter().find(|name| is_dylib(name)) + message.filenames.iter().find(|file| is_dylib(file)) { let filename = AbsPath::assert(filename); data.proc_macro_dylib_path = Some(filename.to_owned()); @@ -388,93 +393,85 @@ impl WorkspaceBuildScripts { Ok(errors) } - pub fn error(&self) -> Option<&str> { - self.error.as_deref() - } - - pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { - self.outputs.get(idx) - } - - pub(crate) fn rustc_crates( - rustc: &CargoWorkspace, + fn build_command( + config: &CargoConfig, + allowed_features: &FxHashSet<String>, + manifest_path: &ManifestPath, current_dir: &AbsPath, - extra_env: &FxHashMap<String, String>, sysroot: &Sysroot, - ) -> Self { - let mut bs = WorkspaceBuildScripts::default(); - for p in rustc.packages() { - bs.outputs.insert(p, BuildScriptOutput::default()); - } - let res = (|| { - let target_libdir = (|| { - let mut cargo_config = sysroot.tool(Tool::Cargo); - cargo_config.envs(extra_env); - cargo_config - .current_dir(current_dir) - .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"]) - .env("RUSTC_BOOTSTRAP", "1"); - if let Ok(it) = utf8_stdout(cargo_config) { - return Ok(it); + ) -> io::Result<Command> { + let mut cmd = match config.run_build_script_command.as_deref() { + Some([program, args @ ..]) => { + let mut cmd = Command::new(program); + cmd.args(args); + cmd + } + _ => { + let mut cmd = sysroot.tool(Tool::Cargo); + + cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); + cmd.args(&config.extra_args); + + cmd.arg("--manifest-path"); + cmd.arg(manifest_path); + + if let Some(target_dir) = &config.target_dir { + cmd.arg("--target-dir").arg(target_dir); } - let mut cmd = sysroot.tool(Tool::Rustc); - cmd.envs(extra_env); - cmd.args(["--print", "target-libdir"]); - utf8_stdout(cmd) - })()?; - let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir)) - .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?; - tracing::info!("Loading rustc proc-macro paths from {target_libdir}"); + // --all-targets includes tests, benches and examples in addition to the + // default lib and bins. This is an independent concept from the --target + // flag below. + if config.all_targets { + cmd.arg("--all-targets"); + } - let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)? - .filter_map(|entry| { - let dir_entry = entry.ok()?; - if dir_entry.file_type().ok()?.is_file() { - let path = dir_entry.path(); - let extension = path.extension()?; - if extension == std::env::consts::DLL_EXTENSION { - let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned(); - let path = AbsPathBuf::try_from(path).ok()?; - return Some((name, path)); - } + if let Some(target) = &config.target { + cmd.args(["--target", target]); + } + + match &config.features { + CargoFeatures::All => { + cmd.arg("--all-features"); } - None - }) - .collect(); - for p in rustc.packages() { - let package = &rustc[p]; - if package - .targets - .iter() - .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) - { - if let Some((_, path)) = proc_macro_dylibs - .iter() - .find(|(name, _)| *name.trim_start_matches("lib") == package.name) - { - bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); + CargoFeatures::Selected { features, no_default_features } => { + if *no_default_features { + cmd.arg("--no-default-features"); + } + if !features.is_empty() { + cmd.arg("--features"); + cmd.arg( + features + .iter() + .filter(|&feat| allowed_features.contains(feat)) + .join(","), + ); + } } } - } - if tracing::enabled!(tracing::Level::INFO) { - for package in rustc.packages() { - let package_build_data = &bs.outputs[package]; - if !package_build_data.is_unchanged() { - tracing::info!( - "{}: {package_build_data:?}", - rustc[package].manifest.parent(), - ); - } + if manifest_path.is_rust_manifest() { + cmd.arg("-Zscript"); } + + cmd.arg("--keep-going"); + + cmd } - Ok(()) - })(); - if let Err::<_, anyhow::Error>(e) = res { - bs.error = Some(e.to_string()); + }; + + cmd.current_dir(current_dir); + cmd.envs(&config.extra_env); + if config.wrap_rustc_in_build_scripts { + // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use + // that to compile only proc macros and build scripts during the initial + // `cargo check`. + let myself = std::env::current_exe()?; + cmd.env("RUSTC_WRAPPER", myself); + cmd.env("RA_RUSTC_WRAPPER", "1"); } - bs + + Ok(cmd) } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 632ba1cacf2..38eeedec621 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -85,8 +85,6 @@ pub struct CargoConfig { pub target: Option<String>, /// Sysroot loading behavior pub sysroot: Option<RustLibSource>, - /// Whether to invoke `cargo metadata` on the sysroot crate. - pub sysroot_query_metadata: bool, pub sysroot_src: Option<AbsPathBuf>, /// rustc private crate source pub rustc_source: Option<RustLibSource>, @@ -259,6 +257,7 @@ impl CargoWorkspace { current_dir: &AbsPath, config: &CargoConfig, sysroot: &Sysroot, + locked: bool, progress: &dyn Fn(String), ) -> anyhow::Result<cargo_metadata::Metadata> { let targets = find_list_of_build_targets(config, cargo_toml, sysroot); @@ -312,6 +311,9 @@ impl CargoWorkspace { // opt into it themselves. other_options.push("-Zscript".to_owned()); } + if locked { + other_options.push("--locked".to_owned()); + } meta.other_options(other_options); // FIXME: Fetching metadata is a slow process, as it might require diff --git a/src/tools/rust-analyzer/crates/project-model/src/cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/cfg.rs deleted file mode 100644 index e921e3de722..00000000000 --- a/src/tools/rust-analyzer/crates/project-model/src/cfg.rs +++ /dev/null @@ -1,100 +0,0 @@ -//! Parsing of CfgFlags as command line arguments, as in -//! -//! rustc main.rs --cfg foo --cfg 'feature="bar"' -use std::{fmt, str::FromStr}; - -use cfg::{CfgDiff, CfgOptions}; -use intern::Symbol; -use rustc_hash::FxHashMap; -use serde::Serialize; - -#[derive(Clone, Eq, PartialEq, Debug, Serialize)] -pub enum CfgFlag { - Atom(String), - KeyValue { key: String, value: String }, -} - -impl FromStr for CfgFlag { - type Err = String; - fn from_str(s: &str) -> Result<Self, Self::Err> { - let res = match s.split_once('=') { - Some((key, value)) => { - if !(value.starts_with('"') && value.ends_with('"')) { - return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); - } - let key = key.to_owned(); - let value = value[1..value.len() - 1].to_string(); - CfgFlag::KeyValue { key, value } - } - None => CfgFlag::Atom(s.into()), - }; - Ok(res) - } -} - -impl<'de> serde::Deserialize<'de> for CfgFlag { - fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> - where - D: serde::Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl Extend<CfgFlag> for CfgOptions { - fn extend<T: IntoIterator<Item = CfgFlag>>(&mut self, iter: T) { - for cfg_flag in iter { - match cfg_flag { - CfgFlag::Atom(it) => self.insert_atom(Symbol::intern(&it)), - CfgFlag::KeyValue { key, value } => { - self.insert_key_value(Symbol::intern(&key), Symbol::intern(&value)) - } - } - } - } -} - -impl FromIterator<CfgFlag> for CfgOptions { - fn from_iter<T: IntoIterator<Item = CfgFlag>>(iter: T) -> Self { - let mut this = CfgOptions::default(); - this.extend(iter); - this - } -} - -impl fmt::Display for CfgFlag { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - CfgFlag::Atom(atom) => f.write_str(atom), - CfgFlag::KeyValue { key, value } => { - f.write_str(key)?; - f.write_str("=")?; - f.write_str(value) - } - } - } -} - -/// A set of cfg-overrides per crate. -#[derive(Default, Debug, Clone, Eq, PartialEq)] -pub struct CfgOverrides { - /// A global set of overrides matching all crates. - pub global: CfgDiff, - /// A set of overrides matching specific crates. - pub selective: FxHashMap<String, CfgDiff>, -} - -impl CfgOverrides { - pub fn len(&self) -> usize { - self.global.len() + self.selective.values().map(|it| it.len()).sum::<usize>() - } - - pub fn apply(&self, cfg_options: &mut CfgOptions, name: &str) { - if !self.global.is_empty() { - cfg_options.apply_diff(self.global.clone()); - }; - if let Some(diff) = self.selective.get(name) { - cfg_options.apply_diff(diff.clone()); - }; - } -} diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index 049acc290bb..ac7246acc59 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -90,10 +90,13 @@ fn parse_output_cargo_config_env(stdout: String) -> FxHashMap<String, String> { stdout .lines() .filter_map(|l| l.strip_prefix("env.")) - .filter_map(|l| { - l.split_once(" = ") - // cargo used to report it with this, keep it for a couple releases around - .or_else(|| l.split_once(".value = ")) + .filter_map(|l| l.split_once(" = ")) + .filter_map(|(k, v)| { + if k.contains('.') { + k.strip_suffix(".value").zip(Some(v)) + } else { + Some((k, v)) + } }) .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) .collect() diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 92bf6a08f87..4fa70508bbd 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -15,9 +15,8 @@ //! procedural macros). //! * Lowering of concrete model to a [`base_db::CrateGraph`] -mod build_scripts; +mod build_dependencies; mod cargo_workspace; -mod cfg; mod env; mod manifest_path; pub mod project_json; @@ -37,16 +36,15 @@ use std::{ }; use anyhow::{bail, format_err, Context}; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashSet; pub use crate::{ - build_scripts::WorkspaceBuildScripts, + build_dependencies::WorkspaceBuildScripts, cargo_workspace::{ CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency, RustLibSource, Target, TargetData, TargetKind, }, - cfg::CfgOverrides, manifest_path::ManifestPath, project_json::{ProjectJson, ProjectJsonData}, sysroot::Sysroot, @@ -68,6 +66,9 @@ impl ProjectManifest { if path.file_name().unwrap_or_default() == "rust-project.json" { return Ok(ProjectManifest::ProjectJson(path)); } + if path.file_name().unwrap_or_default() == ".rust-project.json" { + return Ok(ProjectManifest::ProjectJson(path)); + } if path.file_name().unwrap_or_default() == "Cargo.toml" { return Ok(ProjectManifest::CargoToml(path)); } @@ -94,6 +95,9 @@ impl ProjectManifest { if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") { return Ok(vec![ProjectManifest::ProjectJson(project_json)]); } + if let Some(project_json) = find_in_parent_dirs(path, ".rust-project.json") { + return Ok(vec![ProjectManifest::ProjectJson(project_json)]); + } return find_cargo_toml(path) .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect()); @@ -132,8 +136,11 @@ impl ProjectManifest { .filter_map(Result::ok) .map(|it| it.path().join("Cargo.toml")) .filter(|it| it.exists()) + .map(Utf8PathBuf::from_path_buf) + .filter_map(Result::ok) .map(AbsPathBuf::try_from) - .filter_map(|it| it.ok()?.try_into().ok()) + .filter_map(Result::ok) + .filter_map(|it| it.try_into().ok()) .collect() } } @@ -192,3 +199,42 @@ pub enum InvocationLocation { #[default] Workspace, } + +/// A set of cfg-overrides per crate. +#[derive(Default, Debug, Clone, Eq, PartialEq)] +pub struct CfgOverrides { + /// A global set of overrides matching all crates. + pub global: cfg::CfgDiff, + /// A set of overrides matching specific crates. + pub selective: rustc_hash::FxHashMap<String, cfg::CfgDiff>, +} + +impl CfgOverrides { + pub fn len(&self) -> usize { + self.global.len() + self.selective.values().map(|it| it.len()).sum::<usize>() + } + + pub fn apply(&self, cfg_options: &mut cfg::CfgOptions, name: &str) { + if !self.global.is_empty() { + cfg_options.apply_diff(self.global.clone()); + }; + if let Some(diff) = self.selective.get(name) { + cfg_options.apply_diff(diff.clone()); + }; + } +} + +fn parse_cfg(s: &str) -> Result<cfg::CfgAtom, String> { + let res = match s.split_once('=') { + Some((key, value)) => { + if !(value.starts_with('"') && value.ends_with('"')) { + return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); + } + let key = intern::Symbol::intern(key); + let value = intern::Symbol::intern(&value[1..value.len() - 1]); + cfg::CfgAtom::KeyValue { key, value } + } + None => cfg::CfgAtom::Flag(intern::Symbol::intern(s)), + }; + Ok(res) +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs index 2331c0c36c3..a8be5dff7b6 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs @@ -64,6 +64,18 @@ impl AsRef<AbsPath> for ManifestPath { } } +impl AsRef<std::path::Path> for ManifestPath { + fn as_ref(&self) -> &std::path::Path { + self.file.as_ref() + } +} + +impl AsRef<std::ffi::OsStr> for ManifestPath { + fn as_ref(&self) -> &std::ffi::OsStr { + self.file.as_ref() + } +} + impl Borrow<AbsPath> for ManifestPath { fn borrow(&self) -> &AbsPath { self.file.borrow() diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index cf0a6ad4025..7dea0c38398 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -50,12 +50,13 @@ //! rust-project.json over time via configuration request!) use base_db::{CrateDisplayName, CrateName}; +use cfg::CfgAtom; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::{de, Deserialize, Serialize}; use span::Edition; -use crate::{cfg::CfgFlag, ManifestPath, TargetKind}; +use crate::{ManifestPath, TargetKind}; /// Roots and crates that compose this Rust project. #[derive(Clone, Debug, Eq, PartialEq)] @@ -73,106 +74,6 @@ pub struct ProjectJson { runnables: Vec<Runnable>, } -/// A crate points to the root module of a crate and lists the dependencies of the crate. This is -/// useful in creating the crate graph. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct Crate { - pub(crate) display_name: Option<CrateDisplayName>, - pub root_module: AbsPathBuf, - pub(crate) edition: Edition, - pub(crate) version: Option<String>, - pub(crate) deps: Vec<Dep>, - pub(crate) cfg: Vec<CfgFlag>, - pub(crate) target: Option<String>, - pub(crate) env: FxHashMap<String, String>, - pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>, - pub(crate) is_workspace_member: bool, - pub(crate) include: Vec<AbsPathBuf>, - pub(crate) exclude: Vec<AbsPathBuf>, - pub(crate) is_proc_macro: bool, - pub(crate) repository: Option<String>, - pub build: Option<Build>, -} - -/// Additional, build-specific data about a crate. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct Build { - /// The name associated with this crate. - /// - /// This is determined by the build system that produced - /// the `rust-project.json` in question. For instance, if buck were used, - /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`. - /// - /// Do not attempt to parse the contents of this string; it is a build system-specific - /// identifier similar to [`Crate::display_name`]. - pub label: String, - /// Path corresponding to the build system-specific file defining the crate. - /// - /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with - /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be - /// be in the `rust-project.json`. - pub build_file: Utf8PathBuf, - /// The kind of target. - /// - /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`], - /// and [`TargetKind::Test`]. This information is used to determine what sort - /// of runnable codelens to provide, if any. - pub target_kind: TargetKind, -} - -/// A template-like structure for describing runnables. -/// -/// These are used for running and debugging binaries and tests without encoding -/// build system-specific knowledge into rust-analyzer. -/// -/// # Example -/// -/// Below is an example of a test runnable. `{label}` and `{test_id}` -/// are explained in [`Runnable::args`]'s documentation. -/// -/// ```json -/// { -/// "program": "buck", -/// "args": [ -/// "test", -/// "{label}", -/// "--", -/// "{test_id}", -/// "--print-passing-details" -/// ], -/// "cwd": "/home/user/repo-root/", -/// "kind": "testOne" -/// } -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Runnable { - /// The program invoked by the runnable. - /// - /// For example, this might be `cargo`, `buck`, or `bazel`. - pub program: String, - /// The arguments passed to [`Runnable::program`]. - /// - /// The args can contain two template strings: `{label}` and `{test_id}`. - /// rust-analyzer will find and replace `{label}` with [`Build::label`] and - /// `{test_id}` with the test name. - pub args: Vec<String>, - /// The current working directory of the runnable. - pub cwd: Utf8PathBuf, - pub kind: RunnableKind, -} - -/// The kind of runnable. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum RunnableKind { - Check, - - /// Can run a binary. - Run, - - /// Run a single test. - TestOne, -} - impl ProjectJson { /// Create a new ProjectJson instance. /// @@ -301,6 +202,106 @@ impl ProjectJson { } } +/// A crate points to the root module of a crate and lists the dependencies of the crate. This is +/// useful in creating the crate graph. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Crate { + pub(crate) display_name: Option<CrateDisplayName>, + pub root_module: AbsPathBuf, + pub(crate) edition: Edition, + pub(crate) version: Option<String>, + pub(crate) deps: Vec<Dep>, + pub(crate) cfg: Vec<CfgAtom>, + pub(crate) target: Option<String>, + pub(crate) env: FxHashMap<String, String>, + pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>, + pub(crate) is_workspace_member: bool, + pub(crate) include: Vec<AbsPathBuf>, + pub(crate) exclude: Vec<AbsPathBuf>, + pub(crate) is_proc_macro: bool, + pub(crate) repository: Option<String>, + pub build: Option<Build>, +} + +/// Additional, build-specific data about a crate. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Build { + /// The name associated with this crate. + /// + /// This is determined by the build system that produced + /// the `rust-project.json` in question. For instance, if buck were used, + /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`. + /// + /// Do not attempt to parse the contents of this string; it is a build system-specific + /// identifier similar to [`Crate::display_name`]. + pub label: String, + /// Path corresponding to the build system-specific file defining the crate. + /// + /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with + /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be + /// be in the `rust-project.json`. + pub build_file: Utf8PathBuf, + /// The kind of target. + /// + /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`], + /// and [`TargetKind::Test`]. This information is used to determine what sort + /// of runnable codelens to provide, if any. + pub target_kind: TargetKind, +} + +/// A template-like structure for describing runnables. +/// +/// These are used for running and debugging binaries and tests without encoding +/// build system-specific knowledge into rust-analyzer. +/// +/// # Example +/// +/// Below is an example of a test runnable. `{label}` and `{test_id}` +/// are explained in [`Runnable::args`]'s documentation. +/// +/// ```json +/// { +/// "program": "buck", +/// "args": [ +/// "test", +/// "{label}", +/// "--", +/// "{test_id}", +/// "--print-passing-details" +/// ], +/// "cwd": "/home/user/repo-root/", +/// "kind": "testOne" +/// } +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Runnable { + /// The program invoked by the runnable. + /// + /// For example, this might be `cargo`, `buck`, or `bazel`. + pub program: String, + /// The arguments passed to [`Runnable::program`]. + /// + /// The args can contain two template strings: `{label}` and `{test_id}`. + /// rust-analyzer will find and replace `{label}` with [`Build::label`] and + /// `{test_id}` with the test name. + pub args: Vec<String>, + /// The current working directory of the runnable. + pub cwd: Utf8PathBuf, + pub kind: RunnableKind, +} + +/// The kind of runnable. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RunnableKind { + Check, + + /// Can run a binary. + Run, + + /// Run a single test. + TestOne, +} + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] pub struct ProjectJsonData { sysroot: Option<Utf8PathBuf>, @@ -319,7 +320,8 @@ struct CrateData { version: Option<semver::Version>, deps: Vec<Dep>, #[serde(default)] - cfg: Vec<CfgFlag>, + #[serde(with = "cfg_")] + cfg: Vec<CfgAtom>, target: Option<String>, #[serde(default)] env: FxHashMap<String, String>, @@ -334,6 +336,33 @@ struct CrateData { build: Option<BuildData>, } +mod cfg_ { + use cfg::CfgAtom; + use serde::{Deserialize, Serialize}; + + pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<CfgAtom>, D::Error> + where + D: serde::Deserializer<'de>, + { + let cfg: Vec<String> = Vec::deserialize(deserializer)?; + cfg.into_iter().map(|it| crate::parse_cfg(&it).map_err(serde::de::Error::custom)).collect() + } + pub(super) fn serialize<S>(cfg: &[CfgAtom], serializer: S) -> Result<S::Ok, S::Error> + where + S: serde::Serializer, + { + cfg.iter() + .map(|cfg| match cfg { + CfgAtom::Flag(flag) => flag.as_str().to_owned(), + CfgAtom::KeyValue { key, value } => { + format!("{}=\"{}\"", key.as_str(), value.as_str()) + } + }) + .collect::<Vec<String>>() + .serialize(serializer) + } +} + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] #[serde(rename = "edition")] enum EditionData { @@ -378,6 +407,29 @@ pub enum TargetKindData { Lib, Test, } +/// Identifies a crate by position in the crates array. +/// +/// This will differ from `CrateId` when multiple `ProjectJson` +/// workspaces are loaded. +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] +#[serde(transparent)] +pub struct CrateArrayIdx(pub usize); + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub(crate) struct Dep { + /// Identifies a crate by position in the crates array. + #[serde(rename = "crate")] + pub(crate) krate: CrateArrayIdx, + #[serde(serialize_with = "serialize_crate_name")] + #[serde(deserialize_with = "deserialize_crate_name")] + pub(crate) name: CrateName, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +struct CrateSource { + include_dirs: Vec<Utf8PathBuf>, + exclude_dirs: Vec<Utf8PathBuf>, +} impl From<TargetKindData> for TargetKind { fn from(data: TargetKindData) -> Self { @@ -416,30 +468,6 @@ impl From<RunnableKindData> for RunnableKind { } } -/// Identifies a crate by position in the crates array. -/// -/// This will differ from `CrateId` when multiple `ProjectJson` -/// workspaces are loaded. -#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] -#[serde(transparent)] -pub struct CrateArrayIdx(pub usize); - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub(crate) struct Dep { - /// Identifies a crate by position in the crates array. - #[serde(rename = "crate")] - pub(crate) krate: CrateArrayIdx, - #[serde(serialize_with = "serialize_crate_name")] - #[serde(deserialize_with = "deserialize_crate_name")] - pub(crate) name: CrateName, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -struct CrateSource { - include_dirs: Vec<Utf8PathBuf>, - exclude_dirs: Vec<Utf8PathBuf>, -} - fn deserialize_crate_name<'de, D>(de: D) -> std::result::Result<CrateName, D::Error> where D: de::Deserializer<'de>, diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs index 599897f84a0..aa73ff89100 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs @@ -1,10 +1,12 @@ //! Runs `rustc --print cfg` to get built-in cfg flags. use anyhow::Context; +use cfg::CfgAtom; +use intern::Symbol; use rustc_hash::FxHashMap; use toolchain::Tool; -use crate::{cfg::CfgFlag, utf8_stdout, ManifestPath, Sysroot}; +use crate::{utf8_stdout, ManifestPath, Sysroot}; /// Determines how `rustc --print cfg` is discovered and invoked. pub(crate) enum RustcCfgConfig<'a> { @@ -20,15 +22,15 @@ pub(crate) fn get( target: Option<&str>, extra_env: &FxHashMap<String, String>, config: RustcCfgConfig<'_>, -) -> Vec<CfgFlag> { +) -> Vec<CfgAtom> { let _p = tracing::info_span!("rustc_cfg::get").entered(); - let mut res = Vec::with_capacity(6 * 2 + 1); + let mut res: Vec<_> = Vec::with_capacity(6 * 2 + 1); // Some nightly-only cfgs, which are required for stdlib - res.push(CfgFlag::Atom("target_thread_local".into())); + res.push(CfgAtom::Flag(Symbol::intern("target_thread_local"))); for ty in ["8", "16", "32", "64", "cas", "ptr"] { for key in ["target_has_atomic", "target_has_atomic_load_store"] { - res.push(CfgFlag::KeyValue { key: key.to_owned(), value: ty.into() }); + res.push(CfgAtom::KeyValue { key: Symbol::intern(key), value: Symbol::intern(ty) }); } } @@ -42,8 +44,7 @@ pub(crate) fn get( } }; - let rustc_cfgs = - rustc_cfgs.lines().map(|it| it.parse::<CfgFlag>()).collect::<Result<Vec<_>, _>>(); + let rustc_cfgs = rustc_cfgs.lines().map(crate::parse_cfg).collect::<Result<Vec<_>, _>>(); match rustc_cfgs { Ok(rustc_cfgs) => { diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 1eeec4cedeb..419fac3f41f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -123,32 +123,27 @@ impl Sysroot { // FIXME: Expose a builder api as loading the sysroot got way too modular and complicated. impl Sysroot { /// Attempts to discover the toolchain's sysroot from the given `dir`. - pub fn discover( - dir: &AbsPath, - extra_env: &FxHashMap<String, String>, - metadata: bool, - ) -> Sysroot { + pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Sysroot { let sysroot_dir = discover_sysroot_dir(dir, extra_env); let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| { discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env) }); - Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir, metadata) + Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir) } pub fn discover_with_src_override( current_dir: &AbsPath, extra_env: &FxHashMap<String, String>, sysroot_src_dir: AbsPathBuf, - metadata: bool, ) -> Sysroot { let sysroot_dir = discover_sysroot_dir(current_dir, extra_env); - Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir)), metadata) + Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir))) } - pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Sysroot { + pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir) .ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}")); - Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir), metadata) + Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir)) } pub fn discover_rustc_src(&self) -> Option<ManifestPath> { @@ -191,20 +186,15 @@ impl Sysroot { }) } - pub fn load( - sysroot_dir: Option<AbsPathBuf>, - sysroot_src_dir: Option<AbsPathBuf>, - metadata: bool, - ) -> Sysroot { - Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok), metadata) + pub fn load(sysroot_dir: Option<AbsPathBuf>, sysroot_src_dir: Option<AbsPathBuf>) -> Sysroot { + Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok)) } fn load_core_check( sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>, sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>, - metadata: bool, ) -> Sysroot { - let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir, metadata); + let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir); if sysroot.error.is_none() { if let Some(src_root) = &sysroot.src_root { let has_core = match &sysroot.mode { @@ -230,7 +220,6 @@ impl Sysroot { fn load_( sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>, sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>, - metadata: bool, ) -> Sysroot { let sysroot_dir = match sysroot_dir { Some(Ok(sysroot_dir)) => Some(sysroot_dir), @@ -263,119 +252,16 @@ impl Sysroot { } } }; - if metadata { - let sysroot: Option<_> = (|| { - let sysroot_cargo_toml = ManifestPath::try_from( - AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot/Cargo.toml")).ok()?, - ) - .ok()?; - let current_dir = - AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?; - - let mut cargo_config = CargoConfig::default(); - // the sysroot uses `public-dependency`, so we make cargo think it's a nightly - cargo_config.extra_env.insert( - "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), - "nightly".to_owned(), - ); - - let res = CargoWorkspace::fetch_metadata( - &sysroot_cargo_toml, - ¤t_dir, - &cargo_config, - &Sysroot::empty(), - &|_| (), - ) - .map_err(|e| { - tracing::error!( - "failed to load sysroot `{sysroot_src_dir}/sysroot/Cargo.toml`: {}", - e - ); - e - }); - if let Err(e) = - std::fs::remove_file(format!("{sysroot_src_dir}/sysroot/Cargo.lock")) - { - tracing::error!( - "failed to remove sysroot `{sysroot_src_dir}/sysroot/Cargo.lock`: {}", - e - ) - } - let mut res = res.ok()?; - - // Patch out `rustc-std-workspace-*` crates to point to the real crates. - // This is done prior to `CrateGraph` construction to avoid having duplicate `std` targets. - - let mut fake_core = None; - let mut fake_alloc = None; - let mut fake_std = None; - let mut real_core = None; - let mut real_alloc = None; - let mut real_std = None; - res.packages.iter().enumerate().for_each(|(idx, package)| { - match package.name.strip_prefix("rustc-std-workspace-") { - Some("core") => fake_core = Some((idx, package.id.clone())), - Some("alloc") => fake_alloc = Some((idx, package.id.clone())), - Some("std") => fake_std = Some((idx, package.id.clone())), - Some(_) => { - tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name) - } - None => match &*package.name { - "core" => real_core = Some(package.id.clone()), - "alloc" => real_alloc = Some(package.id.clone()), - "std" => real_std = Some(package.id.clone()), - _ => (), - }, - } - }); - - let patches = - [fake_core.zip(real_core), fake_alloc.zip(real_alloc), fake_std.zip(real_std)] - .into_iter() - .flatten(); - - let resolve = res.resolve.as_mut().expect("metadata executed with deps"); - let mut remove_nodes = vec![]; - for (idx, node) in resolve.nodes.iter_mut().enumerate() { - // Replace them in the dependency list - node.deps.iter_mut().for_each(|dep| { - if let Some((_, real)) = - patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg) - { - dep.pkg = real; - } - }); - if patches.clone().any(|((_, fake), _)| fake == node.id) { - remove_nodes.push(idx); - } - } - // Remove the fake ones from the resolve data - remove_nodes.into_iter().rev().for_each(|r| { - resolve.nodes.remove(r); - }); - // Remove the fake ones from the packages - patches.map(|((r, _), _)| r).sorted().rev().for_each(|r| { - res.packages.remove(r); - }); - - res.workspace_members = res - .packages - .iter() - .filter(|&package| RELEVANT_SYSROOT_CRATES.contains(&&*package.name)) - .map(|package| package.id.clone()) - .collect(); - let cargo_workspace = CargoWorkspace::new(res, sysroot_cargo_toml); - Some(Sysroot { - root: sysroot_dir.clone(), - src_root: Some(sysroot_src_dir.clone()), - mode: SysrootMode::Workspace(cargo_workspace), - error: None, - }) - })(); - if let Some(sysroot) = sysroot { + let library_manifest = ManifestPath::try_from(sysroot_src_dir.join("Cargo.toml")).unwrap(); + if fs::metadata(&library_manifest).is_ok() { + if let Some(sysroot) = + Self::load_library_via_cargo(library_manifest, &sysroot_dir, &sysroot_src_dir) + { return sysroot; } } + tracing::debug!("Stitching sysroot library: {sysroot_src_dir}"); + let mut stitched = Stitched { crates: Arena::default() }; for path in SYSROOT_CRATES.trim().lines() { @@ -384,7 +270,7 @@ impl Sysroot { .into_iter() .map(|it| sysroot_src_dir.join(it)) .filter_map(|it| ManifestPath::try_from(it).ok()) - .find(|it| fs::metadata(it.as_ref()).is_ok()); + .find(|it| fs::metadata(it).is_ok()); if let Some(root) = root { stitched.crates.alloc(SysrootCrateData { @@ -425,6 +311,92 @@ impl Sysroot { error: None, } } + + fn load_library_via_cargo( + library_manifest: ManifestPath, + sysroot_dir: &Option<AbsPathBuf>, + sysroot_src_dir: &AbsPathBuf, + ) -> Option<Sysroot> { + tracing::debug!("Loading library metadata: {library_manifest}"); + let mut cargo_config = CargoConfig::default(); + // the sysroot uses `public-dependency`, so we make cargo think it's a nightly + cargo_config.extra_env.insert( + "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), + "nightly".to_owned(), + ); + + let mut res = match CargoWorkspace::fetch_metadata( + &library_manifest, + sysroot_src_dir, + &cargo_config, + &Sysroot::empty(), + // Make sure we never attempt to write to the sysroot + true, + &|_| (), + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}"); + return None; + } + }; + + // Patch out `rustc-std-workspace-*` crates to point to the real crates. + // This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing. + let patches = { + let mut fake_core = None; + let mut fake_alloc = None; + let mut fake_std = None; + let mut real_core = None; + let mut real_alloc = None; + let mut real_std = None; + res.packages.iter().enumerate().for_each(|(idx, package)| { + match package.name.strip_prefix("rustc-std-workspace-") { + Some("core") => fake_core = Some((idx, package.id.clone())), + Some("alloc") => fake_alloc = Some((idx, package.id.clone())), + Some("std") => fake_std = Some((idx, package.id.clone())), + Some(_) => { + tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name) + } + None => match &*package.name { + "core" => real_core = Some(package.id.clone()), + "alloc" => real_alloc = Some(package.id.clone()), + "std" => real_std = Some(package.id.clone()), + _ => (), + }, + } + }); + + [fake_core.zip(real_core), fake_alloc.zip(real_alloc), fake_std.zip(real_std)] + .into_iter() + .flatten() + }; + + let resolve = res.resolve.as_mut().expect("metadata executed with deps"); + resolve.nodes.retain_mut(|node| { + // Replace `rustc-std-workspace` crate with the actual one in the dependency list + node.deps.iter_mut().for_each(|dep| { + let real_pkg = patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg); + if let Some((_, real)) = real_pkg { + dep.pkg = real; + } + }); + // Remove this node if it's a fake one + !patches.clone().any(|((_, fake), _)| fake == node.id) + }); + // Remove the fake ones from the package list + patches.map(|((idx, _), _)| idx).sorted().rev().for_each(|idx| { + res.packages.remove(idx); + }); + + let cargo_workspace = CargoWorkspace::new(res, library_manifest); + Some(Sysroot { + root: sysroot_dir.clone(), + src_root: Some(sysroot_src_dir.clone()), + mode: SysrootMode::Workspace(cargo_workspace), + error: None, + }) + } } fn discover_sysroot_dir( @@ -471,13 +443,13 @@ fn discover_sysroot_src_dir_or_add_component( get_rust_src(sysroot_path) }) .ok_or_else(|| { - format_err!( - "\ + let error = "\ can't load standard library from sysroot {sysroot_path} (discovered via `rustc --print sysroot`) -try installing the Rust source the same way you installed rustc", - ) +try installing the Rust source the same way you installed rustc"; + tracing::error!(error); + format_err!(error) }) } @@ -485,7 +457,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml"); let rustc_src = ManifestPath::try_from(rustc_src).ok()?; tracing::debug!("checking for rustc source code: {rustc_src}"); - if fs::metadata(rustc_src.as_ref()).is_ok() { + if fs::metadata(&rustc_src).is_ok() { Some(rustc_src) } else { None @@ -531,5 +503,3 @@ test"; const PROC_MACRO_DEPS: &str = " std core"; - -const RELEVANT_SYSROOT_CRATES: &[&str] = &["core", "alloc", "std", "test", "proc_macro"]; diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 8f5457bf99a..e3bc81e1963 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -12,8 +12,8 @@ use span::FileId; use triomphe::Arc; use crate::{ - workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, ManifestPath, ProjectJson, - ProjectJsonData, ProjectWorkspace, Sysroot, WorkspaceBuildScripts, + sysroot::SysrootMode, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, + ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, WorkspaceBuildScripts, }; fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) { @@ -146,7 +146,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false) + Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir)) } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { @@ -274,10 +274,9 @@ fn crate_graph_dedup() { } #[test] +// FIXME Remove the ignore +#[ignore = "requires nightly until the sysroot ships a cargo workspace for library on stable"] fn smoke_test_real_sysroot_cargo() { - if std::env::var("SYSROOT_CARGO_METADATA").is_err() { - return; - } let file_map = &mut FxHashMap::<AbsPathBuf, FileId>::default(); let meta: Metadata = get_test_json_file("hello-world-metadata.json"); let manifest_path = @@ -286,8 +285,8 @@ fn smoke_test_real_sysroot_cargo() { let sysroot = Sysroot::discover( AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))), &Default::default(), - true, ); + assert!(matches!(sysroot.mode(), SysrootMode::Workspace(_))); let project_workspace = ProjectWorkspace { kind: ProjectWorkspaceKind::Cargo { diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 31d1c77fd07..5620dfade2d 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -20,16 +20,15 @@ use tracing::instrument; use triomphe::Arc; use crate::{ - build_scripts::BuildScriptOutput, + build_dependencies::BuildScriptOutput, cargo_workspace::{DepKind, PackageData, RustLibSource}, - cfg::{CfgFlag, CfgOverrides}, env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, project_json::{Crate, CrateArrayIdx}, rustc_cfg::{self, RustcCfgConfig}, sysroot::{SysrootCrate, SysrootMode}, target_data_layout::{self, RustcDataLayoutConfig}, - utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, - ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, + utf8_stdout, CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, + Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; use tracing::{debug, error, info}; @@ -55,7 +54,7 @@ pub struct ProjectWorkspace { /// `rustc --print cfg`. // FIXME: make this a per-crate map, as, eg, build.rs might have a // different target. - pub rustc_cfg: Vec<CfgFlag>, + pub rustc_cfg: Vec<CfgAtom>, /// The toolchain version used by this workspace. pub toolchain: Option<Version>, /// The target data layout queried for workspace. @@ -194,7 +193,7 @@ impl ProjectWorkspace { ) -> anyhow::Result<ProjectWorkspace> { let res = match manifest { ProjectManifest::ProjectJson(project_json) => { - let file = fs::read_to_string(project_json.as_ref()) + let file = fs::read_to_string(project_json) .with_context(|| format!("Failed to read json file {project_json}"))?; let data = serde_json::from_str(&file) .with_context(|| format!("Failed to deserialize json file {project_json}"))?; @@ -213,28 +212,22 @@ impl ProjectWorkspace { } ProjectManifest::CargoToml(cargo_toml) => { let sysroot = match (&config.sysroot, &config.sysroot_src) { - (Some(RustLibSource::Discover), None) => Sysroot::discover( - cargo_toml.parent(), - &config.extra_env, - config.sysroot_query_metadata, - ), + (Some(RustLibSource::Discover), None) => { + Sysroot::discover(cargo_toml.parent(), &config.extra_env) + } (Some(RustLibSource::Discover), Some(sysroot_src)) => { Sysroot::discover_with_src_override( cargo_toml.parent(), &config.extra_env, sysroot_src.clone(), - config.sysroot_query_metadata, ) } - (Some(RustLibSource::Path(path)), None) => Sysroot::discover_sysroot_src_dir( - path.clone(), - config.sysroot_query_metadata, - ), - (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => Sysroot::load( - Some(sysroot.clone()), - Some(sysroot_src.clone()), - config.sysroot_query_metadata, - ), + (Some(RustLibSource::Path(path)), None) => { + Sysroot::discover_sysroot_src_dir(path.clone()) + } + (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { + Sysroot::load(Some(sysroot.clone()), Some(sysroot_src.clone())) + } (None, _) => Sysroot::empty(), }; tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot"); @@ -260,6 +253,7 @@ impl ProjectWorkspace { ..config.clone() }, &sysroot, + false, progress, ) { Ok(meta) => { @@ -312,7 +306,8 @@ impl ProjectWorkspace { cargo_toml.parent(), config, &sysroot, - progress, + false, + progress, ) .with_context(|| { format!( @@ -350,8 +345,7 @@ impl ProjectWorkspace { extra_env: &FxHashMap<String, String>, cfg_overrides: &CfgOverrides, ) -> ProjectWorkspace { - let sysroot = - Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone(), false); + let sysroot = Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone()); let cfg_config = RustcCfgConfig::Rustc(&sysroot); let data_layout_config = RustcDataLayoutConfig::Rustc(&sysroot); let toolchain = match get_toolchain_version( @@ -386,12 +380,8 @@ impl ProjectWorkspace { ) -> anyhow::Result<ProjectWorkspace> { let dir = detached_file.parent(); let sysroot = match &config.sysroot { - Some(RustLibSource::Path(path)) => { - Sysroot::discover_sysroot_src_dir(path.clone(), config.sysroot_query_metadata) - } - Some(RustLibSource::Discover) => { - Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata) - } + Some(RustLibSource::Path(path)) => Sysroot::discover_sysroot_src_dir(path.clone()), + Some(RustLibSource::Discover) => Sysroot::discover(dir, &config.extra_env), None => Sysroot::empty(), }; @@ -412,14 +402,14 @@ impl ProjectWorkspace { ); let cargo_script = - CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, &|_| ()).ok().map( - |ws| { + CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, false, &|_| ()) + .ok() + .map(|ws| { ( CargoWorkspace::new(ws, detached_file.clone()), WorkspaceBuildScripts::default(), ) - }, - ); + }); let cargo_config_extra_env = cargo_config_env(detached_file, &config.extra_env, &sysroot); Ok(ProjectWorkspace { @@ -651,7 +641,7 @@ impl ProjectWorkspace { ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => { iter::once(PackageRoot { is_local: true, - include: vec![file.as_ref().to_owned()], + include: vec![file.to_path_buf()], exclude: Vec::new(), }) .chain(cargo_script.iter().flat_map(|(cargo, build_scripts)| { @@ -851,7 +841,7 @@ impl ProjectWorkspace { #[instrument(skip_all)] fn project_json_to_crate_graph( - rustc_cfg: Vec<CfgFlag>, + rustc_cfg: Vec<CfgAtom>, load: FileLoader<'_>, project: &ProjectJson, sysroot: &Sysroot, @@ -863,8 +853,8 @@ fn project_json_to_crate_graph( let (public_deps, libproc_macro) = sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load); - let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); - let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default(); + let r_a_cfg_flag = CfgAtom::Flag(sym::rust_analyzer.clone()); + let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default(); let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project .crates() @@ -971,7 +961,7 @@ fn cargo_to_crate_graph( rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>, cargo: &CargoWorkspace, sysroot: &Sysroot, - rustc_cfg: Vec<CfgFlag>, + rustc_cfg: Vec<CfgAtom>, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, ) -> (CrateGraph, ProcMacroPaths) { @@ -1154,7 +1144,7 @@ fn cargo_to_crate_graph( } fn detached_file_to_crate_graph( - rustc_cfg: Vec<CfgFlag>, + rustc_cfg: Vec<CfgAtom>, load: FileLoader<'_>, detached_file: &ManifestPath, sysroot: &Sysroot, @@ -1317,11 +1307,10 @@ fn add_target_crate_root( None } else { let mut potential_cfg_options = cfg_options.clone(); - potential_cfg_options.extend( - pkg.features - .iter() - .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }), - ); + potential_cfg_options.extend(pkg.features.iter().map(|feat| CfgAtom::KeyValue { + key: sym::feature.clone(), + value: Symbol::intern(feat.0), + })); Some(potential_cfg_options) }; let cfg_options = { @@ -1358,12 +1347,13 @@ fn add_target_crate_root( ); if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { - Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))), - None => Some(Err("proc-macro crate is missing its build data".to_owned())), + Some(it) => match it { + Some(path) => Ok((cargo_name.to_owned(), path.clone())), + None => Err("proc-macro crate build data is missing dylib path".to_owned()), + }, + None => Err("proc-macro crate is missing its build data".to_owned()), }; - if let Some(proc_macro) = proc_macro { - proc_macros.insert(crate_id, proc_macro); - } + proc_macros.insert(crate_id, proc_macro); } crate_id @@ -1386,7 +1376,7 @@ impl SysrootPublicDeps { fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, - rustc_cfg: Vec<CfgFlag>, + rustc_cfg: Vec<CfgAtom>, load: FileLoader<'_>, ) -> (SysrootPublicDeps, Option<CrateId>) { let _p = tracing::info_span!("sysroot_to_crate_graph").entered(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index bc1b13a6497..eb95f42d755 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -2,7 +2,7 @@ name = "rust-analyzer" version = "0.0.0" homepage = "https://rust-analyzer.github.io/" -repository = "https://github.com/rust-analyzer/rust-analyzer" +repository.workspace = true description = "A language server for the Rust programming language" documentation = "https://rust-analyzer.github.io/manual.html" autobins = false @@ -21,7 +21,7 @@ path = "src/bin/main.rs" [dependencies] anyhow.workspace = true -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true dirs = "5.0.1" dissimilar.workspace = true itertools.workspace = true @@ -47,9 +47,10 @@ always-assert = "0.2.0" walkdir = "2.3.2" semver.workspace = true memchr = "2.7.1" +cargo_metadata.workspace = true +process-wrap.workspace = true cfg.workspace = true -flycheck.workspace = true hir-def.workspace = true hir-ty.workspace = true hir.workspace = true @@ -82,20 +83,20 @@ xshell.workspace = true test-utils.workspace = true test-fixture.workspace = true -mbe.workspace = true +syntax-bridge.workspace = true [features] jemalloc = ["jemallocator", "profile/jemalloc"] force-always-assert = ["always-assert/force"] sysroot-abi = [] in-rust-tree = [ - "sysroot-abi", - "syntax/in-rust-tree", - "parser/in-rust-tree", - "hir/in-rust-tree", - "hir-def/in-rust-tree", - "hir-ty/in-rust-tree", - "load-cargo/in-rust-tree", + "sysroot-abi", + "syntax/in-rust-tree", + "parser/in-rust-tree", + "hir/in-rust-tree", + "hir-def/in-rust-tree", + "hir-ty/in-rust-tree", + "load-cargo/in-rust-tree", ] [lints] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 6a980a153c9..42953d3b833 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -14,6 +14,7 @@ use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; +use paths::Utf8PathBuf; use rust_analyzer::{ cli::flags, config::{Config, ConfigChange, ConfigErrors}, @@ -189,6 +190,7 @@ fn run_server() -> anyhow::Result<()> { let root_path = match root_uri .and_then(|it| it.to_file_path().ok()) .map(patch_path_prefix) + .and_then(|it| Utf8PathBuf::from_path_buf(it).ok()) .and_then(|it| AbsPathBuf::try_from(it).ok()) { Some(it) => it, @@ -218,6 +220,7 @@ fn run_server() -> anyhow::Result<()> { .into_iter() .filter_map(|it| it.uri.to_file_path().ok()) .map(patch_path_prefix) + .filter_map(|it| Utf8PathBuf::from_path_buf(it).ok()) .filter_map(|it| AbsPathBuf::try_from(it).ok()) .collect::<Vec<_>>() }) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 380105d2c21..06f4ba815d0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -23,7 +23,7 @@ use ide::{ use ide_db::{ base_db::{ salsa::{self, debug::DebugQueryTable, ParallelDatabase}, - SourceDatabase, SourceDatabaseExt, + SourceDatabase, SourceRootDatabase, }, EditionedFileId, LineIndexDatabase, SnippetCap, }; @@ -64,7 +64,6 @@ impl flags::AnalysisStats { true => None, false => Some(RustLibSource::Discover), }, - sysroot_query_metadata: self.query_sysroot_metadata, ..Default::default() }; let no_progress = &|_| (); @@ -977,7 +976,7 @@ impl flags::AnalysisStats { let mut sw = self.stop_watch(); for &file_id in &file_ids { - _ = analysis.diagnostics( + _ = analysis.full_diagnostics( &DiagnosticsConfig { enabled: true, proc_macros_enabled: true, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index 4ddeb4ab1b0..cdac0e5ef5c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashSet; use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity}; -use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase}; +use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use crate::cli::flags; @@ -63,7 +63,7 @@ impl flags::Diagnostics { _vfs.file_path(file_id.into()) ); for diagnostic in analysis - .diagnostics( + .full_diagnostics( &DiagnosticsConfig::test_sample(), AssistResolveStrategy::None, file_id.into(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index b3b8ab9a404..2a3e74c680b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -71,9 +71,6 @@ xflags::xflags! { optional --with-deps /// Don't load sysroot crates (`std`, `core` & friends). optional --no-sysroot - /// Run cargo metadata on the sysroot to analyze its third-party dependencies. - /// Requires --no-sysroot to not be set. - optional --query-sysroot-metadata /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis. optional --disable-build-scripts @@ -214,7 +211,6 @@ pub struct AnalysisStats { pub only: Option<String>, pub with_deps: bool, pub no_sysroot: bool, - pub query_sysroot_metadata: bool, pub disable_build_scripts: bool, pub disable_proc_macros: bool, pub skip_lowering: bool, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 3ff9be7102f..016f0edc2dd 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -276,7 +276,7 @@ impl flags::Lsif { eprintln!("Generating LSIF started..."); let now = Instant::now(); let cargo_config = - CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; + &CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, @@ -284,9 +284,11 @@ impl flags::Lsif { prefill_caches: false, }; let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path)); - let manifest = ProjectManifest::discover_single(&path)?; + let root = ProjectManifest::discover_single(&path)?; + let mut workspace = ProjectWorkspace::load(root, cargo_config, no_progress)?; - let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?; + let build_scripts = workspace.run_build_scripts(cargo_config, no_progress)?; + workspace.set_build_scripts(build_scripts); let (db, vfs, _proc_macro) = load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; @@ -294,7 +296,7 @@ impl flags::Lsif { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = StaticIndex::compute(&analysis, &path.clone().into()); let mut lsif = LsifManager::new(&analysis, db, &vfs); lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs index 10cb2d5ad6e..157ef43dd0a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs @@ -2,7 +2,7 @@ use hir::{Crate, Module}; use hir_ty::db::HirDatabase; -use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase}; +use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase}; use profile::StopWatch; use project_model::{CargoConfig, RustLibSource}; use syntax::TextRange; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 31565878d84..75efdfd7dd8 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -8,6 +8,7 @@ use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::Path use hir::{ChangeWithProcMacros, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use itertools::Either; +use paths::Utf8PathBuf; use profile::StopWatch; use project_model::target_data_layout::RustcDataLayoutConfig; use project_model::{ @@ -64,12 +65,12 @@ impl Tester { fn new() -> Result<Self> { let mut path = std::env::temp_dir(); path.push("ra-rustc-test.rs"); - let tmp_file = AbsPathBuf::try_from(path).unwrap(); + let tmp_file = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap(); std::fs::write(&tmp_file, "")?; let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; - let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false); + let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env); let data_layout = target_data_layout::get( RustcDataLayoutConfig::Rustc(&sysroot), None, @@ -154,7 +155,7 @@ impl Tester { let root_file = self.root_file; move || { let res = std::panic::catch_unwind(move || { - analysis.diagnostics( + analysis.full_diagnostics( diagnostic_config, ide::AssistResolveStrategy::None, root_file, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index ee134b6c507..2fc0ef8d4da 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -63,7 +63,7 @@ impl flags::Scip { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = StaticIndex::compute(&analysis, &root.clone().into()); let metadata = scip_types::Metadata { version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(), @@ -334,6 +334,7 @@ mod test { use ide::{FilePosition, TextSize}; use scip::symbol::format_symbol; use test_fixture::ChangeFixture; + use vfs::VfsPath; fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) { let mut host = AnalysisHost::default(); @@ -351,7 +352,8 @@ mod test { let (host, position) = position(ra_fixture); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let FilePosition { file_id, offset } = position; @@ -384,7 +386,7 @@ mod test { fn basic() { check_symbol( r#" -//- /lib.rs crate:main deps:foo +//- /workspace/lib.rs crate:main deps:foo use foo::example_mod::func; fn main() { func$0(); @@ -485,7 +487,7 @@ pub mod module { fn symbol_for_field() { check_symbol( r#" - //- /lib.rs crate:main deps:foo + //- /workspace/lib.rs crate:main deps:foo use foo::St; fn main() { let x = St { a$0: 2 }; @@ -503,7 +505,7 @@ pub mod module { fn symbol_for_param() { check_symbol( r#" -//- /lib.rs crate:main deps:foo +//- /workspace/lib.rs crate:main deps:foo use foo::example_mod::func; fn main() { func(42); @@ -521,7 +523,7 @@ pub mod example_mod { fn symbol_for_closure_param() { check_symbol( r#" -//- /lib.rs crate:main deps:foo +//- /workspace/lib.rs crate:main deps:foo use foo::example_mod::func; fn main() { func(); @@ -541,7 +543,7 @@ pub mod example_mod { fn local_symbol_for_local() { check_symbol( r#" - //- /lib.rs crate:main deps:foo + //- /workspace/lib.rs crate:main deps:foo use foo::module::func; fn main() { func(); @@ -561,13 +563,13 @@ pub mod example_mod { fn global_symbol_for_pub_struct() { check_symbol( r#" - //- /lib.rs crate:main + //- /workspace/lib.rs crate:main mod foo; fn main() { let _bar = foo::Bar { i: 0 }; } - //- /foo.rs + //- /workspace/foo.rs pub struct Bar$0 { pub i: i32, } @@ -580,13 +582,13 @@ pub mod example_mod { fn global_symbol_for_pub_struct_reference() { check_symbol( r#" - //- /lib.rs crate:main + //- /workspace/lib.rs crate:main mod foo; fn main() { let _bar = foo::Bar$0 { i: 0 }; } - //- /foo.rs + //- /workspace/foo.rs pub struct Bar { pub i: i32, } @@ -599,7 +601,7 @@ pub mod example_mod { fn symbol_for_for_type_alias() { check_symbol( r#" - //- /lib.rs crate:main + //- /workspace/lib.rs crate:main pub type MyTypeAlias$0 = u8; "#, "rust-analyzer cargo main . MyTypeAlias#", @@ -615,7 +617,8 @@ pub mod example_mod { host.raw_database_mut().apply_change(change_fixture.change); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let file = si.files.first().unwrap(); let (_, token_id) = file.tokens.first().unwrap(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs index 7f24fa2835e..3caa4879887 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs @@ -1,7 +1,7 @@ //! Applies structured search replace rules from the command line. use anyhow::Context; -use ide_db::EditionedFileId; +use ide_db::{base_db::SourceDatabase, EditionedFileId}; use ide_ssr::MatchFinder; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use project_model::{CargoConfig, RustLibSource}; @@ -10,7 +10,6 @@ use crate::cli::flags; impl flags::Ssr { pub fn run(self) -> anyhow::Result<()> { - use ide_db::base_db::SourceDatabaseExt; let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let load_cargo_config = LoadCargoConfig { @@ -46,7 +45,7 @@ impl flags::Search { /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful /// for much else. pub fn run(self) -> anyhow::Result<()> { - use ide_db::base_db::SourceDatabaseExt; + use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { diff --git a/src/tools/rust-analyzer/crates/flycheck/src/command.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs index 38c7c81f57a..f1009eb4660 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/command.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs @@ -1,5 +1,5 @@ -//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread and -//! parse its stdout/stderr. +//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread +//! and parse its stdout/stderr. use std::{ ffi::OsString, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index b9b8cfdfc9e..02f5d75136e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -7,7 +7,6 @@ use std::{fmt, iter, ops::Not, sync::OnceLock}; use cfg::{CfgAtom, CfgDiff}; use dirs::config_dir; -use flycheck::{CargoOptions, FlycheckConfig}; use hir::Symbol; use ide::{ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, @@ -35,8 +34,9 @@ use triomphe::Arc; use vfs::{AbsPath, AbsPathBuf, VfsPath}; use crate::{ - capabilities::ClientCapabilities, diagnostics::DiagnosticsMapConfig, + flycheck::{CargoOptions, FlycheckConfig}, + lsp::capabilities::ClientCapabilities, lsp_ext::{WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope}, }; @@ -143,13 +143,6 @@ config_data! { /// /// This option does not take effect until rust-analyzer is restarted. cargo_sysroot: Option<String> = Some("discover".to_owned()), - /// Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze - /// third-party dependencies of the standard libraries. - /// - /// This will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer - /// will attempt to clean up afterwards, but nevertheless requires the location to be - /// writable to. - cargo_sysrootQueryMetadata: bool = false, /// Relative path to the sysroot library sources. If left unset, this will default to /// `{cargo.sysroot}/lib/rustlib/src/rust/library`. /// @@ -1839,7 +1832,6 @@ impl Config { }); let sysroot_src = self.cargo_sysrootSrc(None).as_ref().map(|sysroot| self.root_path.join(sysroot)); - let sysroot_query_metadata = self.cargo_sysrootQueryMetadata(None); CargoConfig { all_targets: *self.cargo_allTargets(None), @@ -1852,7 +1844,6 @@ impl Config { }, target: self.cargo_target(None).clone(), sysroot, - sysroot_query_metadata: *sysroot_query_metadata, sysroot_src, rustc_source, cfg_overrides: project_model::CfgOverrides { @@ -1908,7 +1899,7 @@ impl Config { *self.check_workspace(None) } - pub fn cargo_test_options(&self) -> CargoOptions { + pub(crate) fn cargo_test_options(&self) -> CargoOptions { CargoOptions { target_triples: self.cargo_target(None).clone().into_iter().collect(), all_targets: false, @@ -1924,7 +1915,7 @@ impl Config { } } - pub fn flycheck(&self) -> FlycheckConfig { + pub(crate) fn flycheck(&self) -> FlycheckConfig { match &self.check_overrideCommand(None) { Some(args) if !args.is_empty() => { let mut args = args.clone(); @@ -1934,16 +1925,18 @@ impl Config { args, extra_env: self.check_extra_env(), invocation_strategy: match self.check_invocationStrategy(None) { - InvocationStrategy::Once => flycheck::InvocationStrategy::Once, + InvocationStrategy::Once => crate::flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { - flycheck::InvocationStrategy::PerWorkspace + crate::flycheck::InvocationStrategy::PerWorkspace } }, invocation_location: match self.check_invocationLocation(None) { InvocationLocation::Root => { - flycheck::InvocationLocation::Root(self.root_path.clone()) + crate::flycheck::InvocationLocation::Root(self.root_path.clone()) + } + InvocationLocation::Workspace => { + crate::flycheck::InvocationLocation::Workspace } - InvocationLocation::Workspace => flycheck::InvocationLocation::Workspace, }, } } @@ -3450,7 +3443,7 @@ mod tests { let s = remove_ws(&schema); if !p.contains(&s) { package_json.replace_range(start..end, &schema); - ensure_file_contents(&package_json_path, &package_json) + ensure_file_contents(package_json_path.as_std_path(), &package_json) } } @@ -3458,7 +3451,7 @@ mod tests { fn generate_config_documentation() { let docs_path = project_root().join("docs/user/generated_config.adoc"); let expected = FullConfigInput::manual(); - ensure_file_contents(&docs_path, &expected); + ensure_file_contents(docs_path.as_std_path(), &expected); } fn remove_ws(text: &str) -> String { @@ -3467,13 +3460,8 @@ mod tests { #[test] fn proc_macro_srv_null() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3487,32 +3475,22 @@ mod tests { #[test] fn proc_macro_srv_abs() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ "procMacro" : { - "server": project_root().display().to_string(), + "server": project_root().to_string(), }})); (config, _, _) = config.apply_change(change); - assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap())); + assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::assert(project_root()))); } #[test] fn proc_macro_srv_rel() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); @@ -3531,13 +3509,8 @@ mod tests { #[test] fn cargo_target_dir_unset() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); @@ -3554,13 +3527,8 @@ mod tests { #[test] fn cargo_target_dir_subdir() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3577,13 +3545,8 @@ mod tests { #[test] fn cargo_target_dir_relative_dir() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3603,13 +3566,8 @@ mod tests { #[test] fn toml_unknown_key() { - let config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index b23e7b7e98c..034c49c3d5c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -11,7 +11,7 @@ use rustc_hash::FxHashSet; use stdx::iter_eq_by; use triomphe::Arc; -use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext}; +use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind}; pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>; @@ -28,7 +28,8 @@ pub(crate) type DiagnosticsGeneration = usize; #[derive(Debug, Default, Clone)] pub(crate) struct DiagnosticCollection { // FIXME: should be IntMap<FileId, Vec<ra_id::Diagnostic>> - pub(crate) native: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>, + pub(crate) native_syntax: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>, + pub(crate) native_semantic: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>, // FIXME: should be Vec<flycheck::Diagnostic> pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>, pub(crate) check_fixes: CheckFixes, @@ -64,7 +65,8 @@ impl DiagnosticCollection { } pub(crate) fn clear_native_for(&mut self, file_id: FileId) { - self.native.remove(&file_id); + self.native_syntax.remove(&file_id); + self.native_semantic.remove(&file_id); self.changes.insert(file_id); } @@ -88,43 +90,51 @@ impl DiagnosticCollection { self.changes.insert(file_id); } - pub(crate) fn set_native_diagnostics( - &mut self, - generation: DiagnosticsGeneration, - file_id: FileId, - mut diagnostics: Vec<lsp_types::Diagnostic>, - ) { - diagnostics.sort_by_key(|it| (it.range.start, it.range.end)); - if let Some((old_gen, existing_diagnostics)) = self.native.get_mut(&file_id) { - if existing_diagnostics.len() == diagnostics.len() - && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| { - are_diagnostics_equal(new, existing) - }) - { - // don't signal an update if the diagnostics are the same - return; + pub(crate) fn set_native_diagnostics(&mut self, kind: DiagnosticsTaskKind) { + let (generation, diagnostics, target) = match kind { + DiagnosticsTaskKind::Syntax(generation, diagnostics) => { + (generation, diagnostics, &mut self.native_syntax) + } + DiagnosticsTaskKind::Semantic(generation, diagnostics) => { + (generation, diagnostics, &mut self.native_semantic) } - if *old_gen < generation || generation == 0 { - self.native.insert(file_id, (generation, diagnostics)); + }; + + for (file_id, mut diagnostics) in diagnostics { + diagnostics.sort_by_key(|it| (it.range.start, it.range.end)); + + if let Some((old_gen, existing_diagnostics)) = target.get_mut(&file_id) { + if existing_diagnostics.len() == diagnostics.len() + && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| { + are_diagnostics_equal(new, existing) + }) + { + // don't signal an update if the diagnostics are the same + continue; + } + if *old_gen < generation || generation == 0 { + target.insert(file_id, (generation, diagnostics)); + } else { + existing_diagnostics.extend(diagnostics); + // FIXME: Doing the merge step of a merge sort here would be a bit more performant + // but eh + existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end)) + } } else { - existing_diagnostics.extend(diagnostics); - // FIXME: Doing the merge step of a merge sort here would be a bit more performant - // but eh - existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end)) + target.insert(file_id, (generation, diagnostics)); } - } else { - self.native.insert(file_id, (generation, diagnostics)); + self.changes.insert(file_id); } - self.changes.insert(file_id); } pub(crate) fn diagnostics_for( &self, file_id: FileId, ) -> impl Iterator<Item = &lsp_types::Diagnostic> { - let native = self.native.get(&file_id).into_iter().flat_map(|(_, d)| d); + let native_syntax = self.native_syntax.get(&file_id).into_iter().flat_map(|(_, d)| d); + let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d); let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten(); - native.chain(check) + native_syntax.chain(native_semantic).chain(check) } pub(crate) fn take_changes(&mut self) -> Option<IntSet<FileId>> { @@ -147,10 +157,16 @@ fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagno && left.message == right.message } +pub(crate) enum NativeDiagnosticsFetchKind { + Syntax, + Semantic, +} + pub(crate) fn fetch_native_diagnostics( - snapshot: GlobalStateSnapshot, + snapshot: &GlobalStateSnapshot, subscriptions: std::sync::Arc<[FileId]>, slice: std::ops::Range<usize>, + kind: NativeDiagnosticsFetchKind, ) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> { let _p = tracing::info_span!("fetch_native_diagnostics").entered(); let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned()); @@ -180,14 +196,17 @@ pub(crate) fn fetch_native_diagnostics( let line_index = snapshot.file_line_index(file_id).ok()?; let source_root = snapshot.analysis.source_root_id(file_id).ok()?; - let diagnostics = snapshot - .analysis - .diagnostics( - &snapshot.config.diagnostics(Some(source_root)), - ide::AssistResolveStrategy::None, - file_id, - ) - .ok()? + let config = &snapshot.config.diagnostics(Some(source_root)); + let diagnostics = match kind { + NativeDiagnosticsFetchKind::Syntax => { + snapshot.analysis.syntax_diagnostics(config, file_id).ok()? + } + NativeDiagnosticsFetchKind::Semantic => snapshot + .analysis + .semantic_diagnostics(config, ide::AssistResolveStrategy::None, file_id) + .ok()?, + }; + let diagnostics = diagnostics .into_iter() .filter_map(|d| { if d.range.file_id == file_id { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index defa464f2ba..208a70bc02a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -1,7 +1,7 @@ //! This module provides the functionality needed to convert diagnostics from //! `cargo check` json format to the LSP diagnostic format. -use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan}; +use crate::flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan}; use itertools::Itertools; use rustc_hash::FxHashMap; use stdx::format_to; @@ -17,8 +17,8 @@ use super::{DiagnosticsMapConfig, Fix}; /// Determines the LSP severity from a diagnostic fn diagnostic_severity( config: &DiagnosticsMapConfig, - level: flycheck::DiagnosticLevel, - code: Option<flycheck::DiagnosticCode>, + level: crate::flycheck::DiagnosticLevel, + code: Option<crate::flycheck::DiagnosticCode>, ) -> Option<lsp_types::DiagnosticSeverity> { let res = match level { DiagnosticLevel::Ice => lsp_types::DiagnosticSeverity::ERROR, @@ -181,7 +181,7 @@ enum MappedRustChildDiagnostic { fn map_rust_child_diagnostic( config: &DiagnosticsMapConfig, workspace_root: &AbsPath, - rd: &flycheck::Diagnostic, + rd: &crate::flycheck::Diagnostic, snap: &GlobalStateSnapshot, ) -> MappedRustChildDiagnostic { let spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect(); @@ -284,7 +284,7 @@ pub(crate) struct MappedRustDiagnostic { /// If the diagnostic has no primary span this will return `None` pub(crate) fn map_rust_diagnostic_to_lsp( config: &DiagnosticsMapConfig, - rd: &flycheck::Diagnostic, + rd: &crate::flycheck::Diagnostic, workspace_root: &AbsPath, snap: &GlobalStateSnapshot, ) -> Vec<MappedRustDiagnostic> { @@ -537,7 +537,8 @@ mod tests { } fn check_with_config(config: DiagnosticsMapConfig, diagnostics_json: &str, expect: ExpectFile) { - let diagnostic: flycheck::Diagnostic = serde_json::from_str(diagnostics_json).unwrap(); + let diagnostic: crate::flycheck::Diagnostic = + serde_json::from_str(diagnostics_json).unwrap(); let workspace_root: &AbsPath = Utf8Path::new("/test/").try_into().unwrap(); let (sender, _) = crossbeam_channel::unbounded(); let state = GlobalState::new( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs deleted file mode 100644 index 3fcfb4a1b08..00000000000 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs +++ /dev/null @@ -1,53 +0,0 @@ -//! Generate minimal `TextEdit`s from different text versions -use dissimilar::Chunk; -use ide::{TextEdit, TextRange, TextSize}; - -pub(crate) fn diff(left: &str, right: &str) -> TextEdit { - let chunks = dissimilar::diff(left, right); - textedit_from_chunks(chunks) -} - -fn textedit_from_chunks(chunks: Vec<dissimilar::Chunk<'_>>) -> TextEdit { - let mut builder = TextEdit::builder(); - let mut pos = TextSize::default(); - - let mut chunks = chunks.into_iter().peekable(); - while let Some(chunk) = chunks.next() { - if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) { - chunks.next().unwrap(); - let deleted_len = TextSize::of(deleted); - builder.replace(TextRange::at(pos, deleted_len), inserted.into()); - pos += deleted_len; - continue; - } - - match chunk { - Chunk::Equal(text) => { - pos += TextSize::of(text); - } - Chunk::Delete(deleted) => { - let deleted_len = TextSize::of(deleted); - builder.delete(TextRange::at(pos, deleted_len)); - pos += deleted_len; - } - Chunk::Insert(inserted) => { - builder.insert(pos, inserted.into()); - } - } - } - builder.finish() -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn diff_applies() { - let mut original = String::from("fn foo(a:u32){\n}"); - let result = "fn foo(a: u32) {}"; - let edit = diff(&original, result); - edit.apply(&mut original); - assert_eq!(original, result); - } -} diff --git a/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs index b6e4495bc6d..7e9162eee6e 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs @@ -1,4 +1,5 @@ -//! A `cargo-metadata`-equivalent for non-Cargo build systems. +//! Infrastructure for lazy project discovery. Currently only support rust-project.json discovery +//! via a custom discover command. use std::{io, process::Command}; use crossbeam_channel::Sender; @@ -9,19 +10,19 @@ use serde_json::Value; use crate::command::{CommandHandle, ParseFromLine}; -pub const ARG_PLACEHOLDER: &str = "{arg}"; +pub(crate) const ARG_PLACEHOLDER: &str = "{arg}"; /// A command wrapper for getting a `rust-project.json`. /// -/// This is analogous to `cargo-metadata`, but for non-Cargo build systems. -pub struct Discover { +/// This is analogous to discovering a cargo project + running `cargo-metadata` on it, but for non-Cargo build systems. +pub(crate) struct DiscoverCommand { command: Vec<String>, sender: Sender<DiscoverProjectMessage>, } #[derive(PartialEq, Clone, Debug, Serialize)] #[serde(rename_all = "camelCase")] -pub enum DiscoverArgument { +pub(crate) enum DiscoverArgument { Path(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), Buildfile(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), } @@ -34,14 +35,14 @@ where se.serialize_str(path.as_str()) } -impl Discover { - /// Create a new [Discover]. - pub fn new(sender: Sender<DiscoverProjectMessage>, command: Vec<String>) -> Self { +impl DiscoverCommand { + /// Create a new [DiscoverCommand]. + pub(crate) fn new(sender: Sender<DiscoverProjectMessage>, command: Vec<String>) -> Self { Self { sender, command } } /// Spawn the command inside [Discover] and report progress, if any. - pub fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result<DiscoverHandle> { + pub(crate) fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result<DiscoverHandle> { let command = &self.command[0]; let args = &self.command[1..]; @@ -65,7 +66,7 @@ impl Discover { /// A handle to a spawned [Discover]. #[derive(Debug)] -pub struct DiscoverHandle { +pub(crate) struct DiscoverHandle { _handle: CommandHandle<DiscoverProjectMessage>, } @@ -81,7 +82,7 @@ enum DiscoverProjectData { } #[derive(Debug, PartialEq, Clone)] -pub enum DiscoverProjectMessage { +pub(crate) enum DiscoverProjectMessage { Finished { project: ProjectJsonData, buildfile: AbsPathBuf }, Error { error: String, source: Option<String> }, Progress { message: String }, diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 3dd2a91d8fd..8f2e7d1ca26 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -1,59 +1,48 @@ -//! Flycheck provides the functionality needed to run `cargo check` or -//! another compatible command (f.x. clippy) in a background thread and provide +//! Flycheck provides the functionality needed to run `cargo check` to provide //! LSP diagnostics based on the output of the command. -// FIXME: This crate now handles running `cargo test` needed in the test explorer in -// addition to `cargo check`. Either split it into 3 crates (one for test, one for check -// and one common utilities) or change its name and docs to reflect the current state. - use std::{fmt, io, process::Command, time::Duration}; -use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; +use crossbeam_channel::{select_biased, unbounded, Receiver, Sender}; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize; -pub use cargo_metadata::diagnostic::{ +pub(crate) use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, - DiagnosticSpanMacroExpansion, }; use toolchain::Tool; -mod command; -pub mod project_json; -mod test_runner; - -use command::{CommandHandle, ParseFromLine}; -pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState, TestTarget}; +use crate::command::{CommandHandle, ParseFromLine}; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] -pub enum InvocationStrategy { +pub(crate) enum InvocationStrategy { Once, #[default] PerWorkspace, } #[derive(Clone, Debug, Default, PartialEq, Eq)] -pub enum InvocationLocation { +pub(crate) enum InvocationLocation { Root(AbsPathBuf), #[default] Workspace, } #[derive(Clone, Debug, PartialEq, Eq)] -pub struct CargoOptions { - pub target_triples: Vec<String>, - pub all_targets: bool, - pub no_default_features: bool, - pub all_features: bool, - pub features: Vec<String>, - pub extra_args: Vec<String>, - pub extra_env: FxHashMap<String, String>, - pub target_dir: Option<Utf8PathBuf>, +pub(crate) struct CargoOptions { + pub(crate) target_triples: Vec<String>, + pub(crate) all_targets: bool, + pub(crate) no_default_features: bool, + pub(crate) all_features: bool, + pub(crate) features: Vec<String>, + pub(crate) extra_args: Vec<String>, + pub(crate) extra_env: FxHashMap<String, String>, + pub(crate) target_dir: Option<Utf8PathBuf>, } impl CargoOptions { - fn apply_on_command(&self, cmd: &mut Command) { + pub(crate) fn apply_on_command(&self, cmd: &mut Command) { for target in &self.target_triples { cmd.args(["--target", target.as_str()]); } @@ -79,7 +68,7 @@ impl CargoOptions { } #[derive(Clone, Debug, PartialEq, Eq)] -pub enum FlycheckConfig { +pub(crate) enum FlycheckConfig { CargoCommand { command: String, options: CargoOptions, @@ -110,7 +99,7 @@ impl fmt::Display for FlycheckConfig { /// diagnostics based on the output. /// The spawned thread is shut down when this struct is dropped. #[derive(Debug)] -pub struct FlycheckHandle { +pub(crate) struct FlycheckHandle { // XXX: drop order is significant sender: Sender<StateChange>, _thread: stdx::thread::JoinHandle, @@ -118,9 +107,9 @@ pub struct FlycheckHandle { } impl FlycheckHandle { - pub fn spawn( + pub(crate) fn spawn( id: usize, - sender: Box<dyn Fn(Message) + Send>, + sender: Sender<FlycheckMessage>, config: FlycheckConfig, sysroot_root: Option<AbsPathBuf>, workspace_root: AbsPathBuf, @@ -137,28 +126,28 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub fn restart_workspace(&self, saved_file: Option<AbsPathBuf>) { + pub(crate) fn restart_workspace(&self, saved_file: Option<AbsPathBuf>) { self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. - pub fn restart_for_package(&self, package: String) { + pub(crate) fn restart_for_package(&self, package: String) { self.sender .send(StateChange::Restart { package: Some(package), saved_file: None }) .unwrap(); } /// Stop this cargo check worker. - pub fn cancel(&self) { + pub(crate) fn cancel(&self) { self.sender.send(StateChange::Cancel).unwrap(); } - pub fn id(&self) -> usize { + pub(crate) fn id(&self) -> usize { self.id } } -pub enum Message { +pub(crate) enum FlycheckMessage { /// Request adding a diagnostic with fixes included to a file AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic }, @@ -173,19 +162,19 @@ pub enum Message { }, } -impl fmt::Debug for Message { +impl fmt::Debug for FlycheckMessage { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Message::AddDiagnostic { id, workspace_root, diagnostic } => f + FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => f .debug_struct("AddDiagnostic") .field("id", id) .field("workspace_root", workspace_root) .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code)) .finish(), - Message::ClearDiagnostics { id } => { + FlycheckMessage::ClearDiagnostics { id } => { f.debug_struct("ClearDiagnostics").field("id", id).finish() } - Message::Progress { id, progress } => { + FlycheckMessage::Progress { id, progress } => { f.debug_struct("Progress").field("id", id).field("progress", progress).finish() } } @@ -193,7 +182,7 @@ impl fmt::Debug for Message { } #[derive(Debug)] -pub enum Progress { +pub(crate) enum Progress { DidStart, DidCheckCrate(String), DidFinish(io::Result<()>), @@ -210,7 +199,7 @@ enum StateChange { struct FlycheckActor { /// The workspace id of this flycheck instance. id: usize, - sender: Box<dyn Fn(Message) + Send>, + sender: Sender<FlycheckMessage>, config: FlycheckConfig, manifest_path: Option<AbsPathBuf>, /// Either the workspace root of the workspace we are flychecking, @@ -241,12 +230,12 @@ enum FlycheckStatus { Finished, } -pub const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; +pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; impl FlycheckActor { fn new( id: usize, - sender: Box<dyn Fn(Message) + Send>, + sender: Sender<FlycheckMessage>, config: FlycheckConfig, sysroot_root: Option<AbsPathBuf>, workspace_root: AbsPathBuf, @@ -267,17 +256,18 @@ impl FlycheckActor { } fn report_progress(&self, progress: Progress) { - self.send(Message::Progress { id: self.id, progress }); + self.send(FlycheckMessage::Progress { id: self.id, progress }); } fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> { - if let Ok(msg) = inbox.try_recv() { - // give restarts a preference so check outputs don't block a restart or stop - return Some(Event::RequestStateChange(msg)); - } - select! { + let Some(command_receiver) = &self.command_receiver else { + return inbox.recv().ok().map(Event::RequestStateChange); + }; + + // Biased to give restarts a preference so check outputs don't block a restart or stop + select_biased! { recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange), - recv(self.command_receiver.as_ref().unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), + recv(command_receiver) -> msg => Some(Event::CheckEvent(msg.ok())), } } @@ -340,7 +330,7 @@ impl FlycheckActor { ); } if self.status == FlycheckStatus::Started { - self.send(Message::ClearDiagnostics { id: self.id }); + self.send(FlycheckMessage::ClearDiagnostics { id: self.id }); } self.report_progress(Progress::DidFinish(res)); self.status = FlycheckStatus::Finished; @@ -362,9 +352,9 @@ impl FlycheckActor { "diagnostic received" ); if self.status == FlycheckStatus::Started { - self.send(Message::ClearDiagnostics { id: self.id }); + self.send(FlycheckMessage::ClearDiagnostics { id: self.id }); } - self.send(Message::AddDiagnostic { + self.send(FlycheckMessage::AddDiagnostic { id: self.id, workspace_root: self.root.clone(), diagnostic: msg, @@ -399,7 +389,7 @@ impl FlycheckActor { package: Option<&str>, saved_file: Option<&AbsPath>, ) -> Option<Command> { - let (mut cmd, args) = match &self.config { + match &self.config { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { let mut cmd = Command::new(Tool::Cargo.path()); if let Some(sysroot_root) = &self.sysroot_root { @@ -430,7 +420,8 @@ impl FlycheckActor { cmd.arg("--keep-going"); options.apply_on_command(&mut cmd); - (cmd, options.extra_args.clone()) + cmd.args(&options.extra_args); + Some(cmd) } FlycheckConfig::CustomCommand { command, @@ -459,38 +450,36 @@ impl FlycheckActor { } } - if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) { - // If the custom command has a $saved_file placeholder, and - // we're saving a file, replace the placeholder in the arguments. - if let Some(saved_file) = saved_file { - let args = args - .iter() - .map(|arg| { - if arg == SAVED_FILE_PLACEHOLDER { - saved_file.to_string() - } else { - arg.clone() - } - }) - .collect(); - (cmd, args) - } else { - // The custom command has a $saved_file placeholder, - // but we had an IDE event that wasn't a file save. Do nothing. - return None; + // If the custom command has a $saved_file placeholder, and + // we're saving a file, replace the placeholder in the arguments. + if let Some(saved_file) = saved_file { + for arg in args { + if arg == SAVED_FILE_PLACEHOLDER { + cmd.arg(saved_file); + } else { + cmd.arg(arg); + } } } else { - (cmd, args.clone()) + for arg in args { + if arg == SAVED_FILE_PLACEHOLDER { + // The custom command has a $saved_file placeholder, + // but we had an IDE event that wasn't a file save. Do nothing. + return None; + } + + cmd.arg(arg); + } } - } - }; - cmd.args(args); - Some(cmd) + Some(cmd) + } + } } - fn send(&self, check_task: Message) { - (self.sender)(check_task); + #[track_caller] + fn send(&self, check_task: FlycheckMessage) { + self.sender.send(check_task).unwrap(); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index f1dde104fce..7a7ec1d77e0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -6,10 +6,9 @@ use std::{ops::Not as _, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; -use flycheck::{project_json, FlycheckHandle}; use hir::ChangeWithProcMacros; use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; -use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt}; +use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase}; use itertools::Itertools; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; @@ -28,15 +27,18 @@ use vfs::{AbsPathBuf, AnchoredPathBuf, ChangeKind, Vfs, VfsPath}; use crate::{ config::{Config, ConfigChange, ConfigErrors, RatomlFileKind}, diagnostics::{CheckFixes, DiagnosticCollection}, + discover, + flycheck::{FlycheckHandle, FlycheckMessage}, line_index::{LineEndings, LineIndex}, lsp::{from_proto, to_proto::url_from_abs_path}, lsp_ext, main_loop::Task, mem_docs::MemDocs, - op_queue::OpQueue, + op_queue::{Cause, OpQueue}, reload, target_spec::{CargoTargetSpec, ProjectJsonTargetSpec, TargetSpec}, task_pool::{TaskPool, TaskQueue}, + test_runner::{CargoTestHandle, CargoTestMessage}, }; pub(crate) struct FetchWorkspaceRequest { @@ -88,28 +90,28 @@ pub(crate) struct GlobalState { // Flycheck pub(crate) flycheck: Arc<[FlycheckHandle]>, - pub(crate) flycheck_sender: Sender<flycheck::Message>, - pub(crate) flycheck_receiver: Receiver<flycheck::Message>, + pub(crate) flycheck_sender: Sender<FlycheckMessage>, + pub(crate) flycheck_receiver: Receiver<FlycheckMessage>, pub(crate) last_flycheck_error: Option<String>, // Test explorer - pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>, - pub(crate) test_run_sender: Sender<flycheck::CargoTestMessage>, - pub(crate) test_run_receiver: Receiver<flycheck::CargoTestMessage>, + pub(crate) test_run_session: Option<Vec<CargoTestHandle>>, + pub(crate) test_run_sender: Sender<CargoTestMessage>, + pub(crate) test_run_receiver: Receiver<CargoTestMessage>, pub(crate) test_run_remaining_jobs: usize, // Project loading - pub(crate) discover_handle: Option<project_json::DiscoverHandle>, - pub(crate) discover_sender: Sender<project_json::DiscoverProjectMessage>, - pub(crate) discover_receiver: Receiver<project_json::DiscoverProjectMessage>, + pub(crate) discover_handle: Option<discover::DiscoverHandle>, + pub(crate) discover_sender: Sender<discover::DiscoverProjectMessage>, + pub(crate) discover_receiver: Receiver<discover::DiscoverProjectMessage>, // VFS pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>, pub(crate) vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>, pub(crate) vfs_config_version: u32, pub(crate) vfs_progress_config_version: u32, - pub(crate) vfs_progress_n_total: usize, - pub(crate) vfs_progress_n_done: usize, + pub(crate) vfs_done: bool, + pub(crate) wants_to_switch: Option<Cause>, /// `workspaces` field stores the data we actually use, while the `OpQueue` /// stores the result of the last fetch. @@ -183,8 +185,7 @@ impl GlobalState { pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState { let loader = { let (sender, receiver) = unbounded::<vfs::loader::Message>(); - let handle: vfs_notify::NotifyHandle = - vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); + let handle: vfs_notify::NotifyHandle = vfs::loader::Handle::spawn(sender); let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>; Handle { handle, receiver } }; @@ -252,8 +253,8 @@ impl GlobalState { vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))), vfs_config_version: 0, vfs_progress_config_version: 0, - vfs_progress_n_total: 0, - vfs_progress_n_done: 0, + vfs_done: true, + wants_to_switch: None, workspaces: Arc::from(Vec::new()), crate_graph_file_dependencies: FxHashSet::default(), @@ -458,6 +459,11 @@ impl GlobalState { } } + // FIXME: `workspace_structure_change` is computed from `should_refresh_for_change` which is + // path syntax based. That is not sufficient for all cases so we should lift that check out + // into a `QueuedTask`, see `handle_did_save_text_document`. + // Or maybe instead of replacing that check, kick off a semantic one if the syntactic one + // didn't find anything (to make up for the lack of precision). { if !matches!(&workspace_structure_change, Some((.., true))) { _ = self @@ -557,8 +563,52 @@ impl GlobalState { self.req_queue.incoming.is_completed(&request.id) } + #[track_caller] fn send(&self, message: lsp_server::Message) { - self.sender.send(message).unwrap() + self.sender.send(message).unwrap(); + } + + pub(crate) fn publish_diagnostics( + &mut self, + uri: Url, + version: Option<i32>, + mut diagnostics: Vec<lsp_types::Diagnostic>, + ) { + // We put this on a separate thread to avoid blocking the main thread with serialization work + self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, { + let sender = self.sender.clone(); + move |_| { + // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch + // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether + // diagnostic messages are actually allowed to be empty or not and patching this + // in the VSCode client does not work as the assertion happens in the protocol + // conversion. So this hack is here to stay, and will be considered a hack + // until the LSP decides to state that empty messages are allowed. + + // See https://github.com/rust-lang/rust-analyzer/issues/11404 + // See https://github.com/rust-lang/rust-analyzer/issues/13130 + let patch_empty = |message: &mut String| { + if message.is_empty() { + " ".clone_into(message); + } + }; + + for d in &mut diagnostics { + patch_empty(&mut d.message); + if let Some(dri) = &mut d.related_information { + for dri in dri { + patch_empty(&mut dri.message); + } + } + } + + let not = lsp_server::Notification::new( + <lsp_types::notification::PublishDiagnostics as lsp_types::notification::Notification>::METHOD.to_owned(), + lsp_types::PublishDiagnosticsParams { uri, diagnostics, version }, + ); + _ = sender.send(not.into()); + } + }); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index ebdc196a658..a105ec63820 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -97,16 +97,45 @@ impl RequestDispatcher<'_> { self } - /// Dispatches a non-latency-sensitive request onto the thread pool. + /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not + /// ready this will return a default constructed [`R::Result`]. pub(crate) fn on<const ALLOW_RETRYING: bool, R>( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, - R::Result: Serialize, + R: lsp_types::request::Request< + Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, + Result: Serialize + Default, + > + 'static, + { + if !self.global_state.vfs_done { + if let Some(lsp_server::Request { id, .. }) = + self.req.take_if(|it| it.method == R::METHOD) + { + self.global_state.respond(lsp_server::Response::new_ok(id, R::Result::default())); + } + return self; + } + self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f) + } + + /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not + /// ready this will return the parameter as is. + pub(crate) fn on_identity<const ALLOW_RETRYING: bool, R, Params>( + &mut self, + f: fn(GlobalStateSnapshot, Params) -> anyhow::Result<R::Result>, + ) -> &mut Self + where + R: lsp_types::request::Request<Params = Params, Result = Params> + 'static, + Params: Serialize + DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, { + if !self.global_state.vfs_done { + if let Some((request, params, _)) = self.parse::<R>() { + self.global_state.respond(lsp_server::Response::new_ok(request.id, ¶ms)) + } + return self; + } self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f) } @@ -198,11 +227,7 @@ impl RequestDispatcher<'_> { R: lsp_types::request::Request, R::Params: DeserializeOwned + fmt::Debug, { - let req = match &self.req { - Some(req) if req.method == R::METHOD => self.req.take()?, - _ => return None, - }; - + let req = self.req.take_if(|it| it.method == R::METHOD)?; let res = crate::from_json(R::METHOD, &req.params); match res { Ok(params) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 4b14dcfc372..de5d1f23136 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -9,6 +9,7 @@ use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, WorkDoneProgressCancelParams, }; +use paths::Utf8PathBuf; use triomphe::Arc; use vfs::{AbsPathBuf, ChangeKind, VfsPath}; @@ -157,6 +158,8 @@ pub(crate) fn handle_did_save_text_document( .map(|cfg| cfg.files_to_watch.iter().map(String::as_str).collect::<Vec<&str>>()) .unwrap_or_default(); + // FIXME: We should move this check into a QueuedTask and do semantic resolution of + // the files. There is only so much we can tell syntactically from the path. if reload::should_refresh_for_change(path, ChangeKind::Modify, additional_files) { state.fetch_workspaces_queue.request_op( format!("workspace vfs file change saved {path}"), @@ -240,6 +243,7 @@ pub(crate) fn handle_did_change_workspace_folders( for workspace in params.event.removed { let Ok(path) = workspace.uri.to_file_path() else { continue }; + let Ok(path) = Utf8PathBuf::from_path_buf(path) else { continue }; let Ok(path) = AbsPathBuf::try_from(path) else { continue }; config.remove_workspace(&path); } @@ -249,6 +253,7 @@ pub(crate) fn handle_did_change_workspace_folders( .added .into_iter() .filter_map(|it| it.uri.to_file_path().ok()) + .filter_map(|it| Utf8PathBuf::from_path_buf(it).ok()) .filter_map(|it| AbsPathBuf::try_from(it).ok()); config.add_workspaces(added); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index eca139d79ae..34325ac7a93 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -36,7 +36,6 @@ use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath}; use crate::{ config::{Config, RustfmtConfig, WorkspaceSymbolConfig}, - diff::diff, global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot}, hack_recover_crate_name, line_index::LineEndings, @@ -51,6 +50,7 @@ use crate::{ FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams, }, target_spec::{CargoTargetSpec, TargetSpec}, + test_runner::{CargoTestHandle, TestTarget}, }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { @@ -246,15 +246,15 @@ pub(crate) fn handle_run_test( if let ProjectWorkspaceKind::Cargo { cargo, .. } = &ws.kind { let test_target = if let Some(namespace_root) = namespace_root { if let Some(package_name) = find_package_name(namespace_root, cargo) { - flycheck::TestTarget::Package(package_name) + TestTarget::Package(package_name) } else { - flycheck::TestTarget::Workspace + TestTarget::Workspace } } else { - flycheck::TestTarget::Workspace + TestTarget::Workspace }; - let handle = flycheck::CargoTestHandle::new( + let handle = CargoTestHandle::new( test_path, state.config.cargo_test_options(), cargo.workspace_root(), @@ -781,9 +781,12 @@ pub(crate) fn handle_parent_module( if let Ok(file_path) = ¶ms.text_document.uri.to_file_path() { if file_path.file_name().unwrap_or_default() == "Cargo.toml" { // search workspaces for parent packages or fallback to workspace root - let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() { - Some(abs_path_buf) => abs_path_buf, - None => return Ok(None), + let abs_path_buf = match Utf8PathBuf::from_path_buf(file_path.to_path_buf()) + .ok() + .map(AbsPathBuf::try_from) + { + Some(Ok(abs_path_buf)) => abs_path_buf, + _ => return Ok(None), }; let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() { @@ -2366,3 +2369,47 @@ fn resolve_resource_op(op: &ResourceOp) -> ResourceOperationKind { ResourceOp::Delete(_) => ResourceOperationKind::Delete, } } + +pub(crate) fn diff(left: &str, right: &str) -> TextEdit { + use dissimilar::Chunk; + + let chunks = dissimilar::diff(left, right); + + let mut builder = TextEdit::builder(); + let mut pos = TextSize::default(); + + let mut chunks = chunks.into_iter().peekable(); + while let Some(chunk) = chunks.next() { + if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) { + chunks.next().unwrap(); + let deleted_len = TextSize::of(deleted); + builder.replace(TextRange::at(pos, deleted_len), inserted.into()); + pos += deleted_len; + continue; + } + + match chunk { + Chunk::Equal(text) => { + pos += TextSize::of(text); + } + Chunk::Delete(deleted) => { + let deleted_len = TextSize::of(deleted); + builder.delete(TextRange::at(pos, deleted_len)); + pos += deleted_len; + } + Chunk::Insert(inserted) => { + builder.insert(pos, inserted.into()); + } + } + } + builder.finish() +} + +#[test] +fn diff_smoke_test() { + let mut original = String::from("fn foo(a:u32){\n}"); + let result = "fn foo(a: u32) {}"; + let edit = diff(&original, result); + edit.apply(&mut original); + assert_eq!(original, result); +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index ff8eb6c8612..28f4b809d6c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -46,13 +46,19 @@ fn integrated_highlighting_benchmark() { let (db, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); - load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() + load_workspace_at( + workspace_to_load.as_std_path(), + &cargo_config, + &load_cargo_config, + &|_| {}, + ) + .unwrap() }; let mut host = AnalysisHost::with_database(db); let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); + let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -106,13 +112,19 @@ fn integrated_completion_benchmark() { let (db, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); - load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() + load_workspace_at( + workspace_to_load.as_std_path(), + &cargo_config, + &load_cargo_config, + &|_| {}, + ) + .unwrap() }; let mut host = AnalysisHost::with_database(db); let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); + let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -274,13 +286,19 @@ fn integrated_diagnostics_benchmark() { let (db, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); - load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() + load_workspace_at( + workspace_to_load.as_std_path(), + &cargo_config, + &load_cargo_config, + &|_| {}, + ) + .unwrap() }; let mut host = AnalysisHost::with_database(db); let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); + let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -307,7 +325,7 @@ fn integrated_diagnostics_benchmark() { term_search_borrowck: true, }; host.analysis() - .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) + .full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) .unwrap(); let _g = crate::tracing::hprof::init("*"); @@ -325,7 +343,7 @@ fn integrated_diagnostics_benchmark() { let _p = tracing::info_span!("diagnostics").entered(); let _span = profile::cpu_span(); host.analysis() - .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) + .full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) .unwrap(); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index 174979edede..714991e8116 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -11,10 +11,10 @@ pub mod cli; -mod capabilities; +mod command; mod diagnostics; -mod diff; -mod dispatch; +mod discover; +mod flycheck; mod hack_recover_crate_name; mod line_index; mod main_loop; @@ -23,9 +23,11 @@ mod op_queue; mod reload; mod target_spec; mod task_pool; +mod test_runner; mod version; mod handlers { + pub(crate) mod dispatch; pub(crate) mod notification; pub(crate) mod request; } @@ -47,7 +49,7 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; pub use crate::{ - capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, + lsp::capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs index 9e0d42faed4..122ad20d65e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs @@ -3,6 +3,8 @@ use core::fmt; pub mod ext; + +pub(crate) mod capabilities; pub(crate) mod from_proto; pub(crate) mod semantic_tokens; pub(crate) mod to_proto; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs index 9610808c27e..9610808c27e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index 1fcb636f856..8d1a686dc4d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -61,7 +61,7 @@ impl Request for FetchDependencyList { #[serde(rename_all = "camelCase")] pub struct FetchDependencyListParams {} -#[derive(Deserialize, Serialize, Debug)] +#[derive(Deserialize, Serialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct FetchDependencyListResult { pub crates: Vec<CrateInfoResult>, @@ -194,7 +194,7 @@ pub struct TestItem { pub runnable: Option<Runnable>, } -#[derive(Deserialize, Serialize, Debug)] +#[derive(Deserialize, Serialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct DiscoverTestResults { pub tests: Vec<TestItem>, @@ -690,6 +690,12 @@ pub enum ExternalDocsResponse { WithLocal(ExternalDocsPair), } +impl Default for ExternalDocsResponse { + fn default() -> Self { + ExternalDocsResponse::Simple(None) + } +} + #[derive(Debug, Default, PartialEq, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct ExternalDocsPair { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs index aea424298f8..1f4544887f0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs @@ -2,6 +2,7 @@ use anyhow::format_err; use ide::{Annotation, AnnotationKind, AssistKind, LineCol}; use ide_db::{line_index::WideLineCol, FileId, FilePosition, FileRange}; +use paths::Utf8PathBuf; use syntax::{TextRange, TextSize}; use vfs::AbsPathBuf; @@ -13,7 +14,7 @@ use crate::{ pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> { let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?; - Ok(AbsPathBuf::try_from(path).unwrap()) + Ok(AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap()) } pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result<vfs::VfsPath> { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 9c820749ece..1d4ee71e5c1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -9,26 +9,28 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use flycheck::project_json; -use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::{notification::Notification as _, TextDocumentIdentifier}; use stdx::thread::ThreadIntent; use tracing::{error, span, Level}; -use vfs::{AbsPathBuf, FileId}; +use vfs::{loader::LoadingProgress, AbsPathBuf, FileId}; use crate::{ config::Config, - diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration}, - dispatch::{NotificationDispatcher, RequestDispatcher}, + diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind}, + discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage}, + flycheck::{self, FlycheckMessage}, global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState}, hack_recover_crate_name, + handlers::dispatch::{NotificationDispatcher, RequestDispatcher}, lsp::{ from_proto, to_proto, utils::{notification_is, Progress}, }, lsp_ext, reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress}, + test_runner::{CargoTestMessage, TestState}, }; pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> { @@ -61,9 +63,9 @@ enum Event { Task(Task), QueuedTask(QueuedTask), Vfs(vfs::loader::Message), - Flycheck(flycheck::Message), - TestResult(flycheck::CargoTestMessage), - DiscoverProject(project_json::DiscoverProjectMessage), + Flycheck(FlycheckMessage), + TestResult(CargoTestMessage), + DiscoverProject(DiscoverProjectMessage), } impl fmt::Display for Event { @@ -87,16 +89,23 @@ pub(crate) enum QueuedTask { } #[derive(Debug)] +pub(crate) enum DiagnosticsTaskKind { + Syntax(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>), + Semantic(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>), +} + +#[derive(Debug)] pub(crate) enum Task { Response(lsp_server::Response), DiscoverLinkedProjects(DiscoverProjectParam), Retry(lsp_server::Request), - Diagnostics(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>), + Diagnostics(DiagnosticsTaskKind), DiscoverTest(lsp_ext::DiscoverTestResults), PrimeCaches(PrimeCachesProgress), FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), LoadProcMacros(ProcMacroProgress), + // FIXME: Remove this in favor of a more general QueuedTask, see `handle_did_save_text_document` BuildDepsHaveChanged, } @@ -164,8 +173,10 @@ impl GlobalState { } if self.config.discover_workspace_config().is_none() { - let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }; - self.fetch_workspaces_queue.request_op("startup".to_owned(), req); + self.fetch_workspaces_queue.request_op( + "startup".to_owned(), + FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }, + ); if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = self.fetch_workspaces_queue.should_start_op() { @@ -173,7 +184,10 @@ impl GlobalState { } } - while let Some(event) = self.next_event(&inbox) { + while let Ok(event) = self.next_event(&inbox) { + let Some(event) = event else { + anyhow::bail!("client exited without proper shutdown sequence"); + }; if matches!( &event, Event::Lsp(lsp_server::Message::Notification(Notification { method, .. })) @@ -184,7 +198,7 @@ impl GlobalState { self.handle_event(event)?; } - anyhow::bail!("client exited without proper shutdown sequence") + Err(anyhow::anyhow!("A receiver has been dropped, something panicked!")) } fn register_did_save_capability(&mut self, additional_patterns: impl Iterator<Item = String>) { @@ -231,37 +245,40 @@ impl GlobalState { ); } - fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> { + fn next_event( + &self, + inbox: &Receiver<lsp_server::Message>, + ) -> Result<Option<Event>, crossbeam_channel::RecvError> { select! { recv(inbox) -> msg => - msg.ok().map(Event::Lsp), + return Ok(msg.ok().map(Event::Lsp)), recv(self.task_pool.receiver) -> task => - Some(Event::Task(task.unwrap())), + task.map(Event::Task), recv(self.deferred_task_queue.receiver) -> task => - Some(Event::QueuedTask(task.unwrap())), + task.map(Event::QueuedTask), recv(self.fmt_pool.receiver) -> task => - Some(Event::Task(task.unwrap())), + task.map(Event::Task), recv(self.loader.receiver) -> task => - Some(Event::Vfs(task.unwrap())), + task.map(Event::Vfs), recv(self.flycheck_receiver) -> task => - Some(Event::Flycheck(task.unwrap())), + task.map(Event::Flycheck), recv(self.test_run_receiver) -> task => - Some(Event::TestResult(task.unwrap())), + task.map(Event::TestResult), recv(self.discover_receiver) -> task => - Some(Event::DiscoverProject(task.unwrap())), + task.map(Event::DiscoverProject), } + .map(Some) } fn handle_event(&mut self, event: Event) -> anyhow::Result<()> { let loop_start = Instant::now(); - // NOTE: don't count blocking select! call as a loop-turn time let _p = tracing::info_span!("GlobalState::handle_event", event = %event).entered(); let event_dbg_msg = format!("{event:?}"); @@ -375,9 +392,14 @@ impl GlobalState { } } let event_handling_duration = loop_start.elapsed(); - - let state_changed = self.process_changes(); - let memdocs_added_or_removed = self.mem_docs.take_changes(); + let (state_changed, memdocs_added_or_removed) = if self.vfs_done { + if let Some(cause) = self.wants_to_switch.take() { + self.switch_workspaces(cause); + } + (self.process_changes(), self.mem_docs.take_changes()) + } else { + (false, false) + }; if self.is_quiescent() { let became_quiescent = !was_quiescent; @@ -423,40 +445,13 @@ impl GlobalState { if let Some(diagnostic_changes) = self.diagnostics.take_changes() { for file_id in diagnostic_changes { let uri = file_id_to_url(&self.vfs.read().0, file_id); - let mut diagnostics = - self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>(); - - // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch - // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether - // diagnostic messages are actually allowed to be empty or not and patching this - // in the VSCode client does not work as the assertion happens in the protocol - // conversion. So this hack is here to stay, and will be considered a hack - // until the LSP decides to state that empty messages are allowed. - - // See https://github.com/rust-lang/rust-analyzer/issues/11404 - // See https://github.com/rust-lang/rust-analyzer/issues/13130 - let patch_empty = |message: &mut String| { - if message.is_empty() { - " ".clone_into(message); - } - }; - - for d in &mut diagnostics { - patch_empty(&mut d.message); - if let Some(dri) = &mut d.related_information { - for dri in dri { - patch_empty(&mut dri.message); - } - } - } - let version = from_proto::vfs_path(&uri) - .map(|path| self.mem_docs.get(&path).map(|it| it.version)) - .unwrap_or_default(); + .ok() + .and_then(|path| self.mem_docs.get(&path).map(|it| it.version)); - self.send_notification::<lsp_types::notification::PublishDiagnostics>( - lsp_types::PublishDiagnosticsParams { uri, diagnostics, version }, - ); + let diagnostics = + self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>(); + self.publish_diagnostics(uri, version, diagnostics); } } @@ -549,14 +544,37 @@ impl GlobalState { } // Diagnostics are triggered by the user typing // so we run them on a latency sensitive thread. - self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, { - let snapshot = self.snapshot(); + let snapshot = self.snapshot(); + self.task_pool.handle.spawn_with_sender(ThreadIntent::LatencySensitive, { let subscriptions = subscriptions.clone(); - move || { - Task::Diagnostics( - generation, - fetch_native_diagnostics(snapshot, subscriptions, slice), - ) + // Do not fetch semantic diagnostics (and populate query results) if we haven't even + // loaded the initial workspace yet. + let fetch_semantic = + self.vfs_done && self.fetch_workspaces_queue.last_op_result().is_some(); + move |sender| { + let diags = fetch_native_diagnostics( + &snapshot, + subscriptions.clone(), + slice.clone(), + NativeDiagnosticsFetchKind::Syntax, + ); + sender + .send(Task::Diagnostics(DiagnosticsTaskKind::Syntax(generation, diags))) + .unwrap(); + + if fetch_semantic { + let diags = fetch_native_diagnostics( + &snapshot, + subscriptions, + slice, + NativeDiagnosticsFetchKind::Semantic, + ); + sender + .send(Task::Diagnostics(DiagnosticsTaskKind::Semantic( + generation, diags, + ))) + .unwrap(); + } } }); start = end; @@ -564,6 +582,9 @@ impl GlobalState { } fn update_tests(&mut self) { + if !self.vfs_done { + return; + } let db = self.analysis_host.raw_database(); let subscriptions = self .mem_docs @@ -644,10 +665,8 @@ impl GlobalState { // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work. Task::Retry(req) if !self.is_completed(&req) => self.on_request(req), Task::Retry(_) => (), - Task::Diagnostics(generation, diagnostics_per_file) => { - for (file_id, diagnostics) in diagnostics_per_file { - self.diagnostics.set_native_diagnostics(generation, file_id, diagnostics) - } + Task::Diagnostics(kind) => { + self.diagnostics.set_native_diagnostics(kind); } Task::PrimeCaches(progress) => match progress { PrimeCachesProgress::Begin => prime_caches_progress.push(progress), @@ -672,7 +691,7 @@ impl GlobalState { if let Err(e) = self.fetch_workspace_error() { error!("FetchWorkspaceError:\n{e}"); } - self.switch_workspaces("fetched workspace".to_owned()); + self.wants_to_switch = Some("fetched workspace".to_owned()); (Progress::End, None) } }; @@ -686,8 +705,7 @@ impl GlobalState { // `self.report_progress` is called later let title = &cfg.progress_label.clone(); let command = cfg.command.clone(); - let discover = - project_json::Discover::new(self.discover_sender.clone(), command); + let discover = DiscoverCommand::new(self.discover_sender.clone(), command); self.report_progress(title, Progress::Begin, None, None, None); self.discover_workspace_queue @@ -695,12 +713,8 @@ impl GlobalState { let _ = self.discover_workspace_queue.should_start_op(); let arg = match arg { - DiscoverProjectParam::Buildfile(it) => { - project_json::DiscoverArgument::Buildfile(it) - } - DiscoverProjectParam::Path(it) => { - project_json::DiscoverArgument::Path(it) - } + DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it), + DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it), }; let handle = discover.spawn(arg).unwrap(); @@ -718,8 +732,9 @@ impl GlobalState { error!("FetchBuildDataError:\n{e}"); } - self.switch_workspaces("fetched build data".to_owned()); - + if self.wants_to_switch.is_none() { + self.wants_to_switch = Some("fetched build data".to_owned()); + } (Some(Progress::End), None) } }; @@ -772,16 +787,14 @@ impl GlobalState { let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered(); always!(config_version <= self.vfs_config_version); - let state = match n_done { - None => Progress::Begin, - Some(done) if done == n_total => Progress::End, - Some(_) => Progress::Report, + let (n_done, state) = match n_done { + LoadingProgress::Started => (0, Progress::Begin), + LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report), + LoadingProgress::Finished => (n_total, Progress::End), }; - let n_done = n_done.unwrap_or_default(); self.vfs_progress_config_version = config_version; - self.vfs_progress_n_total = n_total; - self.vfs_progress_n_done = n_done; + self.vfs_done = state == Progress::End; let mut message = format!("{n_done}/{n_total}"); if let Some(dir) = dir { @@ -850,14 +863,14 @@ impl GlobalState { } } - fn handle_discover_msg(&mut self, message: project_json::DiscoverProjectMessage) { + fn handle_discover_msg(&mut self, message: DiscoverProjectMessage) { let title = self .config .discover_workspace_config() .map(|cfg| cfg.progress_label.clone()) .expect("No title could be found; this is a bug"); match message { - project_json::DiscoverProjectMessage::Finished { project, buildfile } => { + DiscoverProjectMessage::Finished { project, buildfile } => { self.report_progress(&title, Progress::End, None, None, None); self.discover_workspace_queue.op_completed(()); @@ -865,10 +878,10 @@ impl GlobalState { config.add_linked_projects(project, buildfile); self.update_configuration(config); } - project_json::DiscoverProjectMessage::Progress { message } => { + DiscoverProjectMessage::Progress { message } => { self.report_progress(&title, Progress::Report, Some(message), None, None) } - project_json::DiscoverProjectMessage::Error { error, source } => { + DiscoverProjectMessage::Error { error, source } => { let message = format!("Project discovery failed: {error}"); self.discover_workspace_queue.op_completed(()); self.show_and_log_error(message.clone(), source); @@ -877,16 +890,14 @@ impl GlobalState { } } - fn handle_cargo_test_msg(&mut self, message: flycheck::CargoTestMessage) { + fn handle_cargo_test_msg(&mut self, message: CargoTestMessage) { match message { - flycheck::CargoTestMessage::Test { name, state } => { + CargoTestMessage::Test { name, state } => { let state = match state { - flycheck::TestState::Started => lsp_ext::TestState::Started, - flycheck::TestState::Ignored => lsp_ext::TestState::Skipped, - flycheck::TestState::Ok => lsp_ext::TestState::Passed, - flycheck::TestState::Failed { stdout } => { - lsp_ext::TestState::Failed { message: stdout } - } + TestState::Started => lsp_ext::TestState::Started, + TestState::Ignored => lsp_ext::TestState::Skipped, + TestState::Ok => lsp_ext::TestState::Passed, + TestState::Failed { stdout } => lsp_ext::TestState::Failed { message: stdout }, }; let Some(test_id) = hack_recover_crate_name::lookup_name(name) else { return; @@ -895,23 +906,23 @@ impl GlobalState { lsp_ext::ChangeTestStateParams { test_id, state }, ); } - flycheck::CargoTestMessage::Suite => (), - flycheck::CargoTestMessage::Finished => { + CargoTestMessage::Suite => (), + CargoTestMessage::Finished => { self.test_run_remaining_jobs = self.test_run_remaining_jobs.saturating_sub(1); if self.test_run_remaining_jobs == 0 { self.send_notification::<lsp_ext::EndRunTest>(()); self.test_run_session = None; } } - flycheck::CargoTestMessage::Custom { text } => { + CargoTestMessage::Custom { text } => { self.send_notification::<lsp_ext::AppendOutputToRunTest>(text); } } } - fn handle_flycheck_msg(&mut self, message: flycheck::Message) { + fn handle_flycheck_msg(&mut self, message: FlycheckMessage) { match message { - flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { + FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => { let snap = self.snapshot(); let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( &self.config.diagnostics_map(), @@ -937,9 +948,9 @@ impl GlobalState { } } - flycheck::Message::ClearDiagnostics { id } => self.diagnostics.clear_check(id), + FlycheckMessage::ClearDiagnostics { id } => self.diagnostics.clear_check(id), - flycheck::Message::Progress { id, progress } => { + FlycheckMessage::Progress { id, progress } => { let (state, message) = match progress { flycheck::Progress::DidStart => (Progress::Begin, None), flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), @@ -1054,9 +1065,9 @@ impl GlobalState { .on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation) .on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition) .on::<NO_RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints) - .on::<NO_RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve) + .on_identity::<NO_RETRY, lsp_request::InlayHintResolveRequest, _>(handlers::handle_inlay_hints_resolve) .on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens) - .on::<NO_RETRY, lsp_request::CodeLensResolve>(handlers::handle_code_lens_resolve) + .on_identity::<NO_RETRY, lsp_request::CodeLensResolve, _>(handlers::handle_code_lens_resolve) .on::<NO_RETRY, lsp_request::PrepareRenameRequest>(handlers::handle_prepare_rename) .on::<NO_RETRY, lsp_request::Rename>(handlers::handle_rename) .on::<NO_RETRY, lsp_request::References>(handlers::handle_references) @@ -1083,7 +1094,7 @@ impl GlobalState { .on::<NO_RETRY, lsp_ext::Runnables>(handlers::handle_runnables) .on::<NO_RETRY, lsp_ext::RelatedTests>(handlers::handle_related_tests) .on::<NO_RETRY, lsp_ext::CodeActionRequest>(handlers::handle_code_action) - .on::<RETRY, lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve) + .on_identity::<RETRY, lsp_ext::CodeActionResolveRequest, _>(handlers::handle_code_action_resolve) .on::<NO_RETRY, lsp_ext::HoverRequest>(handlers::handle_hover) .on::<NO_RETRY, lsp_ext::ExternalDocs>(handlers::handle_open_docs) .on::<NO_RETRY, lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 5c95ccd4b82..dee34b1b393 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -15,7 +15,6 @@ // FIXME: This is a mess that needs some untangling work use std::{iter, mem}; -use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder}; use ide_db::{ base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, @@ -32,6 +31,7 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, + flycheck::{FlycheckConfig, FlycheckHandle}, global_state::{FetchWorkspaceRequest, GlobalState}, lsp_ext, main_loop::{DiscoverProjectParam, Task}, @@ -62,13 +62,13 @@ pub(crate) enum ProcMacroProgress { impl GlobalState { pub(crate) fn is_quiescent(&self) -> bool { - !(self.last_reported_status.is_none() - || self.fetch_workspaces_queue.op_in_progress() - || self.fetch_build_data_queue.op_in_progress() - || self.fetch_proc_macros_queue.op_in_progress() - || self.discover_workspace_queue.op_in_progress() - || self.vfs_progress_config_version < self.vfs_config_version - || self.vfs_progress_n_done < self.vfs_progress_n_total) + self.vfs_done + && self.last_reported_status.is_some() + && !self.fetch_workspaces_queue.op_in_progress() + && !self.fetch_build_data_queue.op_in_progress() + && !self.fetch_proc_macros_queue.op_in_progress() + && !self.discover_workspace_queue.op_in_progress() + && self.vfs_progress_config_version >= self.vfs_config_version } pub(crate) fn update_configuration(&mut self, config: Config) { @@ -102,15 +102,13 @@ impl GlobalState { } pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams { - let mut status = lsp_ext::ServerStatusParams { - health: lsp_ext::Health::Ok, - quiescent: self.is_quiescent(), - message: None, - }; + let quiescent = self.is_quiescent(); + let mut status = + lsp_ext::ServerStatusParams { health: lsp_ext::Health::Ok, quiescent, message: None }; let mut message = String::new(); if !self.config.cargo_autoreload(None) - && self.is_quiescent() + && quiescent && self.fetch_workspaces_queue.op_requested() && self.config.discover_workspace_config().is_none() { @@ -242,7 +240,7 @@ impl GlobalState { let discover_command = self.config.discover_workspace_config().cloned(); let is_quiescent = !(self.discover_workspace_queue.op_in_progress() || self.vfs_progress_config_version < self.vfs_config_version - || self.vfs_progress_n_done < self.vfs_progress_n_total); + || !self.vfs_done); move |sender| { let progress = { @@ -751,20 +749,22 @@ impl GlobalState { let config = self.config.flycheck(); let sender = self.flycheck_sender.clone(); let invocation_strategy = match config { - FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace, + FlycheckConfig::CargoCommand { .. } => { + crate::flycheck::InvocationStrategy::PerWorkspace + } FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy, }; self.flycheck = match invocation_strategy { - flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( + crate::flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( 0, - Box::new(move |msg| sender.send(msg).unwrap()), + sender, config, None, self.config.root_path().clone(), None, )], - flycheck::InvocationStrategy::PerWorkspace => { + crate::flycheck::InvocationStrategy::PerWorkspace => { self.workspaces .iter() .enumerate() @@ -793,10 +793,9 @@ impl GlobalState { )) }) .map(|(id, (root, manifest_path), sysroot_root)| { - let sender = sender.clone(); FlycheckHandle::spawn( id, - Box::new(move |msg| sender.send(msg).unwrap()), + sender.clone(), config.clone(), sysroot_root, root.to_path_buf(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index 67e1bad5281..965fd415e99 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -263,11 +263,14 @@ mod tests { use super::*; use ide::Edition; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, SmolStr, }; + use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, + }; fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { diff --git a/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs index 74ebca34103..293cff47433 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs @@ -10,12 +10,12 @@ use toolchain::Tool; use crate::{ command::{CommandHandle, ParseFromLine}, - CargoOptions, + flycheck::CargoOptions, }; #[derive(Debug, Deserialize)] #[serde(tag = "event", rename_all = "camelCase")] -pub enum TestState { +pub(crate) enum TestState { Started, Ok, Ignored, @@ -24,7 +24,7 @@ pub enum TestState { #[derive(Debug, Deserialize)] #[serde(tag = "type", rename_all = "camelCase")] -pub enum CargoTestMessage { +pub(crate) enum CargoTestMessage { Test { name: String, #[serde(flatten)] @@ -54,7 +54,7 @@ impl ParseFromLine for CargoTestMessage { } #[derive(Debug)] -pub struct CargoTestHandle { +pub(crate) struct CargoTestHandle { _handle: CommandHandle<CargoTestMessage>, } @@ -64,13 +64,13 @@ pub struct CargoTestHandle { // cargo test --package my-package --no-fail-fast -- module::func -Z unstable-options --format=json #[derive(Debug)] -pub enum TestTarget { +pub(crate) enum TestTarget { Workspace, Package(String), } impl CargoTestHandle { - pub fn new( + pub(crate) fn new( path: Option<&str>, options: CargoOptions, root: &AbsPath, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs index 66481d3d7f5..b8a82fd6a72 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs @@ -69,7 +69,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false) + Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir)) } #[test] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index b1ef4837717..54cd27f4b3b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -750,7 +750,7 @@ fn test_missing_module_code_action_in_json_project() { let code = format!( r#" -//- /rust-project.json +//- /.rust-project.json {project} //- /src/lib.rs @@ -909,7 +909,7 @@ version = \"0.0.0\" fn out_dirs_check_impl(root_contains_symlink: bool) { if skip_slow_tests() { - // return; + return; } let mut server = Project::with_fixture( @@ -1084,7 +1084,6 @@ fn resolve_proc_macro() { let sysroot = project_model::Sysroot::discover( &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()), &Default::default(), - false, ); let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); @@ -1125,7 +1124,6 @@ edition = "2021" proc-macro = true //- /bar/src/lib.rs -extern crate proc_macro; use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; macro_rules! t { ($n:literal) => { diff --git a/src/tools/rust-analyzer/crates/span/Cargo.toml b/src/tools/rust-analyzer/crates/span/Cargo.toml index 9f85f0107cc..3381dac0b42 100644 --- a/src/tools/rust-analyzer/crates/span/Cargo.toml +++ b/src/tools/rust-analyzer/crates/span/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "span" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "File and span related types for rust-analyzer." rust-version.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index b61baa22446..0ebd72e1514 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -18,7 +18,28 @@ use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; /// See crates\hir-expand\src\ast_id_map.rs /// This is a type erased FileAstId. -pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>; +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ErasedFileAstId(u32); + +impl ErasedFileAstId { + pub const fn into_raw(self) -> u32 { + self.0 + } + pub const fn from_raw(u32: u32) -> Self { + Self(u32) + } +} + +impl fmt::Display for ErasedFileAstId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} +impl fmt::Debug for ErasedFileAstId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} /// `AstId` points to an AST node in a specific file. pub struct FileAstId<N: AstIdNode> { @@ -47,7 +68,7 @@ impl<N: AstIdNode> Hash for FileAstId<N> { impl<N: AstIdNode> fmt::Debug for FileAstId<N> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw()) + write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw) } } @@ -176,7 +197,10 @@ impl AstIdMap { let ptr = ptr.syntax_node_ptr(); let hash = hash_ptr(&ptr); match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { - Some((&raw, &())) => FileAstId { raw, covariant: PhantomData }, + Some((&raw, &())) => FileAstId { + raw: ErasedFileAstId(raw.into_raw().into_u32()), + covariant: PhantomData, + }, None => panic!( "Can't find {:?} in AstIdMap:\n{:?}", ptr, @@ -186,18 +210,19 @@ impl AstIdMap { } pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> { - AstPtr::try_from_raw(self.arena[id.raw]).unwrap() + AstPtr::try_from_raw(self.arena[Idx::from_raw(RawIdx::from_u32(id.raw.into_raw()))]) + .unwrap() } pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr { - self.arena[id] + self.arena[Idx::from_raw(RawIdx::from_u32(id.into_raw()))] } fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { let ptr = SyntaxNodePtr::new(item); let hash = hash_ptr(&ptr); match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { - Some((&idx, &())) => idx, + Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()), None => panic!( "Can't find {:?} in AstIdMap:\n{:?}", item, @@ -207,7 +232,7 @@ impl AstIdMap { } fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { - self.arena.alloc(SyntaxNodePtr::new(item)) + ErasedFileAstId(self.arena.alloc(SyntaxNodePtr::new(item)).into_raw().into_u32()) } } diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs index e8c558355c9..874480c59fb 100644 --- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs @@ -79,6 +79,10 @@ impl SyntaxContextId { #[derive(Copy, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { /// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion. + // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of + // MacroCallId is reserved anyways so we can do bit tagging here just fine. + // The bigger issue is that that will cause interning to now create completely separate chains + // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent. pub outer_expn: Option<MacroCallId>, pub outer_transparency: Transparency, pub parent: SyntaxContextId, diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index b4e21d64f81..61e4c98128a 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -21,15 +21,14 @@ pub use vfs::FileId; /// The root ast id always points to the encompassing file, using this in spans is discouraged as /// any range relative to it will be effectively absolute, ruining the entire point of anchored /// relative text ranges. -pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = - la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); +pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(0); /// FileId used as the span for syntax node fixups. Any Span containing this file id is to be /// considered fake. pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = - // we pick the second to last for this in case we every consider making this a NonMaxU32, this + // we pick the second to last for this in case we ever consider making this a NonMaxU32, this // is required to be stable for the proc-macro-server - la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1)); + ErasedFileAstId::from_raw(!0 - 1); pub type Span = SpanData<SyntaxContextId>; diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs index 6269f4c30c7..c539754979d 100644 --- a/src/tools/rust-analyzer/crates/span/src/map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -119,7 +119,7 @@ impl fmt::Display for RealSpanMap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "RealSpanMap({:?}):", self.file_id)?; for span in self.pairs.iter() { - writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw().into_u32())?; + writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw())?; } Ok(()) } diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml index 99824df1f69..bf0d6df9ad8 100644 --- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml +++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "stdx" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Missing batteries for standard libraries for rust-analyzer." authors.workspace = true edition.workspace = true @@ -16,7 +17,7 @@ backtrace = { version = "0.3.67", optional = true } always-assert = { version = "0.2.0", features = ["tracing"] } jod-thread = "0.1.2" libc.workspace = true -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true itertools.workspace = true # Think twice before adding anything here diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml new file mode 100644 index 00000000000..e995ff3b55b --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "syntax-bridge" +version = "0.0.0" +repository.workspace = true +description = "Conversions between syntax nodes and token trees for rust-analyzer." + +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[lib] +doctest = false + +[dependencies] +rustc-hash.workspace = true +tracing.workspace = true + +# local deps +syntax.workspace = true +parser.workspace = true +tt.workspace = true +stdx.workspace = true +span.workspace = true +intern.workspace = true + +[dev-dependencies] +test-utils.workspace = true + +[features] +in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"] + +[lints] +workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index a29efdd4ef7..b0afd245c52 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -14,11 +14,13 @@ use syntax::{ }; use tt::{ buffer::{Cursor, TokenBuffer}, - iter::TtIter, token_to_literal, }; -use crate::to_parser_input::to_parser_input; +mod to_parser_input; +pub use to_parser_input::to_parser_input; +// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces +pub use ::parser::TopEntryPoint; #[cfg(test)] mod tests; @@ -43,7 +45,7 @@ impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM { } /// Dummy things for testing where spans don't matter. -pub(crate) mod dummy_test_span_utils { +pub mod dummy_test_span_utils { use span::{Span, SyntaxContextId}; @@ -211,50 +213,6 @@ where Some(convert_tokens(&mut conv)) } -/// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep( - tt: &tt::Subtree<span::Span>, - sep: char, - span: span::Span, - edition: Edition, -) -> Vec<tt::Subtree<span::Span>> { - if tt.token_trees.is_empty() { - return Vec::new(); - } - - let mut iter = TtIter::new(tt); - let mut res = Vec::new(); - - while iter.peek_n(0).is_some() { - let expanded = crate::expect_fragment( - &mut iter, - parser::PrefixEntryPoint::Expr, - edition, - tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, - ); - - res.push(match expanded.value { - None => break, - Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }), - }); - - let mut fork = iter.clone(); - if fork.expect_char(sep).is_err() { - break; - } - iter = fork; - } - - if iter.peek_n(0).is_some() { - res.push(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(span), - token_trees: iter.cloned().collect(), - }); - } - - res -} - fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S> where C: TokenConverter<S>, @@ -479,7 +437,6 @@ fn convert_doc_comment<S: Copy>( span: S, mode: DocCommentDesugarMode, ) -> Option<Vec<tt::TokenTree<S>>> { - cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs new file mode 100644 index 00000000000..7b8e3f2b49c --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs @@ -0,0 +1,104 @@ +use rustc_hash::FxHashMap; +use span::Span; +use syntax::{ast, AstNode}; +use test_utils::extract_annotations; +use tt::{ + buffer::{TokenBuffer, TokenTreeRef}, + Leaf, Punct, Spacing, +}; + +use crate::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; + +fn check_punct_spacing(fixture: &str) { + let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap(); + let subtree = syntax_node_to_token_tree( + source_file.syntax(), + DummyTestSpanMap, + DUMMY, + DocCommentDesugarMode::Mbe, + ); + let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) + .into_iter() + .map(|(range, annotation)| { + let spacing = match annotation.as_str() { + "Alone" => Spacing::Alone, + "Joint" => Spacing::Joint, + a => panic!("unknown annotation: {a}"), + }; + (range, spacing) + }) + .collect(); + + let buf = TokenBuffer::from_subtree(&subtree); + let mut cursor = buf.begin(); + while !cursor.eof() { + while let Some(token_tree) = cursor.token_tree() { + if let TokenTreeRef::Leaf( + Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }), + _, + ) = token_tree + { + if let Some(expected) = annotations.remove(range) { + assert_eq!(expected, *spacing); + } + } + cursor = cursor.bump_subtree(); + } + cursor = cursor.bump(); + } + + assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}"); +} + +#[test] +fn punct_spacing() { + check_punct_spacing( + r#" +fn main() { + 0+0; + //^ Alone + 0+(0); + //^ Alone + 0<=0; + //^ Joint + // ^ Alone + 0<=(0); + // ^ Alone + a=0; + //^ Alone + a=(0); + //^ Alone + a+=0; + //^ Joint + // ^ Alone + a+=(0); + // ^ Alone + a&&b; + //^ Joint + // ^ Alone + a&&(b); + // ^ Alone + foo::bar; + // ^ Joint + // ^ Alone + use foo::{bar,baz,}; + // ^ Alone + // ^ Alone + // ^ Alone + struct Struct<'a> {}; + // ^ Joint + // ^ Joint + Struct::<0>; + // ^ Alone + Struct::<{0}>; + // ^ Alone + ;; + //^ Joint + // ^ Alone +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs index c35b28527a0..2c548992688 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs @@ -8,7 +8,7 @@ use syntax::{SyntaxKind, SyntaxKind::*, T}; use tt::buffer::TokenBuffer; -pub(crate) fn to_parser_input<S: Copy + fmt::Debug>( +pub fn to_parser_input<S: Copy + fmt::Debug>( edition: Edition, buffer: &TokenBuffer<'_, S>, ) -> parser::Input { diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index b371ec6ebd5..994c21469ff 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "syntax" version = "0.0.0" -description = "Comment and whitespace preserving parser for the Rust language" -repository = "https://github.com/rust-lang/rust-analyzer" +repository.workspace = true +description = "Concrete syntax tree definitions for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs index f0d58efc01e..b50489c6f0f 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/tests.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs @@ -79,7 +79,7 @@ fn self_hosting_parsing() { let crates_dir = project_root().join("crates"); let mut files = Vec::new(); - let mut work = vec![crates_dir.to_path_buf()]; + let mut work = vec![crates_dir.into_std_path_buf()]; while let Some(dir) = work.pop() { for entry in dir.read_dir().unwrap() { let entry = entry.unwrap(); @@ -127,7 +127,7 @@ fn self_hosting_parsing() { } fn test_data_dir() -> PathBuf { - project_root().join("crates/syntax/test_data") + project_root().into_std_path_buf().join("crates/syntax/test_data") } fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index e1f40f5da01..03e85a898ab 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -3,7 +3,7 @@ use std::{iter, mem, str::FromStr, sync}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange, - FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath, + FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath, }; use cfg::CfgOptions; use hir_expand::{ @@ -26,7 +26,7 @@ use tt::{Leaf, Subtree, TokenTree}; pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0); -pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { +pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static { #[track_caller] fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) { let fixture = ChangeFixture::parse(ra_fixture); @@ -101,7 +101,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { } } -impl<DB: ExpandDatabase + SourceDatabaseExt + Default + 'static> WithFixture for DB {} +impl<DB: ExpandDatabase + SourceRootDatabase + Default + 'static> WithFixture for DB {} pub struct ChangeFixture { pub file_position: Option<(EditionedFileId, RangeOrOffset)>, diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml index 2ff1fad6c29..b1457722a92 100644 --- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml +++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "test-utils" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Assorted testing utilities for rust-analyzer." authors.workspace = true edition.workspace = true @@ -18,6 +19,7 @@ text-size.workspace = true tracing.workspace = true rustc-hash.workspace = true +paths.workspace = true stdx.workspace = true profile.workspace = true diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs index 088817b8357..36be9937d3f 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs @@ -18,6 +18,7 @@ use std::{ path::{Path, PathBuf}, }; +use paths::Utf8PathBuf; use profile::StopWatch; use stdx::is_ci; use text_size::{TextRange, TextSize}; @@ -402,9 +403,10 @@ pub fn skip_slow_tests() -> bool { } /// Returns the path to the root directory of `rust-analyzer` project. -pub fn project_root() -> PathBuf { +pub fn project_root() -> Utf8PathBuf { let dir = env!("CARGO_MANIFEST_DIR"); - PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() + Utf8PathBuf::from_path_buf(PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()) + .unwrap() } pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String { diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml index f745674794c..dc6b3d31a09 100644 --- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml +++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "text-edit" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Representation of a `TextEdit` for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml index c85efd432b0..87e8efb20fc 100644 --- a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml +++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "toolchain" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Discovery of `cargo` & `rustc` executables for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml index cea1519c2dd..82e7c24668f 100644 --- a/src/tools/rust-analyzer/crates/tt/Cargo.toml +++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "tt" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A `TokenTree` data structure for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml index a6d5027c3a6..09296dc6dd5 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "vfs-notify" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Implementation of `loader::Handle` for rust-analyzer." authors.workspace = true edition.workspace = true @@ -14,12 +15,14 @@ doctest = false [dependencies] tracing.workspace = true walkdir = "2.3.2" -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true notify = "6.1.1" +rayon = "1.10.0" stdx.workspace = true vfs.workspace = true paths.workspace = true +rustc-hash.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs index 7e0f9af7af8..0ae8b7baf46 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs @@ -10,12 +10,15 @@ use std::{ fs, path::{Component, Path}, + sync::atomic::AtomicUsize, }; -use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; -use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher}; -use paths::{AbsPath, AbsPathBuf}; -use vfs::loader; +use crossbeam_channel::{select, unbounded, Receiver, Sender}; +use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; +use rayon::iter::{IndexedParallelIterator as _, IntoParallelIterator as _, ParallelIterator}; +use rustc_hash::FxHashSet; +use vfs::loader::{self, LoadingProgress}; use walkdir::WalkDir; #[derive(Debug)] @@ -59,7 +62,8 @@ type NotifyEvent = notify::Result<notify::Event>; struct NotifyActor { sender: loader::Sender, - watched_entries: Vec<loader::Entry>, + watched_file_entries: FxHashSet<AbsPathBuf>, + watched_dir_entries: Vec<loader::Directories>, // Drop order is significant. watcher: Option<(RecommendedWatcher, Receiver<NotifyEvent>)>, } @@ -72,14 +76,22 @@ enum Event { impl NotifyActor { fn new(sender: loader::Sender) -> NotifyActor { - NotifyActor { sender, watched_entries: Vec::new(), watcher: None } + NotifyActor { + sender, + watched_dir_entries: Vec::new(), + watched_file_entries: FxHashSet::default(), + watcher: None, + } } fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> { - let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver); + let Some((_, watcher_receiver)) = &self.watcher else { + return receiver.recv().ok().map(Event::Message); + }; + select! { recv(receiver) -> it => it.ok().map(Event::Message), - recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())), + recv(watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())), } } @@ -94,7 +106,10 @@ impl NotifyActor { let (watcher_sender, watcher_receiver) = unbounded(); let watcher = log_notify_error(RecommendedWatcher::new( move |event| { - watcher_sender.send(event).unwrap(); + // we don't care about the error. If sending fails that usually + // means we were dropped, so unwrapping will just add to the + // panic noise. + _ = watcher_sender.send(event); }, Config::default(), )); @@ -104,35 +119,74 @@ impl NotifyActor { let config_version = config.version; let n_total = config.load.len(); + self.watched_dir_entries.clear(); + self.watched_file_entries.clear(); + self.send(loader::Message::Progress { n_total, - n_done: None, + n_done: LoadingProgress::Started, config_version, dir: None, }); - self.watched_entries.clear(); + let (entry_tx, entry_rx) = unbounded(); + let (watch_tx, watch_rx) = unbounded(); + let processed = AtomicUsize::new(0); - for (i, entry) in config.load.into_iter().enumerate() { - let watch = config.watch.contains(&i); - if watch { - self.watched_entries.push(entry.clone()); + config.load.into_par_iter().enumerate().for_each(|(i, entry)| { + let do_watch = config.watch.contains(&i); + if do_watch { + _ = entry_tx.send(entry.clone()); } - let files = - self.load_entry(entry, watch, |file| loader::Message::Progress { - n_total, - n_done: Some(i), - dir: Some(file), - config_version, - }); + let files = Self::load_entry( + |f| _ = watch_tx.send(f.to_owned()), + entry, + do_watch, + |file| { + self.send(loader::Message::Progress { + n_total, + n_done: LoadingProgress::Progress( + processed.load(std::sync::atomic::Ordering::Relaxed), + ), + dir: Some(file), + config_version, + }); + }, + ); self.send(loader::Message::Loaded { files }); self.send(loader::Message::Progress { n_total, - n_done: Some(i + 1), + n_done: LoadingProgress::Progress( + processed.fetch_add(1, std::sync::atomic::Ordering::AcqRel) + 1, + ), config_version, dir: None, }); + }); + + drop(watch_tx); + for path in watch_rx { + self.watch(&path); } + + drop(entry_tx); + for entry in entry_rx { + match entry { + loader::Entry::Files(files) => { + self.watched_file_entries.extend(files) + } + loader::Entry::Directories(dir) => { + self.watched_dir_entries.push(dir) + } + } + } + + self.send(loader::Message::Progress { + n_total, + n_done: LoadingProgress::Finished, + config_version, + dir: None, + }); } Message::Invalidate(path) => { let contents = read(path.as_path()); @@ -142,55 +196,69 @@ impl NotifyActor { }, Event::NotifyEvent(event) => { if let Some(event) = log_notify_error(event) { - let files = event - .paths - .into_iter() - .map(|path| AbsPathBuf::try_from(path).unwrap()) - .filter_map(|path| { - let meta = fs::metadata(&path).ok()?; - if meta.file_type().is_dir() - && self - .watched_entries - .iter() - .any(|entry| entry.contains_dir(&path)) - { - self.watch(path); - return None; - } + if let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) = + event.kind + { + let files = event + .paths + .into_iter() + .filter_map(|path| { + Some( + AbsPathBuf::try_from( + Utf8PathBuf::from_path_buf(path).ok()?, + ) + .expect("path is absolute"), + ) + }) + .filter_map(|path| -> Option<(AbsPathBuf, Option<Vec<u8>>)> { + let meta = fs::metadata(&path).ok()?; + if meta.file_type().is_dir() + && self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_dir(&path)) + { + self.watch(path.as_ref()); + return None; + } - if !meta.file_type().is_file() { - return None; - } - if !self - .watched_entries - .iter() - .any(|entry| entry.contains_file(&path)) - { - return None; - } + if !meta.file_type().is_file() { + return None; + } - let contents = read(&path); - Some((path, contents)) - }) - .collect(); - self.send(loader::Message::Changed { files }); + if !(self.watched_file_entries.contains(&path) + || self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_file(&path))) + { + return None; + } + + let contents = read(&path); + Some((path, contents)) + }) + .collect(); + self.send(loader::Message::Changed { files }); + } } } } } } + fn load_entry( - &mut self, + mut watch: impl FnMut(&Path), entry: loader::Entry, - watch: bool, - make_message: impl Fn(AbsPathBuf) -> loader::Message, + do_watch: bool, + send_message: impl Fn(AbsPathBuf), ) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> { match entry { loader::Entry::Files(files) => files .into_iter() .map(|file| { - if watch { - self.watch(file.clone()); + if do_watch { + watch(file.as_ref()); } let contents = read(file.as_path()); (file, contents) @@ -200,7 +268,7 @@ impl NotifyActor { let mut res = Vec::new(); for root in &dirs.include { - self.send(make_message(root.clone())); + send_message(root.clone()); let walkdir = WalkDir::new(root).follow_links(true).into_iter().filter_entry(|entry| { if !entry.file_type().is_dir() { @@ -208,7 +276,7 @@ impl NotifyActor { } let path = entry.path(); - if path_is_parent_symlink(path) { + if path_might_be_cyclic(path) { return false; } @@ -220,12 +288,15 @@ impl NotifyActor { let depth = entry.depth(); let is_dir = entry.file_type().is_dir(); let is_file = entry.file_type().is_file(); - let abs_path = AbsPathBuf::try_from(entry.into_path()).ok()?; + let abs_path = AbsPathBuf::try_from( + Utf8PathBuf::from_path_buf(entry.into_path()).ok()?, + ) + .ok()?; if depth < 2 && is_dir { - self.send(make_message(abs_path.clone())); + send_message(abs_path.clone()); } - if is_dir && watch { - self.watch(abs_path.clone()); + if is_dir && do_watch { + watch(abs_path.as_ref()); } if !is_file { return None; @@ -247,13 +318,15 @@ impl NotifyActor { } } - fn watch(&mut self, path: AbsPathBuf) { + fn watch(&mut self, path: &Path) { if let Some((watcher, _)) = &mut self.watcher { - log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive)); + log_notify_error(watcher.watch(path, RecursiveMode::NonRecursive)); } } - fn send(&mut self, msg: loader::Message) { - (self.sender)(msg); + + #[track_caller] + fn send(&self, msg: loader::Message) { + self.sender.send(msg).unwrap(); } } @@ -271,7 +344,7 @@ fn log_notify_error<T>(res: notify::Result<T>) -> Option<T> { /// heuristic is not sufficient to catch all symlink cycles (it's /// possible to construct cycle using two or more symlinks), but it /// catches common cases. -fn path_is_parent_symlink(path: &Path) -> bool { +fn path_might_be_cyclic(path: &Path) -> bool { let Ok(destination) = std::fs::read_link(path) else { return false; }; diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml index 84f2110ebad..e8a6195036e 100644 --- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "vfs" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A virtual file system for rust-analyzer." authors.workspace = true edition.workspace = true @@ -17,6 +18,7 @@ tracing.workspace = true fst = "0.4.7" indexmap.workspace = true nohash-hasher.workspace = true +crossbeam-channel.workspace = true paths.workspace = true stdx.workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs index 77f890fd7e0..bc40e03c5a2 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs @@ -201,8 +201,8 @@ impl Vfs { pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool { let _p = span!(Level::INFO, "Vfs::set_file_contents").entered(); let file_id = self.alloc_file_id(path); - let state = self.get(file_id); - let change_kind = match (state, contents) { + let state: FileState = self.get(file_id); + let change = match (state, contents) { (FileState::Deleted, None) => return false, (FileState::Deleted, Some(v)) => { let hash = hash_once::<FxHasher>(&*v); @@ -225,7 +225,7 @@ impl Vfs { }; }; - let changed_file = ChangedFile { file_id, change: change_kind }; + let changed_file = ChangedFile { file_id, change }; match self.changes.entry(file_id) { // two changes to the same file in one cycle, merge them appropriately Entry::Occupied(mut o) => { diff --git a/src/tools/rust-analyzer/crates/vfs/src/loader.rs b/src/tools/rust-analyzer/crates/vfs/src/loader.rs index 3af91b1af81..f24354cb493 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/loader.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/loader.rs @@ -43,6 +43,13 @@ pub struct Config { pub watch: Vec<usize>, } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum LoadingProgress { + Started, + Progress(usize), + Finished, +} + /// Message about an action taken by a [`Handle`]. pub enum Message { /// Indicate a gradual progress. @@ -52,7 +59,7 @@ pub enum Message { /// The total files to be loaded. n_total: usize, /// The files that have been loaded successfully. - n_done: Option<usize>, + n_done: LoadingProgress, /// The dir being loaded, `None` if its for a file. dir: Option<AbsPathBuf>, /// The [`Config`] version. @@ -65,7 +72,7 @@ pub enum Message { } /// Type that will receive [`Messages`](Message) from a [`Handle`]. -pub type Sender = Box<dyn Fn(Message) + Send>; +pub type Sender = crossbeam_channel::Sender<Message>; /// Interface for reading and watching files. pub trait Handle: fmt::Debug { diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index e559f88e233..4786bd54d59 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp/ext.rs hash: e92e1f12229b0071 +lsp/ext.rs hash: 3429c08745984b3d If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index ac95767ea5b..2be338dd4d1 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -144,16 +144,6 @@ Unsetting this disables sysroot loading. This option does not take effect until rust-analyzer is restarted. -- -[[rust-analyzer.cargo.sysrootQueryMetadata]]rust-analyzer.cargo.sysrootQueryMetadata (default: `false`):: -+ --- -Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze -third-party dependencies of the standard libraries. - -This will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer -will attempt to clean up afterwards, but nevertheless requires the location to be -writable to. --- [[rust-analyzer.cargo.sysrootSrc]]rust-analyzer.cargo.sysrootSrc (default: `null`):: + -- diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc index a1777209087..703ec66921c 100644 --- a/src/tools/rust-analyzer/docs/user/manual.adoc +++ b/src/tools/rust-analyzer/docs/user/manual.adoc @@ -203,6 +203,12 @@ The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew]. $ brew install rust-analyzer ---- +==== Windows + +It is recommended to install the latest Microsoft Visual C++ Redistributable prior to installation. +Download links can be found +https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist[here]. + === VS Code or VSCodium in Flatpak Setting up `rust-analyzer` with a Flatpak version of Code is not trivial because of the Flatpak sandbox. diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 4b594129a36..bf9c4a366d4 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -71,7 +71,9 @@ "workspaceContains:Cargo.toml", "workspaceContains:*/Cargo.toml", "workspaceContains:rust-project.json", - "workspaceContains:*/rust-project.json" + "workspaceContains:*/rust-project.json", + "workspaceContains:.rust-project.json", + "workspaceContains:*/.rust-project.json" ], "main": "./out/main", "contributes": { @@ -829,16 +831,6 @@ { "title": "cargo", "properties": { - "rust-analyzer.cargo.sysrootQueryMetadata": { - "markdownDescription": "Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze\nthird-party dependencies of the standard libraries.\n\nThis will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer\nwill attempt to clean up afterwards, but nevertheless requires the location to be\nwritable to.", - "default": false, - "type": "boolean" - } - } - }, - { - "title": "cargo", - "properties": { "rust-analyzer.cargo.sysrootSrc": { "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.", "default": null, @@ -3221,6 +3213,10 @@ { "fileMatch": "rust-project.json", "url": "https://json.schemastore.org/rust-project.json" + }, + { + "fileMatch": ".rust-project.json", + "url": "https://json.schemastore.org/rust-project.json" } ], "walkthroughs": [ diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts index 42dd0760d62..daead47e942 100644 --- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts +++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts @@ -4,6 +4,7 @@ import type { Config } from "./config"; import { type Env, log } from "./util"; import type { PersistentState } from "./persistent_state"; import { exec, spawnSync } from "child_process"; +import { TextDecoder } from "node:util"; export async function bootstrap( context: vscode.ExtensionContext, @@ -50,26 +51,35 @@ async function getServer( } return explicitPath; } - if (packageJson.releaseTag === null) return "rust-analyzer"; - if (vscode.workspace.workspaceFolders?.length === 1) { - // otherwise check if there is a toolchain override for the current vscode workspace - // and if the toolchain of this override has a rust-analyzer component - // if so, use the rust-analyzer component - const toolchainTomlExists = await fileExists( - vscode.Uri.joinPath(vscode.workspace.workspaceFolders[0]!.uri, "rust-toolchain.toml"), - ); - if (toolchainTomlExists) { - const res = spawnSync("rustup", ["which", "rust-analyzer"], { - encoding: "utf8", - env: { ...process.env }, - cwd: vscode.workspace.workspaceFolders[0]!.uri.fsPath, - }); - if (!res.error && res.status === 0) { - return res.stdout.trim(); + let toolchainServerPath = undefined; + if (vscode.workspace.workspaceFolders) { + for (const workspaceFolder of vscode.workspace.workspaceFolders) { + // otherwise check if there is a toolchain override for the current vscode workspace + // and if the toolchain of this override has a rust-analyzer component + // if so, use the rust-analyzer component + const toolchainUri = vscode.Uri.joinPath(workspaceFolder.uri, "rust-toolchain.toml"); + if (await hasToolchainFileWithRaDeclared(toolchainUri)) { + const res = spawnSync("rustup", ["which", "rust-analyzer"], { + encoding: "utf8", + env: { ...process.env }, + cwd: workspaceFolder.uri.fsPath, + }); + if (!res.error && res.status === 0) { + toolchainServerPath = earliestToolchainPath( + toolchainServerPath, + res.stdout.trim(), + raVersionResolver, + ); + } } } } + if (toolchainServerPath) { + return toolchainServerPath; + } + + if (packageJson.releaseTag === null) return "rust-analyzer"; // finally, use the bundled one const ext = process.platform === "win32" ? ".exe" : ""; @@ -102,6 +112,57 @@ async function getServer( return undefined; } +// Given a path to a rust-analyzer executable, resolve its version and return it. +function raVersionResolver(path: string): string | undefined { + const res = spawnSync(path, ["--version"], { + encoding: "utf8", + }); + if (!res.error && res.status === 0) { + return res.stdout; + } else { + return undefined; + } +} + +// Given a path to two rust-analyzer executables, return the earliest one by date. +function earliestToolchainPath( + path0: string | undefined, + path1: string, + raVersionResolver: (path: string) => string | undefined, +): string { + if (path0) { + if (orderFromPath(path0, raVersionResolver) < orderFromPath(path1, raVersionResolver)) { + return path0; + } else { + return path1; + } + } else { + return path1; + } +} + +// Further to extracting a date for comparison, determine the order of a toolchain as follows: +// Highest - nightly +// Medium - versioned +// Lowest - stable +// Example paths: +// nightly - /Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer +// versioned - /Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer +// stable - /Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer +function orderFromPath( + path: string, + raVersionResolver: (path: string) => string | undefined, +): string { + const raVersion = raVersionResolver(path); + const raDate = raVersion?.match(/^rust-analyzer .*\(.* (\d{4}-\d{2}-\d{2})\)$/); + if (raDate?.length === 2) { + const precedence = path.includes("nightly-") ? "0" : "1"; + return "0-" + raDate[1] + "/" + precedence; + } else { + return "2"; + } +} + async function fileExists(uri: vscode.Uri) { return await vscode.workspace.fs.stat(uri).then( () => true, @@ -109,6 +170,19 @@ async function fileExists(uri: vscode.Uri) { ); } +async function hasToolchainFileWithRaDeclared(uri: vscode.Uri): Promise<boolean> { + try { + const toolchainFileContents = new TextDecoder().decode( + await vscode.workspace.fs.readFile(uri), + ); + return ( + toolchainFileContents.match(/components\s*=\s*\[.*\"rust-analyzer\".*\]/g)?.length === 1 + ); + } catch (e) { + return false; + } +} + export function isValidExecutable(path: string, extraEnv: Env): boolean { log.debug("Checking availability of a binary at", path); @@ -207,3 +281,8 @@ async function patchelf(dest: vscode.Uri): Promise<void> { }, ); } + +export const _private = { + earliestToolchainPath, + orderFromPath, +}; diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index d9622b4a0d2..3aae0f9ce6e 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -7,21 +7,15 @@ import { Cargo } from "./toolchain"; import type { Ctx } from "./ctx"; import { prepareEnv } from "./run"; import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util"; +import type { Config } from "./config"; const debugOutput = vscode.window.createOutputChannel("Debug"); -type DebugConfigProvider = ( - runnable: ra.Runnable, - runnableArgs: ra.CargoRunnableArgs, - executable: string, - env: Record<string, string>, - sourceFileMap?: Record<string, string>, -) => vscode.DebugConfiguration; export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> { const scope = ctx.activeRustEditor?.document.uri; if (!scope) return; - const debugConfig = await getDebugConfiguration(ctx, runnable); + const debugConfig = await getDebugConfiguration(ctx.config, runnable, false); if (!debugConfig) return; const wsLaunchSection = vscode.workspace.getConfiguration("launch", scope); @@ -57,7 +51,7 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis message = " (from launch.json)"; debugOutput.clear(); } else { - debugConfig = await getDebugConfiguration(ctx, runnable); + debugConfig = await getDebugConfiguration(ctx.config, runnable); } if (!debugConfig) return false; @@ -74,35 +68,35 @@ function createCommandLink(extensionId: string): string { } async function getDebugConfiguration( - ctx: Ctx, + config: Config, runnable: ra.Runnable, + inheritEnv: boolean = true, ): Promise<vscode.DebugConfiguration | undefined> { if (!isCargoRunnableArgs(runnable.args)) { return; } const runnableArgs: ra.CargoRunnableArgs = runnable.args; - const editor = ctx.activeRustEditor; - if (!editor) return; + const debugOptions = config.debug; - const knownEngines: Record<string, DebugConfigProvider> = { - "vadimcn.vscode-lldb": getCodeLldbDebugConfig, - "ms-vscode.cpptools": getCCppDebugConfig, - "webfreak.debug": getNativeDebugConfig, - }; - const debugOptions = ctx.config.debug; + let provider: null | KnownEnginesType = null; - let debugEngine = null; if (debugOptions.engine === "auto") { - for (var engineId in knownEngines) { - debugEngine = vscode.extensions.getExtension(engineId); - if (debugEngine) break; + for (const engineId in knownEngines) { + const debugEngine = vscode.extensions.getExtension(engineId); + if (debugEngine) { + provider = knownEngines[engineId as keyof typeof knownEngines]; + break; + } } } else if (debugOptions.engine) { - debugEngine = vscode.extensions.getExtension(debugOptions.engine); + const debugEngine = vscode.extensions.getExtension(debugOptions.engine); + if (debugEngine && Object.keys(knownEngines).includes(debugOptions.engine)) { + provider = knownEngines[debugOptions.engine as keyof typeof knownEngines]; + } } - if (!debugEngine) { + if (!provider) { const commandCCpp: string = createCommandLink("ms-vscode.cpptools"); const commandCodeLLDB: string = createCommandLink("vadimcn.vscode-lldb"); const commandNativeDebug: string = createCommandLink("webfreak.debug"); @@ -116,7 +110,7 @@ async function getDebugConfiguration( } debugOutput.clear(); - if (ctx.config.debug.openDebugPane) { + if (config.debug.openDebugPane) { debugOutput.show(true); } // folder exists or RA is not active. @@ -131,37 +125,36 @@ async function getDebugConfiguration( firstWorkspace; const workspace = unwrapUndefinable(maybeWorkspace); - const wsFolder = path.normalize(workspace.uri.fsPath); + let wsFolder = path.normalize(workspace.uri.fsPath); + if (os.platform() === "win32") { + // in windows, the drive letter can vary in casing for VSCode, so we gotta normalize that first + wsFolder = wsFolder.replace(/^[a-z]:\\/, (c) => c.toUpperCase()); + } + const workspaceQualifier = isMultiFolderWorkspace ? `:${workspace.name}` : ""; function simplifyPath(p: string): string { + // in windows, the drive letter can vary in casing for VSCode, so we gotta normalize that first + if (os.platform() === "win32") { + p = p.replace(/^[a-z]:\\/, (c) => c.toUpperCase()); + } // see https://github.com/rust-lang/rust-analyzer/pull/5513#issuecomment-663458818 for why this is needed return path.normalize(p).replace(wsFolder, `\${workspaceFolder${workspaceQualifier}}`); } - const env = prepareEnv(runnable.label, runnableArgs, ctx.config.runnablesExtraEnv); + const env = prepareEnv(inheritEnv, runnable.label, runnableArgs, config.runnablesExtraEnv); const executable = await getDebugExecutable(runnableArgs, env); let sourceFileMap = debugOptions.sourceFileMap; if (sourceFileMap === "auto") { sourceFileMap = {}; - const sysroot = env["RUSTC_TOOLCHAIN"]; - if (sysroot) { - // let's try to use the default toolchain - const data = await execute(`rustc -V -v`, { cwd: wsFolder, env }); - const rx = /commit-hash:\s(.*)$/m; - - const commitHash = rx.exec(data)?.[1]; - if (commitHash) { - const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust"); - sourceFileMap[`/rustc/${commitHash}/`] = rustlib; - } - } + await discoverSourceFileMap(sourceFileMap, env, wsFolder); } - const provider = unwrapUndefinable(knownEngines[debugEngine.id]); - const debugConfig = provider( + const debugConfig = getDebugConfig( + provider, + simplifyPath, runnable, runnableArgs, - simplifyPath(executable), + executable, env, sourceFileMap, ); @@ -186,6 +179,92 @@ async function getDebugConfiguration( return debugConfig; } +async function discoverSourceFileMap( + sourceFileMap: Record<string, string>, + env: Record<string, string>, + cwd: string, +) { + const sysroot = env["RUSTC_TOOLCHAIN"]; + if (sysroot) { + // let's try to use the default toolchain + const data = await execute(`rustc -V -v`, { cwd, env }); + const rx = /commit-hash:\s(.*)$/m; + + const commitHash = rx.exec(data)?.[1]; + if (commitHash) { + const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust"); + sourceFileMap[`/rustc/${commitHash}/`] = rustlib; + } + } +} + +type PropertyFetcher<Config, Input, Key extends keyof Config> = ( + input: Input, +) => [Key, Config[Key]]; + +type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfig<Type>> = { + executableProperty: keyof DebugConfig; + environmentProperty: PropertyFetcher<DebugConfig, Record<string, string>, keyof DebugConfig>; + runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>; + sourceFileMapProperty?: keyof DebugConfig; + type: Type; + additional?: Record<string, unknown>; +}; + +type KnownEnginesType = (typeof knownEngines)[keyof typeof knownEngines]; +const knownEngines: { + "vadimcn.vscode-lldb": DebugConfigProvider<"lldb", CodeLldbDebugConfig>; + "ms-vscode.cpptools": DebugConfigProvider<"cppvsdbg" | "cppdbg", CCppDebugConfig>; + "webfreak.debug": DebugConfigProvider<"gdb", NativeDebugConfig>; +} = { + "vadimcn.vscode-lldb": { + type: "lldb", + executableProperty: "program", + environmentProperty: (env) => ["env", env], + runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [ + "args", + runnableArgs.executableArgs, + ], + sourceFileMapProperty: "sourceMap", + additional: { + sourceLanguages: ["rust"], + }, + }, + "ms-vscode.cpptools": { + type: os.platform() === "win32" ? "cppvsdbg" : "cppdbg", + executableProperty: "program", + environmentProperty: (env) => [ + "environment", + Object.entries(env).map((entry) => ({ + name: entry[0], + value: entry[1], + })), + ], + runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [ + "args", + runnableArgs.executableArgs, + ], + sourceFileMapProperty: "sourceFileMap", + additional: { + osx: { + MIMode: "lldb", + }, + }, + }, + "webfreak.debug": { + type: "gdb", + executableProperty: "target", + runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [ + "arguments", + quote(runnableArgs.executableArgs), + ], + environmentProperty: (env) => ["env", env], + additional: { + valuesFormatting: "prettyPrinters", + }, + }, +}; + async function getDebugExecutable( runnableArgs: ra.CargoRunnableArgs, env: Record<string, string>, @@ -197,71 +276,74 @@ async function getDebugExecutable( return executable; } -function getCCppDebugConfig( - runnable: ra.Runnable, - runnableArgs: ra.CargoRunnableArgs, - executable: string, - env: Record<string, string>, - sourceFileMap?: Record<string, string>, -): vscode.DebugConfiguration { - return { - type: os.platform() === "win32" ? "cppvsdbg" : "cppdbg", - request: "launch", - name: runnable.label, - program: executable, - args: runnableArgs.executableArgs, - cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".", - sourceFileMap, - environment: Object.entries(env).map((entry) => ({ - name: entry[0], - value: entry[1], - })), - // See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941 - osx: { - MIMode: "lldb", - }, - }; -} +type BaseDebugConfig<type extends string> = { + type: type; + request: "launch"; + name: string; + cwd: string; +}; -function getCodeLldbDebugConfig( +function getDebugConfig( + provider: KnownEnginesType, + simplifyPath: (p: string) => string, runnable: ra.Runnable, runnableArgs: ra.CargoRunnableArgs, executable: string, env: Record<string, string>, sourceFileMap?: Record<string, string>, ): vscode.DebugConfiguration { + const { + environmentProperty, + executableProperty, + runnableArgsProperty, + type, + additional, + sourceFileMapProperty, + } = provider; + const [envProperty, envValue] = environmentProperty(env); + const [argsProperty, argsValue] = runnableArgsProperty(runnableArgs); return { - type: "lldb", + type, request: "launch", name: runnable.label, - program: executable, - args: runnableArgs.executableArgs, - cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".", - sourceMap: sourceFileMap, - sourceLanguages: ["rust"], - env, + cwd: simplifyPath(runnable.args.cwd || runnableArgs.workspaceRoot || "."), + [executableProperty]: simplifyPath(executable), + [envProperty]: envValue, + [argsProperty]: argsValue, + ...(sourceFileMapProperty ? { [sourceFileMapProperty]: sourceFileMap } : {}), + ...additional, }; } -function getNativeDebugConfig( - runnable: ra.Runnable, - runnableArgs: ra.CargoRunnableArgs, - executable: string, - env: Record<string, string>, - _sourceFileMap?: Record<string, string>, -): vscode.DebugConfiguration { - return { - type: "gdb", - request: "launch", - name: runnable.label, - target: executable, - // See https://github.com/WebFreak001/code-debug/issues/359 - arguments: quote(runnableArgs.executableArgs), - cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".", - env, - valuesFormatting: "prettyPrinters", +type CCppDebugConfig = { + program: string; + args: string[]; + sourceFileMap: Record<string, string> | undefined; + environment: { + name: string; + value: string; + }[]; + // See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941 + osx: { + MIMode: "lldb"; }; -} +} & BaseDebugConfig<"cppvsdbg" | "cppdbg">; + +type CodeLldbDebugConfig = { + program: string; + args: string[]; + sourceMap: Record<string, string> | undefined; + sourceLanguages: ["rust"]; + env: Record<string, string>; +} & BaseDebugConfig<"lldb">; + +type NativeDebugConfig = { + target: string; + // See https://github.com/WebFreak001/code-debug/issues/359 + arguments: string; + env: Record<string, string>; + valuesFormatting: "prettyPrinters"; +} & BaseDebugConfig<"gdb">; // Based on https://github.com/ljharb/shell-quote/blob/main/quote.js function quote(xs: string[]) { diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 7179eb37447..dd0da6b62c8 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -65,9 +65,14 @@ export class RunnableQuickPick implements vscode.QuickPickItem { } } -export function prepareBaseEnv(base?: Record<string, string>): Record<string, string> { +export function prepareBaseEnv( + inheritEnv: boolean, + base?: Record<string, string>, +): Record<string, string> { const env: Record<string, string> = { RUST_BACKTRACE: "short" }; - Object.assign(env, process.env); + if (inheritEnv) { + Object.assign(env, process.env); + } if (base) { Object.assign(env, base); } @@ -75,11 +80,12 @@ export function prepareBaseEnv(base?: Record<string, string>): Record<string, st } export function prepareEnv( + inheritEnv: boolean, label: string, runnableArgs: ra.CargoRunnableArgs, runnableEnvCfg?: RunnableEnvCfg, ): Record<string, string> { - const env = prepareBaseEnv(runnableArgs.environment); + const env = prepareBaseEnv(inheritEnv, runnableArgs.environment); const platform = process.platform; const checkPlatform = (it: RunnableEnvCfgItem) => { @@ -134,7 +140,7 @@ export async function createTaskFromRunnable( }; options = { cwd: runnableArgs.workspaceRoot || ".", - env: prepareEnv(runnable.label, runnableArgs, config.runnablesExtraEnv), + env: prepareEnv(true, runnable.label, runnableArgs, config.runnablesExtraEnv), }; } else { const runnableArgs = runnable.args; @@ -145,7 +151,7 @@ export async function createTaskFromRunnable( }; options = { cwd: runnableArgs.cwd, - env: prepareBaseEnv(), + env: prepareBaseEnv(true), }; } diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/bootstrap.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/bootstrap.test.ts new file mode 100644 index 00000000000..8aeb72180a0 --- /dev/null +++ b/src/tools/rust-analyzer/editors/code/tests/unit/bootstrap.test.ts @@ -0,0 +1,96 @@ +import * as assert from "assert"; +import { _private } from "../../src/bootstrap"; +import type { Context } from "."; + +export async function getTests(ctx: Context) { + await ctx.suite("Bootstrap/Select toolchain RA", (suite) => { + suite.addTest("Order of nightly RA", async () => { + assert.deepStrictEqual( + _private.orderFromPath( + "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer", + function (path: string) { + assert.deepStrictEqual( + path, + "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer", + ); + return "rust-analyzer 1.67.0-nightly (b7bc90fe 2022-11-21)"; + }, + ), + "0-2022-11-21/0", + ); + }); + + suite.addTest("Order of versioned RA", async () => { + assert.deepStrictEqual( + _private.orderFromPath( + "/Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer", + function (path: string) { + assert.deepStrictEqual( + path, + "/Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer", + ); + return "rust-analyzer 1.72.1 (d5c2e9c3 2023-09-13)"; + }, + ), + "0-2023-09-13/1", + ); + }); + + suite.addTest("Order of versioned RA when unable to obtain version date", async () => { + assert.deepStrictEqual( + _private.orderFromPath( + "/Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer", + function () { + return "rust-analyzer 1.72.1"; + }, + ), + "2", + ); + }); + + suite.addTest("Order of stable RA", async () => { + assert.deepStrictEqual( + _private.orderFromPath( + "/Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer", + function (path: string) { + assert.deepStrictEqual( + path, + "/Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer", + ); + return "rust-analyzer 1.79.0 (129f3b99 2024-06-10)"; + }, + ), + "0-2024-06-10/1", + ); + }); + + suite.addTest("Order with invalid path to RA", async () => { + assert.deepStrictEqual( + _private.orderFromPath("some-weird-path", function () { + return undefined; + }), + "2", + ); + }); + + suite.addTest("Earliest RA between nightly and stable", async () => { + assert.deepStrictEqual( + _private.earliestToolchainPath( + "/Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer", + "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer", + function (path: string) { + if ( + path === + "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer" + ) { + return "rust-analyzer 1.67.0-nightly (b7bc90fe 2022-11-21)"; + } else { + return "rust-analyzer 1.79.0 (129f3b99 2024-06-10)"; + } + }, + ), + "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer", + ); + }); + }); +} diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts index 81850e03f1c..f0a62a3cce2 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts @@ -19,7 +19,7 @@ function makeRunnable(label: string): ra.Runnable { function fakePrepareEnv(runnableName: string, config?: RunnableEnvCfg): Record<string, string> { const runnable = makeRunnable(runnableName); const runnableArgs = runnable.args as ra.CargoRunnableArgs; - return prepareEnv(runnable.label, runnableArgs, config); + return prepareEnv(false, runnable.label, runnableArgs, config); } export async function getTests(ctx: Context) { diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml index a89eb4b144c..fb3411c8ab4 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml +++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml @@ -10,11 +10,11 @@ edition = "2021" log = "0.4.17" serde_json = "1.0.108" serde = { version = "1.0.192", features = ["derive"] } -crossbeam-channel = "0.5.8" +crossbeam-channel.workspace = true [dev-dependencies] lsp-types = "=0.95" ctrlc = "3.4.1" [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 001b900b207..d4f1703d850 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -1b51d80027919563004918eaadfa0d890ac0eb93 +80eb5a8e910e5185d47cdefe3732d839c78a5e7e diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs index 21001c28da6..6555f225415 100644 --- a/src/tools/rust-analyzer/xtask/src/metrics.rs +++ b/src/tools/rust-analyzer/xtask/src/metrics.rs @@ -117,11 +117,7 @@ impl Metrics { ) -> anyhow::Result<()> { assert!(Path::new(path).exists(), "unable to find bench in {path}"); eprintln!("\nMeasuring analysis-stats/{name}"); - let output = cmd!( - sh, - "./target/release/rust-analyzer -q analysis-stats {path} --query-sysroot-metadata" - ) - .read()?; + let output = cmd!(sh, "./target/release/rust-analyzer -q analysis-stats {path}").read()?; for (metric, value, unit) in parse_metrics(&output) { self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into()); } diff --git a/src/tools/rust-analyzer/xtask/src/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs index e85f5182865..ea51d33ed9c 100644 --- a/src/tools/rust-analyzer/xtask/src/tidy.rs +++ b/src/tools/rust-analyzer/xtask/src/tidy.rs @@ -134,6 +134,7 @@ Apache-2.0 OR MIT Apache-2.0 WITH LLVM-exception Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT Apache-2.0/MIT +BSD-2-Clause OR Apache-2.0 OR MIT BSD-3-Clause CC0-1.0 ISC diff --git a/src/tools/rustc-perf b/src/tools/rustc-perf -Subproject c64bb60dd1636922b1ccbb82867bed934a99dbc +Subproject d5055e78042c739deeb3fe0bef83fde0e5cc259 diff --git a/src/tools/rustc-perf-wrapper/src/main.rs b/src/tools/rustc-perf-wrapper/src/main.rs index 991f4ea15ed..951d36b788b 100644 --- a/src/tools/rustc-perf-wrapper/src/main.rs +++ b/src/tools/rustc-perf-wrapper/src/main.rs @@ -1,3 +1,4 @@ +use std::fs::create_dir_all; use std::path::PathBuf; use std::process::Command; @@ -18,9 +19,6 @@ pub struct Args { cmd: PerfCommand, #[clap(flatten)] - opts: SharedOpts, - - #[clap(flatten)] ctx: BuildContext, } @@ -28,22 +26,37 @@ pub struct Args { enum PerfCommand { /// Run `profile_local eprintln`. /// This executes the compiler on the given benchmarks and stores its stderr output. - Eprintln, + Eprintln { + #[clap(flatten)] + opts: SharedOpts, + }, /// Run `profile_local samply` /// This executes the compiler on the given benchmarks and profiles it with `samply`. /// You need to install `samply`, e.g. using `cargo install samply`. - Samply, + Samply { + #[clap(flatten)] + opts: SharedOpts, + }, /// Run `profile_local cachegrind`. /// This executes the compiler on the given benchmarks under `Cachegrind`. - Cachegrind, -} - -impl PerfCommand { - fn is_profiling(&self) -> bool { - match self { - PerfCommand::Eprintln | PerfCommand::Samply | PerfCommand::Cachegrind => true, - } - } + Cachegrind { + #[clap(flatten)] + opts: SharedOpts, + }, + Benchmark { + /// Identifier to associate benchmark results with + id: String, + + #[clap(flatten)] + opts: SharedOpts, + }, + Compare { + /// The name of the base artifact to be compared. + base: String, + + /// The name of the modified artifact to be compared. + modified: String, + }, } #[derive(Debug, clap::Parser)] @@ -52,6 +65,11 @@ struct SharedOpts { /// If unspecified, all benchmarks will be executed. #[clap(long, global = true, value_delimiter = ',')] include: Vec<String>, + + /// Select the benchmarks matching a prefix in this comma-separated list that you don't want to run. + #[clap(long, global = true, value_delimiter = ',')] + exclude: Vec<String>, + /// Select the scenarios that should be benchmarked. #[clap( long, @@ -87,35 +105,67 @@ fn main() { fn run(args: Args) { let mut cmd = Command::new(args.ctx.collector); + let db_path = args.ctx.results_dir.join("results.db"); + match &args.cmd { - PerfCommand::Eprintln => { - cmd.arg("profile_local").arg("eprintln"); + PerfCommand::Eprintln { opts } + | PerfCommand::Samply { opts } + | PerfCommand::Cachegrind { opts } => { + cmd.arg("profile_local"); + cmd.arg(match &args.cmd { + PerfCommand::Eprintln { .. } => "eprintln", + PerfCommand::Samply { .. } => "samply", + PerfCommand::Cachegrind { .. } => "cachegrind", + _ => unreachable!(), + }); + + cmd.arg("--out-dir").arg(&args.ctx.results_dir); + + apply_shared_opts(&mut cmd, opts); + execute_benchmark(&mut cmd, &args.ctx.compiler); + + println!("You can find the results at `{}`", args.ctx.results_dir.display()); } - PerfCommand::Samply => { - cmd.arg("profile_local").arg("samply"); + PerfCommand::Benchmark { id, opts } => { + cmd.arg("bench_local"); + cmd.arg("--db").arg(&db_path); + cmd.arg("--id").arg(id); + + apply_shared_opts(&mut cmd, opts); + create_dir_all(&args.ctx.results_dir).unwrap(); + execute_benchmark(&mut cmd, &args.ctx.compiler); } - PerfCommand::Cachegrind => { - cmd.arg("profile_local").arg("cachegrind"); + PerfCommand::Compare { base, modified } => { + cmd.arg("bench_cmp"); + cmd.arg("--db").arg(&db_path); + cmd.arg(base).arg(modified); + + create_dir_all(&args.ctx.results_dir).unwrap(); + cmd.status().expect("error while running rustc-perf bench_cmp"); } } - if args.cmd.is_profiling() { - cmd.arg("--out-dir").arg(&args.ctx.results_dir); - } +} - if !args.opts.include.is_empty() { - cmd.arg("--include").arg(args.opts.include.join(",")); +fn apply_shared_opts(cmd: &mut Command, opts: &SharedOpts) { + if !opts.include.is_empty() { + cmd.arg("--include").arg(opts.include.join(",")); } - if !args.opts.profiles.is_empty() { + if !opts.exclude.is_empty() { + cmd.arg("--exclude").arg(opts.exclude.join(",")); + } + if !opts.profiles.is_empty() { cmd.arg("--profiles") - .arg(args.opts.profiles.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(",")); + .arg(opts.profiles.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(",")); } - if !args.opts.scenarios.is_empty() { + if !opts.scenarios.is_empty() { cmd.arg("--scenarios") - .arg(args.opts.scenarios.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(",")); + .arg(opts.scenarios.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(",")); } - cmd.arg(&args.ctx.compiler); +} - println!("Running `rustc-perf` using `{}`", args.ctx.compiler.display()); +fn execute_benchmark(cmd: &mut Command, compiler: &PathBuf) { + cmd.arg(compiler); + println!("Running `rustc-perf` using `{}`", compiler.display()); const MANIFEST_DIR: &str = env!("CARGO_MANIFEST_DIR"); @@ -125,8 +175,4 @@ fn run(args: Args) { // with compile-time benchmarks. let cmd = cmd.current_dir(rustc_perf_dir); cmd.status().expect("error while running rustc-perf collector"); - - if args.cmd.is_profiling() { - println!("You can find the results at `{}`", args.ctx.results_dir.display()); - } } diff --git a/src/tools/rustdoc/main.rs b/src/tools/rustdoc/main.rs index 5b499a1fa1f..d4099cafe5d 100644 --- a/src/tools/rustdoc/main.rs +++ b/src/tools/rustdoc/main.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to `rustc_driver`. +#![feature(rustc_private)] + fn main() { rustdoc::main() } diff --git a/src/tools/rustfmt/src/git-rustfmt/main.rs b/src/tools/rustfmt/src/git-rustfmt/main.rs index 3059d917c6b..5674f40bef9 100644 --- a/src/tools/rustfmt/src/git-rustfmt/main.rs +++ b/src/tools/rustfmt/src/git-rustfmt/main.rs @@ -1,3 +1,7 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to +// `rustc_driver`. +#![feature(rustc_private)] + #[macro_use] extern crate tracing; diff --git a/src/tools/tidy/src/allowed_run_make_makefiles.txt b/src/tools/tidy/src/allowed_run_make_makefiles.txt index 14f0a9cd23d..bc446555773 100644 --- a/src/tools/tidy/src/allowed_run_make_makefiles.txt +++ b/src/tools/tidy/src/allowed_run_make_makefiles.txt @@ -13,14 +13,10 @@ run-make/libtest-json/Makefile run-make/libtest-junit/Makefile run-make/libtest-thread-limit/Makefile run-make/macos-deployment-target/Makefile -run-make/min-global-align/Makefile run-make/native-link-modifier-bundle/Makefile -run-make/no-alloc-shim/Makefile -run-make/remap-path-prefix-dwarf/Makefile run-make/reproducible-build/Makefile run-make/rlib-format-packed-bundled-libs/Makefile run-make/split-debuginfo/Makefile run-make/symbol-mangling-hashed/Makefile -run-make/sysroot-crates-are-unstable/Makefile run-make/translation/Makefile run-make/x86_64-fortanix-unknown-sgx-lvi/Makefile diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs index b7ddf47186d..c650fd0eec6 100644 --- a/src/tools/tidy/src/pal.rs +++ b/src/tools/tidy/src/pal.rs @@ -36,6 +36,7 @@ use crate::walk::{filter_dirs, walk}; // Paths that may contain platform-specific code. const EXCEPTION_PATHS: &[&str] = &[ + "library/windows_targets", "library/panic_abort", "library/panic_unwind", "library/unwind", diff --git a/tests/assembly/asm/global_asm.rs b/tests/assembly/asm/global_asm.rs index 22cf4bdb15b..8a4bf98c745 100644 --- a/tests/assembly/asm/global_asm.rs +++ b/tests/assembly/asm/global_asm.rs @@ -4,7 +4,6 @@ //@ compile-flags: -C llvm-args=--x86-asm-syntax=intel //@ compile-flags: -C symbol-mangling-version=v0 -#![feature(asm_const)] #![crate_type = "rlib"] use std::arch::global_asm; diff --git a/tests/assembly/asm/msp430-types.rs b/tests/assembly/asm/msp430-types.rs index 4f51d4020a6..ae09b8b070d 100644 --- a/tests/assembly/asm/msp430-types.rs +++ b/tests/assembly/asm/msp430-types.rs @@ -2,7 +2,7 @@ //@ compile-flags: --target msp430-none-elf //@ needs-llvm-components: msp430 -#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch, asm_const)] +#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)] #![crate_type = "rlib"] #![no_core] #![allow(non_camel_case_types)] diff --git a/tests/assembly/powerpc64-struct-abi.rs b/tests/assembly/powerpc64-struct-abi.rs new file mode 100644 index 00000000000..9a3540d8b41 --- /dev/null +++ b/tests/assembly/powerpc64-struct-abi.rs @@ -0,0 +1,132 @@ +//@ revisions: elfv1-be elfv2-be elfv2-le +//@ assembly-output: emit-asm +//@ compile-flags: -O +//@[elfv1-be] compile-flags: --target powerpc64-unknown-linux-gnu +//@[elfv1-be] needs-llvm-components: powerpc +//@[elfv2-be] compile-flags: --target powerpc64-unknown-linux-musl +//@[elfv2-be] needs-llvm-components: powerpc +//@[elfv2-le] compile-flags: --target powerpc64le-unknown-linux-gnu +//@[elfv2-le] needs-llvm-components: powerpc +//@[elfv1-be] filecheck-flags: --check-prefix be +//@[elfv2-be] filecheck-flags: --check-prefix be + +#![feature(no_core, lang_items)] +#![no_std] +#![no_core] +#![crate_type = "lib"] + +#[lang = "sized"] +trait Sized {} + +#[lang = "copy"] +trait Copy {} + +#[lang = "freeze"] +trait Freeze {} + +#[lang = "unpin"] +trait Unpin {} + +impl Copy for u8 {} +impl Copy for u16 {} +impl Copy for u32 {} +impl Copy for FiveU32s {} +impl Copy for FiveU16s {} +impl Copy for ThreeU8s {} + +#[repr(C)] +struct FiveU32s(u32, u32, u32, u32, u32); + +#[repr(C)] +struct FiveU16s(u16, u16, u16, u16, u16); + +#[repr(C)] +struct ThreeU8s(u8, u8, u8); + +// CHECK-LABEL: read_large +// be: lwz [[REG1:.*]], 16(4) +// be-NEXT: stw [[REG1]], 16(3) +// be-NEXT: ld [[REG2:.*]], 8(4) +// be-NEXT: ld [[REG3:.*]], 0(4) +// be-NEXT: std [[REG2]], 8(3) +// be-NEXT: std [[REG3]], 0(3) +// elfv2-le: lxvd2x [[REG1:.*]], 0, 4 +// elfv2-le-NEXT: lwz [[REG2:.*]], 16(4) +// elfv2-le-NEXT: stw [[REG2]], 16(3) +// elfv2-le-NEXT: stxvd2x [[REG1]], 0, 3 +// CHECK-NEXT: blr +#[no_mangle] +extern "C" fn read_large(x: &FiveU32s) -> FiveU32s { + *x +} + +// CHECK-LABEL: read_medium +// elfv1-be: lhz [[REG1:.*]], 8(4) +// elfv1-be-NEXT: ld [[REG2:.*]], 0(4) +// elfv1-be-NEXT: sth [[REG1]], 8(3) +// elfv1-be-NEXT: std [[REG2]], 0(3) +// elfv2-be: lhz [[REG1:.*]], 8(3) +// elfv2-be-NEXT: ld 3, 0(3) +// elfv2-be-NEXT: sldi 4, [[REG1]], 48 +// elfv2-le: ld [[REG1:.*]], 0(3) +// elfv2-le-NEXT: lhz 4, 8(3) +// elfv2-le-NEXT: mr 3, [[REG1]] +// CHECK-NEXT: blr +#[no_mangle] +extern "C" fn read_medium(x: &FiveU16s) -> FiveU16s { + *x +} + +// CHECK-LABEL: read_small +// elfv1-be: lbz [[REG1:.*]], 2(4) +// elfv1-be-NEXT: lhz [[REG2:.*]], 0(4) +// elfv1-be-NEXT: stb [[REG1]], 2(3) +// elfv1-be-NEXT: sth [[REG2]], 0(3) +// elfv2-be: lhz [[REG1:.*]], 0(3) +// elfv2-be-NEXT: lbz 3, 2(3) +// elfv2-be-NEXT: rldimi 3, [[REG1]], 8, 0 +// elfv2-le: lbz [[REG1:.*]], 2(3) +// elfv2-le-NEXT: lhz 3, 0(3) +// elfv2-le-NEXT: rldimi 3, [[REG1]], 16, 0 +// CHECK-NEXT: blr +#[no_mangle] +extern "C" fn read_small(x: &ThreeU8s) -> ThreeU8s { + *x +} + +// CHECK-LABEL: write_large +// CHECK: std 3, 0(6) +// be-NEXT: rldicl [[REG1:.*]], 5, 32, 32 +// CHECK-NEXT: std 4, 8(6) +// be-NEXT: stw [[REG1]], 16(6) +// elfv2-le-NEXT: stw 5, 16(6) +// CHECK-NEXT: blr +#[no_mangle] +extern "C" fn write_large(x: FiveU32s, dest: &mut FiveU32s) { + *dest = x; +} + +// CHECK-LABEL: write_medium +// CHECK: std 3, 0(5) +// be-NEXT: rldicl [[REG1:.*]], 4, 16, 48 +// be-NEXT: sth [[REG1]], 8(5) +// elfv2-le-NEXT: sth 4, 8(5) +// CHECK-NEXT: blr +#[no_mangle] +extern "C" fn write_medium(x: FiveU16s, dest: &mut FiveU16s) { + *dest = x; +} + +// CHECK-LABEL: write_small +// be: stb 3, 2(4) +// be-NEXT: srwi [[REG1:.*]], 3, 8 +// be-NEXT: sth [[REG1]], 0(4) +// The order these instructions are emitted in changed in LLVM 18. +// elfv2-le-DAG: sth 3, 0(4) +// elfv2-le-DAG: srwi [[REG1:.*]], 3, 16 +// elfv2-le-NEXT: stb [[REG1]], 2(4) +// CHECK-NEXT: blr +#[no_mangle] +extern "C" fn write_small(x: ThreeU8s, dest: &mut ThreeU8s) { + *dest = x; +} diff --git a/tests/assembly/targets/targets-elf.rs b/tests/assembly/targets/targets-elf.rs index 762df40a44b..c3a083321e2 100644 --- a/tests/assembly/targets/targets-elf.rs +++ b/tests/assembly/targets/targets-elf.rs @@ -345,6 +345,9 @@ //@ revisions: powerpc_unknown_linux_musl //@ [powerpc_unknown_linux_musl] compile-flags: --target powerpc-unknown-linux-musl //@ [powerpc_unknown_linux_musl] needs-llvm-components: powerpc +//@ revisions: powerpc_unknown_linux_muslspe +//@ [powerpc_unknown_linux_muslspe] compile-flags: --target powerpc-unknown-linux-muslspe +//@ [powerpc_unknown_linux_muslspe] needs-llvm-components: powerpc //@ revisions: powerpc_unknown_netbsd //@ [powerpc_unknown_netbsd] compile-flags: --target powerpc-unknown-netbsd //@ [powerpc_unknown_netbsd] needs-llvm-components: powerpc diff --git a/tests/codegen-units/item-collection/auxiliary/cgu_extern_closures.rs b/tests/codegen-units/item-collection/auxiliary/cgu_extern_closures.rs index 0192a3d4188..33a32d7fea9 100644 --- a/tests/codegen-units/item-collection/auxiliary/cgu_extern_closures.rs +++ b/tests/codegen-units/item-collection/auxiliary/cgu_extern_closures.rs @@ -1,3 +1,5 @@ +//@ compile-flags: -Zinline-mir=no + #![crate_type = "lib"] #[inline] diff --git a/tests/codegen-units/item-collection/cross-crate-closures.rs b/tests/codegen-units/item-collection/cross-crate-closures.rs index 2dab19401ed..cb86cf18c0c 100644 --- a/tests/codegen-units/item-collection/cross-crate-closures.rs +++ b/tests/codegen-units/item-collection/cross-crate-closures.rs @@ -1,9 +1,6 @@ -// In the current version of the collector that still has to support -// legacy-codegen, closures do not generate their own MonoItems, so we are -// ignoring this test until MIR codegen has taken over completely -//@ ignore-test - -//@ compile-flags:-Zprint-mono-items=eager +// We need to disable MIR inlining in both this and its aux-build crate. The MIR inliner +// will just inline everything into our start function if we let it. As it should. +//@ compile-flags:-Zprint-mono-items=eager -Zinline-mir=no #![deny(dead_code)] #![feature(start)] @@ -11,15 +8,15 @@ //@ aux-build:cgu_extern_closures.rs extern crate cgu_extern_closures; -//~ MONO_ITEM fn cross_crate_closures::start[0] +//~ MONO_ITEM fn start @@ cross_crate_closures-cgu.0[Internal] #[start] fn start(_: isize, _: *const *const u8) -> isize { - //~ MONO_ITEM fn cgu_extern_closures::inlined_fn[0] - //~ MONO_ITEM fn cgu_extern_closures::inlined_fn[0]::{{closure}}[0] + //~ MONO_ITEM fn cgu_extern_closures::inlined_fn @@ cross_crate_closures-cgu.0[Internal] + //~ MONO_ITEM fn cgu_extern_closures::inlined_fn::{closure#0} @@ cross_crate_closures-cgu.0[Internal] let _ = cgu_extern_closures::inlined_fn(1, 2); - //~ MONO_ITEM fn cgu_extern_closures::inlined_fn_generic[0]<i32> - //~ MONO_ITEM fn cgu_extern_closures::inlined_fn_generic[0]::{{closure}}[0]<i32> + //~ MONO_ITEM fn cgu_extern_closures::inlined_fn_generic::<i32> @@ cross_crate_closures-cgu.0[Internal] + //~ MONO_ITEM fn cgu_extern_closures::inlined_fn_generic::<i32>::{closure#0} @@ cross_crate_closures-cgu.0[Internal] let _ = cgu_extern_closures::inlined_fn_generic(3, 4, 5i32); // Nothing should be generated for this call, we just link to the instance @@ -28,5 +25,3 @@ fn start(_: isize, _: *const *const u8) -> isize { 0 } - -//~ MONO_ITEM drop-glue i8 diff --git a/tests/codegen-units/item-collection/non-generic-closures.rs b/tests/codegen-units/item-collection/non-generic-closures.rs index 105348e9d09..dc0846f2cd3 100644 --- a/tests/codegen-units/item-collection/non-generic-closures.rs +++ b/tests/codegen-units/item-collection/non-generic-closures.rs @@ -1,49 +1,45 @@ -// In the current version of the collector that still has to support -// legacy-codegen, closures do not generate their own MonoItems, so we are -// ignoring this test until MIR codegen has taken over completely -//@ ignore-test - -// -//@ compile-flags:-Zprint-mono-items=eager +//@ compile-flags:-Zprint-mono-items=eager -Zinline-mir=no #![deny(dead_code)] #![feature(start)] -//~ MONO_ITEM fn non_generic_closures::temporary[0] +//~ MONO_ITEM fn temporary @@ non_generic_closures-cgu.0[Internal] fn temporary() { - //~ MONO_ITEM fn non_generic_closures::temporary[0]::{{closure}}[0] + //~ MONO_ITEM fn temporary::{closure#0} @@ non_generic_closures-cgu.0[Internal] (|a: u32| { let _ = a; })(4); } -//~ MONO_ITEM fn non_generic_closures::assigned_to_variable_but_not_executed[0] +//~ MONO_ITEM fn assigned_to_variable_but_not_executed @@ non_generic_closures-cgu.0[Internal] fn assigned_to_variable_but_not_executed() { - //~ MONO_ITEM fn non_generic_closures::assigned_to_variable_but_not_executed[0]::{{closure}}[0] let _x = |a: i16| { let _ = a + 1; }; } -//~ MONO_ITEM fn non_generic_closures::assigned_to_variable_executed_directly[0] +//~ MONO_ITEM fn assigned_to_variable_executed_indirectly @@ non_generic_closures-cgu.0[Internal] fn assigned_to_variable_executed_indirectly() { - //~ MONO_ITEM fn non_generic_closures::assigned_to_variable_executed_directly[0]::{{closure}}[0] + //~ MONO_ITEM fn assigned_to_variable_executed_indirectly::{closure#0} @@ non_generic_closures-cgu.0[Internal] + //~ MONO_ITEM fn <{closure@TEST_PATH:27:13: 27:21} as std::ops::FnOnce<(i32,)>>::call_once - shim @@ non_generic_closures-cgu.0[Internal] + //~ MONO_ITEM fn <{closure@TEST_PATH:27:13: 27:21} as std::ops::FnOnce<(i32,)>>::call_once - shim(vtable) @@ non_generic_closures-cgu.0[Internal] + //~ MONO_ITEM fn std::ptr::drop_in_place::<{closure@TEST_PATH:27:13: 27:21}> - shim(None) @@ non_generic_closures-cgu.0[Internal] let f = |a: i32| { let _ = a + 2; }; run_closure(&f); } -//~ MONO_ITEM fn non_generic_closures::assigned_to_variable_executed_indirectly[0] +//~ MONO_ITEM fn assigned_to_variable_executed_directly @@ non_generic_closures-cgu.0[Internal] fn assigned_to_variable_executed_directly() { - //~ MONO_ITEM fn non_generic_closures::assigned_to_variable_executed_indirectly[0]::{{closure}}[0] + //~ MONO_ITEM fn assigned_to_variable_executed_directly::{closure#0} @@ non_generic_closures-cgu.0[Internal] let f = |a: i64| { let _ = a + 3; }; f(4); } -//~ MONO_ITEM fn non_generic_closures::start[0] +//~ MONO_ITEM fn start @@ non_generic_closures-cgu.0[Internal] #[start] fn start(_: isize, _: *const *const u8) -> isize { temporary(); @@ -54,7 +50,7 @@ fn start(_: isize, _: *const *const u8) -> isize { 0 } -//~ MONO_ITEM fn non_generic_closures::run_closure[0] +//~ MONO_ITEM fn run_closure @@ non_generic_closures-cgu.0[Internal] fn run_closure(f: &Fn(i32)) { f(3); } diff --git a/tests/codegen-units/partitioning/methods-are-with-self-type.rs b/tests/codegen-units/partitioning/methods-are-with-self-type.rs index 901e7507d73..7c9045e8f1a 100644 --- a/tests/codegen-units/partitioning/methods-are-with-self-type.rs +++ b/tests/codegen-units/partitioning/methods-are-with-self-type.rs @@ -1,30 +1,23 @@ -// Currently, all generic functions are instantiated in each codegen unit that -// uses them, even those not marked with #[inline], so this test does not make -// much sense at the moment. -//@ ignore-test - // We specify incremental here because we want to test the partitioning for incremental compilation //@ incremental //@ compile-flags:-Zprint-mono-items=lazy -#![allow(dead_code)] -#![feature(start)] +#![crate_type = "lib"] -struct SomeType; +pub struct SomeType; struct SomeGenericType<T1, T2>(T1, T2); -mod mod1 { +pub mod mod1 { use super::{SomeGenericType, SomeType}; // Even though the impl is in `mod1`, the methods should end up in the // parent module, since that is where their self-type is. impl SomeType { - //~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[External] - fn method(&self) {} - - //~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[External] - fn associated_fn() {} + //~ MONO_ITEM fn mod1::<impl SomeType>::method @@ methods_are_with_self_type[External] + pub fn method(&self) {} + //~ MONO_ITEM fn mod1::<impl SomeType>::associated_fn @@ methods_are_with_self_type[External] + pub fn associated_fn() {} } impl<T1, T2> SomeGenericType<T1, T2> { @@ -52,25 +45,20 @@ mod type2 { pub struct Struct; } -//~ MONO_ITEM fn methods_are_with_self_type::start[0] -#[start] -fn start(_: isize, _: *const *const u8) -> isize { - //~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::method[0]<u32, u64> @@ methods_are_with_self_type.volatile[WeakODR] +//~ MONO_ITEM fn start @@ methods_are_with_self_type[External] +pub fn start() { + //~ MONO_ITEM fn mod1::<impl SomeGenericType<u32, u64>>::method @@ methods_are_with_self_type.volatile[External] SomeGenericType(0u32, 0u64).method(); - //~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::associated_fn[0]<char, &str> @@ methods_are_with_self_type.volatile[WeakODR] + //~ MONO_ITEM fn mod1::<impl SomeGenericType<char, &str>>::associated_fn @@ methods_are_with_self_type.volatile[External] SomeGenericType::associated_fn('c', "&str"); - //~ MONO_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR] + //~ MONO_ITEM fn <type1::Struct as Trait>::foo @@ methods_are_with_self_type-type1.volatile[External] type1::Struct.foo(); - //~ MONO_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR] + //~ MONO_ITEM fn <type2::Struct as Trait>::foo @@ methods_are_with_self_type-type2.volatile[External] type2::Struct.foo(); - //~ MONO_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR] + //~ MONO_ITEM fn <type1::Struct as Trait>::default @@ methods_are_with_self_type-type1.volatile[External] type1::Struct.default(); - //~ MONO_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR] + //~ MONO_ITEM fn <type2::Struct as Trait>::default @@ methods_are_with_self_type-type2.volatile[External] type2::Struct.default(); - - 0 } - -//~ MONO_ITEM drop-glue i8 diff --git a/tests/codegen/array-from_fn.rs b/tests/codegen/array-from_fn.rs new file mode 100644 index 00000000000..7202d0c67e6 --- /dev/null +++ b/tests/codegen/array-from_fn.rs @@ -0,0 +1,13 @@ +//@ revisions: NORMAL OPT +//@ [NORMAL] compile-flags: -C opt-level=0 -C debuginfo=2 +//@ [OPT] compile-flags: -C opt-level=s -C debuginfo=0 + +#![crate_type = "lib"] +#![feature(array_from_fn)] + +#[no_mangle] +pub fn iota() -> [u8; 16] { + // OPT-NOT: core..array..Guard + // NORMAL: core..array..Guard + std::array::from_fn(|i| i as _) +} diff --git a/tests/codegen/call-metadata.rs b/tests/codegen/call-metadata.rs index b2168990ff8..73c4b33e2cf 100644 --- a/tests/codegen/call-metadata.rs +++ b/tests/codegen/call-metadata.rs @@ -2,6 +2,7 @@ // scalar value. //@ compile-flags: -O -C no-prepopulate-passes +//@ ignore-llvm-version: 19 - 99 #![crate_type = "lib"] diff --git a/tests/codegen/cast-optimized.rs b/tests/codegen/cast-optimized.rs index 313b2b4f0d6..59cf40935cd 100644 --- a/tests/codegen/cast-optimized.rs +++ b/tests/codegen/cast-optimized.rs @@ -20,8 +20,6 @@ pub fn u32_index(c: u32) -> [bool; 22] { // CHECK-LABEL: @char_as_u32_index #[no_mangle] pub fn char_as_u32_index(c: char) -> [bool; 22] { - // CHECK: %[[B:.+]] = icmp ult i32 %c, 1114112 - // CHECK: call void @llvm.assume(i1 %[[B]]) let c = c as u32; let mut array = [false; 22]; diff --git a/tests/codegen/common_prim_int_ptr.rs b/tests/codegen/common_prim_int_ptr.rs index 87fa89abb86..aa7ebb4c911 100644 --- a/tests/codegen/common_prim_int_ptr.rs +++ b/tests/codegen/common_prim_int_ptr.rs @@ -28,7 +28,7 @@ pub fn insert_box(x: Box<()>) -> Result<usize, Box<()>> { // CHECK-LABEL: @extract_int // CHECK-NOT: nonnull -// CHECK-SAME: (i{{[0-9]+}} {{[^,]+}} [[DISCRIMINANT:%[0-9]+]], ptr {{[^,]+}} [[PAYLOAD:%[0-9]+]]) +// CHECK-SAME: (i{{[0-9]+}} {{[^%]+}} [[DISCRIMINANT:%[0-9]+]], ptr {{[^,]+}} [[PAYLOAD:%[0-9]+]]) #[no_mangle] pub unsafe fn extract_int(x: Result<usize, Box<()>>) -> usize { // CHECK: [[TEMP:%.+]] = ptrtoint ptr [[PAYLOAD]] to [[USIZE:i[0-9]+]] @@ -40,7 +40,7 @@ pub unsafe fn extract_int(x: Result<usize, Box<()>>) -> usize { } // CHECK-LABEL: @extract_box -// CHECK-SAME: (i{{[0-9]+}} {{[^,]+}} [[DISCRIMINANT:%[0-9]+]], ptr {{[^,]+}} [[PAYLOAD:%[0-9]+]]) +// CHECK-SAME: (i{{[0-9]+}} {{[^%]+}} [[DISCRIMINANT:%[0-9]+]], ptr {{[^,]+}} [[PAYLOAD:%[0-9]+]]) #[no_mangle] pub unsafe fn extract_box(x: Result<usize, Box<i32>>) -> Box<i32> { // CHECK: ret ptr [[PAYLOAD]] diff --git a/tests/codegen/const-vector.rs b/tests/codegen/const-vector.rs new file mode 100644 index 00000000000..d368838201e --- /dev/null +++ b/tests/codegen/const-vector.rs @@ -0,0 +1,107 @@ +//@ compile-flags: -C no-prepopulate-passes -Copt-level=0 + +// This test checks that constants of SIMD type are passed as immediate vectors. +// We ensure that both vector representations (struct with fields and struct wrapping array) work. +#![crate_type = "lib"] +#![feature(abi_unadjusted)] +#![feature(const_trait_impl)] +#![feature(repr_simd)] +#![feature(rustc_attrs)] +#![feature(simd_ffi)] +#![allow(non_camel_case_types)] + +// Setting up structs that can be used as const vectors +#[repr(simd)] +#[derive(Clone)] +pub struct i8x2(i8, i8); + +#[repr(simd)] +#[derive(Clone)] +pub struct i8x2_arr([i8; 2]); + +#[repr(simd)] +#[derive(Clone)] +pub struct f32x2(f32, f32); + +#[repr(simd)] +#[derive(Clone)] +pub struct f32x2_arr([f32; 2]); + +#[repr(simd, packed)] +#[derive(Copy, Clone)] +pub struct Simd<T, const N: usize>([T; N]); + +// The following functions are required for the tests to ensure +// that they are called with a const vector + +extern "unadjusted" { + #[no_mangle] + fn test_i8x2(a: i8x2); +} + +extern "unadjusted" { + #[no_mangle] + fn test_i8x2_two_args(a: i8x2, b: i8x2); +} + +extern "unadjusted" { + #[no_mangle] + fn test_i8x2_mixed_args(a: i8x2, c: i32, b: i8x2); +} + +extern "unadjusted" { + #[no_mangle] + fn test_i8x2_arr(a: i8x2_arr); +} + +extern "unadjusted" { + #[no_mangle] + fn test_f32x2(a: f32x2); +} + +extern "unadjusted" { + #[no_mangle] + fn test_f32x2_arr(a: f32x2_arr); +} + +extern "unadjusted" { + #[no_mangle] + fn test_simd(a: Simd<i32, 4>); +} + +extern "unadjusted" { + #[no_mangle] + fn test_simd_unaligned(a: Simd<i32, 3>); +} + +// Ensure the packed variant of the simd struct does not become a const vector +// if the size is not a power of 2 +// CHECK: %"Simd<i32, 3>" = type { [3 x i32] } + +pub fn do_call() { + unsafe { + // CHECK: call void @test_i8x2(<2 x i8> <i8 32, i8 64> + test_i8x2(const { i8x2(32, 64) }); + + // CHECK: call void @test_i8x2_two_args(<2 x i8> <i8 32, i8 64>, <2 x i8> <i8 8, i8 16> + test_i8x2_two_args(const { i8x2(32, 64) }, const { i8x2(8, 16) }); + + // CHECK: call void @test_i8x2_mixed_args(<2 x i8> <i8 32, i8 64>, i32 43, <2 x i8> <i8 8, i8 16> + test_i8x2_mixed_args(const { i8x2(32, 64) }, 43, const { i8x2(8, 16) }); + + // CHECK: call void @test_i8x2_arr(<2 x i8> <i8 32, i8 64> + test_i8x2_arr(const { i8x2_arr([32, 64]) }); + + // CHECK: call void @test_f32x2(<2 x float> <float 0x3FD47AE140000000, float 0x3FE47AE140000000> + test_f32x2(const { f32x2(0.32, 0.64) }); + + // CHECK: void @test_f32x2_arr(<2 x float> <float 0x3FD47AE140000000, float 0x3FE47AE140000000> + test_f32x2_arr(const { f32x2_arr([0.32, 0.64]) }); + + // CHECK: call void @test_simd(<4 x i32> <i32 2, i32 4, i32 6, i32 8> + test_simd(const { Simd::<i32, 4>([2, 4, 6, 8]) }); + + // CHECK: call void @test_simd_unaligned(%"Simd<i32, 3>" %1 + test_simd_unaligned(const { Simd::<i32, 3>([2, 4, 6]) }); + } +} diff --git a/tests/codegen/enum/enum-match.rs b/tests/codegen/enum/enum-match.rs index 8da5de63e67..a24b98050d2 100644 --- a/tests/codegen/enum/enum-match.rs +++ b/tests/codegen/enum/enum-match.rs @@ -34,7 +34,7 @@ pub enum Enum1 { // CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match1{{.*}} // CHECK-NEXT: start: -// CHECK-NEXT: %1 = add i8 %0, -2 +// CHECK-NEXT: %1 = add{{( nsw)?}} i8 %0, -2 // CHECK-NEXT: %2 = zext i8 %1 to i64 // CHECK-NEXT: %3 = icmp ult i8 %1, 2 // CHECK-NEXT: %4 = add nuw nsw i64 %2, 1 diff --git a/tests/codegen/function-arguments.rs b/tests/codegen/function-arguments.rs index 56504df4034..bf9f405192b 100644 --- a/tests/codegen/function-arguments.rs +++ b/tests/codegen/function-arguments.rs @@ -50,7 +50,7 @@ pub fn maybeuninit_enum_bool(x: MaybeUninit<MyBool>) -> MaybeUninit<MyBool> { x } -// CHECK: noundef i32 @char(i32 noundef %x) +// CHECK: noundef{{( range\(i32 0, 1114112\))?}} i32 @char(i32 noundef{{( range\(i32 0, 1114112\))?}} %x) #[no_mangle] pub fn char(x: char) -> char { x @@ -68,7 +68,7 @@ pub fn int(x: u64) -> u64 { x } -// CHECK: noundef i64 @nonzero_int(i64 noundef %x) +// CHECK: noundef{{( range\(i64 1, 0\))?}} i64 @nonzero_int(i64 noundef{{( range\(i64 1, 0\))?}} %x) #[no_mangle] pub fn nonzero_int(x: NonZero<u64>) -> NonZero<u64> { x @@ -250,7 +250,7 @@ pub fn return_slice(x: &[u16]) -> &[u16] { x } -// CHECK: { i16, i16 } @enum_id_1(i16 noundef %x.0, i16 %x.1) +// CHECK: { i16, i16 } @enum_id_1(i16 noundef{{( range\(i16 0, 3\))?}} %x.0, i16 %x.1) #[no_mangle] pub fn enum_id_1(x: Option<Result<u16, u16>>) -> Option<Result<u16, u16>> { x diff --git a/tests/codegen/intrinsics/nontemporal.rs b/tests/codegen/intrinsics/nontemporal.rs index 076d6d6d9da..ff2d6296066 100644 --- a/tests/codegen/intrinsics/nontemporal.rs +++ b/tests/codegen/intrinsics/nontemporal.rs @@ -1,13 +1,37 @@ //@ compile-flags: -O +//@revisions: with_nontemporal without_nontemporal +//@[with_nontemporal] compile-flags: --target aarch64-unknown-linux-gnu +//@[with_nontemporal] needs-llvm-components: aarch64 +//@[without_nontemporal] compile-flags: --target x86_64-unknown-linux-gnu +//@[without_nontemporal] needs-llvm-components: x86 -#![feature(core_intrinsics)] +// Ensure that we *do* emit the `!nontemporal` flag on architectures where it +// is well-behaved, but do *not* emit it on architectures where it is ill-behaved. +// For more context, see <https://github.com/rust-lang/rust/issues/114582> and +// <https://github.com/llvm/llvm-project/issues/64521>. + +#![feature(no_core, lang_items, intrinsics)] +#![no_core] #![crate_type = "lib"] +#[lang = "sized"] +pub trait Sized {} +#[lang = "copy"] +pub trait Copy {} + +impl Copy for u32 {} +impl<T> Copy for *mut T {} + +extern "rust-intrinsic" { + pub fn nontemporal_store<T>(ptr: *mut T, val: T); +} + #[no_mangle] pub fn a(a: &mut u32, b: u32) { // CHECK-LABEL: define{{.*}}void @a - // CHECK: store i32 %b, ptr %a, align 4, !nontemporal + // with_nontemporal: store i32 %b, ptr %a, align 4, !nontemporal + // without_nontemporal-NOT: nontemporal unsafe { - std::intrinsics::nontemporal_store(a, b); + nontemporal_store(a, b); } } diff --git a/tests/codegen/issues/issue-68667-unwrap-combinators.rs b/tests/codegen/issues/issue-68667-unwrap-combinators.rs index 6bd4c566a0c..21a5a5bf4ee 100644 --- a/tests/codegen/issues/issue-68667-unwrap-combinators.rs +++ b/tests/codegen/issues/issue-68667-unwrap-combinators.rs @@ -5,7 +5,7 @@ // MIR inlining now optimizes this code. // CHECK-LABEL: @unwrap_combinators -// CHECK: icmp +// CHECK: {{icmp|trunc}} // CHECK-NEXT: icmp // CHECK-NEXT: select i1 // CHECK-NEXT: ret i1 diff --git a/tests/codegen/issues/str-to-string-128690.rs b/tests/codegen/issues/str-to-string-128690.rs new file mode 100644 index 00000000000..8b416306ba6 --- /dev/null +++ b/tests/codegen/issues/str-to-string-128690.rs @@ -0,0 +1,36 @@ +//@ compile-flags: -C opt-level=3 -Z merge-functions=disabled +#![crate_type = "lib"] + +//! Make sure str::to_string is specialized not to use fmt machinery. + +// CHECK-LABEL: define {{(dso_local )?}}void @one_ref +#[no_mangle] +pub fn one_ref(input: &str) -> String { + // CHECK-NOT: {{(call|invoke).*}}fmt + input.to_string() +} + +// CHECK-LABEL: define {{(dso_local )?}}void @two_ref +#[no_mangle] +pub fn two_ref(input: &&str) -> String { + // CHECK-NOT: {{(call|invoke).*}}fmt + input.to_string() +} + +// CHECK-LABEL: define {{(dso_local )?}}void @thirteen_ref +#[no_mangle] +pub fn thirteen_ref(input: &&&&&&&&&&&&&str) -> String { + // CHECK-NOT: {{(call|invoke).*}}fmt + input.to_string() +} + +// This is a known performance cliff because of the macro-generated +// specialized impl. If this test suddenly starts failing, +// consider removing the `to_string_str!` macro in `alloc/str/string.rs`. +// +// CHECK-LABEL: define {{(dso_local )?}}void @fourteen_ref +#[no_mangle] +pub fn fourteen_ref(input: &&&&&&&&&&&&&&str) -> String { + // CHECK: {{(call|invoke).*}}fmt + input.to_string() +} diff --git a/tests/codegen/range-attribute.rs b/tests/codegen/range-attribute.rs new file mode 100644 index 00000000000..bb19bec0fb9 --- /dev/null +++ b/tests/codegen/range-attribute.rs @@ -0,0 +1,68 @@ +// Checks that range metadata gets emitted on functions result and arguments +// with scalar value. + +//@ compile-flags: -O -C no-prepopulate-passes +//@ min-llvm-version: 19 + +#![crate_type = "lib"] + +use std::num::NonZero; + +// Hack to get the correct size for usize +// CHECK: @helper([[USIZE:i[0-9]+]] noundef %_1) +#[no_mangle] +pub fn helper(_: usize) {} + +// CHECK: noundef range(i128 1, 0) i128 @nonzero_int(i128 noundef range(i128 1, 0) %x) +#[no_mangle] +pub fn nonzero_int(x: NonZero<u128>) -> NonZero<u128> { + x +} + +// CHECK: noundef range(i8 0, 3) i8 @optional_bool(i8 noundef range(i8 0, 3) %x) +#[no_mangle] +pub fn optional_bool(x: Option<bool>) -> Option<bool> { + x +} + +pub enum Enum0 { + A(bool), + B, + C, +} + +// CHECK: noundef range(i8 0, 4) i8 @enum0_value(i8 noundef range(i8 0, 4) %x) +#[no_mangle] +pub fn enum0_value(x: Enum0) -> Enum0 { + x +} + +pub enum Enum1 { + A(u64), + B(u64), + C(u64), +} + +// CHECK: { [[ENUM1_TYP:i[0-9]+]], i64 } @enum1_value([[ENUM1_TYP]] noundef range([[ENUM1_TYP]] 0, 3) %x.0, i64 noundef %x.1) +#[no_mangle] +pub fn enum1_value(x: Enum1) -> Enum1 { + x +} + +pub enum Enum2 { + A(Enum0), + B(Enum0), + C(Enum0), +} + +// CHECK: { i8, i8 } @enum2_value(i8 noundef range(i8 0, 3) %x.0, i8 noundef %x.1) +#[no_mangle] +pub fn enum2_value(x: Enum2) -> Enum2 { + x +} + +// CHECK: noundef [[USIZE]] @takes_slice(ptr noalias noundef nonnull readonly align 4 %x.0, [[USIZE]] noundef %x.1) +#[no_mangle] +pub fn takes_slice(x: &[i32]) -> usize { + x.len() +} diff --git a/tests/codegen/repr/transparent.rs b/tests/codegen/repr/transparent.rs index 4b41332db45..9140b8542ec 100644 --- a/tests/codegen/repr/transparent.rs +++ b/tests/codegen/repr/transparent.rs @@ -74,7 +74,7 @@ pub enum Bool { FileNotFound, } -// CHECK: define{{( dso_local)?}} noundef{{( zeroext)?}} i8 @test_Gpz(i8 noundef{{( zeroext)?}} %_1) +// CHECK: define{{( dso_local)?}} noundef{{( zeroext)?( range\(i8 0, 3\))?}} i8 @test_Gpz(i8 noundef{{( zeroext)?( range\(i8 0, 3\))?}} %_1) #[no_mangle] pub extern "C" fn test_Gpz(_: GenericPlusZst<Bool>) -> GenericPlusZst<Bool> { loop {} diff --git a/tests/debuginfo/pretty-std.rs b/tests/debuginfo/pretty-std.rs index 70827d551ca..45c6dbf3439 100644 --- a/tests/debuginfo/pretty-std.rs +++ b/tests/debuginfo/pretty-std.rs @@ -81,10 +81,10 @@ // cdb-check:vec,d [...] : { len=4 } [Type: [...]::Vec<u64,alloc::alloc::Global>] // cdb-check: [len] : 4 [Type: [...]] // cdb-check: [capacity] : [...] [Type: [...]] -// cdb-check: [0] : 4 [Type: unsigned __int64] -// cdb-check: [1] : 5 [Type: unsigned __int64] -// cdb-check: [2] : 6 [Type: unsigned __int64] -// cdb-check: [3] : 7 [Type: unsigned __int64] +// cdb-check: [0] : 4 [Type: u64] +// cdb-check: [1] : 5 [Type: u64] +// cdb-check: [2] : 6 [Type: u64] +// cdb-check: [3] : 7 [Type: u64] // cdb-command: dx str_slice // cdb-check:str_slice : "IAMA string slice!" [Type: ref$<str$>] @@ -141,8 +141,8 @@ // cdb-check: [<Raw View>] [Type: alloc::collections::vec_deque::VecDeque<i32,alloc::alloc::Global>] // cdb-check: [len] : 0x2 [Type: unsigned [...]] // cdb-check: [capacity] : 0x8 [Type: unsigned [...]] -// cdb-check: [0x0] : 90 [Type: int] -// cdb-check: [0x1] : 20 [Type: int] +// cdb-check: [0x0] : 90 [Type: i32] +// cdb-check: [0x1] : 20 [Type: i32] #![allow(unused_variables)] use std::collections::{LinkedList, VecDeque}; diff --git a/tests/debuginfo/strings-and-strs.rs b/tests/debuginfo/strings-and-strs.rs index 4a6adc2fc53..2d29ac12bd8 100644 --- a/tests/debuginfo/strings-and-strs.rs +++ b/tests/debuginfo/strings-and-strs.rs @@ -7,7 +7,7 @@ // gdb-command:run // gdb-command:print plain_string -// gdbr-check:$1 = alloc::string::String {vec: alloc::vec::Vec<u8, alloc::alloc::Global> {buf: alloc::raw_vec::RawVec<u8, alloc::alloc::Global> {ptr: core::ptr::unique::Unique<u8> {pointer: core::ptr::non_null::NonNull<u8> {pointer: 0x[...]}, _marker: core::marker::PhantomData<u8>}, cap: alloc::raw_vec::Cap (5), alloc: alloc::alloc::Global}, len: 5}} +// gdbr-check:$1 = alloc::string::String {vec: alloc::vec::Vec<u8, alloc::alloc::Global> {buf: alloc::raw_vec::RawVec<u8, alloc::alloc::Global> {inner: alloc::raw_vec::RawVecInner<alloc::alloc::Global> {ptr: core::ptr::unique::Unique<u8> {pointer: core::ptr::non_null::NonNull<u8> {pointer: 0x[...]}, _marker: core::marker::PhantomData<u8>}, cap: alloc::raw_vec::Cap (5), alloc: alloc::alloc::Global}, _marker: core::marker::PhantomData<u8>}, len: 5}} // gdb-command:print plain_str // gdbr-check:$2 = "Hello" diff --git a/tests/mir-opt/build_correct_coerce.main.built.after.mir b/tests/mir-opt/build_correct_coerce.main.built.after.mir new file mode 100644 index 00000000000..061174d69bb --- /dev/null +++ b/tests/mir-opt/build_correct_coerce.main.built.after.mir @@ -0,0 +1,18 @@ +// MIR for `main` after built + +fn main() -> () { + let mut _0: (); + let _1: for<'a> fn(&'a (), &'a ()); + scope 1 { + debug x => _1; + } + + bb0: { + StorageLive(_1); + _1 = foo as for<'a> fn(&'a (), &'a ()) (PointerCoercion(ReifyFnPointer)); + FakeRead(ForLet(None), _1); + _0 = const (); + StorageDead(_1); + return; + } +} diff --git a/tests/mir-opt/build_correct_coerce.rs b/tests/mir-opt/build_correct_coerce.rs new file mode 100644 index 00000000000..b6c861636dc --- /dev/null +++ b/tests/mir-opt/build_correct_coerce.rs @@ -0,0 +1,12 @@ +// skip-filecheck + +// Validate that we record the target for the `as` coercion as `for<'a> fn(&'a (), &'a ())`, +// and not `for<'a, 'b>(&'a (), &'b ())`. We previously did the latter due to a bug in +// the code that records adjustments in HIR typeck. + +fn foo<'a, 'b>(_: &'a (), _: &'b ()) {} + +// EMIT_MIR build_correct_coerce.main.built.after.mir +fn main() { + let x = foo as for<'a> fn(&'a (), &'a ()); +} diff --git a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir index 14ad951a476..0fe4fd37072 100644 --- a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir @@ -5,63 +5,93 @@ fn vec_deref_to_slice(_1: &Vec<u8>) -> &[u8] { let mut _0: &[u8]; scope 1 (inlined <Vec<u8> as Deref>::deref) { debug self => _1; - let mut _4: *const u8; - let mut _5: usize; + let mut _7: usize; scope 2 (inlined Vec::<u8>::as_ptr) { debug self => _1; let mut _2: &alloc::raw_vec::RawVec<u8>; scope 3 (inlined alloc::raw_vec::RawVec::<u8>::ptr) { debug self => _2; - let mut _3: std::ptr::NonNull<u8>; - scope 4 (inlined Unique::<u8>::as_ptr) { - debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _3; - debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; - scope 5 (inlined NonNull::<u8>::as_ptr) { + let mut _3: &alloc::raw_vec::RawVecInner; + scope 4 (inlined alloc::raw_vec::RawVecInner::ptr::<u8>) { + debug self => _3; + let mut _6: std::ptr::NonNull<u8>; + scope 5 (inlined alloc::raw_vec::RawVecInner::non_null::<u8>) { debug self => _3; + let mut _4: std::ptr::NonNull<u8>; + scope 6 (inlined Unique::<u8>::cast::<u8>) { + debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _4; + debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + scope 7 (inlined NonNull::<u8>::cast::<u8>) { + debug self => _4; + scope 8 (inlined NonNull::<u8>::as_ptr) { + debug self => _4; + let mut _5: *const u8; + } + } + } + scope 9 (inlined #[track_caller] <Unique<u8> as Into<NonNull<u8>>>::into) { + debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _6; + debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + scope 10 (inlined <NonNull<u8> as From<Unique<u8>>>::from) { + debug ((unique: Unique<u8>).0: std::ptr::NonNull<u8>) => _6; + debug ((unique: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + scope 11 (inlined Unique::<u8>::as_non_null_ptr) { + debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _6; + debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + } + } + } + } + scope 12 (inlined NonNull::<u8>::as_ptr) { + debug self => _6; } } } } - scope 6 (inlined std::slice::from_raw_parts::<'_, u8>) { - debug data => _4; - debug len => _5; - let _6: *const [u8]; - scope 7 (inlined core::ub_checks::check_language_ub) { - scope 8 (inlined core::ub_checks::check_language_ub::runtime) { + scope 13 (inlined std::slice::from_raw_parts::<'_, u8>) { + debug data => _5; + debug len => _7; + let _8: *const [u8]; + scope 14 (inlined core::ub_checks::check_language_ub) { + scope 15 (inlined core::ub_checks::check_language_ub::runtime) { } } - scope 9 (inlined std::mem::size_of::<u8>) { + scope 16 (inlined std::mem::size_of::<u8>) { } - scope 10 (inlined align_of::<u8>) { + scope 17 (inlined align_of::<u8>) { } - scope 11 (inlined slice_from_raw_parts::<u8>) { - debug data => _4; - debug len => _5; - scope 12 (inlined std::ptr::from_raw_parts::<[u8], u8>) { - debug data_pointer => _4; - debug metadata => _5; + scope 18 (inlined slice_from_raw_parts::<u8>) { + debug data => _5; + debug len => _7; + scope 19 (inlined std::ptr::from_raw_parts::<[u8], u8>) { + debug data_pointer => _5; + debug metadata => _7; } } } } bb0: { - StorageLive(_4); StorageLive(_2); _2 = &((*_1).0: alloc::raw_vec::RawVec<u8>); StorageLive(_3); - _3 = ((((*_1).0: alloc::raw_vec::RawVec<u8>).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>); - _4 = (_3.0: *const u8); - StorageDead(_3); - StorageDead(_2); - StorageLive(_5); - _5 = ((*_1).1: usize); + _3 = &(((*_1).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner); StorageLive(_6); - _6 = *const [u8] from (_4, _5); - _0 = &(*_6); - StorageDead(_6); - StorageDead(_5); + StorageLive(_4); + _4 = (((((*_1).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>); + _5 = (_4.0: *const u8); + _6 = NonNull::<u8> { pointer: _5 }; StorageDead(_4); + StorageDead(_6); + StorageDead(_3); + StorageDead(_2); + StorageLive(_7); + _7 = ((*_1).1: usize); + StorageLive(_8); + _8 = *const [u8] from (_5, _7); + _0 = &(*_8); + StorageDead(_8); + StorageDead(_7); return; } } diff --git a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir index 14ad951a476..0fe4fd37072 100644 --- a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir @@ -5,63 +5,93 @@ fn vec_deref_to_slice(_1: &Vec<u8>) -> &[u8] { let mut _0: &[u8]; scope 1 (inlined <Vec<u8> as Deref>::deref) { debug self => _1; - let mut _4: *const u8; - let mut _5: usize; + let mut _7: usize; scope 2 (inlined Vec::<u8>::as_ptr) { debug self => _1; let mut _2: &alloc::raw_vec::RawVec<u8>; scope 3 (inlined alloc::raw_vec::RawVec::<u8>::ptr) { debug self => _2; - let mut _3: std::ptr::NonNull<u8>; - scope 4 (inlined Unique::<u8>::as_ptr) { - debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _3; - debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; - scope 5 (inlined NonNull::<u8>::as_ptr) { + let mut _3: &alloc::raw_vec::RawVecInner; + scope 4 (inlined alloc::raw_vec::RawVecInner::ptr::<u8>) { + debug self => _3; + let mut _6: std::ptr::NonNull<u8>; + scope 5 (inlined alloc::raw_vec::RawVecInner::non_null::<u8>) { debug self => _3; + let mut _4: std::ptr::NonNull<u8>; + scope 6 (inlined Unique::<u8>::cast::<u8>) { + debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _4; + debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + scope 7 (inlined NonNull::<u8>::cast::<u8>) { + debug self => _4; + scope 8 (inlined NonNull::<u8>::as_ptr) { + debug self => _4; + let mut _5: *const u8; + } + } + } + scope 9 (inlined #[track_caller] <Unique<u8> as Into<NonNull<u8>>>::into) { + debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _6; + debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + scope 10 (inlined <NonNull<u8> as From<Unique<u8>>>::from) { + debug ((unique: Unique<u8>).0: std::ptr::NonNull<u8>) => _6; + debug ((unique: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + scope 11 (inlined Unique::<u8>::as_non_null_ptr) { + debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _6; + debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>; + } + } + } + } + scope 12 (inlined NonNull::<u8>::as_ptr) { + debug self => _6; } } } } - scope 6 (inlined std::slice::from_raw_parts::<'_, u8>) { - debug data => _4; - debug len => _5; - let _6: *const [u8]; - scope 7 (inlined core::ub_checks::check_language_ub) { - scope 8 (inlined core::ub_checks::check_language_ub::runtime) { + scope 13 (inlined std::slice::from_raw_parts::<'_, u8>) { + debug data => _5; + debug len => _7; + let _8: *const [u8]; + scope 14 (inlined core::ub_checks::check_language_ub) { + scope 15 (inlined core::ub_checks::check_language_ub::runtime) { } } - scope 9 (inlined std::mem::size_of::<u8>) { + scope 16 (inlined std::mem::size_of::<u8>) { } - scope 10 (inlined align_of::<u8>) { + scope 17 (inlined align_of::<u8>) { } - scope 11 (inlined slice_from_raw_parts::<u8>) { - debug data => _4; - debug len => _5; - scope 12 (inlined std::ptr::from_raw_parts::<[u8], u8>) { - debug data_pointer => _4; - debug metadata => _5; + scope 18 (inlined slice_from_raw_parts::<u8>) { + debug data => _5; + debug len => _7; + scope 19 (inlined std::ptr::from_raw_parts::<[u8], u8>) { + debug data_pointer => _5; + debug metadata => _7; } } } } bb0: { - StorageLive(_4); StorageLive(_2); _2 = &((*_1).0: alloc::raw_vec::RawVec<u8>); StorageLive(_3); - _3 = ((((*_1).0: alloc::raw_vec::RawVec<u8>).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>); - _4 = (_3.0: *const u8); - StorageDead(_3); - StorageDead(_2); - StorageLive(_5); - _5 = ((*_1).1: usize); + _3 = &(((*_1).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner); StorageLive(_6); - _6 = *const [u8] from (_4, _5); - _0 = &(*_6); - StorageDead(_6); - StorageDead(_5); + StorageLive(_4); + _4 = (((((*_1).0: alloc::raw_vec::RawVec<u8>).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>); + _5 = (_4.0: *const u8); + _6 = NonNull::<u8> { pointer: _5 }; StorageDead(_4); + StorageDead(_6); + StorageDead(_3); + StorageDead(_2); + StorageLive(_7); + _7 = ((*_1).1: usize); + StorageLive(_8); + _8 = *const [u8] from (_5, _7); + _0 = &(*_8); + StorageDead(_8); + StorageDead(_7); return; } } diff --git a/tests/run-make/CURRENT_RUSTC_VERSION/rmake.rs b/tests/run-make/CURRENT_RUSTC_VERSION/rmake.rs index fec4a4bb90b..024ce975cd6 100644 --- a/tests/run-make/CURRENT_RUSTC_VERSION/rmake.rs +++ b/tests/run-make/CURRENT_RUSTC_VERSION/rmake.rs @@ -3,8 +3,6 @@ // Check that the `CURRENT_RUSTC_VERSION` placeholder is correctly replaced by the current // `rustc` version and the `since` property in feature stability gating is properly respected. -use std::path::PathBuf; - use run_make_support::{aux_build, rfs, rustc, source_root}; fn main() { diff --git a/tests/run-make/arguments-non-c-like-enum/rmake.rs b/tests/run-make/arguments-non-c-like-enum/rmake.rs index 036691e8509..f8e077e0a79 100644 --- a/tests/run-make/arguments-non-c-like-enum/rmake.rs +++ b/tests/run-make/arguments-non-c-like-enum/rmake.rs @@ -4,8 +4,6 @@ use run_make_support::{cc, extra_c_flags, extra_cxx_flags, run, rustc, static_lib_name}; pub fn main() { - use std::path::Path; - rustc().input("nonclike.rs").crate_type("staticlib").run(); cc().input("test.c") .input(static_lib_name("nonclike")) diff --git a/tests/run-make/c-link-to-rust-staticlib/rmake.rs b/tests/run-make/c-link-to-rust-staticlib/rmake.rs index e21b976035b..6d045012e95 100644 --- a/tests/run-make/c-link-to-rust-staticlib/rmake.rs +++ b/tests/run-make/c-link-to-rust-staticlib/rmake.rs @@ -3,8 +3,6 @@ //@ ignore-cross-compile -use std::fs; - use run_make_support::rfs::remove_file; use run_make_support::{cc, extra_c_flags, run, rustc, static_lib_name}; diff --git a/tests/run-make/comment-section/rmake.rs b/tests/run-make/comment-section/rmake.rs index ecee441b35f..1557f50dbd0 100644 --- a/tests/run-make/comment-section/rmake.rs +++ b/tests/run-make/comment-section/rmake.rs @@ -7,9 +7,7 @@ // FIXME(jieyouxu): check cross-compile setup //@ ignore-cross-compile -use std::path::PathBuf; - -use run_make_support::{cwd, env_var, llvm_readobj, rfs, run_in_tmpdir, rustc}; +use run_make_support::{cwd, env_var, llvm_readobj, rfs, rustc}; fn main() { let target = env_var("TARGET"); diff --git a/tests/run-make/compressed-debuginfo/rmake.rs b/tests/run-make/compressed-debuginfo/rmake.rs index 3a656f28c22..5ba1a1852d5 100644 --- a/tests/run-make/compressed-debuginfo/rmake.rs +++ b/tests/run-make/compressed-debuginfo/rmake.rs @@ -5,7 +5,7 @@ // FIXME: This test isn't comprehensive and isn't covering all possible combinations. -use run_make_support::{assert_contains, cmd, llvm_readobj, run_in_tmpdir, rustc}; +use run_make_support::{assert_contains, llvm_readobj, run_in_tmpdir, rustc}; fn check_compression(compression: &str, to_find: &str) { run_in_tmpdir(|| { diff --git a/tests/run-make/const_fn_mir/rmake.rs b/tests/run-make/const_fn_mir/rmake.rs index 1ba93421855..0c33b1ce34e 100644 --- a/tests/run-make/const_fn_mir/rmake.rs +++ b/tests/run-make/const_fn_mir/rmake.rs @@ -2,7 +2,7 @@ //@ needs-unwind -use run_make_support::{cwd, diff, rustc}; +use run_make_support::{diff, rustc}; fn main() { rustc().input("main.rs").emit("mir").output("dump-actual.mir").run(); diff --git a/tests/run-make/crate-loading/rmake.rs b/tests/run-make/crate-loading/rmake.rs index fd5b66ae879..d7abd5872c9 100644 --- a/tests/run-make/crate-loading/rmake.rs +++ b/tests/run-make/crate-loading/rmake.rs @@ -2,14 +2,13 @@ //@ ignore-wasm32 //@ ignore-wasm64 -use run_make_support::rfs::copy; -use run_make_support::{assert_contains, rust_lib_name, rustc}; +use run_make_support::{rust_lib_name, rustc}; fn main() { rustc().input("multiple-dep-versions-1.rs").run(); rustc().input("multiple-dep-versions-2.rs").extra_filename("2").metadata("2").run(); - let out = rustc() + rustc() .input("multiple-dep-versions.rs") .extern_("dependency", rust_lib_name("dependency")) .extern_("dep_2_reexport", rust_lib_name("dependency2")) diff --git a/tests/run-make/doctests-keep-binaries-2024/rmake.rs b/tests/run-make/doctests-keep-binaries-2024/rmake.rs new file mode 100644 index 00000000000..3e8ffcbf244 --- /dev/null +++ b/tests/run-make/doctests-keep-binaries-2024/rmake.rs @@ -0,0 +1,67 @@ +// Check that valid binaries are persisted by running them, regardless of whether the +// --run or --no-run option is used. + +//@ ignore-cross-compile + +use std::path::Path; + +use run_make_support::{rfs, run, rustc, rustdoc}; + +fn setup_test_env<F: FnOnce(&Path, &Path)>(callback: F) { + let out_dir = Path::new("doctests"); + rfs::create_dir(&out_dir); + rustc().input("t.rs").crate_type("rlib").run(); + callback(&out_dir, Path::new("libt.rlib")); + rfs::remove_dir_all(out_dir); +} + +fn check_generated_binaries() { + run("doctests/merged_doctest_2024/rust_out"); +} + +fn main() { + setup_test_env(|out_dir, extern_path| { + rustdoc() + .input("t.rs") + .arg("-Zunstable-options") + .arg("--test") + .arg("--persist-doctests") + .arg(out_dir) + .extern_("t", extern_path) + .edition("2024") + .run(); + check_generated_binaries(); + }); + setup_test_env(|out_dir, extern_path| { + rustdoc() + .input("t.rs") + .arg("-Zunstable-options") + .arg("--test") + .arg("--persist-doctests") + .arg(out_dir) + .extern_("t", extern_path) + .arg("--no-run") + .edition("2024") + .run(); + check_generated_binaries(); + }); + // Behavior with --test-run-directory with relative paths. + setup_test_env(|_, _| { + let run_dir_path = Path::new("rundir"); + rfs::create_dir(&run_dir_path); + + rustdoc() + .input("t.rs") + .arg("-Zunstable-options") + .arg("--test") + .arg("--persist-doctests") + .arg("doctests") + .arg("--test-run-directory") + .arg(run_dir_path) + .extern_("t", "libt.rlib") + .edition("2024") + .run(); + + rfs::remove_dir_all(run_dir_path); + }); +} diff --git a/tests/run-make/doctests-keep-binaries-2024/t.rs b/tests/run-make/doctests-keep-binaries-2024/t.rs new file mode 100644 index 00000000000..c38cf0a0b25 --- /dev/null +++ b/tests/run-make/doctests-keep-binaries-2024/t.rs @@ -0,0 +1,11 @@ +/// Fungle the foople. +/// ``` +/// t::foople(); +/// ``` +pub fn foople() {} + +/// Flomble the florp +/// ``` +/// t::florp(); +/// ``` +pub fn florp() {} diff --git a/tests/run-make/doctests-merge/doctest-2021.stdout b/tests/run-make/doctests-merge/doctest-2021.stdout new file mode 100644 index 00000000000..7da08d68faa --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-2021.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test doctest.rs - (line 4) ... ok +test doctest.rs - init (line 8) ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/run-make/doctests-merge/doctest-2024.stdout b/tests/run-make/doctests-merge/doctest-2024.stdout new file mode 100644 index 00000000000..7da08d68faa --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-2024.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test doctest.rs - (line 4) ... ok +test doctest.rs - init (line 8) ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/run-make/doctests-merge/doctest-standalone.rs b/tests/run-make/doctests-merge/doctest-standalone.rs new file mode 100644 index 00000000000..134ffb58285 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-standalone.rs @@ -0,0 +1,18 @@ +#![crate_name = "foo"] +#![crate_type = "lib"] + +//! ```standalone +//! foo::init(); +//! ``` + +/// ```standalone +/// foo::init(); +/// ``` +pub fn init() { + static mut IS_INIT: bool = false; + + unsafe { + assert!(!IS_INIT); + IS_INIT = true; + } +} diff --git a/tests/run-make/doctests-merge/doctest-standalone.stdout b/tests/run-make/doctests-merge/doctest-standalone.stdout new file mode 100644 index 00000000000..ee9f62326ab --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-standalone.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test doctest-standalone.rs - (line 4) ... ok +test doctest-standalone.rs - init (line 8) ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/run-make/doctests-merge/doctest.rs b/tests/run-make/doctests-merge/doctest.rs new file mode 100644 index 00000000000..66a5d88db67 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest.rs @@ -0,0 +1,18 @@ +#![crate_name = "foo"] +#![crate_type = "lib"] + +//! ``` +//! foo::init(); +//! ``` + +/// ``` +/// foo::init(); +/// ``` +pub fn init() { + static mut IS_INIT: bool = false; + + unsafe { + assert!(!IS_INIT); + IS_INIT = true; + } +} diff --git a/tests/run-make/doctests-merge/rmake.rs b/tests/run-make/doctests-merge/rmake.rs new file mode 100644 index 00000000000..a25da7403e2 --- /dev/null +++ b/tests/run-make/doctests-merge/rmake.rs @@ -0,0 +1,39 @@ +use std::path::Path; + +use run_make_support::{cwd, diff, rustc, rustdoc}; + +fn test_and_compare(input_file: &str, stdout_file: &str, edition: &str, dep: &Path) { + let mut cmd = rustdoc(); + + let output = cmd + .input(input_file) + .arg("--test") + .arg("-Zunstable-options") + .edition(edition) + .arg("--test-args=--test-threads=1") + .extern_("foo", dep.display().to_string()) + .env("RUST_BACKTRACE", "short") + .run(); + + diff() + .expected_file(stdout_file) + .actual_text("output", output.stdout_utf8()) + .normalize(r#"finished in \d+\.\d+s"#, "finished in $$TIME") + .run(); +} + +fn main() { + let out_file = cwd().join("libfoo.rlib"); + + rustc().input("doctest.rs").crate_type("rlib").output(&out_file).run(); + + // First we ensure that running with the 2024 edition will not fail at runtime. + test_and_compare("doctest.rs", "doctest-2024.stdout", "2024", &out_file); + + // Then we ensure that running with an edition < 2024 will not fail at runtime. + test_and_compare("doctest.rs", "doctest-2021.stdout", "2021", &out_file); + + // Now we check with the standalone attribute which should succeed in all cases. + test_and_compare("doctest-standalone.rs", "doctest-standalone.stdout", "2024", &out_file); + test_and_compare("doctest-standalone.rs", "doctest-standalone.stdout", "2021", &out_file); +} diff --git a/tests/run-make/dump-ice-to-disk/rmake.rs b/tests/run-make/dump-ice-to-disk/rmake.rs index 260abf3aa6f..48b4071e065 100644 --- a/tests/run-make/dump-ice-to-disk/rmake.rs +++ b/tests/run-make/dump-ice-to-disk/rmake.rs @@ -1,137 +1,197 @@ -// This test checks if internal compilation error (ICE) log files work as expected. -// - Get the number of lines from the log files without any configuration options, -// then check that the line count doesn't change if the backtrace gets configured to be short -// or full. -// - Check that disabling ICE logging results in zero files created. -// - Check that the ICE files contain some of the expected strings. -// - exercise the -Zmetrics-dir nightly flag -// - verify what happens when both the nightly flag and env variable are set -// - test the RUST_BACKTRACE=0 behavior against the file creation +//! This test checks if Internal Compilation Error (ICE) dump files `rustc-ice*.txt` work as +//! expected. +//! +//! - Basic sanity checks on a default ICE dump. +//! - Get the number of lines from the dump files without any `RUST_BACKTRACE` options, then check +//! ICE dump file (line count) is not affected by `RUSTC_BACKTRACE` settings. +//! - Check that disabling ICE dumping results in zero dump files created. +//! - Check that the ICE dump contain some of the expected strings. +//! - Check that `RUST_BACKTRACE=0` prevents ICE dump from created. +//! - Exercise the `-Zmetrics-dir` nightly flag (#128914): +//! - When `-Zmetrics=dir=PATH` is present but `RUSTC_ICE` is not set, check that the ICE dump +//! is placed under `PATH`. +//! - When `RUSTC_ICE=RUSTC_ICE_PATH` and `-Zmetrics-dir=METRICS_PATH` are both provided, check +//! that `RUSTC_ICE_PATH` takes precedence and no ICE dump is emitted under `METRICS_PATH`. +//! +//! See <https://github.com/rust-lang/rust/pull/108714>. -// See https://github.com/rust-lang/rust/pull/108714 +//@ ignore-windows +// FIXME(#128911): @jieyouxu: This test is sometimes for whatever forsaken reason flakey in +// `i686-mingw`, and I cannot reproduce it locally. The error messages upon assertion failure in +// this test is intentionally extremely verbose to aid debugging that issue. -use run_make_support::{cwd, has_extension, has_prefix, rfs, rustc, shallow_find_files}; +use std::cell::OnceCell; +use std::path::{Path, PathBuf}; -fn main() { - rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); - let default = get_text_from_ice(".").lines().count(); - - clear_ice_files(); - rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); - let ice_text = get_text_from_ice(cwd()); - let default_set = ice_text.lines().count(); - let content = ice_text; - let ice_files = shallow_find_files(cwd(), |path| { - has_prefix(path, "rustc-ice") && has_extension(path, "txt") - }); +use run_make_support::{ + cwd, filename_contains, has_extension, has_prefix, rfs, run_in_tmpdir, rustc, + shallow_find_files, +}; + +#[derive(Debug)] +struct IceDump { + name: &'static str, + path: PathBuf, + message: String, +} + +impl IceDump { + fn lines_count(&self) -> usize { + self.message.lines().count() + } +} + +#[track_caller] +fn assert_ice_len_equals(left: &IceDump, right: &IceDump) { + let left_len = left.lines_count(); + let right_len = right.lines_count(); + + if left_len != right_len { + eprintln!("=== {} ICE MESSAGE ({} lines) ====", left.name, left_len); + eprintln!("{}", left.message); + + eprintln!("=== {} ICE MESSAGE ({} lines) ====", right.name, right_len); + eprintln!("{}", right.message); + + eprintln!("===================================="); + panic!( + "ICE message length mismatch: {} has {} lines but {} has {} lines", + left.name, left_len, right.name, right_len + ); + } +} + +fn find_ice_dumps_in_dir<P: AsRef<Path>>(dir: P) -> Vec<PathBuf> { + shallow_find_files(dir, |path| has_prefix(path, "rustc-ice") && has_extension(path, "txt")) +} + +// Assert only one `rustc-ice*.txt` ICE file exists, and extract the ICE message from the ICE file. +#[track_caller] +fn extract_exactly_one_ice_file<P: AsRef<Path>>(name: &'static str, dir: P) -> IceDump { + let ice_files = find_ice_dumps_in_dir(dir); assert_eq!(ice_files.len(), 1); // There should only be 1 ICE file. - let ice_file_name = - ice_files.first().and_then(|f| f.file_name()).and_then(|n| n.to_str()).unwrap(); - // Ensure that the ICE dump path doesn't contain `:`, because they cause problems on Windows. - assert!(!ice_file_name.contains(":"), "{ice_file_name}"); - assert_eq!(default, default_set); - assert!(default > 0); - // Some of the expected strings in an ICE file should appear. - assert!(content.contains("thread 'rustc' panicked at")); - assert!(content.contains("stack backtrace:")); - - test_backtrace_short(default); - test_backtrace_full(default); - test_backtrace_disabled(default); - - clear_ice_files(); - // The ICE dump is explicitly disabled. Therefore, this should produce no files. - rustc().env("RUSTC_ICE", "0").input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); - let ice_files = shallow_find_files(cwd(), |path| { - has_prefix(path, "rustc-ice") && has_extension(path, "txt") + let path = ice_files.get(0).unwrap(); + let message = rfs::read_to_string(path); + IceDump { name, path: path.to_path_buf(), message } +} + +fn main() { + // Establish baseline ICE message. + let mut default_ice_dump = OnceCell::new(); + run_in_tmpdir(|| { + rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); + let dump = extract_exactly_one_ice_file("baseline", cwd()); + // Ensure that the ICE dump path doesn't contain `:`, because they cause problems on + // Windows. + assert!(!filename_contains(&dump.path, ":"), "{} contains `:`", dump.path.display()); + // Some of the expected strings in an ICE file should appear. + assert!(dump.message.contains("thread 'rustc' panicked at")); + assert!(dump.message.contains("stack backtrace:")); + default_ice_dump.set(dump).unwrap(); }); - assert!(ice_files.is_empty()); // There should be 0 ICE files. + let default_ice_dump = default_ice_dump.get().unwrap(); - metrics_dir(default); -} + test_backtrace_short(default_ice_dump); + test_backtrace_full(default_ice_dump); + test_backtrace_disabled(default_ice_dump); + test_ice_dump_disabled(); -fn test_backtrace_short(baseline: usize) { - clear_ice_files(); - rustc() - .env("RUSTC_ICE", cwd()) - .input("lib.rs") - .env("RUST_BACKTRACE", "short") - .arg("-Ztreat-err-as-bug=1") - .run_fail(); - let short = get_text_from_ice(cwd()).lines().count(); - // backtrace length in dump shouldn't be changed by RUST_BACKTRACE - assert_eq!(short, baseline); + test_metrics_dir(default_ice_dump); } -fn test_backtrace_full(baseline: usize) { - clear_ice_files(); - rustc() - .env("RUSTC_ICE", cwd()) - .input("lib.rs") - .env("RUST_BACKTRACE", "full") - .arg("-Ztreat-err-as-bug=1") - .run_fail(); - let full = get_text_from_ice(cwd()).lines().count(); - // backtrace length in dump shouldn't be changed by RUST_BACKTRACE - assert_eq!(full, baseline); +#[track_caller] +fn test_backtrace_short(baseline: &IceDump) { + run_in_tmpdir(|| { + rustc() + .env("RUSTC_ICE", cwd()) + .input("lib.rs") + .env("RUST_BACKTRACE", "short") + .arg("-Ztreat-err-as-bug=1") + .run_fail(); + let dump = extract_exactly_one_ice_file("RUST_BACKTRACE=short", cwd()); + // Backtrace length in dump shouldn't be changed by `RUST_BACKTRACE`. + assert_ice_len_equals(baseline, &dump); + }); } -fn test_backtrace_disabled(baseline: usize) { - clear_ice_files(); - rustc() - .env("RUSTC_ICE", cwd()) - .input("lib.rs") - .env("RUST_BACKTRACE", "0") - .arg("-Ztreat-err-as-bug=1") - .run_fail(); - let disabled = get_text_from_ice(cwd()).lines().count(); - // backtrace length in dump shouldn't be changed by RUST_BACKTRACE - assert_eq!(disabled, baseline); +#[track_caller] +fn test_backtrace_full(baseline: &IceDump) { + run_in_tmpdir(|| { + rustc() + .env("RUSTC_ICE", cwd()) + .input("lib.rs") + .env("RUST_BACKTRACE", "full") + .arg("-Ztreat-err-as-bug=1") + .run_fail(); + let dump = extract_exactly_one_ice_file("RUST_BACKTRACE=full", cwd()); + // Backtrace length in dump shouldn't be changed by `RUST_BACKTRACE`. + assert_ice_len_equals(baseline, &dump); + }); } -fn metrics_dir(baseline: usize) { - test_flag_only(baseline); - test_flag_and_env(baseline); +#[track_caller] +fn test_backtrace_disabled(baseline: &IceDump) { + run_in_tmpdir(|| { + rustc() + .env("RUSTC_ICE", cwd()) + .input("lib.rs") + .env("RUST_BACKTRACE", "0") + .arg("-Ztreat-err-as-bug=1") + .run_fail(); + let dump = extract_exactly_one_ice_file("RUST_BACKTRACE=disabled", cwd()); + // Backtrace length in dump shouldn't be changed by `RUST_BACKTRACE`. + assert_ice_len_equals(baseline, &dump); + }); } -fn test_flag_only(baseline: usize) { - clear_ice_files(); - let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); - rustc().input("lib.rs").arg("-Ztreat-err-as-bug=1").arg(metrics_arg).run_fail(); - let output = get_text_from_ice(cwd()).lines().count(); - assert_eq!(output, baseline); +#[track_caller] +fn test_ice_dump_disabled() { + // The ICE dump is explicitly disabled. Therefore, this should produce no files. + run_in_tmpdir(|| { + rustc().env("RUSTC_ICE", "0").input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); + let ice_files = find_ice_dumps_in_dir(cwd()); + assert!(ice_files.is_empty(), "there should be no ICE files if `RUSTC_ICE=0` is set"); + }); } -fn test_flag_and_env(baseline: usize) { - clear_ice_files(); - let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); - let real_dir = cwd().join("actually_put_ice_here"); - rfs::create_dir(real_dir.clone()); - rustc() - .input("lib.rs") - .env("RUSTC_ICE", real_dir.clone()) - .arg("-Ztreat-err-as-bug=1") - .arg(metrics_arg) - .run_fail(); - let output = get_text_from_ice(real_dir).lines().count(); - assert_eq!(output, baseline); +#[track_caller] +fn test_metrics_dir(baseline: &IceDump) { + test_flag_only(baseline); + test_flag_and_env(baseline); } -fn clear_ice_files() { - let ice_files = shallow_find_files(cwd(), |path| { - has_prefix(path, "rustc-ice") && has_extension(path, "txt") +#[track_caller] +fn test_flag_only(baseline: &IceDump) { + run_in_tmpdir(|| { + let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); + rustc() + .env_remove("RUSTC_ICE") // prevent interference from environment + .input("lib.rs") + .arg("-Ztreat-err-as-bug=1") + .arg(metrics_arg) + .run_fail(); + let dump = extract_exactly_one_ice_file("-Zmetrics-dir only", cwd()); + assert_ice_len_equals(baseline, &dump); }); - for file in ice_files { - rfs::remove_file(file); - } } #[track_caller] -fn get_text_from_ice(dir: impl AsRef<std::path::Path>) -> String { - let ice_files = - shallow_find_files(dir, |path| has_prefix(path, "rustc-ice") && has_extension(path, "txt")); - assert_eq!(ice_files.len(), 1); // There should only be 1 ICE file. - let ice_file = ice_files.get(0).unwrap(); - let output = rfs::read_to_string(ice_file); - output +fn test_flag_and_env(baseline: &IceDump) { + run_in_tmpdir(|| { + let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); + let real_dir = cwd().join("actually_put_ice_here"); + rfs::create_dir(&real_dir); + rustc() + .input("lib.rs") + .env("RUSTC_ICE", &real_dir) + .arg("-Ztreat-err-as-bug=1") + .arg(metrics_arg) + .run_fail(); + + let cwd_ice_files = find_ice_dumps_in_dir(cwd()); + assert!(cwd_ice_files.is_empty(), "RUSTC_ICE should override -Zmetrics-dir"); + + let dump = extract_exactly_one_ice_file("RUSTC_ICE overrides -Zmetrics-dir", real_dir); + assert_ice_len_equals(baseline, &dump); + }); } diff --git a/tests/run-make/dylib-soname/rmake.rs b/tests/run-make/dylib-soname/rmake.rs index a0215a6906e..cec0d463842 100644 --- a/tests/run-make/dylib-soname/rmake.rs +++ b/tests/run-make/dylib-soname/rmake.rs @@ -4,7 +4,6 @@ //@ only-linux //@ ignore-cross-compile -use run_make_support::regex::Regex; use run_make_support::{cmd, run_in_tmpdir, rustc}; fn main() { diff --git a/tests/run-make/extern-flag-disambiguates/rmake.rs b/tests/run-make/extern-flag-disambiguates/rmake.rs index 2d7d7f69f66..89cda02eaea 100644 --- a/tests/run-make/extern-flag-disambiguates/rmake.rs +++ b/tests/run-make/extern-flag-disambiguates/rmake.rs @@ -1,6 +1,6 @@ //@ ignore-cross-compile -use run_make_support::{cwd, run, rustc}; +use run_make_support::{run, rustc}; // Attempt to build this dependency tree: // diff --git a/tests/run-make/git_clone_sha1.sh b/tests/run-make/git_clone_sha1.sh deleted file mode 100644 index 626e4e42761..00000000000 --- a/tests/run-make/git_clone_sha1.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -x - -# Usage: $0 project_name url sha1 -# Get the crate with the specified sha1. -# -# all arguments are required. -# -# See below link for git usage: -# https://stackoverflow.com/questions/3489173#14091182 - -# Mandatory arguments: -PROJECT_NAME=$1 -URL=$2 -SHA1=$3 - -function err_exit() { - echo "ERROR:" $* - exit 1 -} - -git clone $URL $PROJECT_NAME || err_exit -cd $PROJECT_NAME || err_exit -git reset --hard $SHA1 || err_exit diff --git a/tests/run-make/ice-dep-cannot-find-dep/rmake.rs b/tests/run-make/ice-dep-cannot-find-dep/rmake.rs index 8ba3a0366b0..1c136773f01 100644 --- a/tests/run-make/ice-dep-cannot-find-dep/rmake.rs +++ b/tests/run-make/ice-dep-cannot-find-dep/rmake.rs @@ -16,7 +16,7 @@ // If we used `rustc` the additional '-L rmake_out' option would allow rustc to // actually find the crate. -use run_make_support::{bare_rustc, rfs, rust_lib_name, rustc}; +use run_make_support::{bare_rustc, rust_lib_name, rustc}; fn main() { rustc().crate_name("a").crate_type("rlib").input("a.rs").arg("--verbose").run(); diff --git a/tests/run-make/incr-test-moved-file/rmake.rs b/tests/run-make/incr-test-moved-file/rmake.rs index f314e110a8c..0ba1b0d58fe 100644 --- a/tests/run-make/incr-test-moved-file/rmake.rs +++ b/tests/run-make/incr-test-moved-file/rmake.rs @@ -14,7 +14,7 @@ //@ ignore-nvptx64-nvidia-cuda // FIXME: can't find crate for 'std' -use run_make_support::{rfs, rust_lib_name, rustc}; +use run_make_support::{rfs, rustc}; fn main() { rfs::create_dir("incr"); diff --git a/tests/run-make/incremental-debugger-visualizer/rmake.rs b/tests/run-make/incremental-debugger-visualizer/rmake.rs index 58a7e415bc5..07c920cc04a 100644 --- a/tests/run-make/incremental-debugger-visualizer/rmake.rs +++ b/tests/run-make/incremental-debugger-visualizer/rmake.rs @@ -2,8 +2,6 @@ // (in this case, foo.py and foo.natvis) are picked up when compiling incrementally. // See https://github.com/rust-lang/rust/pull/111641 -use std::io::Read; - use run_make_support::{invalid_utf8_contains, invalid_utf8_not_contains, rfs, rustc}; fn main() { diff --git a/tests/run-make/lto-readonly-lib/rmake.rs b/tests/run-make/lto-readonly-lib/rmake.rs index b9f0dc21daf..78f2b580e0d 100644 --- a/tests/run-make/lto-readonly-lib/rmake.rs +++ b/tests/run-make/lto-readonly-lib/rmake.rs @@ -7,7 +7,7 @@ //@ ignore-cross-compile -use run_make_support::{rfs, run, rust_lib_name, rustc, test_while_readonly}; +use run_make_support::{run, rust_lib_name, rustc, test_while_readonly}; fn main() { rustc().input("lib.rs").run(); diff --git a/tests/run-make/min-global-align/Makefile b/tests/run-make/min-global-align/Makefile deleted file mode 100644 index 82f38749e00..00000000000 --- a/tests/run-make/min-global-align/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -include ../tools.mk - -# only-linux - -# This tests ensure that global variables respect the target minimum alignment. -# The three bools `STATIC_BOOL`, `STATIC_MUT_BOOL`, and `CONST_BOOL` all have -# type-alignment of 1, but some targets require greater global alignment. - -SRC = min_global_align.rs -LL = $(TMPDIR)/min_global_align.ll - -all: -# Most targets are happy with default alignment -- take i686 for example. -ifeq ($(filter x86,$(LLVM_COMPONENTS)),x86) - $(RUSTC) --target=i686-unknown-linux-gnu --emit=llvm-ir $(SRC) - [ "$$(grep -c 'align 1' "$(LL)")" -eq "3" ] -endif -# SystemZ requires even alignment for PC-relative addressing. -ifeq ($(filter systemz,$(LLVM_COMPONENTS)),systemz) - $(RUSTC) --target=s390x-unknown-linux-gnu --emit=llvm-ir $(SRC) - [ "$$(grep -c 'align 2' "$(LL)")" -eq "3" ] -endif diff --git a/tests/run-make/min-global-align/rmake.rs b/tests/run-make/min-global-align/rmake.rs new file mode 100644 index 00000000000..2adaaf172f4 --- /dev/null +++ b/tests/run-make/min-global-align/rmake.rs @@ -0,0 +1,27 @@ +// This test checks that global variables respect the target minimum alignment. +// The three bools `STATIC_BOOL`, `STATIC_MUT_BOOL`, and `CONST_BOOL` all have +// type-alignment of 1, but some targets require greater global alignment. +// See https://github.com/rust-lang/rust/pull/44440 + +//@ only-linux +// Reason: this test is specific to linux, considering compilation is targeted +// towards linux architectures only. + +use run_make_support::{assert_count_is, llvm_components_contain, rfs, rustc}; + +fn main() { + // Most targets are happy with default alignment -- take i686 for example. + if llvm_components_contain("x86") { + rustc().target("i686-unknown-linux-gnu").emit("llvm-ir").input("min_global_align.rs").run(); + assert_count_is(3, rfs::read_to_string("min_global_align.ll"), "align 1"); + } + // SystemZ requires even alignment for PC-relative addressing. + if llvm_components_contain("systemz") { + rustc() + .target("s390x-unknown-linux-gnu") + .emit("llvm-ir") + .input("min_global_align.rs") + .run(); + assert_count_is(3, rfs::read_to_string("min_global_align.ll"), "align 2"); + } +} diff --git a/tests/run-make/multiple-emits/rmake.rs b/tests/run-make/multiple-emits/rmake.rs index 67c0ebb9864..8a5eb1d9d85 100644 --- a/tests/run-make/multiple-emits/rmake.rs +++ b/tests/run-make/multiple-emits/rmake.rs @@ -1,4 +1,4 @@ -use run_make_support::{cwd, path, rustc}; +use run_make_support::{path, rustc}; fn main() { rustc().input("foo.rs").emit("asm,llvm-ir").output("out").run(); diff --git a/tests/run-make/naked-symbol-visibility/rmake.rs b/tests/run-make/naked-symbol-visibility/rmake.rs index a32e62326eb..07ff253788d 100644 --- a/tests/run-make/naked-symbol-visibility/rmake.rs +++ b/tests/run-make/naked-symbol-visibility/rmake.rs @@ -3,7 +3,7 @@ use run_make_support::object::read::{File, Object, Symbol}; use run_make_support::object::ObjectSymbol; use run_make_support::targets::is_windows; -use run_make_support::{dynamic_lib_name, env_var, rfs, rustc}; +use run_make_support::{dynamic_lib_name, rfs, rustc}; fn main() { let rdylib_name = dynamic_lib_name("a_rust_dylib"); diff --git a/tests/run-make/no-alloc-shim/Makefile b/tests/run-make/no-alloc-shim/Makefile deleted file mode 100644 index 568e3f9ba1d..00000000000 --- a/tests/run-make/no-alloc-shim/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -include ../tools.mk - -# ignore-cross-compile -# ignore-msvc FIXME(bjorn3) can't figure out how to link with the MSVC toolchain - -TARGET_LIBDIR = $$($(RUSTC) --print target-libdir) - -all: - $(RUSTC) foo.rs --crate-type bin --emit obj -Cpanic=abort -ifdef IS_MSVC - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(call OUT_EXE,foo) /link $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib - $(call OUT_EXE,foo) -else - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib -o $(call RUN_BINFILE,foo) - $(call RUN_BINFILE,foo) -endif - - # Check that linking without __rust_no_alloc_shim_is_unstable defined fails - $(RUSTC) foo.rs --crate-type bin --emit obj -Cpanic=abort --cfg check_feature_gate -ifdef IS_MSVC - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(call OUT_EXE,foo) /link $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib || exit 0 && exit 1 -else - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib -o $(call RUN_BINFILE,foo) || exit 0 && exit 1 -endif diff --git a/tests/run-make/no-alloc-shim/rmake.rs b/tests/run-make/no-alloc-shim/rmake.rs new file mode 100644 index 00000000000..c398a3177df --- /dev/null +++ b/tests/run-make/no-alloc-shim/rmake.rs @@ -0,0 +1,55 @@ +// This test checks the compatibility of the interaction between `--emit obj` and +// `#[global_allocator]`, as it is now possible to invoke the latter without the +// allocator shim since #86844. As this feature is unstable, it should fail if +// --cfg check_feature_gate is passed. +// See https://github.com/rust-lang/rust/pull/86844 + +//@ ignore-cross-compile +// Reason: the compiled binary is executed + +//@ ignore-msvc +//FIXME(Oneirical): Getting this to work on MSVC requires passing libcmt.lib to CC, +// which is not trivial to do. +// Tracking issue: https://github.com/rust-lang/rust/issues/128602 +// Discussion: https://github.com/rust-lang/rust/pull/128407#discussion_r1702439172 + +use run_make_support::{cc, cwd, has_extension, has_prefix, run, rustc, shallow_find_files}; + +fn main() { + rustc().input("foo.rs").crate_type("bin").emit("obj").panic("abort").run(); + let libdir = rustc().print("target-libdir").run().stdout_utf8(); + let libdir = libdir.trim(); + + let alloc_libs = shallow_find_files(&libdir, |path| { + has_prefix(path, "liballoc-") && has_extension(path, "rlib") + }); + let core_libs = shallow_find_files(&libdir, |path| { + has_prefix(path, "libcore-") && has_extension(path, "rlib") + }); + let compiler_builtins_libs = shallow_find_files(libdir, |path| { + has_prefix(path, "libcompiler_builtins") && has_extension(path, "rlib") + }); + + cc().input("foo.o") + .out_exe("foo") + .args(&alloc_libs) + .args(&core_libs) + .args(&compiler_builtins_libs) + .run(); + run("foo"); + + // Check that linking without __rust_no_alloc_shim_is_unstable defined fails + rustc() + .input("foo.rs") + .crate_type("bin") + .emit("obj") + .panic("abort") + .cfg("check_feature_gate") + .run(); + cc().input("foo.o") + .out_exe("foo") + .args(&alloc_libs) + .args(&core_libs) + .args(&compiler_builtins_libs) + .run_fail(); +} diff --git a/tests/run-make/non-unicode-in-incremental-dir/rmake.rs b/tests/run-make/non-unicode-in-incremental-dir/rmake.rs index ef316f11036..42bd4b1b799 100644 --- a/tests/run-make/non-unicode-in-incremental-dir/rmake.rs +++ b/tests/run-make/non-unicode-in-incremental-dir/rmake.rs @@ -8,7 +8,7 @@ fn main() { match std::fs::create_dir(&non_unicode) { // If an error occurs, check if creating a directory with a valid Unicode name would // succeed. - Err(e) if std::fs::create_dir("valid_unicode").is_ok() => { + Err(_) if std::fs::create_dir("valid_unicode").is_ok() => { // Filesystem doesn't appear support non-Unicode paths. return; } diff --git a/tests/run-make/output-type-permutations/rmake.rs b/tests/run-make/output-type-permutations/rmake.rs index 76fd8c1c260..c0569af6e84 100644 --- a/tests/run-make/output-type-permutations/rmake.rs +++ b/tests/run-make/output-type-permutations/rmake.rs @@ -113,7 +113,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "asm-emit".to_string(), }, || { @@ -123,7 +123,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "asm-emit2".to_string(), }, || { @@ -133,7 +133,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "asm-emit3".to_string(), }, || { @@ -144,7 +144,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-ir-emit".to_string(), }, || { @@ -154,7 +154,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-ir-emit2".to_string(), }, || { @@ -164,7 +164,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-ir-emit3".to_string(), }, || { @@ -175,7 +175,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-bc-emit".to_string(), }, || { @@ -185,7 +185,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-bc-emit2".to_string(), }, || { @@ -195,7 +195,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-bc-emit3".to_string(), }, || { @@ -206,7 +206,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "obj-emit".to_string(), }, || { @@ -216,7 +216,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "obj-emit2".to_string(), }, || { @@ -226,7 +226,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "obj-emit3".to_string(), }, || { @@ -268,7 +268,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib".to_string(), }, || { @@ -278,7 +278,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib2".to_string(), }, || { @@ -288,7 +288,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib3".to_string(), }, || { @@ -375,7 +375,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib".to_string(), }, || { @@ -385,7 +385,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib2".to_string(), }, || { @@ -395,7 +395,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib3".to_string(), }, || { @@ -449,7 +449,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["ir", rust_lib_name("bar")], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib-ir".to_string(), }, || { @@ -466,7 +466,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["ir", "asm", "bc", "obj", "link"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib-all".to_string(), }, || { @@ -484,7 +484,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["ir", "asm", "bc", "obj", "link"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib-all2".to_string(), }, || { @@ -523,7 +523,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["bar.bc", rust_lib_name("bar"), "foo.bc"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib-emits".to_string(), }, || { diff --git a/tests/run-make/print-check-cfg/rmake.rs b/tests/run-make/print-check-cfg/rmake.rs index b10336f88c6..f6f7f7cece6 100644 --- a/tests/run-make/print-check-cfg/rmake.rs +++ b/tests/run-make/print-check-cfg/rmake.rs @@ -4,7 +4,6 @@ extern crate run_make_support; use std::collections::HashSet; use std::iter::FromIterator; -use std::ops::Deref; use run_make_support::rustc; diff --git a/tests/run-make/print-native-static-libs/rmake.rs b/tests/run-make/print-native-static-libs/rmake.rs index b4e7c0e17ff..a51ac934c72 100644 --- a/tests/run-make/print-native-static-libs/rmake.rs +++ b/tests/run-make/print-native-static-libs/rmake.rs @@ -12,8 +12,6 @@ //@ ignore-cross-compile //@ ignore-wasm -use std::io::BufRead; - use run_make_support::{is_msvc, rustc}; fn main() { diff --git a/tests/run-make/redundant-libs/rmake.rs b/tests/run-make/redundant-libs/rmake.rs index 43bb30bde70..e984dee8df5 100644 --- a/tests/run-make/redundant-libs/rmake.rs +++ b/tests/run-make/redundant-libs/rmake.rs @@ -11,9 +11,7 @@ //@ ignore-cross-compile // Reason: the compiled binary is executed -use run_make_support::{ - build_native_dynamic_lib, build_native_static_lib, cwd, is_msvc, rfs, run, rustc, -}; +use run_make_support::{build_native_dynamic_lib, build_native_static_lib, run, rustc}; fn main() { build_native_dynamic_lib("foo"); diff --git a/tests/run-make/remap-path-prefix-dwarf/Makefile b/tests/run-make/remap-path-prefix-dwarf/Makefile deleted file mode 100644 index 8905a00ea28..00000000000 --- a/tests/run-make/remap-path-prefix-dwarf/Makefile +++ /dev/null @@ -1,112 +0,0 @@ -# This test makes sure that --remap-path-prefix has the expected effects on paths in debuginfo. -# It tests several cases, each of them has a detailed description attached to it. - -# ignore-windows - -include ../tools.mk - -SRC_DIR := $(abspath .) -SRC_DIR_PARENT := $(abspath ..) - -ifeq ($(UNAME),Darwin) - DEBUGINFOOPTS := -Csplit-debuginfo=off -else - DEBUGINFOOPTS := -endif - -all: \ - abs_input_outside_working_dir \ - rel_input_remap_working_dir \ - rel_input_remap_working_dir_scope \ - rel_input_remap_working_dir_parent \ - rel_input_remap_working_dir_child \ - rel_input_remap_working_dir_diagnostics \ - abs_input_inside_working_dir \ - abs_input_inside_working_dir_scope \ - abs_input_outside_working_dir - -# The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path *is* within -# the working directory of the compiler. We are remapping the path that contains `src`. -abs_input_inside_working_dir: - # We explicitly switch to a directory that *is* a prefix of the directory our - # source code is contained in. - cd $(SRC_DIR) && $(RUSTC) $(SRC_DIR)/src/quux.rs -o "$(TMPDIR)/abs_input_inside_working_dir.rlib" -Cdebuginfo=2 --remap-path-prefix $(SRC_DIR)=REMAPPED - # We expect the path to the main source file to be remapped. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir.rlib | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir.rlib | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path *is* within -# the working directory of the compiler. We are remapping the path that contains `src`. -abs_input_inside_working_dir_scope: - # We explicitly switch to a directory that *is* a prefix of the directory our - # source code is contained in. - cd $(SRC_DIR) && $(RUSTC) $(SRC_DIR)/src/quux.rs -o "$(TMPDIR)/abs_input_inside_working_dir_scope.rlib" -Cdebuginfo=2 --remap-path-prefix $(SRC_DIR)=REMAPPED -Zremap-path-scope=object $(DEBUGINFOOPTS) - # We expect the path to the main source file to be remapped. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir_scope.rlib | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir_scope.rlib | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path is *not* within -# the working directory of the compiler. We are remapping both the path that contains `src` and -# the working directory to the same thing. This setup corresponds to a workaround that is needed -# when trying to remap everything to something that looks like a local path. -# Relative paths are interpreted as relative to the compiler's working directory (e.g. in -# debuginfo). If we also remap the working directory, the compiler strip it from other paths so -# that the final outcome is the desired one again. -abs_input_outside_working_dir: - # We explicitly switch to a directory that is *not* a prefix of the directory our - # source code is contained in. - cd $(TMPDIR) && $(RUSTC) $(SRC_DIR)/src/quux.rs -o "$(TMPDIR)/abs_input_outside_working_dir.rlib" -Cdebuginfo=2 --remap-path-prefix $(SRC_DIR)=REMAPPED --remap-path-prefix $(TMPDIR)=REMAPPED - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_outside_working_dir.rlib | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_outside_working_dir.rlib | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping the working directory of -# the compiler, which naturally is an implicit prefix of our relative input path. Debuginfo will -# expand the relative path to an absolute path and we expect the working directory to be remapped -# in that expansion. -rel_input_remap_working_dir: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)=REMAPPED" - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir.rlib" | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping the working directory of -# the compiler, which naturally is an implicit prefix of our relative input path. Debuginfo will -# expand the relative path to an absolute path and we expect the working directory to be remapped -# in that expansion. -rel_input_remap_working_dir_scope: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)=REMAPPED" -Zremap-path-scope=object $(DEBUGINFOOPTS) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -rel_input_remap_working_dir_diagnostics: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)=REMAPPED" -Zremap-path-scope=diagnostics $(DEBUGINFOOPTS) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) -v "REMAPPED/src/quux.rs" - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping a *SUB-DIRECTORY* of the -# compiler's working directory. This test makes sure that that directory is remapped even though it -# won't actually show up in this form in the compiler's SourceMap and instead is only constructed -# on demand during debuginfo generation. -rel_input_remap_working_dir_child: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)/src=REMAPPED" - # We expect `src/quux.rs` to have been remapped to `REMAPPED/quux.rs`. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" | $(CGREP) "REMAPPED/quux.rs" - # We don't want to find the path that we just remapped anywhere in the DWARF - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" | $(CGREP) -v "$(SRC_DIR)/src" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping a *PARENT DIRECTORY* of -# the compiler's working directory. -rel_input_remap_working_dir_parent: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR_PARENT)=REMAPPED" - # We expect `src/quux.rs` to have been remapped to `REMAPPED/remap-path-prefix-dwarf/src/quux.rs`. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" | $(CGREP) "REMAPPED/remap-path-prefix-dwarf/src/quux.rs" - # We don't want to find the path that we just remapped anywhere in the DWARF - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" | $(CGREP) -v "$(SRC_DIR_PARENT)" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" diff --git a/tests/run-make/remap-path-prefix-dwarf/rmake.rs b/tests/run-make/remap-path-prefix-dwarf/rmake.rs new file mode 100644 index 00000000000..ede1d615742 --- /dev/null +++ b/tests/run-make/remap-path-prefix-dwarf/rmake.rs @@ -0,0 +1,202 @@ +// This test makes sure that --remap-path-prefix has the expected effects on paths in debuginfo. +// We explicitly switch to a directory that *is* a prefix of the directory our +// source code is contained in. +// It tests several cases, each of them has a detailed description attached to it. +// See https://github.com/rust-lang/rust/pull/96867 + +//@ ignore-windows +// Reason: the remap path prefix is not printed in the dwarf dump. + +use run_make_support::{cwd, is_darwin, llvm_dwarfdump, rust_lib_name, rustc}; + +fn main() { + // The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path *is* within + // the working directory of the compiler. We are remapping the path that contains `src`. + check_dwarf(DwarfTest { + lib_name: "abs_input_inside_working_dir", + input_path: PathType::Absolute, + scope: None, + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + check_dwarf(DwarfTest { + lib_name: "abs_input_inside_working_dir_scope", + input_path: PathType::Absolute, + scope: Some(ScopeType::Object), + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + // The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path is *not* + // within the working directory of the compiler. We are remapping both the path that contains + // `src` and the working directory to the same thing. This setup corresponds to a workaround + // that is needed when trying to remap everything to something that looks like a local + // path. Relative paths are interpreted as relative to the compiler's working directory (e.g. + // in debuginfo). If we also remap the working directory, the compiler strip it from other + // paths so that the final outcome is the desired one again. + check_dwarf(DwarfTest { + lib_name: "abs_input_outside_working_dir", + input_path: PathType::Absolute, + scope: None, + remap_path_prefix: PrefixType::Dual(( + format!("{}=REMAPPED", cwd().display()), + "rmake_out=REMAPPED".to_owned(), + )), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + // The compiler is called with a *RELATIVE PATH* as input. We are remapping the working + // directory of the compiler, which naturally is an implicit prefix of our relative input path. + // Debuginfo will expand the relative path to an absolute path and we expect the working + // directory to be remapped in that expansion. + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir", + input_path: PathType::Relative, + scope: None, + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_scope", + input_path: PathType::Relative, + scope: Some(ScopeType::Object), + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_scope", + input_path: PathType::Relative, + scope: Some(ScopeType::Diagnostics), + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::AvoidSrcPath, + }); + // The compiler is called with a *RELATIVE PATH* as input. We are remapping a *SUB-DIRECTORY* + // of the compiler's working directory. This test makes sure that that directory is remapped + // even though it won't actually show up in this form in the compiler's SourceMap and instead + // is only constructed on demand during debuginfo generation. + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_child", + input_path: PathType::Relative, + scope: None, + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().join("src").display())), + dwarf_test: DwarfDump::ChildTest, + }); + // The compiler is called with a *RELATIVE PATH* as input. We are remapping a + // *PARENT DIRECTORY* of the compiler's working directory. + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_parent", + input_path: PathType::Relative, + scope: None, + remap_path_prefix: PrefixType::Regular(format!( + "{}=REMAPPED", + cwd().parent().unwrap().display() + )), + dwarf_test: DwarfDump::ParentTest, + }); +} + +#[track_caller] +fn check_dwarf(test: DwarfTest) { + let mut rustc = rustc(); + match test.input_path { + PathType::Absolute => rustc.input(cwd().join("src/quux.rs")), + PathType::Relative => rustc.input("src/quux.rs"), + }; + rustc.output(rust_lib_name(test.lib_name)); + rustc.arg("-Cdebuginfo=2"); + if let Some(scope) = test.scope { + match scope { + ScopeType::Object => rustc.arg("-Zremap-path-scope=object"), + ScopeType::Diagnostics => rustc.arg("-Zremap-path-scope=diagnostics"), + }; + if is_darwin() { + rustc.arg("-Csplit-debuginfo=off"); + } + } + match test.remap_path_prefix { + PrefixType::Regular(prefix) => { + // We explicitly switch to a directory that *is* a prefix of the directory our + // source code is contained in. + rustc.arg("--remap-path-prefix"); + rustc.arg(prefix); + } + PrefixType::Dual((prefix1, prefix2)) => { + // We explicitly switch to a directory that is *not* a prefix of the directory our + // source code is contained in. + rustc.arg("--remap-path-prefix"); + rustc.arg(prefix1); + rustc.arg("--remap-path-prefix"); + rustc.arg(prefix2); + } + } + rustc.run(); + match test.dwarf_test { + DwarfDump::ContainsSrcPath => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + // We expect the path to the main source file to be remapped. + .assert_stdout_contains("REMAPPED/src/quux.rs") + // No weird duplication of remapped components (see #78479) + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + DwarfDump::AvoidSrcPath => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + .assert_stdout_not_contains("REMAPPED/src/quux.rs") + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + DwarfDump::ChildTest => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + // We expect `src/quux.rs` to have been remapped to `REMAPPED/quux.rs`. + .assert_stdout_contains("REMAPPED/quux.rs") + // We don't want to find the path that we just remapped anywhere in the DWARF + .assert_stdout_not_contains(cwd().join("src").to_str().unwrap()) + // No weird duplication of remapped components (see #78479) + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + DwarfDump::ParentTest => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + // We expect `src/quux.rs` to have been remapped to + // `REMAPPED/remap-path-prefix-dwarf/src/quux.rs`. + .assert_stdout_contains("REMAPPED/rmake_out/src/quux.rs") + // We don't want to find the path that we just remapped anywhere in the DWARF + .assert_stdout_not_contains(cwd().parent().unwrap().to_str().unwrap()) + // No weird duplication of remapped components (see #78479) + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + }; +} + +struct DwarfTest { + lib_name: &'static str, + input_path: PathType, + scope: Option<ScopeType>, + remap_path_prefix: PrefixType, + dwarf_test: DwarfDump, +} + +enum PathType { + Absolute, + Relative, +} + +enum ScopeType { + Object, + Diagnostics, +} + +enum DwarfDump { + ContainsSrcPath, + AvoidSrcPath, + ChildTest, + ParentTest, +} + +enum PrefixType { + Regular(String), + Dual((String, String)), +} diff --git a/tests/run-make/reproducible-build-2/rmake.rs b/tests/run-make/reproducible-build-2/rmake.rs index c500c4238b0..8b5825cad30 100644 --- a/tests/run-make/reproducible-build-2/rmake.rs +++ b/tests/run-make/reproducible-build-2/rmake.rs @@ -13,7 +13,7 @@ // 2. When the sysroot gets copied, some symlinks must be re-created, // which is a privileged action on Windows. -use run_make_support::{bin_name, rfs, rust_lib_name, rustc}; +use run_make_support::{rfs, rust_lib_name, rustc}; fn main() { // test 1: fat lto diff --git a/tests/run-make/reset-codegen-1/rmake.rs b/tests/run-make/reset-codegen-1/rmake.rs index d19907a675e..118b3a666ad 100644 --- a/tests/run-make/reset-codegen-1/rmake.rs +++ b/tests/run-make/reset-codegen-1/rmake.rs @@ -9,7 +9,7 @@ use std::path::Path; -use run_make_support::{bin_name, rfs, rustc}; +use run_make_support::{bin_name, rustc}; fn compile(output_file: &str, emit: Option<&str>) { let mut rustc = rustc(); diff --git a/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs b/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs index d152047600f..67e839bec70 100644 --- a/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs +++ b/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs @@ -6,7 +6,7 @@ // See https://github.com/rust-lang/rust/pull/105601 use run_make_support::{ - build_native_static_lib, is_msvc, llvm_ar, regex, rfs, rust_lib_name, rustc, static_lib_name, + build_native_static_lib, llvm_ar, regex, rfs, rust_lib_name, rustc, static_lib_name, }; //@ ignore-cross-compile diff --git a/tests/run-make/run-in-tmpdir-self-test/rmake.rs b/tests/run-make/run-in-tmpdir-self-test/rmake.rs index 83b99bfe863..dba036f8866 100644 --- a/tests/run-make/run-in-tmpdir-self-test/rmake.rs +++ b/tests/run-make/run-in-tmpdir-self-test/rmake.rs @@ -3,7 +3,7 @@ //! when returning from the closure. use std::fs; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use run_make_support::{cwd, run_in_tmpdir}; diff --git a/tests/run-make/rust-lld-by-default-nightly/rmake.rs b/tests/run-make/rust-lld-by-default-nightly/rmake.rs index 79424984c21..02bbe8227f0 100644 --- a/tests/run-make/rust-lld-by-default-nightly/rmake.rs +++ b/tests/run-make/rust-lld-by-default-nightly/rmake.rs @@ -6,8 +6,6 @@ //@ ignore-stable //@ only-x86_64-unknown-linux-gnu -use std::process::Output; - use run_make_support::regex::Regex; use run_make_support::rustc; diff --git a/tests/run-make/rust-lld-compress-debug-sections/rmake.rs b/tests/run-make/rust-lld-compress-debug-sections/rmake.rs index df9691ccbcf..ea4997fab80 100644 --- a/tests/run-make/rust-lld-compress-debug-sections/rmake.rs +++ b/tests/run-make/rust-lld-compress-debug-sections/rmake.rs @@ -6,7 +6,7 @@ // FIXME: This test isn't comprehensive and isn't covering all possible combinations. -use run_make_support::{assert_contains, cmd, llvm_readobj, run_in_tmpdir, rustc}; +use run_make_support::{assert_contains, llvm_readobj, run_in_tmpdir, rustc}; fn check_compression(compression: &str, to_find: &str) { run_in_tmpdir(|| { diff --git a/tests/run-make/rust-lld-custom-target/rmake.rs b/tests/run-make/rust-lld-custom-target/rmake.rs index 17247125521..a6f7c33793a 100644 --- a/tests/run-make/rust-lld-custom-target/rmake.rs +++ b/tests/run-make/rust-lld-custom-target/rmake.rs @@ -8,8 +8,6 @@ //@ needs-rust-lld //@ only-x86_64-unknown-linux-gnu -use std::process::Output; - use run_make_support::regex::Regex; use run_make_support::rustc; diff --git a/tests/run-make/rust-lld/rmake.rs b/tests/run-make/rust-lld/rmake.rs index d0bc19130d7..1f311af1ed5 100644 --- a/tests/run-make/rust-lld/rmake.rs +++ b/tests/run-make/rust-lld/rmake.rs @@ -4,8 +4,6 @@ //@ needs-rust-lld //@ ignore-s390x lld does not yet support s390x as target -use std::process::Output; - use run_make_support::regex::Regex; use run_make_support::{is_msvc, rustc}; diff --git a/tests/run-make/rustdoc-io-error/rmake.rs b/tests/run-make/rustdoc-io-error/rmake.rs index d64c5106ffb..a5fae36e733 100644 --- a/tests/run-make/rustdoc-io-error/rmake.rs +++ b/tests/run-make/rustdoc-io-error/rmake.rs @@ -14,22 +14,20 @@ // `mkfs.ext4 -d`, as well as mounting a loop device for the rootfs. //@ ignore-windows - the `set_readonly` functions doesn't work on folders. -use std::fs; - -use run_make_support::{path, rustdoc}; +use run_make_support::{path, rfs, rustdoc}; fn main() { let out_dir = path("rustdoc-io-error"); - let output = fs::create_dir(&out_dir).unwrap(); - let mut permissions = fs::metadata(&out_dir).unwrap().permissions(); + rfs::create_dir(&out_dir); + let mut permissions = rfs::metadata(&out_dir).permissions(); let original_permissions = permissions.clone(); permissions.set_readonly(true); - fs::set_permissions(&out_dir, permissions).unwrap(); + rfs::set_permissions(&out_dir, permissions); let output = rustdoc().input("foo.rs").output(&out_dir).env("RUST_BACKTRACE", "1").run_fail(); - fs::set_permissions(&out_dir, original_permissions).unwrap(); + rfs::set_permissions(&out_dir, original_permissions); output .assert_exit_code(1) diff --git a/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs b/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs index 4badc68845e..eca07043b55 100644 --- a/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs +++ b/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs @@ -1,6 +1,6 @@ use std::path::Path; -use run_make_support::{htmldocck, rfs, rustc, rustdoc, source_root}; +use run_make_support::{htmldocck, rfs, rustc, rustdoc}; pub fn scrape(extra_args: &[&str]) { let out_dir = Path::new("rustdoc"); diff --git a/tests/run-make/sepcomp-cci-copies/rmake.rs b/tests/run-make/sepcomp-cci-copies/rmake.rs index e244f546580..a66cc2872b4 100644 --- a/tests/run-make/sepcomp-cci-copies/rmake.rs +++ b/tests/run-make/sepcomp-cci-copies/rmake.rs @@ -4,10 +4,7 @@ // created for each source module (see `rustc_const_eval::monomorphize::partitioning`). // See https://github.com/rust-lang/rust/pull/16367 -use run_make_support::{ - count_regex_matches_in_files_with_extension, cwd, has_extension, regex, rfs, rustc, - shallow_find_files, -}; +use run_make_support::{count_regex_matches_in_files_with_extension, regex, rustc}; fn main() { rustc().input("cci_lib.rs").run(); diff --git a/tests/run-make/sepcomp-inlining/rmake.rs b/tests/run-make/sepcomp-inlining/rmake.rs index b7a6bed05b8..ea4a4d210cc 100644 --- a/tests/run-make/sepcomp-inlining/rmake.rs +++ b/tests/run-make/sepcomp-inlining/rmake.rs @@ -5,10 +5,7 @@ // in only one compilation unit. // See https://github.com/rust-lang/rust/pull/16367 -use run_make_support::{ - count_regex_matches_in_files_with_extension, cwd, has_extension, regex, rfs, rustc, - shallow_find_files, -}; +use run_make_support::{count_regex_matches_in_files_with_extension, regex, rustc}; fn main() { rustc().input("foo.rs").emit("llvm-ir").codegen_units(3).arg("-Zinline-in-all-cgus").run(); diff --git a/tests/run-make/sepcomp-separate/rmake.rs b/tests/run-make/sepcomp-separate/rmake.rs index 90017572c4c..49958044a61 100644 --- a/tests/run-make/sepcomp-separate/rmake.rs +++ b/tests/run-make/sepcomp-separate/rmake.rs @@ -3,10 +3,7 @@ // wind up in three different compilation units. // See https://github.com/rust-lang/rust/pull/16367 -use run_make_support::{ - count_regex_matches_in_files_with_extension, cwd, has_extension, regex, rfs, rustc, - shallow_find_files, -}; +use run_make_support::{count_regex_matches_in_files_with_extension, regex, rustc}; fn main() { rustc().input("foo.rs").emit("llvm-ir").codegen_units(3).run(); diff --git a/tests/run-make/sysroot-crates-are-unstable/Makefile b/tests/run-make/sysroot-crates-are-unstable/Makefile deleted file mode 100644 index 1e267fb9576..00000000000 --- a/tests/run-make/sysroot-crates-are-unstable/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -all: - '$(PYTHON)' test.py diff --git a/tests/run-make/sysroot-crates-are-unstable/rmake.rs b/tests/run-make/sysroot-crates-are-unstable/rmake.rs new file mode 100644 index 00000000000..24da387eb80 --- /dev/null +++ b/tests/run-make/sysroot-crates-are-unstable/rmake.rs @@ -0,0 +1,5 @@ +use run_make_support::python_command; + +fn main() { + python_command().arg("test.py").run(); +} diff --git a/tests/rustdoc-gui/code-example-buttons.goml b/tests/rustdoc-gui/code-example-buttons.goml index 57ea2970072..4f037ec79f5 100644 --- a/tests/rustdoc-gui/code-example-buttons.goml +++ b/tests/rustdoc-gui/code-example-buttons.goml @@ -1,5 +1,6 @@ // This test ensures that code blocks buttons are displayed on hover and when you click on them. go-to: "file://" + |DOC_PATH| + "/test_docs/fn.foo.html" +include: "utils.goml" // First we check we "hover". move-cursor-to: ".example-wrap" @@ -19,3 +20,77 @@ click: ".example-wrap" move-cursor-to: ".search-input" assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0) assert-css: (".example-wrap .copy-button", { "visibility": "hidden" }) + +// Clicking on the "copy code" button shouldn't make the buttons stick. +click: ".example-wrap .copy-button" +move-cursor-to: ".search-input" +assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0) +assert-css: (".example-wrap .copy-button", { "visibility": "hidden" }) + +define-function: ( + "check-buttons", + [theme, background, filter, filter_hover], + block { + call-function: ("switch-theme", {"theme": |theme|}) + + assert-css: (".example-wrap .test-arrow", {"visibility": "hidden"}) + assert-css: (".example-wrap .copy-button", {"visibility": "hidden"}) + + move-cursor-to: ".example-wrap" + assert-css: (".example-wrap .test-arrow", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .test-arrow::before", { + "filter": |filter|, + }) + assert-css: (".example-wrap .copy-button", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .copy-button::before", { + "filter": |filter|, + }) + + move-cursor-to: ".example-wrap .test-arrow" + assert-css: (".example-wrap .test-arrow:hover", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .test-arrow:hover::before", { + "filter": |filter_hover|, + }) + + move-cursor-to: ".example-wrap .copy-button" + assert-css: (".example-wrap .copy-button:hover", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .copy-button:hover::before", { + "filter": |filter_hover|, + }) + }, +) + +call-function: ("check-buttons",{ + "theme": "ayu", + "background": "#0f1419", + "filter": "invert(0.7)", + "filter_hover": "invert(1)", +}) +call-function: ("check-buttons",{ + "theme": "dark", + "background": "#353535", + "filter": "invert(0.5)", + "filter_hover": "invert(0.65)", +}) +call-function: ("check-buttons",{ + "theme": "light", + "background": "#fff", + "filter": "invert(0.5)", + "filter_hover": "invert(0.35)", +}) diff --git a/tests/rustdoc-gui/copy-code.goml b/tests/rustdoc-gui/copy-code.goml index 72a5bece175..9cc717bc67a 100644 --- a/tests/rustdoc-gui/copy-code.goml +++ b/tests/rustdoc-gui/copy-code.goml @@ -24,11 +24,11 @@ define-function: ( ) call-function: ("check-copy-button", {}) -// Checking that the run button and the copy button have the same height. +// Checking that the run button and the copy button have the same height and same width. compare-elements-size: ( ".example-wrap:nth-of-type(1) .test-arrow", ".example-wrap:nth-of-type(1) .copy-button", - ["height"], + ["height", "width"], ) // ... and the same y position. compare-elements-position: ( diff --git a/tests/rustdoc-gui/run-on-hover.goml b/tests/rustdoc-gui/run-on-hover.goml deleted file mode 100644 index b62da79b780..00000000000 --- a/tests/rustdoc-gui/run-on-hover.goml +++ /dev/null @@ -1,54 +0,0 @@ -// Example code blocks sometimes have a "Run" button to run them on the -// Playground. That button is hidden until the user hovers over the code block. -// This test checks that it is hidden, and that it shows on hover. It also -// checks for its color. -include: "utils.goml" -go-to: "file://" + |DOC_PATH| + "/test_docs/fn.foo.html" -show-text: true - -define-function: ( - "check-run-button", - [theme, color, background, hover_color, hover_background], - block { - call-function: ("switch-theme", {"theme": |theme|}) - assert-css: (".test-arrow", {"visibility": "hidden"}) - move-cursor-to: ".example-wrap" - assert-css: (".test-arrow", { - "visibility": "visible", - "color": |color|, - "background-color": |background|, - "font-size": "16px", - "border-radius": "2px", - }) - move-cursor-to: ".test-arrow" - assert-css: (".test-arrow:hover", { - "visibility": "visible", - "color": |hover_color|, - "background-color": |hover_background|, - "font-size": "16px", - "border-radius": "2px", - }) - }, -) - -call-function: ("check-run-button", { - "theme": "ayu", - "color": "#788797", - "background": "rgba(57, 175, 215, 0.09)", - "hover_color": "#c5c5c5", - "hover_background": "rgba(57, 175, 215, 0.37)", -}) -call-function: ("check-run-button", { - "theme": "dark", - "color": "#dedede", - "background": "rgba(78, 139, 202, 0.2)", - "hover_color": "#dedede", - "hover_background": "rgb(78, 139, 202)", -}) -call-function: ("check-run-button", { - "theme": "light", - "color": "#f5f5f5", - "background": "rgba(78, 139, 202, 0.2)", - "hover_color": "#f5f5f5", - "hover_background": "rgb(78, 139, 202)", -}) diff --git a/tests/rustdoc-ui/2024-doctests-checks.rs b/tests/rustdoc-ui/2024-doctests-checks.rs new file mode 100644 index 00000000000..464cf5b200d --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-checks.rs @@ -0,0 +1,27 @@ +//@ check-pass +//@ compile-flags: --test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ normalize-stdout-test: ".rs:\d+:\d+" -> ".rs:$$LINE:$$COL" + +/// ``` +/// let x = 12; +/// ``` +/// +/// This one should not be a merged doctest (because of `$crate`). The output +/// will confirm it by displaying both merged and standalone doctest passes. +/// +/// ``` +/// macro_rules! bla { +/// () => {{ +/// $crate::foo(); +/// }} +/// } +/// +/// fn foo() {} +/// +/// fn main() { +/// bla!(); +/// } +/// ``` +pub struct Foo; diff --git a/tests/rustdoc-ui/2024-doctests-checks.stdout b/tests/rustdoc-ui/2024-doctests-checks.stdout new file mode 100644 index 00000000000..d1064084a85 --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-checks.stdout @@ -0,0 +1,12 @@ + +running 1 test +test $DIR/2024-doctests-checks.rs - Foo (line 7) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 1 test +test $DIR/2024-doctests-checks.rs - Foo (line 14) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/2024-doctests-crate-attribute.rs b/tests/rustdoc-ui/2024-doctests-crate-attribute.rs new file mode 100644 index 00000000000..4984fdfe194 --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-crate-attribute.rs @@ -0,0 +1,22 @@ +//@ check-pass +//@ compile-flags: --test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ normalize-stdout-test: ".rs:\d+:\d+" -> ".rs:$$LINE:$$COL" + +/// This doctest is used to ensure that if a crate attribute is present, +/// it will not be part of the merged doctests. +/// +/// ``` +/// #![doc(html_playground_url = "foo")] +/// +/// pub struct Bar; +/// ``` +/// +/// This one will allow us to confirm that the doctest above will be a +/// standalone one (there will be two separate doctests passes). +/// +/// ``` +/// let x = 12; +/// ``` +pub struct Foo; diff --git a/tests/rustdoc-ui/2024-doctests-crate-attribute.stdout b/tests/rustdoc-ui/2024-doctests-crate-attribute.stdout new file mode 100644 index 00000000000..29702ce8929 --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-crate-attribute.stdout @@ -0,0 +1,12 @@ + +running 1 test +test $DIR/2024-doctests-crate-attribute.rs - Foo (line 19) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 1 test +test $DIR/2024-doctests-crate-attribute.rs - Foo (line 10) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.rs b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.rs new file mode 100644 index 00000000000..4fe513b4066 --- /dev/null +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.rs @@ -0,0 +1,12 @@ +// FIXME: if/when the output of the test harness can be tested on its own, this test should be +// adapted to use that, and that normalize line can go away + +//@ compile-flags:--test --edition 2021 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ failure-status: 101 + +/// ```should_panic +/// println!("Hello, world!"); +/// ``` +pub struct Foo; diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.stdout b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.stdout new file mode 100644 index 00000000000..63d987de8a9 --- /dev/null +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.stdout @@ -0,0 +1,14 @@ + +running 1 test +test $DIR/failed-doctest-should-panic-2021.rs - Foo (line 9) ... FAILED + +failures: + +---- $DIR/failed-doctest-should-panic-2021.rs - Foo (line 9) stdout ---- +Test executable succeeded, but it's marked `should_panic`. + +failures: + $DIR/failed-doctest-should-panic-2021.rs - Foo (line 9) + +test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs index b24687993e5..4018e37105f 100644 --- a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs @@ -1,7 +1,7 @@ // FIXME: if/when the output of the test harness can be tested on its own, this test should be // adapted to use that, and that normalize line can go away -//@ compile-flags:--test +//@ compile-flags:--test -Z unstable-options --edition 2024 //@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" //@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" //@ failure-status: 101 diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout index 57a20092a5d..cb3456e087e 100644 --- a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout @@ -1,11 +1,11 @@ running 1 test -test $DIR/failed-doctest-should-panic.rs - Foo (line 9) ... FAILED +test $DIR/failed-doctest-should-panic.rs - Foo (line 9) - should panic ... FAILED failures: ---- $DIR/failed-doctest-should-panic.rs - Foo (line 9) stdout ---- -Test executable succeeded, but it's marked `should_panic`. +note: test did not panic as expected failures: $DIR/failed-doctest-should-panic.rs - Foo (line 9) diff --git a/tests/rustdoc-ui/doctest/merged-ignore-no_run.rs b/tests/rustdoc-ui/doctest/merged-ignore-no_run.rs new file mode 100644 index 00000000000..4c21d542951 --- /dev/null +++ b/tests/rustdoc-ui/doctest/merged-ignore-no_run.rs @@ -0,0 +1,14 @@ +//@ compile-flags:--test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ check-pass + +/// ```ignore (test) +/// let x = 12; +/// ``` +pub fn ignored() {} + +/// ```no_run +/// panic!("blob"); +/// ``` +pub fn no_run() {} diff --git a/tests/rustdoc-ui/doctest/merged-ignore-no_run.stdout b/tests/rustdoc-ui/doctest/merged-ignore-no_run.stdout new file mode 100644 index 00000000000..f2cb1e7e72f --- /dev/null +++ b/tests/rustdoc-ui/doctest/merged-ignore-no_run.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test $DIR/merged-ignore-no_run.rs - ignored (line 6) ... ignored +test $DIR/merged-ignore-no_run.rs - no_run (line 11) - compile ... ok + +test result: ok. 1 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/wrong-ast-2024.rs b/tests/rustdoc-ui/doctest/wrong-ast-2024.rs new file mode 100644 index 00000000000..7b4fa8fd2c9 --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast-2024.rs @@ -0,0 +1,20 @@ +//@ compile-flags:--test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ normalize-stdout-test: ".rs:\d+:\d+" -> ".rs:$$LINE:$$COL" +//@ failure-status: 101 + +/// ``` +/// /* plop +/// ``` +pub fn one() {} + +/// ``` +/// } mod __doctest_1 { fn main() { +/// ``` +pub fn two() {} + +/// ```should_panic +/// panic!() +/// ``` +pub fn three() {} diff --git a/tests/rustdoc-ui/doctest/wrong-ast-2024.stdout b/tests/rustdoc-ui/doctest/wrong-ast-2024.stdout new file mode 100644 index 00000000000..22c8ce468fd --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast-2024.stdout @@ -0,0 +1,41 @@ + +running 1 test +test $DIR/wrong-ast-2024.rs - three (line 17) - should panic ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 2 tests +test $DIR/wrong-ast-2024.rs - one (line 7) ... FAILED +test $DIR/wrong-ast-2024.rs - two (line 12) ... FAILED + +failures: + +---- $DIR/wrong-ast-2024.rs - one (line 7) stdout ---- +error[E0758]: unterminated block comment + --> $DIR/wrong-ast-2024.rs:$LINE:$COL + | +LL | /* plop + | ^^^^^^^ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0758`. +Couldn't compile the test. +---- $DIR/wrong-ast-2024.rs - two (line 12) stdout ---- +error: unexpected closing delimiter: `}` + --> $DIR/wrong-ast-2024.rs:$LINE:$COL + | +LL | } mod __doctest_1 { fn main() { + | ^ unexpected closing delimiter + +error: aborting due to 1 previous error + +Couldn't compile the test. + +failures: + $DIR/wrong-ast-2024.rs - one (line 7) + $DIR/wrong-ast-2024.rs - two (line 12) + +test result: FAILED. 0 passed; 2 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/wrong-ast.rs b/tests/rustdoc-ui/doctest/wrong-ast.rs new file mode 100644 index 00000000000..92286b33dcf --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast.rs @@ -0,0 +1,19 @@ +//@ compile-flags:--test --test-args=--test-threads=1 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ failure-status: 101 + +/// ``` +/// /* plop +/// ``` +pub fn one() {} + +/// ``` +/// } mod __doctest_1 { fn main() { +/// ``` +pub fn two() {} + +/// ```should_panic +/// panic!() +/// ``` +pub fn three() {} diff --git a/tests/rustdoc-ui/doctest/wrong-ast.stdout b/tests/rustdoc-ui/doctest/wrong-ast.stdout new file mode 100644 index 00000000000..15494706c16 --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast.stdout @@ -0,0 +1,36 @@ + +running 3 tests +test $DIR/wrong-ast.rs - one (line 6) ... FAILED +test $DIR/wrong-ast.rs - three (line 16) ... ok +test $DIR/wrong-ast.rs - two (line 11) ... FAILED + +failures: + +---- $DIR/wrong-ast.rs - one (line 6) stdout ---- +error[E0758]: unterminated block comment + --> $DIR/wrong-ast.rs:7:1 + | +LL | /* plop + | ^^^^^^^ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0758`. +Couldn't compile the test. +---- $DIR/wrong-ast.rs - two (line 11) stdout ---- +error: unexpected closing delimiter: `}` + --> $DIR/wrong-ast.rs:12:1 + | +LL | } mod __doctest_1 { fn main() { + | ^ unexpected closing delimiter + +error: aborting due to 1 previous error + +Couldn't compile the test. + +failures: + $DIR/wrong-ast.rs - one (line 6) + $DIR/wrong-ast.rs - two (line 11) + +test result: FAILED. 1 passed; 2 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs index f11b94bb036..babdbd0a692 100644 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs +++ b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs @@ -2,5 +2,6 @@ // option can only be used with HTML generation. //@ compile-flags: -Zunstable-options --generate-link-to-definition --output-format json +//@ check-pass pub fn f() {} diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr index 4c8c607e7da..62b0e3ce408 100644 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr +++ b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr @@ -1,2 +1,4 @@ -error: --generate-link-to-definition option can only be used with HTML output format +warning: `--generate-link-to-definition` option can only be used with HTML output format + | + = note: `--generate-link-to-definition` option will be ignored diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.rs b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.rs deleted file mode 100644 index 71852205979..00000000000 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.rs +++ /dev/null @@ -1,6 +0,0 @@ -// This test purpose is to check that the "--generate-link-to-definition" -// option can only be used with HTML generation. - -//@ compile-flags: -Zunstable-options --generate-link-to-definition --show-coverage - -pub fn f() {} diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.stderr b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.stderr deleted file mode 100644 index 4c8c607e7da..00000000000 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.stderr +++ /dev/null @@ -1,2 +0,0 @@ -error: --generate-link-to-definition option can only be used with HTML output format - diff --git a/tests/rustdoc/playground-arg.rs b/tests/rustdoc/playground-arg.rs index 5875451a859..e10a31017ef 100644 --- a/tests/rustdoc/playground-arg.rs +++ b/tests/rustdoc/playground-arg.rs @@ -10,4 +10,4 @@ pub fn dummy() {} // ensure that `extern crate foo;` was inserted into code snips automatically: -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://example.com/?code=%23!%5Ballow(unused)%5D%0A%23%5Ballow(unused_extern_crates)%5D%0Aextern+crate+r%23foo;%0Afn+main()+%7B%0A++++use+foo::dummy;%0A++++dummy();%0A%7D&edition=2015"]' "Run" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://example.com/?code=%23!%5Ballow(unused)%5D%0A%23%5Ballow(unused_extern_crates)%5D%0Aextern+crate+r%23foo;%0Afn+main()+%7B%0A++++use+foo::dummy;%0A++++dummy();%0A%7D&edition=2015"]' "" diff --git a/tests/rustdoc/playground-syntax-error.rs b/tests/rustdoc/playground-syntax-error.rs index f5067145e02..d989570bf58 100644 --- a/tests/rustdoc/playground-syntax-error.rs +++ b/tests/rustdoc/playground-syntax-error.rs @@ -17,5 +17,5 @@ pub fn bar() {} //@ has foo/fn.bar.html -//@ has - '//a[@class="test-arrow"]' "Run" +//@ has - '//a[@class="test-arrow"]' "" //@ has - '//*[@class="docblock"]' 'foo_recursive' diff --git a/tests/rustdoc/playground.rs b/tests/rustdoc/playground.rs index 7880f779067..db2d1669df6 100644 --- a/tests/rustdoc/playground.rs +++ b/tests/rustdoc/playground.rs @@ -22,6 +22,6 @@ //! } //! ``` -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "Run" -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "Run" -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0A%23!%5Bfeature(something)%5D%0A%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&version=nightly&edition=2015"]' "Run" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0A%23!%5Bfeature(something)%5D%0A%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&version=nightly&edition=2015"]' "" diff --git a/tests/ui/asm/aarch64/bad-reg.rs b/tests/ui/asm/aarch64/bad-reg.rs index 1e54b6505db..b99e5fe4b9e 100644 --- a/tests/ui/asm/aarch64/bad-reg.rs +++ b/tests/ui/asm/aarch64/bad-reg.rs @@ -1,8 +1,6 @@ //@ only-aarch64 //@ compile-flags: -C target-feature=+neon -#![feature(asm_const)] - use std::arch::asm; fn main() { diff --git a/tests/ui/asm/aarch64/bad-reg.stderr b/tests/ui/asm/aarch64/bad-reg.stderr index 717a788caf6..370752ad0f1 100644 --- a/tests/ui/asm/aarch64/bad-reg.stderr +++ b/tests/ui/asm/aarch64/bad-reg.stderr @@ -1,17 +1,17 @@ error: invalid register class `foo`: unknown register class - --> $DIR/bad-reg.rs:14:20 + --> $DIR/bad-reg.rs:12:20 | LL | asm!("{}", in(foo) foo); | ^^^^^^^^^^^ error: invalid register `foo`: unknown register - --> $DIR/bad-reg.rs:16:18 + --> $DIR/bad-reg.rs:14:18 | LL | asm!("", in("foo") foo); | ^^^^^^^^^^^^^ error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:18:15 + --> $DIR/bad-reg.rs:16:15 | LL | asm!("{:z}", in(reg) foo); | ^^^^ ----------- argument @@ -21,7 +21,7 @@ LL | asm!("{:z}", in(reg) foo); = note: the `reg` register class supports the following template modifiers: `w`, `x` error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:20:15 + --> $DIR/bad-reg.rs:18:15 | LL | asm!("{:r}", in(vreg) foo); | ^^^^ ------------ argument @@ -31,7 +31,7 @@ LL | asm!("{:r}", in(vreg) foo); = note: the `vreg` register class supports the following template modifiers: `b`, `h`, `s`, `d`, `q`, `v` error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:22:15 + --> $DIR/bad-reg.rs:20:15 | LL | asm!("{:r}", in(vreg_low16) foo); | ^^^^ ------------------ argument @@ -41,7 +41,7 @@ LL | asm!("{:r}", in(vreg_low16) foo); = note: the `vreg_low16` register class supports the following template modifiers: `b`, `h`, `s`, `d`, `q`, `v` error: asm template modifiers are not allowed for `const` arguments - --> $DIR/bad-reg.rs:24:15 + --> $DIR/bad-reg.rs:22:15 | LL | asm!("{:a}", const 0); | ^^^^ ------- argument @@ -49,7 +49,7 @@ LL | asm!("{:a}", const 0); | template modifier error: asm template modifiers are not allowed for `sym` arguments - --> $DIR/bad-reg.rs:26:15 + --> $DIR/bad-reg.rs:24:15 | LL | asm!("{:a}", sym main); | ^^^^ -------- argument @@ -57,49 +57,49 @@ LL | asm!("{:a}", sym main); | template modifier error: invalid register `x29`: the frame pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:28:18 + --> $DIR/bad-reg.rs:26:18 | LL | asm!("", in("x29") foo); | ^^^^^^^^^^^^^ error: invalid register `sp`: the stack pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:30:18 + --> $DIR/bad-reg.rs:28:18 | LL | asm!("", in("sp") foo); | ^^^^^^^^^^^^ error: invalid register `xzr`: the zero register cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:32:18 + --> $DIR/bad-reg.rs:30:18 | LL | asm!("", in("xzr") foo); | ^^^^^^^^^^^^^ error: invalid register `x19`: x19 is used internally by LLVM and cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:34:18 + --> $DIR/bad-reg.rs:32:18 | LL | asm!("", in("x19") foo); | ^^^^^^^^^^^^^ error: register class `preg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:37:18 + --> $DIR/bad-reg.rs:35:18 | LL | asm!("", in("p0") foo); | ^^^^^^^^^^^^ error: register class `preg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:41:20 + --> $DIR/bad-reg.rs:39:20 | LL | asm!("{}", in(preg) foo); | ^^^^^^^^^^^^ error: register class `preg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:44:20 + --> $DIR/bad-reg.rs:42:20 | LL | asm!("{}", out(preg) _); | ^^^^^^^^^^^ error: register `w0` conflicts with register `x0` - --> $DIR/bad-reg.rs:50:32 + --> $DIR/bad-reg.rs:48:32 | LL | asm!("", in("x0") foo, in("w0") bar); | ------------ ^^^^^^^^^^^^ register `w0` @@ -107,7 +107,7 @@ LL | asm!("", in("x0") foo, in("w0") bar); | register `x0` error: register `x0` conflicts with register `x0` - --> $DIR/bad-reg.rs:52:32 + --> $DIR/bad-reg.rs:50:32 | LL | asm!("", in("x0") foo, out("x0") bar); | ------------ ^^^^^^^^^^^^^ register `x0` @@ -115,13 +115,13 @@ LL | asm!("", in("x0") foo, out("x0") bar); | register `x0` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:52:18 + --> $DIR/bad-reg.rs:50:18 | LL | asm!("", in("x0") foo, out("x0") bar); | ^^^^^^^^^^^^ error: register `q0` conflicts with register `v0` - --> $DIR/bad-reg.rs:55:32 + --> $DIR/bad-reg.rs:53:32 | LL | asm!("", in("v0") foo, in("q0") bar); | ------------ ^^^^^^^^^^^^ register `q0` @@ -129,7 +129,7 @@ LL | asm!("", in("v0") foo, in("q0") bar); | register `v0` error: register `q0` conflicts with register `v0` - --> $DIR/bad-reg.rs:57:32 + --> $DIR/bad-reg.rs:55:32 | LL | asm!("", in("v0") foo, out("q0") bar); | ------------ ^^^^^^^^^^^^^ register `q0` @@ -137,13 +137,13 @@ LL | asm!("", in("v0") foo, out("q0") bar); | register `v0` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:57:18 + --> $DIR/bad-reg.rs:55:18 | LL | asm!("", in("v0") foo, out("q0") bar); | ^^^^^^^^^^^^ error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:37:27 + --> $DIR/bad-reg.rs:35:27 | LL | asm!("", in("p0") foo); | ^^^ @@ -151,7 +151,7 @@ LL | asm!("", in("p0") foo); = note: register class `preg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:41:29 + --> $DIR/bad-reg.rs:39:29 | LL | asm!("{}", in(preg) foo); | ^^^ diff --git a/tests/ui/asm/aarch64/const.rs b/tests/ui/asm/aarch64/const.rs index a1fadb2115b..3eab5138d7d 100644 --- a/tests/ui/asm/aarch64/const.rs +++ b/tests/ui/asm/aarch64/const.rs @@ -2,8 +2,6 @@ //@ run-pass //@ needs-asm-support -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn const_generic<const X: usize>() -> usize { diff --git a/tests/ui/asm/aarch64/parse-error.rs b/tests/ui/asm/aarch64/parse-error.rs index ac73bbf99c9..aa731c35dda 100644 --- a/tests/ui/asm/aarch64/parse-error.rs +++ b/tests/ui/asm/aarch64/parse-error.rs @@ -1,7 +1,5 @@ //@ only-aarch64 -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn main() { diff --git a/tests/ui/asm/aarch64/parse-error.stderr b/tests/ui/asm/aarch64/parse-error.stderr index e2c798c798e..7b273282ee6 100644 --- a/tests/ui/asm/aarch64/parse-error.stderr +++ b/tests/ui/asm/aarch64/parse-error.stderr @@ -1,107 +1,107 @@ error: requires at least a template string argument - --> $DIR/parse-error.rs:11:9 + --> $DIR/parse-error.rs:9:9 | LL | asm!(); | ^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:13:14 + --> $DIR/parse-error.rs:11:14 | LL | asm!(foo); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:15:19 + --> $DIR/parse-error.rs:13:19 | LL | asm!("{}" foo); | ^^^ expected `,` error: expected operand, clobber_abi, options, or additional template string - --> $DIR/parse-error.rs:17:20 + --> $DIR/parse-error.rs:15:20 | LL | asm!("{}", foo); | ^^^ expected operand, clobber_abi, options, or additional template string error: expected `(`, found `foo` - --> $DIR/parse-error.rs:19:23 + --> $DIR/parse-error.rs:17:23 | LL | asm!("{}", in foo); | ^^^ expected `(` error: expected `)`, found `foo` - --> $DIR/parse-error.rs:21:27 + --> $DIR/parse-error.rs:19:27 | LL | asm!("{}", in(reg foo)); | ^^^ expected `)` error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:23:27 + --> $DIR/parse-error.rs:21:27 | LL | asm!("{}", in(reg)); | ^ expected expression error: expected register class or explicit register - --> $DIR/parse-error.rs:25:26 + --> $DIR/parse-error.rs:23:26 | LL | asm!("{}", inout(=) foo => bar); | ^ error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:27:37 + --> $DIR/parse-error.rs:25:37 | LL | asm!("{}", inout(reg) foo =>); | ^ expected expression error: expected one of `!`, `,`, `.`, `::`, `?`, `{`, or an operator, found `=>` - --> $DIR/parse-error.rs:29:32 + --> $DIR/parse-error.rs:27:32 | LL | asm!("{}", in(reg) foo => bar); | ^^ expected one of 7 possible tokens error: expected a path for argument to `sym` - --> $DIR/parse-error.rs:31:24 + --> $DIR/parse-error.rs:29:24 | LL | asm!("{}", sym foo + bar); | ^^^^^^^^^ error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:33:26 + --> $DIR/parse-error.rs:31:26 | LL | asm!("", options(foo)); | ^^^ expected one of 10 possible tokens error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:35:32 + --> $DIR/parse-error.rs:33:32 | LL | asm!("", options(nomem foo)); | ^^^ expected one of `)` or `,` error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:37:33 + --> $DIR/parse-error.rs:35:33 | LL | asm!("", options(nomem, foo)); | ^^^ expected one of 10 possible tokens error: expected string literal - --> $DIR/parse-error.rs:41:30 + --> $DIR/parse-error.rs:39:30 | LL | asm!("", clobber_abi(foo)); | ^^^ not a string literal error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:43:34 + --> $DIR/parse-error.rs:41:34 | LL | asm!("", clobber_abi("C" foo)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:45:35 + --> $DIR/parse-error.rs:43:35 | LL | asm!("", clobber_abi("C", foo)); | ^^^ not a string literal error: duplicate argument named `a` - --> $DIR/parse-error.rs:52:36 + --> $DIR/parse-error.rs:50:36 | LL | asm!("{a}", a = const foo, a = const bar); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -109,7 +109,7 @@ LL | asm!("{a}", a = const foo, a = const bar); | previously here error: argument never used - --> $DIR/parse-error.rs:52:36 + --> $DIR/parse-error.rs:50:36 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^^^^^^^^^^^ argument never used @@ -117,13 +117,13 @@ LL | asm!("{a}", a = const foo, a = const bar); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: explicit register arguments cannot have names - --> $DIR/parse-error.rs:57:18 + --> $DIR/parse-error.rs:55:18 | LL | asm!("", a = in("x0") foo); | ^^^^^^^^^^^^^^^^ error: positional arguments cannot follow named arguments or explicit register arguments - --> $DIR/parse-error.rs:63:35 + --> $DIR/parse-error.rs:61:35 | LL | asm!("{1}", in("x0") foo, const bar); | ------------ ^^^^^^^^^ positional argument @@ -131,19 +131,19 @@ LL | asm!("{1}", in("x0") foo, const bar); | explicit register argument error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `""` - --> $DIR/parse-error.rs:66:29 + --> $DIR/parse-error.rs:64:29 | LL | asm!("", options(), ""); | ^^ expected one of 10 possible tokens error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:68:33 + --> $DIR/parse-error.rs:66:33 | LL | asm!("{}", in(reg) foo, "{}", out(reg) foo); | ^^^^ expected one of 10 possible tokens error: asm template must be a string literal - --> $DIR/parse-error.rs:70:14 + --> $DIR/parse-error.rs:68:14 | LL | asm!(format!("{{{}}}", 0), in(reg) foo); | ^^^^^^^^^^^^^^^^^^^^ @@ -151,7 +151,7 @@ LL | asm!(format!("{{{}}}", 0), in(reg) foo); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:72:21 + --> $DIR/parse-error.rs:70:21 | LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); | ^^^^^^^^^^^^^^^^^^^^ @@ -159,127 +159,127 @@ LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: _ cannot be used for input operands - --> $DIR/parse-error.rs:74:28 + --> $DIR/parse-error.rs:72:28 | LL | asm!("{}", in(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:76:31 + --> $DIR/parse-error.rs:74:31 | LL | asm!("{}", inout(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:78:35 + --> $DIR/parse-error.rs:76:35 | LL | asm!("{}", inlateout(reg) _); | ^ error: requires at least a template string argument - --> $DIR/parse-error.rs:85:1 + --> $DIR/parse-error.rs:83:1 | LL | global_asm!(); | ^^^^^^^^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:87:13 + --> $DIR/parse-error.rs:85:13 | LL | global_asm!(FOO); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:89:18 + --> $DIR/parse-error.rs:87:18 | LL | global_asm!("{}" FOO); | ^^^ expected `,` error: expected operand, options, or additional template string - --> $DIR/parse-error.rs:91:19 + --> $DIR/parse-error.rs:89:19 | LL | global_asm!("{}", FOO); | ^^^ expected operand, options, or additional template string error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:93:24 + --> $DIR/parse-error.rs:91:24 | LL | global_asm!("{}", const); | ^ expected expression error: expected one of `,`, `.`, `?`, or an operator, found `FOO` - --> $DIR/parse-error.rs:95:30 + --> $DIR/parse-error.rs:93:30 | LL | global_asm!("{}", const(reg) FOO); | ^^^ expected one of `,`, `.`, `?`, or an operator error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:97:25 + --> $DIR/parse-error.rs:95:25 | LL | global_asm!("", options(FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:99:25 + --> $DIR/parse-error.rs:97:25 | LL | global_asm!("", options(nomem FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:99:31 + --> $DIR/parse-error.rs:97:31 | LL | global_asm!("", options(nomem FOO)); | ^^^ expected one of `)` or `,` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:102:25 + --> $DIR/parse-error.rs:100:25 | LL | global_asm!("", options(nomem, FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:102:32 + --> $DIR/parse-error.rs:100:32 | LL | global_asm!("", options(nomem, FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: expected string literal - --> $DIR/parse-error.rs:106:29 + --> $DIR/parse-error.rs:104:29 | LL | global_asm!("", clobber_abi(FOO)); | ^^^ not a string literal error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:108:33 + --> $DIR/parse-error.rs:106:33 | LL | global_asm!("", clobber_abi("C" FOO)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:110:34 + --> $DIR/parse-error.rs:108:34 | LL | global_asm!("", clobber_abi("C", FOO)); | ^^^ not a string literal error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:112:19 + --> $DIR/parse-error.rs:110:19 | LL | global_asm!("{}", clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:114:28 + --> $DIR/parse-error.rs:112:28 | LL | global_asm!("", options(), clobber_abi("C")); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:116:30 + --> $DIR/parse-error.rs:114:30 | LL | global_asm!("{}", options(), clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: duplicate argument named `a` - --> $DIR/parse-error.rs:118:35 + --> $DIR/parse-error.rs:116:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -287,7 +287,7 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); | previously here error: argument never used - --> $DIR/parse-error.rs:118:35 + --> $DIR/parse-error.rs:116:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ^^^^^^^^^^^^^ argument never used @@ -295,19 +295,19 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `""` - --> $DIR/parse-error.rs:121:28 + --> $DIR/parse-error.rs:119:28 | LL | global_asm!("", options(), ""); | ^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:123:30 + --> $DIR/parse-error.rs:121:30 | LL | global_asm!("{}", const FOO, "{}", const FOO); | ^^^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: asm template must be a string literal - --> $DIR/parse-error.rs:125:13 + --> $DIR/parse-error.rs:123:13 | LL | global_asm!(format!("{{{}}}", 0), const FOO); | ^^^^^^^^^^^^^^^^^^^^ @@ -315,7 +315,7 @@ LL | global_asm!(format!("{{{}}}", 0), const FOO); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:127:20 + --> $DIR/parse-error.rs:125:20 | LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); | ^^^^^^^^^^^^^^^^^^^^ @@ -323,7 +323,7 @@ LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:39:37 + --> $DIR/parse-error.rs:37:37 | LL | asm!("{}", options(), const foo); | ^^^ non-constant value @@ -334,7 +334,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:47:44 + --> $DIR/parse-error.rs:45:44 | LL | asm!("{}", clobber_abi("C"), const foo); | ^^^ non-constant value @@ -345,7 +345,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:50:55 + --> $DIR/parse-error.rs:48:55 | LL | asm!("{}", options(), clobber_abi("C"), const foo); | ^^^ non-constant value @@ -356,7 +356,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:52:31 + --> $DIR/parse-error.rs:50:31 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value @@ -367,7 +367,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:52:46 + --> $DIR/parse-error.rs:50:46 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value @@ -378,7 +378,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:59:45 + --> $DIR/parse-error.rs:57:45 | LL | asm!("{a}", in("x0") foo, a = const bar); | ^^^ non-constant value @@ -389,7 +389,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:61:45 + --> $DIR/parse-error.rs:59:45 | LL | asm!("{a}", in("x0") foo, a = const bar); | ^^^ non-constant value @@ -400,7 +400,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:63:41 + --> $DIR/parse-error.rs:61:41 | LL | asm!("{1}", in("x0") foo, const bar); | ^^^ non-constant value diff --git a/tests/ui/asm/aarch64/type-check-3.rs b/tests/ui/asm/aarch64/type-check-3.rs index 3fc8e506069..b64473f98c0 100644 --- a/tests/ui/asm/aarch64/type-check-3.rs +++ b/tests/ui/asm/aarch64/type-check-3.rs @@ -1,7 +1,7 @@ //@ only-aarch64 //@ compile-flags: -C target-feature=+neon -#![feature(repr_simd, asm_const)] +#![feature(repr_simd)] use std::arch::aarch64::float64x2_t; use std::arch::{asm, global_asm}; diff --git a/tests/ui/asm/aarch64/type-check-4.rs b/tests/ui/asm/aarch64/type-check-4.rs index f00b4d4c46f..41eb9de5669 100644 --- a/tests/ui/asm/aarch64/type-check-4.rs +++ b/tests/ui/asm/aarch64/type-check-4.rs @@ -1,7 +1,7 @@ //@ only-aarch64 //@ compile-flags: -C target-feature=+neon -#![feature(repr_simd, asm_const)] +#![feature(repr_simd)] use std::arch::aarch64::float64x2_t; use std::arch::{asm, global_asm}; @@ -10,8 +10,7 @@ use std::arch::{asm, global_asm}; #[derive(Copy, Clone)] struct Simd256bit(f64, f64, f64, f64); -fn main() { -} +fn main() {} // Constants must be... constant diff --git a/tests/ui/asm/aarch64/type-check-4.stderr b/tests/ui/asm/aarch64/type-check-4.stderr index 3e675f69e84..89eb8467cde 100644 --- a/tests/ui/asm/aarch64/type-check-4.stderr +++ b/tests/ui/asm/aarch64/type-check-4.stderr @@ -1,5 +1,5 @@ error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:25:25 + --> $DIR/type-check-4.rs:24:25 | LL | global_asm!("{}", const S); | ^ @@ -11,7 +11,7 @@ LL | global_asm!("{}", const S); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:28:35 + --> $DIR/type-check-4.rs:27:35 | LL | global_asm!("{}", const const_foo(S)); | ^ @@ -23,7 +23,7 @@ LL | global_asm!("{}", const const_foo(S)); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:31:35 + --> $DIR/type-check-4.rs:30:35 | LL | global_asm!("{}", const const_bar(S)); | ^ diff --git a/tests/ui/asm/bad-template.rs b/tests/ui/asm/bad-template.rs index 41a906e32a4..6b00905a393 100644 --- a/tests/ui/asm/bad-template.rs +++ b/tests/ui/asm/bad-template.rs @@ -6,7 +6,7 @@ //@ [x86_64] needs-llvm-components: x86 //@ [aarch64] needs-llvm-components: aarch64 -#![feature(no_core, lang_items, rustc_attrs, asm_const)] +#![feature(no_core, lang_items, rustc_attrs)] #![no_core] #[rustc_builtin_macro] diff --git a/tests/ui/asm/const-error.rs b/tests/ui/asm/const-error.rs index f2cead399b6..40d0590c33e 100644 --- a/tests/ui/asm/const-error.rs +++ b/tests/ui/asm/const-error.rs @@ -1,15 +1,15 @@ //@ only-x86_64 //@ needs-asm-support -#![feature(asm_const)] - // Test to make sure that we emit const errors eagerly for inline asm use std::arch::asm; fn test<T>() { - unsafe { asm!("/* {} */", const 1 / 0); } - //~^ ERROR evaluation of + unsafe { + asm!("/* {} */", const 1 / 0); + //~^ ERROR evaluation of + } } fn main() {} diff --git a/tests/ui/asm/const-error.stderr b/tests/ui/asm/const-error.stderr index fe311832177..02e54457e89 100644 --- a/tests/ui/asm/const-error.stderr +++ b/tests/ui/asm/const-error.stderr @@ -1,8 +1,8 @@ error[E0080]: evaluation of `test::<T>::{constant#0}` failed - --> $DIR/const-error.rs:11:37 + --> $DIR/const-error.rs:10:32 | -LL | unsafe { asm!("/* {} */", const 1 / 0); } - | ^^^^^ attempt to divide `1_i32` by zero +LL | asm!("/* {} */", const 1 / 0); + | ^^^^^ attempt to divide `1_i32` by zero error: aborting due to 1 previous error diff --git a/tests/ui/asm/fail-const-eval-issue-121099.rs b/tests/ui/asm/fail-const-eval-issue-121099.rs index bed6fc9b39f..36d00b1e5d2 100644 --- a/tests/ui/asm/fail-const-eval-issue-121099.rs +++ b/tests/ui/asm/fail-const-eval-issue-121099.rs @@ -1,6 +1,5 @@ //@ build-fail //@ needs-asm-support -#![feature(asm_const)] use std::arch::global_asm; diff --git a/tests/ui/asm/fail-const-eval-issue-121099.stderr b/tests/ui/asm/fail-const-eval-issue-121099.stderr index 51d283218d2..5d86c3a5f7b 100644 --- a/tests/ui/asm/fail-const-eval-issue-121099.stderr +++ b/tests/ui/asm/fail-const-eval-issue-121099.stderr @@ -1,11 +1,11 @@ error[E0080]: evaluation of constant value failed - --> $DIR/fail-const-eval-issue-121099.rs:9:31 + --> $DIR/fail-const-eval-issue-121099.rs:8:31 | LL | global_asm!("/* {} */", const 1 << 500); | ^^^^^^^^ attempt to shift left by `500_i32`, which would overflow error[E0080]: evaluation of constant value failed - --> $DIR/fail-const-eval-issue-121099.rs:11:31 + --> $DIR/fail-const-eval-issue-121099.rs:10:31 | LL | global_asm!("/* {} */", const 1 / 0); | ^^^^^ attempt to divide `1_i32` by zero diff --git a/tests/ui/asm/generic-const.rs b/tests/ui/asm/generic-const.rs index 133d093d200..3b69a4e86e3 100644 --- a/tests/ui/asm/generic-const.rs +++ b/tests/ui/asm/generic-const.rs @@ -1,8 +1,6 @@ //@ needs-asm-support //@ build-pass -#![feature(asm_const)] - use std::arch::asm; fn foofoo<const N: usize>() {} diff --git a/tests/ui/asm/invalid-const-operand.rs b/tests/ui/asm/invalid-const-operand.rs index eff335ff6aa..a688f5042db 100644 --- a/tests/ui/asm/invalid-const-operand.rs +++ b/tests/ui/asm/invalid-const-operand.rs @@ -2,8 +2,6 @@ //@ ignore-nvptx64 //@ ignore-spirv -#![feature(asm_const)] - use std::arch::{asm, global_asm}; // Const operands must be integers and must be constants. diff --git a/tests/ui/asm/invalid-const-operand.stderr b/tests/ui/asm/invalid-const-operand.stderr index a6d742b53c2..bda4b0355b7 100644 --- a/tests/ui/asm/invalid-const-operand.stderr +++ b/tests/ui/asm/invalid-const-operand.stderr @@ -1,5 +1,5 @@ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:42:26 + --> $DIR/invalid-const-operand.rs:40:26 | LL | asm!("{}", const x); | ^ non-constant value @@ -10,7 +10,7 @@ LL | const x: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:45:36 + --> $DIR/invalid-const-operand.rs:43:36 | LL | asm!("{}", const const_foo(x)); | ^ non-constant value @@ -21,7 +21,7 @@ LL | const x: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:48:36 + --> $DIR/invalid-const-operand.rs:46:36 | LL | asm!("{}", const const_bar(x)); | ^ non-constant value @@ -32,7 +32,7 @@ LL | const x: /* Type */ = 0; | ~~~~~ ++++++++++++ error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:14:19 + --> $DIR/invalid-const-operand.rs:12:19 | LL | global_asm!("{}", const 0f32); | ^^^^^^---- @@ -42,7 +42,7 @@ LL | global_asm!("{}", const 0f32); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:16:19 + --> $DIR/invalid-const-operand.rs:14:19 | LL | global_asm!("{}", const 0 as *mut u8); | ^^^^^^------------ @@ -52,7 +52,7 @@ LL | global_asm!("{}", const 0 as *mut u8); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:26:20 + --> $DIR/invalid-const-operand.rs:24:20 | LL | asm!("{}", const 0f32); | ^^^^^^---- @@ -62,7 +62,7 @@ LL | asm!("{}", const 0f32); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:28:20 + --> $DIR/invalid-const-operand.rs:26:20 | LL | asm!("{}", const 0 as *mut u8); | ^^^^^^------------ @@ -72,7 +72,7 @@ LL | asm!("{}", const 0 as *mut u8); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:30:20 + --> $DIR/invalid-const-operand.rs:28:20 | LL | asm!("{}", const &0); | ^^^^^^-- diff --git a/tests/ui/asm/naked-functions.rs b/tests/ui/asm/naked-functions.rs index cb1e5c325c2..116a84506c5 100644 --- a/tests/ui/asm/naked-functions.rs +++ b/tests/ui/asm/naked-functions.rs @@ -3,7 +3,7 @@ //@ ignore-spirv #![feature(naked_functions)] -#![feature(asm_const, asm_unwind, linkage)] +#![feature(asm_unwind, linkage)] #![crate_type = "lib"] use std::arch::asm; diff --git a/tests/ui/asm/named-asm-labels.rs b/tests/ui/asm/named-asm-labels.rs index d2ca6fe8808..043aab9029d 100644 --- a/tests/ui/asm/named-asm-labels.rs +++ b/tests/ui/asm/named-asm-labels.rs @@ -10,7 +10,7 @@ // which causes less readable LLVM errors and in the worst cases causes ICEs // or segfaults based on system dependent behavior and codegen flags. -#![feature(naked_functions, asm_const)] +#![feature(naked_functions)] use std::arch::{asm, global_asm}; @@ -128,6 +128,7 @@ fn main() { // Tests usage of colons in non-label positions asm!(":lo12:FOO"); // this is apparently valid aarch64 + // is there an example that is valid x86 for this test? asm!(":bbb nop"); @@ -176,7 +177,8 @@ fn main() { // label or LTO can cause labels to break #[naked] pub extern "C" fn foo() -> i32 { - unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } //~ ERROR avoid using named labels + unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } + //~^ ERROR avoid using named labels } // Make sure that non-naked attributes *do* still let the lint happen diff --git a/tests/ui/asm/named-asm-labels.stderr b/tests/ui/asm/named-asm-labels.stderr index 20b7d64f9e7..e5e177fb8b8 100644 --- a/tests/ui/asm/named-asm-labels.stderr +++ b/tests/ui/asm/named-asm-labels.stderr @@ -328,7 +328,7 @@ LL | ab: nop // ab: does foo = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:143:19 + --> $DIR/named-asm-labels.rs:144:19 | LL | asm!("test_{}: nop", in(reg) 10); | ^^^^^^^ @@ -338,7 +338,7 @@ LL | asm!("test_{}: nop", in(reg) 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:145:15 + --> $DIR/named-asm-labels.rs:146:15 | LL | asm!("test_{}: nop", const 10); | ^^^^^^^ @@ -348,7 +348,7 @@ LL | asm!("test_{}: nop", const 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:146:15 + --> $DIR/named-asm-labels.rs:147:15 | LL | asm!("test_{}: nop", sym main); | ^^^^^^^ @@ -358,7 +358,7 @@ LL | asm!("test_{}: nop", sym main); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:147:15 + --> $DIR/named-asm-labels.rs:148:15 | LL | asm!("{}_test: nop", const 10); | ^^^^^^^ @@ -368,7 +368,7 @@ LL | asm!("{}_test: nop", const 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:148:15 + --> $DIR/named-asm-labels.rs:149:15 | LL | asm!("test_{}_test: nop", const 10); | ^^^^^^^^^^^^ @@ -378,7 +378,7 @@ LL | asm!("test_{}_test: nop", const 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:149:15 + --> $DIR/named-asm-labels.rs:150:15 | LL | asm!("{}: nop", const 10); | ^^ @@ -388,7 +388,7 @@ LL | asm!("{}: nop", const 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:151:15 + --> $DIR/named-asm-labels.rs:152:15 | LL | asm!("{uwu}: nop", uwu = const 10); | ^^^^^ @@ -398,7 +398,7 @@ LL | asm!("{uwu}: nop", uwu = const 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:152:15 + --> $DIR/named-asm-labels.rs:153:15 | LL | asm!("{0}: nop", const 10); | ^^^ @@ -408,7 +408,7 @@ LL | asm!("{0}: nop", const 10); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:153:15 + --> $DIR/named-asm-labels.rs:154:15 | LL | asm!("{1}: nop", "/* {0} */", const 10, const 20); | ^^^ @@ -418,7 +418,7 @@ LL | asm!("{1}: nop", "/* {0} */", const 10, const 20); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -428,7 +428,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: the label may be declared in the expansion of a macro error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -439,7 +439,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -450,7 +450,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -461,7 +461,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` warning: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:170:19 + --> $DIR/named-asm-labels.rs:171:19 | LL | asm!("warned: nop"); | ^^^^^^ @@ -469,13 +469,13 @@ LL | asm!("warned: nop"); = help: only local labels of the form `<number>:` should be used in inline asm = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information note: the lint level is defined here - --> $DIR/named-asm-labels.rs:168:16 + --> $DIR/named-asm-labels.rs:169:16 | LL | #[warn(named_asm_labels)] | ^^^^^^^^^^^^^^^^ error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:179:20 + --> $DIR/named-asm-labels.rs:180:20 | LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } | ^^^^^ @@ -484,7 +484,7 @@ LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noret = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:185:20 + --> $DIR/named-asm-labels.rs:187:20 | LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } | ^^^^^ @@ -493,7 +493,7 @@ LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noret = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:193:20 + --> $DIR/named-asm-labels.rs:195:20 | LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) } | ^^^^^ @@ -502,7 +502,7 @@ LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) } = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:203:24 + --> $DIR/named-asm-labels.rs:205:24 | LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) } | ^^^^^ @@ -511,7 +511,7 @@ LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) } = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:212:15 + --> $DIR/named-asm-labels.rs:214:15 | LL | asm!("closure1: nop"); | ^^^^^^^^ @@ -520,7 +520,7 @@ LL | asm!("closure1: nop"); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:216:15 + --> $DIR/named-asm-labels.rs:218:15 | LL | asm!("closure2: nop"); | ^^^^^^^^ @@ -529,7 +529,7 @@ LL | asm!("closure2: nop"); = note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:226:19 + --> $DIR/named-asm-labels.rs:228:19 | LL | asm!("closure3: nop"); | ^^^^^^^^ diff --git a/tests/ui/asm/parse-error.rs b/tests/ui/asm/parse-error.rs index 16ae0282864..4d7b522f5fc 100644 --- a/tests/ui/asm/parse-error.rs +++ b/tests/ui/asm/parse-error.rs @@ -1,7 +1,5 @@ //@ needs-asm-support -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn main() { diff --git a/tests/ui/asm/parse-error.stderr b/tests/ui/asm/parse-error.stderr index f5f8d537d86..6d0e629b937 100644 --- a/tests/ui/asm/parse-error.stderr +++ b/tests/ui/asm/parse-error.stderr @@ -1,167 +1,167 @@ error: requires at least a template string argument - --> $DIR/parse-error.rs:11:9 + --> $DIR/parse-error.rs:9:9 | LL | asm!(); | ^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:13:14 + --> $DIR/parse-error.rs:11:14 | LL | asm!(foo); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:15:19 + --> $DIR/parse-error.rs:13:19 | LL | asm!("{}" foo); | ^^^ expected `,` error: expected operand, clobber_abi, options, or additional template string - --> $DIR/parse-error.rs:17:20 + --> $DIR/parse-error.rs:15:20 | LL | asm!("{}", foo); | ^^^ expected operand, clobber_abi, options, or additional template string error: expected `(`, found `foo` - --> $DIR/parse-error.rs:19:23 + --> $DIR/parse-error.rs:17:23 | LL | asm!("{}", in foo); | ^^^ expected `(` error: expected `)`, found `foo` - --> $DIR/parse-error.rs:21:27 + --> $DIR/parse-error.rs:19:27 | LL | asm!("{}", in(reg foo)); | ^^^ expected `)` error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:23:27 + --> $DIR/parse-error.rs:21:27 | LL | asm!("{}", in(reg)); | ^ expected expression error: expected register class or explicit register - --> $DIR/parse-error.rs:25:26 + --> $DIR/parse-error.rs:23:26 | LL | asm!("{}", inout(=) foo => bar); | ^ error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:27:37 + --> $DIR/parse-error.rs:25:37 | LL | asm!("{}", inout(reg) foo =>); | ^ expected expression error: expected one of `!`, `,`, `.`, `::`, `?`, `{`, or an operator, found `=>` - --> $DIR/parse-error.rs:29:32 + --> $DIR/parse-error.rs:27:32 | LL | asm!("{}", in(reg) foo => bar); | ^^ expected one of 7 possible tokens error: expected a path for argument to `sym` - --> $DIR/parse-error.rs:31:24 + --> $DIR/parse-error.rs:29:24 | LL | asm!("{}", sym foo + bar); | ^^^^^^^^^ error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:33:26 + --> $DIR/parse-error.rs:31:26 | LL | asm!("", options(foo)); | ^^^ expected one of 10 possible tokens error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:35:32 + --> $DIR/parse-error.rs:33:32 | LL | asm!("", options(nomem foo)); | ^^^ expected one of `)` or `,` error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:37:33 + --> $DIR/parse-error.rs:35:33 | LL | asm!("", options(nomem, foo)); | ^^^ expected one of 10 possible tokens error: at least one abi must be provided as an argument to `clobber_abi` - --> $DIR/parse-error.rs:44:30 + --> $DIR/parse-error.rs:42:30 | LL | asm!("", clobber_abi()); | ^ error: expected string literal - --> $DIR/parse-error.rs:46:30 + --> $DIR/parse-error.rs:44:30 | LL | asm!("", clobber_abi(foo)); | ^^^ not a string literal error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:48:34 + --> $DIR/parse-error.rs:46:34 | LL | asm!("", clobber_abi("C" foo)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:50:35 + --> $DIR/parse-error.rs:48:35 | LL | asm!("", clobber_abi("C", foo)); | ^^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:52:30 + --> $DIR/parse-error.rs:50:30 | LL | asm!("", clobber_abi(1)); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:54:30 + --> $DIR/parse-error.rs:52:30 | LL | asm!("", clobber_abi(())); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:56:30 + --> $DIR/parse-error.rs:54:30 | LL | asm!("", clobber_abi(uwu)); | ^^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:58:30 + --> $DIR/parse-error.rs:56:30 | LL | asm!("", clobber_abi({})); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:60:30 + --> $DIR/parse-error.rs:58:30 | LL | asm!("", clobber_abi(loop {})); | ^^^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:62:30 + --> $DIR/parse-error.rs:60:30 | LL | asm!("", clobber_abi(if)); | ^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:64:30 + --> $DIR/parse-error.rs:62:30 | LL | asm!("", clobber_abi(do)); | ^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:66:30 + --> $DIR/parse-error.rs:64:30 | LL | asm!("", clobber_abi(<)); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:68:30 + --> $DIR/parse-error.rs:66:30 | LL | asm!("", clobber_abi(.)); | ^ not a string literal error: duplicate argument named `a` - --> $DIR/parse-error.rs:76:36 + --> $DIR/parse-error.rs:74:36 | LL | asm!("{a}", a = const foo, a = const bar); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -169,7 +169,7 @@ LL | asm!("{a}", a = const foo, a = const bar); | previously here error: argument never used - --> $DIR/parse-error.rs:76:36 + --> $DIR/parse-error.rs:74:36 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^^^^^^^^^^^ argument never used @@ -177,19 +177,19 @@ LL | asm!("{a}", a = const foo, a = const bar); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `""` - --> $DIR/parse-error.rs:82:29 + --> $DIR/parse-error.rs:80:29 | LL | asm!("", options(), ""); | ^^ expected one of 10 possible tokens error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:84:33 + --> $DIR/parse-error.rs:82:33 | LL | asm!("{}", in(reg) foo, "{}", out(reg) foo); | ^^^^ expected one of 10 possible tokens error: asm template must be a string literal - --> $DIR/parse-error.rs:86:14 + --> $DIR/parse-error.rs:84:14 | LL | asm!(format!("{{{}}}", 0), in(reg) foo); | ^^^^^^^^^^^^^^^^^^^^ @@ -197,7 +197,7 @@ LL | asm!(format!("{{{}}}", 0), in(reg) foo); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:88:21 + --> $DIR/parse-error.rs:86:21 | LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); | ^^^^^^^^^^^^^^^^^^^^ @@ -205,139 +205,139 @@ LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: _ cannot be used for input operands - --> $DIR/parse-error.rs:90:28 + --> $DIR/parse-error.rs:88:28 | LL | asm!("{}", in(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:92:31 + --> $DIR/parse-error.rs:90:31 | LL | asm!("{}", inout(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:94:35 + --> $DIR/parse-error.rs:92:35 | LL | asm!("{}", inlateout(reg) _); | ^ error: requires at least a template string argument - --> $DIR/parse-error.rs:101:1 + --> $DIR/parse-error.rs:99:1 | LL | global_asm!(); | ^^^^^^^^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:103:13 + --> $DIR/parse-error.rs:101:13 | LL | global_asm!(FOO); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:105:18 + --> $DIR/parse-error.rs:103:18 | LL | global_asm!("{}" FOO); | ^^^ expected `,` error: expected operand, options, or additional template string - --> $DIR/parse-error.rs:107:19 + --> $DIR/parse-error.rs:105:19 | LL | global_asm!("{}", FOO); | ^^^ expected operand, options, or additional template string error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:109:24 + --> $DIR/parse-error.rs:107:24 | LL | global_asm!("{}", const); | ^ expected expression error: expected one of `,`, `.`, `?`, or an operator, found `FOO` - --> $DIR/parse-error.rs:111:30 + --> $DIR/parse-error.rs:109:30 | LL | global_asm!("{}", const(reg) FOO); | ^^^ expected one of `,`, `.`, `?`, or an operator error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:113:25 + --> $DIR/parse-error.rs:111:25 | LL | global_asm!("", options(FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:115:25 + --> $DIR/parse-error.rs:113:25 | LL | global_asm!("", options(FOO,)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:117:25 + --> $DIR/parse-error.rs:115:25 | LL | global_asm!("", options(nomem FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:117:31 + --> $DIR/parse-error.rs:115:31 | LL | global_asm!("", options(nomem FOO)); | ^^^ expected one of `)` or `,` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:120:25 + --> $DIR/parse-error.rs:118:25 | LL | global_asm!("", options(nomem, FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:120:32 + --> $DIR/parse-error.rs:118:32 | LL | global_asm!("", options(nomem, FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: expected string literal - --> $DIR/parse-error.rs:124:29 + --> $DIR/parse-error.rs:122:29 | LL | global_asm!("", clobber_abi(FOO)); | ^^^ not a string literal error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:126:33 + --> $DIR/parse-error.rs:124:33 | LL | global_asm!("", clobber_abi("C" FOO)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:128:34 + --> $DIR/parse-error.rs:126:34 | LL | global_asm!("", clobber_abi("C", FOO)); | ^^^ not a string literal error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:130:19 + --> $DIR/parse-error.rs:128:19 | LL | global_asm!("{}", clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:132:28 + --> $DIR/parse-error.rs:130:28 | LL | global_asm!("", options(), clobber_abi("C")); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:134:30 + --> $DIR/parse-error.rs:132:30 | LL | global_asm!("{}", options(), clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:136:17 + --> $DIR/parse-error.rs:134:17 | LL | global_asm!("", clobber_abi("C"), clobber_abi("C")); | ^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^ error: duplicate argument named `a` - --> $DIR/parse-error.rs:138:35 + --> $DIR/parse-error.rs:136:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -345,7 +345,7 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); | previously here error: argument never used - --> $DIR/parse-error.rs:138:35 + --> $DIR/parse-error.rs:136:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ^^^^^^^^^^^^^ argument never used @@ -353,19 +353,19 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `""` - --> $DIR/parse-error.rs:141:28 + --> $DIR/parse-error.rs:139:28 | LL | global_asm!("", options(), ""); | ^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:143:30 + --> $DIR/parse-error.rs:141:30 | LL | global_asm!("{}", const FOO, "{}", const FOO); | ^^^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: asm template must be a string literal - --> $DIR/parse-error.rs:145:13 + --> $DIR/parse-error.rs:143:13 | LL | global_asm!(format!("{{{}}}", 0), const FOO); | ^^^^^^^^^^^^^^^^^^^^ @@ -373,7 +373,7 @@ LL | global_asm!(format!("{{{}}}", 0), const FOO); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:147:20 + --> $DIR/parse-error.rs:145:20 | LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); | ^^^^^^^^^^^^^^^^^^^^ @@ -381,43 +381,43 @@ LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: the `in` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:150:19 + --> $DIR/parse-error.rs:148:19 | LL | global_asm!("{}", in(reg)); | ^^ the `in` operand is not meaningful for global-scoped inline assembly, remove it error: the `out` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:152:19 + --> $DIR/parse-error.rs:150:19 | LL | global_asm!("{}", out(reg)); | ^^^ the `out` operand is not meaningful for global-scoped inline assembly, remove it error: the `lateout` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:154:19 + --> $DIR/parse-error.rs:152:19 | LL | global_asm!("{}", lateout(reg)); | ^^^^^^^ the `lateout` operand is not meaningful for global-scoped inline assembly, remove it error: the `inout` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:156:19 + --> $DIR/parse-error.rs:154:19 | LL | global_asm!("{}", inout(reg)); | ^^^^^ the `inout` operand is not meaningful for global-scoped inline assembly, remove it error: the `inlateout` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:158:19 + --> $DIR/parse-error.rs:156:19 | LL | global_asm!("{}", inlateout(reg)); | ^^^^^^^^^ the `inlateout` operand is not meaningful for global-scoped inline assembly, remove it error: the `label` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:160:19 + --> $DIR/parse-error.rs:158:19 | LL | global_asm!("{}", label(reg)); | ^^^^^ the `label` operand is not meaningful for global-scoped inline assembly, remove it error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:39:37 + --> $DIR/parse-error.rs:37:37 | LL | asm!("{}", options(), const foo); | ^^^ non-constant value @@ -428,7 +428,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:71:44 + --> $DIR/parse-error.rs:69:44 | LL | asm!("{}", clobber_abi("C"), const foo); | ^^^ non-constant value @@ -439,7 +439,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:74:55 + --> $DIR/parse-error.rs:72:55 | LL | asm!("{}", options(), clobber_abi("C"), const foo); | ^^^ non-constant value @@ -450,7 +450,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:76:31 + --> $DIR/parse-error.rs:74:31 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value @@ -461,7 +461,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:76:46 + --> $DIR/parse-error.rs:74:46 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value diff --git a/tests/ui/asm/type-check-1.rs b/tests/ui/asm/type-check-1.rs index 22669dce280..4dc30fb5838 100644 --- a/tests/ui/asm/type-check-1.rs +++ b/tests/ui/asm/type-check-1.rs @@ -2,8 +2,6 @@ //@ ignore-nvptx64 //@ ignore-spirv -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn main() { diff --git a/tests/ui/asm/type-check-1.stderr b/tests/ui/asm/type-check-1.stderr index d47e6ae1d2a..aa9eed2fce6 100644 --- a/tests/ui/asm/type-check-1.stderr +++ b/tests/ui/asm/type-check-1.stderr @@ -1,17 +1,17 @@ error: invalid asm output - --> $DIR/type-check-1.rs:14:29 + --> $DIR/type-check-1.rs:12:29 | LL | asm!("{}", out(reg) 1 + 2); | ^^^^^ cannot assign to this expression error: invalid asm output - --> $DIR/type-check-1.rs:16:31 + --> $DIR/type-check-1.rs:14:31 | LL | asm!("{}", inout(reg) 1 + 2); | ^^^^^ cannot assign to this expression error[E0277]: the size for values of type `[u64]` cannot be known at compilation time - --> $DIR/type-check-1.rs:22:28 + --> $DIR/type-check-1.rs:20:28 | LL | asm!("{}", in(reg) v[..]); | ^^^^^ doesn't have a size known at compile-time @@ -20,7 +20,7 @@ LL | asm!("{}", in(reg) v[..]); = note: all inline asm arguments must have a statically known size error[E0277]: the size for values of type `[u64]` cannot be known at compilation time - --> $DIR/type-check-1.rs:25:29 + --> $DIR/type-check-1.rs:23:29 | LL | asm!("{}", out(reg) v[..]); | ^^^^^ doesn't have a size known at compile-time @@ -29,7 +29,7 @@ LL | asm!("{}", out(reg) v[..]); = note: all inline asm arguments must have a statically known size error[E0277]: the size for values of type `[u64]` cannot be known at compilation time - --> $DIR/type-check-1.rs:28:31 + --> $DIR/type-check-1.rs:26:31 | LL | asm!("{}", inout(reg) v[..]); | ^^^^^ doesn't have a size known at compile-time @@ -38,7 +38,7 @@ LL | asm!("{}", inout(reg) v[..]); = note: all inline asm arguments must have a statically known size error: cannot use value of type `[u64]` for inline assembly - --> $DIR/type-check-1.rs:22:28 + --> $DIR/type-check-1.rs:20:28 | LL | asm!("{}", in(reg) v[..]); | ^^^^^ @@ -46,7 +46,7 @@ LL | asm!("{}", in(reg) v[..]); = note: only integers, floats, SIMD vectors, pointers and function pointers can be used as arguments for inline assembly error: cannot use value of type `[u64]` for inline assembly - --> $DIR/type-check-1.rs:25:29 + --> $DIR/type-check-1.rs:23:29 | LL | asm!("{}", out(reg) v[..]); | ^^^^^ @@ -54,7 +54,7 @@ LL | asm!("{}", out(reg) v[..]); = note: only integers, floats, SIMD vectors, pointers and function pointers can be used as arguments for inline assembly error: cannot use value of type `[u64]` for inline assembly - --> $DIR/type-check-1.rs:28:31 + --> $DIR/type-check-1.rs:26:31 | LL | asm!("{}", inout(reg) v[..]); | ^^^^^ diff --git a/tests/ui/asm/x86_64/bad-reg.rs b/tests/ui/asm/x86_64/bad-reg.rs index d41c46d57bb..2a189a91c5a 100644 --- a/tests/ui/asm/x86_64/bad-reg.rs +++ b/tests/ui/asm/x86_64/bad-reg.rs @@ -1,8 +1,6 @@ //@ only-x86_64 //@ compile-flags: -C target-feature=+avx2 -#![feature(asm_const)] - use std::arch::asm; fn main() { diff --git a/tests/ui/asm/x86_64/bad-reg.stderr b/tests/ui/asm/x86_64/bad-reg.stderr index 8017008e97d..3df1f7b2208 100644 --- a/tests/ui/asm/x86_64/bad-reg.stderr +++ b/tests/ui/asm/x86_64/bad-reg.stderr @@ -1,17 +1,17 @@ error: invalid register class `foo`: unknown register class - --> $DIR/bad-reg.rs:14:20 + --> $DIR/bad-reg.rs:12:20 | LL | asm!("{}", in(foo) foo); | ^^^^^^^^^^^ error: invalid register `foo`: unknown register - --> $DIR/bad-reg.rs:16:18 + --> $DIR/bad-reg.rs:14:18 | LL | asm!("", in("foo") foo); | ^^^^^^^^^^^^^ error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:18:15 + --> $DIR/bad-reg.rs:16:15 | LL | asm!("{:z}", in(reg) foo); | ^^^^ ----------- argument @@ -21,7 +21,7 @@ LL | asm!("{:z}", in(reg) foo); = note: the `reg` register class supports the following template modifiers: `l`, `x`, `e`, `r` error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:20:15 + --> $DIR/bad-reg.rs:18:15 | LL | asm!("{:r}", in(xmm_reg) foo); | ^^^^ --------------- argument @@ -31,7 +31,7 @@ LL | asm!("{:r}", in(xmm_reg) foo); = note: the `xmm_reg` register class supports the following template modifiers: `x`, `y`, `z` error: asm template modifiers are not allowed for `const` arguments - --> $DIR/bad-reg.rs:22:15 + --> $DIR/bad-reg.rs:20:15 | LL | asm!("{:a}", const 0); | ^^^^ ------- argument @@ -39,7 +39,7 @@ LL | asm!("{:a}", const 0); | template modifier error: asm template modifiers are not allowed for `sym` arguments - --> $DIR/bad-reg.rs:24:15 + --> $DIR/bad-reg.rs:22:15 | LL | asm!("{:a}", sym main); | ^^^^ -------- argument @@ -47,67 +47,67 @@ LL | asm!("{:a}", sym main); | template modifier error: invalid register `ebp`: the frame pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:26:18 + --> $DIR/bad-reg.rs:24:18 | LL | asm!("", in("ebp") foo); | ^^^^^^^^^^^^^ error: invalid register `rsp`: the stack pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:28:18 + --> $DIR/bad-reg.rs:26:18 | LL | asm!("", in("rsp") foo); | ^^^^^^^^^^^^^ error: invalid register `ip`: the instruction pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:30:18 + --> $DIR/bad-reg.rs:28:18 | LL | asm!("", in("ip") foo); | ^^^^^^^^^^^^ error: register class `x87_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:33:18 + --> $DIR/bad-reg.rs:31:18 | LL | asm!("", in("st(2)") foo); | ^^^^^^^^^^^^^^^ error: register class `mmx_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:36:18 + --> $DIR/bad-reg.rs:34:18 | LL | asm!("", in("mm0") foo); | ^^^^^^^^^^^^^ error: register class `kreg0` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:39:18 + --> $DIR/bad-reg.rs:37:18 | LL | asm!("", in("k0") foo); | ^^^^^^^^^^^^ error: register class `x87_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:44:20 + --> $DIR/bad-reg.rs:42:20 | LL | asm!("{}", in(x87_reg) foo); | ^^^^^^^^^^^^^^^ error: register class `mmx_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:47:20 + --> $DIR/bad-reg.rs:45:20 | LL | asm!("{}", in(mmx_reg) foo); | ^^^^^^^^^^^^^^^ error: register class `x87_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:50:20 + --> $DIR/bad-reg.rs:48:20 | LL | asm!("{}", out(x87_reg) _); | ^^^^^^^^^^^^^^ error: register class `mmx_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:52:20 + --> $DIR/bad-reg.rs:50:20 | LL | asm!("{}", out(mmx_reg) _); | ^^^^^^^^^^^^^^ error: register `al` conflicts with register `eax` - --> $DIR/bad-reg.rs:58:33 + --> $DIR/bad-reg.rs:56:33 | LL | asm!("", in("eax") foo, in("al") bar); | ------------- ^^^^^^^^^^^^ register `al` @@ -115,7 +115,7 @@ LL | asm!("", in("eax") foo, in("al") bar); | register `eax` error: register `rax` conflicts with register `rax` - --> $DIR/bad-reg.rs:61:33 + --> $DIR/bad-reg.rs:59:33 | LL | asm!("", in("rax") foo, out("rax") bar); | ------------- ^^^^^^^^^^^^^^ register `rax` @@ -123,13 +123,13 @@ LL | asm!("", in("rax") foo, out("rax") bar); | register `rax` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:61:18 + --> $DIR/bad-reg.rs:59:18 | LL | asm!("", in("rax") foo, out("rax") bar); | ^^^^^^^^^^^^^ error: register `ymm0` conflicts with register `xmm0` - --> $DIR/bad-reg.rs:66:34 + --> $DIR/bad-reg.rs:64:34 | LL | asm!("", in("xmm0") foo, in("ymm0") bar); | -------------- ^^^^^^^^^^^^^^ register `ymm0` @@ -137,7 +137,7 @@ LL | asm!("", in("xmm0") foo, in("ymm0") bar); | register `xmm0` error: register `ymm0` conflicts with register `xmm0` - --> $DIR/bad-reg.rs:68:34 + --> $DIR/bad-reg.rs:66:34 | LL | asm!("", in("xmm0") foo, out("ymm0") bar); | -------------- ^^^^^^^^^^^^^^^ register `ymm0` @@ -145,13 +145,13 @@ LL | asm!("", in("xmm0") foo, out("ymm0") bar); | register `xmm0` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:68:18 + --> $DIR/bad-reg.rs:66:18 | LL | asm!("", in("xmm0") foo, out("ymm0") bar); | ^^^^^^^^^^^^^^ error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:33:30 + --> $DIR/bad-reg.rs:31:30 | LL | asm!("", in("st(2)") foo); | ^^^ @@ -159,7 +159,7 @@ LL | asm!("", in("st(2)") foo); = note: register class `x87_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:36:28 + --> $DIR/bad-reg.rs:34:28 | LL | asm!("", in("mm0") foo); | ^^^ @@ -167,7 +167,7 @@ LL | asm!("", in("mm0") foo); = note: register class `mmx_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:39:27 + --> $DIR/bad-reg.rs:37:27 | LL | asm!("", in("k0") foo); | ^^^ @@ -175,7 +175,7 @@ LL | asm!("", in("k0") foo); = note: register class `kreg0` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:44:32 + --> $DIR/bad-reg.rs:42:32 | LL | asm!("{}", in(x87_reg) foo); | ^^^ @@ -183,7 +183,7 @@ LL | asm!("{}", in(x87_reg) foo); = note: register class `x87_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:47:32 + --> $DIR/bad-reg.rs:45:32 | LL | asm!("{}", in(mmx_reg) foo); | ^^^ @@ -191,7 +191,7 @@ LL | asm!("{}", in(mmx_reg) foo); = note: register class `mmx_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:58:42 + --> $DIR/bad-reg.rs:56:42 | LL | asm!("", in("eax") foo, in("al") bar); | ^^^ @@ -199,7 +199,7 @@ LL | asm!("", in("eax") foo, in("al") bar); = note: register class `reg_byte` supports these types: i8 error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:63:27 + --> $DIR/bad-reg.rs:61:27 | LL | asm!("", in("al") foo, lateout("al") bar); | ^^^ @@ -207,7 +207,7 @@ LL | asm!("", in("al") foo, lateout("al") bar); = note: register class `reg_byte` supports these types: i8 error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:63:46 + --> $DIR/bad-reg.rs:61:46 | LL | asm!("", in("al") foo, lateout("al") bar); | ^^^ diff --git a/tests/ui/asm/x86_64/const.rs b/tests/ui/asm/x86_64/const.rs index 817a338a5b9..eaaaf92e823 100644 --- a/tests/ui/asm/x86_64/const.rs +++ b/tests/ui/asm/x86_64/const.rs @@ -2,8 +2,6 @@ //@ run-pass //@ needs-asm-support -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn const_generic<const X: usize>() -> usize { diff --git a/tests/ui/asm/x86_64/type-check-3.rs b/tests/ui/asm/x86_64/type-check-3.rs index bd242af3dbc..bfb795d2624 100644 --- a/tests/ui/asm/x86_64/type-check-3.rs +++ b/tests/ui/asm/x86_64/type-check-3.rs @@ -1,8 +1,6 @@ //@ only-x86_64 //@ compile-flags: -C target-feature=+avx512f -#![feature(asm_const)] - use std::arch::{asm, global_asm}; use std::arch::x86_64::{_mm256_setzero_ps, _mm_setzero_ps}; diff --git a/tests/ui/asm/x86_64/type-check-3.stderr b/tests/ui/asm/x86_64/type-check-3.stderr index 202b97ca5c0..5a7b349413e 100644 --- a/tests/ui/asm/x86_64/type-check-3.stderr +++ b/tests/ui/asm/x86_64/type-check-3.stderr @@ -1,5 +1,5 @@ error: type `i128` cannot be used with this register class - --> $DIR/type-check-3.rs:14:28 + --> $DIR/type-check-3.rs:12:28 | LL | asm!("{}", in(reg) 0i128); | ^^^^^ @@ -7,7 +7,7 @@ LL | asm!("{}", in(reg) 0i128); = note: register class `reg` supports these types: i16, i32, i64, f16, f32, f64 error: type `__m128` cannot be used with this register class - --> $DIR/type-check-3.rs:16:28 + --> $DIR/type-check-3.rs:14:28 | LL | asm!("{}", in(reg) _mm_setzero_ps()); | ^^^^^^^^^^^^^^^^ @@ -15,7 +15,7 @@ LL | asm!("{}", in(reg) _mm_setzero_ps()); = note: register class `reg` supports these types: i16, i32, i64, f16, f32, f64 error: type `__m256` cannot be used with this register class - --> $DIR/type-check-3.rs:18:28 + --> $DIR/type-check-3.rs:16:28 | LL | asm!("{}", in(reg) _mm256_setzero_ps()); | ^^^^^^^^^^^^^^^^^^^ @@ -23,7 +23,7 @@ LL | asm!("{}", in(reg) _mm256_setzero_ps()); = note: register class `reg` supports these types: i16, i32, i64, f16, f32, f64 error: type `u8` cannot be used with this register class - --> $DIR/type-check-3.rs:20:32 + --> $DIR/type-check-3.rs:18:32 | LL | asm!("{}", in(xmm_reg) 0u8); | ^^^ @@ -31,7 +31,7 @@ LL | asm!("{}", in(xmm_reg) 0u8); = note: register class `xmm_reg` supports these types: i32, i64, f16, f32, f64, f128, i8x16, i16x8, i32x4, i64x2, f16x8, f32x4, f64x2 error: `avx512bw` target feature is not enabled - --> $DIR/type-check-3.rs:29:29 + --> $DIR/type-check-3.rs:27:29 | LL | asm!("{}", in(kreg) 0u64); | ^^^^ @@ -39,7 +39,7 @@ LL | asm!("{}", in(kreg) 0u64); = note: this is required to use type `u64` with register class `kreg` warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:34:15 + --> $DIR/type-check-3.rs:32:15 | LL | asm!("{0} {0}", in(reg) 0i16); | ^^^ ^^^ ---- for this argument @@ -49,7 +49,7 @@ LL | asm!("{0} {0}", in(reg) 0i16); = note: `#[warn(asm_sub_register)]` on by default warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:36:15 + --> $DIR/type-check-3.rs:34:15 | LL | asm!("{0} {0:x}", in(reg) 0i16); | ^^^ ---- for this argument @@ -58,7 +58,7 @@ LL | asm!("{0} {0:x}", in(reg) 0i16); = help: or use `{0:r}` to keep the default formatting of `rax` (for 64-bit values) warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:38:15 + --> $DIR/type-check-3.rs:36:15 | LL | asm!("{}", in(reg) 0i32); | ^^ ---- for this argument @@ -67,7 +67,7 @@ LL | asm!("{}", in(reg) 0i32); = help: or use `{0:r}` to keep the default formatting of `rax` (for 64-bit values) warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:41:15 + --> $DIR/type-check-3.rs:39:15 | LL | asm!("{}", in(ymm_reg) 0i64); | ^^ ---- for this argument @@ -76,7 +76,7 @@ LL | asm!("{}", in(ymm_reg) 0i64); = help: or use `{0:y}` to keep the default formatting of `ymm0` (for 256-bit values) error: type `i8` cannot be used with this register class - --> $DIR/type-check-3.rs:52:28 + --> $DIR/type-check-3.rs:50:28 | LL | asm!("{}", in(reg) 0i8); | ^^^ @@ -85,7 +85,7 @@ LL | asm!("{}", in(reg) 0i8); = help: consider using the `reg_byte` register class instead error: incompatible types for asm inout argument - --> $DIR/type-check-3.rs:64:33 + --> $DIR/type-check-3.rs:62:33 | LL | asm!("{:r}", inout(reg) 0u32 => val_f32); | ^^^^ ^^^^^^^ type `f32` @@ -95,7 +95,7 @@ LL | asm!("{:r}", inout(reg) 0u32 => val_f32); = note: asm inout arguments must have the same type, unless they are both pointers or integers of the same size error: incompatible types for asm inout argument - --> $DIR/type-check-3.rs:66:33 + --> $DIR/type-check-3.rs:64:33 | LL | asm!("{:r}", inout(reg) 0u32 => val_ptr); | ^^^^ ^^^^^^^ type `*mut u8` @@ -105,7 +105,7 @@ LL | asm!("{:r}", inout(reg) 0u32 => val_ptr); = note: asm inout arguments must have the same type, unless they are both pointers or integers of the same size error: incompatible types for asm inout argument - --> $DIR/type-check-3.rs:68:33 + --> $DIR/type-check-3.rs:66:33 | LL | asm!("{:r}", inout(reg) main => val_u32); | ^^^^ ^^^^^^^ type `u32` diff --git a/tests/ui/asm/x86_64/type-check-4.rs b/tests/ui/asm/x86_64/type-check-4.rs index f7bf60d04df..9503cd6d8ab 100644 --- a/tests/ui/asm/x86_64/type-check-4.rs +++ b/tests/ui/asm/x86_64/type-check-4.rs @@ -1,8 +1,6 @@ //@ only-x86_64 //@ compile-flags: -C target-feature=+avx512f -#![feature(asm_const)] - use std::arch::{asm, global_asm}; use std::arch::x86_64::{_mm256_setzero_ps, _mm_setzero_ps}; diff --git a/tests/ui/asm/x86_64/type-check-4.stderr b/tests/ui/asm/x86_64/type-check-4.stderr index cbdc051b343..f1bbc9e7d33 100644 --- a/tests/ui/asm/x86_64/type-check-4.stderr +++ b/tests/ui/asm/x86_64/type-check-4.stderr @@ -1,5 +1,5 @@ error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:21:25 + --> $DIR/type-check-4.rs:19:25 | LL | global_asm!("{}", const S); | ^ @@ -11,7 +11,7 @@ LL | global_asm!("{}", const S); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:24:35 + --> $DIR/type-check-4.rs:22:35 | LL | global_asm!("{}", const const_foo(S)); | ^ @@ -23,7 +23,7 @@ LL | global_asm!("{}", const const_foo(S)); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:27:35 + --> $DIR/type-check-4.rs:25:35 | LL | global_asm!("{}", const const_bar(S)); | ^ diff --git a/tests/ui/asm/x86_64/x86_64_parse_error.rs b/tests/ui/asm/x86_64/x86_64_parse_error.rs index 850033d4ce0..3df0febf6b0 100644 --- a/tests/ui/asm/x86_64/x86_64_parse_error.rs +++ b/tests/ui/asm/x86_64/x86_64_parse_error.rs @@ -1,7 +1,5 @@ //@ only-x86_64 -#![feature(asm_const)] - use std::arch::asm; fn main() { diff --git a/tests/ui/asm/x86_64/x86_64_parse_error.stderr b/tests/ui/asm/x86_64/x86_64_parse_error.stderr index 9751f7b09d0..b64f6c1127e 100644 --- a/tests/ui/asm/x86_64/x86_64_parse_error.stderr +++ b/tests/ui/asm/x86_64/x86_64_parse_error.stderr @@ -1,11 +1,11 @@ error: explicit register arguments cannot have names - --> $DIR/x86_64_parse_error.rs:11:18 + --> $DIR/x86_64_parse_error.rs:9:18 | LL | asm!("", a = in("eax") foo); | ^^^^^^^^^^^^^^^^^ error: positional arguments cannot follow named arguments or explicit register arguments - --> $DIR/x86_64_parse_error.rs:17:36 + --> $DIR/x86_64_parse_error.rs:15:36 | LL | asm!("{1}", in("eax") foo, const bar); | ------------- ^^^^^^^^^ positional argument @@ -13,7 +13,7 @@ LL | asm!("{1}", in("eax") foo, const bar); | explicit register argument error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/x86_64_parse_error.rs:13:46 + --> $DIR/x86_64_parse_error.rs:11:46 | LL | asm!("{a}", in("eax") foo, a = const bar); | ^^^ non-constant value @@ -24,7 +24,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/x86_64_parse_error.rs:15:46 + --> $DIR/x86_64_parse_error.rs:13:46 | LL | asm!("{a}", in("eax") foo, a = const bar); | ^^^ non-constant value @@ -35,7 +35,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/x86_64_parse_error.rs:17:42 + --> $DIR/x86_64_parse_error.rs:15:42 | LL | asm!("{1}", in("eax") foo, const bar); | ^^^ non-constant value diff --git a/tests/ui/cast/ptr-to-trait-obj-different-args.stderr b/tests/ui/cast/ptr-to-trait-obj-different-args.stderr index b04289ae747..8e60ca42f0a 100644 --- a/tests/ui/cast/ptr-to-trait-obj-different-args.stderr +++ b/tests/ui/cast/ptr-to-trait-obj-different-args.stderr @@ -14,6 +14,7 @@ LL | let y: *const dyn Trait<Y> = x as _; | = note: expected trait object `dyn Trait<X>` found trait object `dyn Trait<Y>` + = help: `dyn Trait<Y>` implements `Trait` so you could box the found value and coerce it to the trait object `Box<dyn Trait>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/ptr-to-trait-obj-different-args.rs:27:34 @@ -25,6 +26,7 @@ LL | let _: *const dyn Trait<T> = x as _; | = note: expected trait object `dyn Trait<X>` found trait object `dyn Trait<T>` + = help: `dyn Trait<T>` implements `Trait` so you could box the found value and coerce it to the trait object `Box<dyn Trait>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/ptr-to-trait-obj-different-args.rs:28:34 @@ -37,6 +39,7 @@ LL | let _: *const dyn Trait<X> = t as _; | = note: expected trait object `dyn Trait<T>` found trait object `dyn Trait<X>` + = help: `dyn Trait<X>` implements `Trait` so you could box the found value and coerce it to the trait object `Box<dyn Trait>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/ptr-to-trait-obj-different-args.rs:36:5 diff --git a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.stderr b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.current.stderr index d1d598e603f..5a5b4bfcacf 100644 --- a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.stderr +++ b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.current.stderr @@ -1,5 +1,5 @@ error: lifetime may not live long enough - --> $DIR/ptr-to-trait-obj-different-regions-id-trait.rs:21:17 + --> $DIR/ptr-to-trait-obj-different-regions-id-trait.rs:24:17 | LL | fn m<'a>() { | -- lifetime `'a` defined here diff --git a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.next.stderr b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.next.stderr new file mode 100644 index 00000000000..5a5b4bfcacf --- /dev/null +++ b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.next.stderr @@ -0,0 +1,15 @@ +error: lifetime may not live long enough + --> $DIR/ptr-to-trait-obj-different-regions-id-trait.rs:24:17 + | +LL | fn m<'a>() { + | -- lifetime `'a` defined here +LL | let unsend: *const dyn Cat<'a> = &(); +LL | let _send = unsend as *const S<dyn Cat<'static>>; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type annotation requires that `'a` must outlive `'static` + | + = note: requirement occurs because of the type `S<dyn Cat<'_>>`, which makes the generic argument `dyn Cat<'_>` invariant + = note: the struct `S<T>` is invariant over the parameter `T` + = help: see <https://doc.rust-lang.org/nomicon/subtyping.html> for more information about variance + +error: aborting due to 1 previous error + diff --git a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs index cdd55e24392..f968dca4fd3 100644 --- a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs +++ b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs @@ -1,3 +1,6 @@ +//@ revisions: current next +//@ ignore-compare-mode-next-solver (explicit revisions) +//@[next] compile-flags: -Znext-solver //@ check-fail // // Make sure we can't trick the compiler by using a projection. diff --git a/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr b/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr index 646044ae41a..0c220a13876 100644 --- a/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr +++ b/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr @@ -42,6 +42,7 @@ LL | let _ = type_ascribe!(Box::new( if true { false } else { true }), Box<d | = note: expected struct `Box<dyn Debug>` found struct `Box<bool>` + = help: `bool` implements `Debug` so you could box the found value and coerce it to the trait object `Box<dyn Debug>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:16:27 @@ -51,6 +52,7 @@ LL | let _ = type_ascribe!(Box::new( match true { true => 'a', false => 'b' | = note: expected struct `Box<dyn Debug>` found struct `Box<char>` + = help: `char` implements `Debug` so you could box the found value and coerce it to the trait object `Box<dyn Debug>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:18:27 @@ -96,6 +98,7 @@ LL | let _ = type_ascribe!(&if true { false } else { true }, &dyn Debug); | = note: expected reference `&dyn Debug` found reference `&bool` + = help: `bool` implements `Debug` so you could box the found value and coerce it to the trait object `Box<dyn Debug>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:24:27 @@ -105,6 +108,7 @@ LL | let _ = type_ascribe!(&match true { true => 'a', false => 'b' }, &dyn D | = note: expected reference `&dyn Debug` found reference `&char` + = help: `char` implements `Debug` so you could box the found value and coerce it to the trait object `Box<dyn Debug>`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:26:27 diff --git a/tests/ui/consts/const-eval/raw-bytes.32bit.stderr b/tests/ui/consts/const-eval/raw-bytes.32bit.stderr index 25f17f9c38a..27c85cc8ce4 100644 --- a/tests/ui/consts/const-eval/raw-bytes.32bit.stderr +++ b/tests/ui/consts/const-eval/raw-bytes.32bit.stderr @@ -436,17 +436,27 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──╼╾──╼ } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:196:62 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:196:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC_ID╼ 00 00 00 00 │ ╾──╼.... + } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:199:65 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:199:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC32 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC27<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──╼╾──╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/raw-bytes.rs:204:1 diff --git a/tests/ui/consts/const-eval/raw-bytes.64bit.stderr b/tests/ui/consts/const-eval/raw-bytes.64bit.stderr index 0fb9694895d..2b0ce99a881 100644 --- a/tests/ui/consts/const-eval/raw-bytes.64bit.stderr +++ b/tests/ui/consts/const-eval/raw-bytes.64bit.stderr @@ -436,17 +436,27 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──────╼╾──────╼ } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:196:62 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:196:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC_ID╼ 00 00 00 00 00 00 00 00 │ ╾──────╼........ + } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:199:65 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:199:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC32 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC27<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──────╼╾──────╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/raw-bytes.rs:204:1 diff --git a/tests/ui/consts/const-eval/raw-bytes.rs b/tests/ui/consts/const-eval/raw-bytes.rs index de1a81b0024..0df732df30e 100644 --- a/tests/ui/consts/const-eval/raw-bytes.rs +++ b/tests/ui/consts/const-eval/raw-bytes.rs @@ -194,10 +194,10 @@ const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, &bool //~| expected a boolean const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| null pointer const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable // Uninhabited types diff --git a/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr b/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr index 439ccb24e61..5c47cbfdf3b 100644 --- a/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr +++ b/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr @@ -1,46 +1,56 @@ -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:19:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:18:1 + | +LL | const INVALID_VTABLE_ALIGNMENT: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC1<imm>, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[0usize, 1usize, 1000usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC8 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC0<imm>╼ ╾ALLOC1<imm>╼ │ ╾──╼╾──╼ + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:24:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:23:1 + | +LL | const INVALID_VTABLE_SIZE: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC3<imm>, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[1usize, usize::MAX, 1usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC9 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC2<imm>╼ ╾ALLOC3<imm>╼ │ ╾──╼╾──╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:33:1 | LL | const INVALID_VTABLE_ALIGNMENT_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC1<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC0<imm>╼ ╾ALLOC1<imm>╼ │ ╾──╼╾──╼ + ╾ALLOC4<imm>╼ ╾ALLOC5<imm>╼ │ ╾──╼╾──╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:38:1 | LL | const INVALID_VTABLE_SIZE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC3<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC7<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC2<imm>╼ ╾ALLOC3<imm>╼ │ ╾──╼╾──╼ + ╾ALLOC6<imm>╼ ╾ALLOC7<imm>╼ │ ╾──╼╾──╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:44:1 | LL | const INVALID_VTABLE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC9<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC4<imm>╼ ╾ALLOC5<imm>╼ │ ╾──╼╾──╼ + ╾ALLOC8<imm>╼ ╾ALLOC9<imm>╼ │ ╾──╼╾──╼ } error[E0080]: it is undefined behavior to use this value @@ -51,7 +61,7 @@ LL | const G: Wide = unsafe { Transmute { t: FOO }.u }; | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC6<imm>╼ ╾ALLOC7╼ │ ╾──╼╾──╼ + ╾ALLOC10<imm>╼ ╾ALLOC11╼ │ ╾──╼╾──╼ } error: aborting due to 6 previous errors diff --git a/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr b/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr index 89bf959703a..f400073aca2 100644 --- a/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr +++ b/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr @@ -1,46 +1,56 @@ -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:19:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:18:1 + | +LL | const INVALID_VTABLE_ALIGNMENT: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC1<imm>, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[0usize, 1usize, 1000usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC8 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC0<imm>╼ ╾ALLOC1<imm>╼ │ ╾──────╼╾──────╼ + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:24:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:23:1 + | +LL | const INVALID_VTABLE_SIZE: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC3<imm>, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[1usize, usize::MAX, 1usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC9 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC2<imm>╼ ╾ALLOC3<imm>╼ │ ╾──────╼╾──────╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:33:1 | LL | const INVALID_VTABLE_ALIGNMENT_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC1<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC0<imm>╼ ╾ALLOC1<imm>╼ │ ╾──────╼╾──────╼ + ╾ALLOC4<imm>╼ ╾ALLOC5<imm>╼ │ ╾──────╼╾──────╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:38:1 | LL | const INVALID_VTABLE_SIZE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC3<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC7<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC2<imm>╼ ╾ALLOC3<imm>╼ │ ╾──────╼╾──────╼ + ╾ALLOC6<imm>╼ ╾ALLOC7<imm>╼ │ ╾──────╼╾──────╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:44:1 | LL | const INVALID_VTABLE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC9<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC4<imm>╼ ╾ALLOC5<imm>╼ │ ╾──────╼╾──────╼ + ╾ALLOC8<imm>╼ ╾ALLOC9<imm>╼ │ ╾──────╼╾──────╼ } error[E0080]: it is undefined behavior to use this value @@ -51,7 +61,7 @@ LL | const G: Wide = unsafe { Transmute { t: FOO }.u }; | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC6<imm>╼ ╾ALLOC7╼ │ ╾──────╼╾──────╼ + ╾ALLOC10<imm>╼ ╾ALLOC11╼ │ ╾──────╼╾──────╼ } error: aborting due to 6 previous errors diff --git a/tests/ui/consts/const-eval/ub-incorrect-vtable.rs b/tests/ui/consts/const-eval/ub-incorrect-vtable.rs index 4325495a380..8058f7693a7 100644 --- a/tests/ui/consts/const-eval/ub-incorrect-vtable.rs +++ b/tests/ui/consts/const-eval/ub-incorrect-vtable.rs @@ -17,12 +17,12 @@ trait Trait {} const INVALID_VTABLE_ALIGNMENT: &dyn Trait = unsafe { std::mem::transmute((&92u8, &[0usize, 1usize, 1000usize])) }; -//~^ ERROR evaluation of constant value failed +//~^^ ERROR it is undefined behavior to use this value //~| vtable const INVALID_VTABLE_SIZE: &dyn Trait = unsafe { std::mem::transmute((&92u8, &[1usize, usize::MAX, 1usize])) }; -//~^ ERROR evaluation of constant value failed +//~^^ ERROR it is undefined behavior to use this value //~| vtable #[repr(transparent)] diff --git a/tests/ui/consts/const-eval/ub-wide-ptr.rs b/tests/ui/consts/const-eval/ub-wide-ptr.rs index 3956146f6ae..991d4424dcf 100644 --- a/tests/ui/consts/const-eval/ub-wide-ptr.rs +++ b/tests/ui/consts/const-eval/ub-wide-ptr.rs @@ -123,13 +123,13 @@ const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, 4u //~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_UNALIGNED_VTABLE: &dyn Trait = unsafe { mem::transmute((&92u8, &[0u8; 128])) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_BAD_DROP_FN_NULL: &dyn Trait = unsafe { mem::transmute((&92u8, &[0usize; 8])) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_BAD_DROP_FN_INT: &dyn Trait = unsafe { mem::transmute((&92u8, &[1usize; 8])) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &[&42u8; 8]))) }; //~^ ERROR it is undefined behavior to use this value @@ -142,10 +142,10 @@ const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, &bool // # raw trait object const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| null pointer const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const RAW_TRAIT_OBJ_CONTENT_INVALID: *const dyn Trait = unsafe { mem::transmute::<_, &bool>(&3u8) } as *const dyn Trait; // ok because raw // Officially blessed way to get the vtable @@ -154,12 +154,12 @@ const DYN_METADATA: ptr::DynMetadata<dyn Send> = ptr::metadata::<dyn Send>(ptr:: static mut RAW_TRAIT_OBJ_VTABLE_NULL_THROUGH_REF: *const dyn Trait = unsafe { mem::transmute::<_, &dyn Trait>((&92u8, 0usize)) - //~^ ERROR could not evaluate static initializer + //~^^ ERROR it is undefined behavior to use this value //~| null pointer }; static mut RAW_TRAIT_OBJ_VTABLE_INVALID_THROUGH_REF: *const dyn Trait = unsafe { mem::transmute::<_, &dyn Trait>((&92u8, &3u64)) - //~^ ERROR could not evaluate static initializer + //~^^ ERROR it is undefined behavior to use this value //~| vtable }; diff --git a/tests/ui/consts/const-eval/ub-wide-ptr.stderr b/tests/ui/consts/const-eval/ub-wide-ptr.stderr index c29cc836fff..92f0029a5b3 100644 --- a/tests/ui/consts/const-eval/ub-wide-ptr.stderr +++ b/tests/ui/consts/const-eval/ub-wide-ptr.stderr @@ -218,29 +218,44 @@ LL | const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u HEX_DUMP } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:125:57 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:125:1 | LL | const TRAIT_OBJ_UNALIGNED_VTABLE: &dyn Trait = unsafe { mem::transmute((&92u8, &[0u8; 128])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC20 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC17<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:128:57 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:128:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_NULL: &dyn Trait = unsafe { mem::transmute((&92u8, &[0usize; 8])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC21 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC19<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:131:56 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:131:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_INT: &dyn Trait = unsafe { mem::transmute((&92u8, &[1usize; 8])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC22 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC21<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-wide-ptr.rs:134:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &[&42u8; 8]))) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC17<imm>, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC23<imm>, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { @@ -258,29 +273,49 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, HEX_DUMP } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:144:62 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:144:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:147:65 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:147:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC23 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC28<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: could not evaluate static initializer - --> $DIR/ub-wide-ptr.rs:156:5 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:155:1 | -LL | mem::transmute::<_, &dyn Trait>((&92u8, 0usize)) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer +LL | static mut RAW_TRAIT_OBJ_VTABLE_NULL_THROUGH_REF: *const dyn Trait = unsafe { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: could not evaluate static initializer - --> $DIR/ub-wide-ptr.rs:161:5 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:160:1 | -LL | mem::transmute::<_, &dyn Trait>((&92u8, &3u64)) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC24 as vtable pointer but it does not point to a vtable +LL | static mut RAW_TRAIT_OBJ_VTABLE_INVALID_THROUGH_REF: *const dyn Trait = unsafe { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC31<imm>, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } error: aborting due to 29 previous errors diff --git a/tests/ui/coroutine/gen_block.none.stderr b/tests/ui/coroutine/gen_block.none.stderr index 64fa2be003d..15123a49e48 100644 --- a/tests/ui/coroutine/gen_block.none.stderr +++ b/tests/ui/coroutine/gen_block.none.stderr @@ -73,7 +73,6 @@ LL | let _ = || yield true; = note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks --> $DIR/gen_block.rs:16:16 @@ -95,7 +94,6 @@ LL | let _ = #[coroutine] || yield true; = note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: aborting due to 11 previous errors diff --git a/tests/ui/coroutine/issue-58888.rs b/tests/ui/coroutine/issue-58888.rs index 6266f97ce8c..e4fada0cd43 100644 --- a/tests/ui/coroutine/issue-58888.rs +++ b/tests/ui/coroutine/issue-58888.rs @@ -13,7 +13,8 @@ impl Database { } fn check_connection(&self) -> impl Coroutine<Yield = (), Return = ()> + '_ { - #[coroutine] move || { + #[coroutine] + move || { let iter = self.get_connection(); for i in iter { yield i @@ -23,5 +24,5 @@ impl Database { } fn main() { - Database.check_connection(); + let _ = Database.check_connection(); } diff --git a/tests/ui/deriving/auxiliary/another-proc-macro.rs b/tests/ui/deriving/auxiliary/another-proc-macro.rs new file mode 100644 index 00000000000..a05175c9de9 --- /dev/null +++ b/tests/ui/deriving/auxiliary/another-proc-macro.rs @@ -0,0 +1,45 @@ +//@ force-host +//@ no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![feature(proc_macro_quote)] + +extern crate proc_macro; + +use proc_macro::{quote, TokenStream}; + +#[proc_macro_derive(AnotherMacro, attributes(pointee))] +pub fn derive(_input: TokenStream) -> TokenStream { + quote! { + const _: () = { + const ANOTHER_MACRO_DERIVED: () = (); + }; + } + .into() +} + +#[proc_macro_attribute] +pub fn pointee( + _attr: proc_macro::TokenStream, + _item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + quote! { + const _: () = { + const POINTEE_MACRO_ATTR_DERIVED: () = (); + }; + } + .into() +} + +#[proc_macro_attribute] +pub fn default( + _attr: proc_macro::TokenStream, + _item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + quote! { + const _: () = { + const DEFAULT_MACRO_ATTR_DERIVED: () = (); + }; + } + .into() +} diff --git a/tests/ui/deriving/built-in-proc-macro-scope.rs b/tests/ui/deriving/built-in-proc-macro-scope.rs new file mode 100644 index 00000000000..41c95f63b13 --- /dev/null +++ b/tests/ui/deriving/built-in-proc-macro-scope.rs @@ -0,0 +1,25 @@ +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] + +#[macro_use] +extern crate another_proc_macro; + +use another_proc_macro::{pointee, AnotherMacro}; + +#[derive(core::marker::SmartPointer)] +#[repr(transparent)] +pub struct Ptr<'a, #[pointee] T: ?Sized> { + data: &'a mut T, +} + +#[pointee] +fn f() {} + +#[derive(AnotherMacro)] +#[pointee] +struct MyStruct; + +fn main() {} diff --git a/tests/ui/deriving/built-in-proc-macro-scope.stdout b/tests/ui/deriving/built-in-proc-macro-scope.stdout new file mode 100644 index 00000000000..c649b7a9a57 --- /dev/null +++ b/tests/ui/deriving/built-in-proc-macro-scope.stdout @@ -0,0 +1,43 @@ +#![feature(prelude_import)] +#![no_std] +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] +#[prelude_import] +use ::std::prelude::rust_2015::*; +#[macro_use] +extern crate std; + +#[macro_use] +extern crate another_proc_macro; + +use another_proc_macro::{pointee, AnotherMacro}; + +#[repr(transparent)] +pub struct Ptr<'a, #[pointee] T: ?Sized> { + data: &'a mut T, +} +#[automatically_derived] +impl<'a, T: ?Sized + ::core::marker::Unsize<__S>, __S: ?Sized> + ::core::ops::DispatchFromDyn<Ptr<'a, __S>> for Ptr<'a, T> { +} +#[automatically_derived] +impl<'a, T: ?Sized + ::core::marker::Unsize<__S>, __S: ?Sized> + ::core::ops::CoerceUnsized<Ptr<'a, __S>> for Ptr<'a, T> { +} + + + +const _: () = + { + const POINTEE_MACRO_ATTR_DERIVED: () = (); + }; +#[pointee] +struct MyStruct; +const _: () = + { + const ANOTHER_MACRO_DERIVED: () = (); + }; +fn main() {} diff --git a/tests/ui/deriving/proc-macro-attribute-mixing.rs b/tests/ui/deriving/proc-macro-attribute-mixing.rs new file mode 100644 index 00000000000..489665ebeb5 --- /dev/null +++ b/tests/ui/deriving/proc-macro-attribute-mixing.rs @@ -0,0 +1,20 @@ +// This test certify that we can mix attribute macros from Rust and external proc-macros. +// For instance, `#[derive(Default)]` uses `#[default]` and `#[derive(SmartPointer)]` uses +// `#[pointee]`. +// The scoping rule should allow the use of the said two attributes when external proc-macros +// are in scope. + +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] + +#[macro_use] +extern crate another_proc_macro; + +#[pointee] +fn f() {} + +#[default] +fn g() {} diff --git a/tests/ui/deriving/proc-macro-attribute-mixing.stdout b/tests/ui/deriving/proc-macro-attribute-mixing.stdout new file mode 100644 index 00000000000..f314f6efbe2 --- /dev/null +++ b/tests/ui/deriving/proc-macro-attribute-mixing.stdout @@ -0,0 +1,30 @@ +#![feature(prelude_import)] +#![no_std] +// This test certify that we can mix attribute macros from Rust and external proc-macros. +// For instance, `#[derive(Default)]` uses `#[default]` and `#[derive(SmartPointer)]` uses +// `#[pointee]`. +// The scoping rule should allow the use of the said two attributes when external proc-macros +// are in scope. + +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] +#[prelude_import] +use ::std::prelude::rust_2015::*; +#[macro_use] +extern crate std; + +#[macro_use] +extern crate another_proc_macro; + + +const _: () = + { + const POINTEE_MACRO_ATTR_DERIVED: () = (); + }; +const _: () = + { + const DEFAULT_MACRO_ATTR_DERIVED: () = (); + }; diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs new file mode 100644 index 00000000000..5548fa2f52e --- /dev/null +++ b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs @@ -0,0 +1,21 @@ +#![allow(unknown_or_malformed_diagnostic_attributes)] + +trait Foo {} + +#[diagnostic::do_not_recommend] +impl<A> Foo for (A,) {} + +#[diagnostic::do_not_recommend] +impl<A, B> Foo for (A, B) {} + +#[diagnostic::do_not_recommend] +impl<A, B, C> Foo for (A, B, C) {} + +impl Foo for i32 {} + +fn check(a: impl Foo) {} + +fn main() { + check(()); + //~^ ERROR the trait bound `(): Foo` is not satisfied +} diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr new file mode 100644 index 00000000000..e56af28f3fb --- /dev/null +++ b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr @@ -0,0 +1,21 @@ +error[E0277]: the trait bound `(): Foo` is not satisfied + --> $DIR/do_not_apply_attribute_without_feature_flag.rs:19:11 + | +LL | check(()); + | ----- ^^ the trait `Foo` is not implemented for `()` + | | + | required by a bound introduced by this call + | + = help: the following other types implement trait `Foo`: + (A, B) + (A, B, C) + (A,) +note: required by a bound in `check` + --> $DIR/do_not_apply_attribute_without_feature_flag.rs:16:18 + | +LL | fn check(a: impl Foo) {} + | ^^^ required by this bound in `check` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/feature-gates/feature-gate-asm_const.rs b/tests/ui/feature-gates/feature-gate-asm_const.rs deleted file mode 100644 index 42d5ba69222..00000000000 --- a/tests/ui/feature-gates/feature-gate-asm_const.rs +++ /dev/null @@ -1,16 +0,0 @@ -//@ only-x86_64 - -use std::arch::asm; - -unsafe fn foo<const N: usize>() { - asm!("mov eax, {}", const N + 1); - //~^ ERROR const operands for inline assembly are unstable -} - -fn main() { - unsafe { - foo::<0>(); - asm!("mov eax, {}", const 123); - //~^ ERROR const operands for inline assembly are unstable - } -} diff --git a/tests/ui/feature-gates/feature-gate-asm_const.stderr b/tests/ui/feature-gates/feature-gate-asm_const.stderr deleted file mode 100644 index 4f83fee6759..00000000000 --- a/tests/ui/feature-gates/feature-gate-asm_const.stderr +++ /dev/null @@ -1,23 +0,0 @@ -error[E0658]: const operands for inline assembly are unstable - --> $DIR/feature-gate-asm_const.rs:6:25 - | -LL | asm!("mov eax, {}", const N + 1); - | ^^^^^^^^^^^ - | - = note: see issue #93332 <https://github.com/rust-lang/rust/issues/93332> for more information - = help: add `#![feature(asm_const)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: const operands for inline assembly are unstable - --> $DIR/feature-gate-asm_const.rs:13:29 - | -LL | asm!("mov eax, {}", const 123); - | ^^^^^^^^^ - | - = note: see issue #93332 <https://github.com/rust-lang/rust/issues/93332> for more information - = help: add `#![feature(asm_const)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error: aborting due to 2 previous errors - -For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/feature-gates/feature-gate-coroutines.none.stderr b/tests/ui/feature-gates/feature-gate-coroutines.none.stderr index 65e7737ef84..032d7adf77a 100644 --- a/tests/ui/feature-gates/feature-gate-coroutines.none.stderr +++ b/tests/ui/feature-gates/feature-gate-coroutines.none.stderr @@ -47,7 +47,6 @@ LL | yield true; = note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks --> $DIR/feature-gate-coroutines.rs:5:5 @@ -69,7 +68,6 @@ LL | let _ = || yield true; = note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks --> $DIR/feature-gate-coroutines.rs:10:16 diff --git a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs index 3257a9ca624..7b4764ee768 100644 --- a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs +++ b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs @@ -3,7 +3,6 @@ use std::marker::SmartPointer; //~ ERROR use of unstable library feature 'derive #[derive(SmartPointer)] //~ ERROR use of unstable library feature 'derive_smart_pointer' #[repr(transparent)] struct MyPointer<'a, #[pointee] T: ?Sized> { - //~^ ERROR the `#[pointee]` attribute is an experimental feature ptr: &'a T, } diff --git a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr index 19501939dc5..ea4d1271b7c 100644 --- a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr +++ b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr @@ -8,16 +8,6 @@ LL | #[derive(SmartPointer)] = help: add `#![feature(derive_smart_pointer)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error[E0658]: the `#[pointee]` attribute is an experimental feature - --> $DIR/feature-gate-derive-smart-pointer.rs:5:22 - | -LL | struct MyPointer<'a, #[pointee] T: ?Sized> { - | ^^^^^^^^^^ - | - = note: see issue #123430 <https://github.com/rust-lang/rust/issues/123430> for more information - = help: add `#![feature(derive_smart_pointer)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - error[E0658]: use of unstable library feature 'derive_smart_pointer' --> $DIR/feature-gate-derive-smart-pointer.rs:1:5 | @@ -28,6 +18,6 @@ LL | use std::marker::SmartPointer; = help: add `#![feature(derive_smart_pointer)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 3 previous errors +error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/higher-ranked/subtyping-fn-ptr-coercion.rs b/tests/ui/higher-ranked/subtyping-fn-ptr-coercion.rs new file mode 100644 index 00000000000..0cecf6808f2 --- /dev/null +++ b/tests/ui/higher-ranked/subtyping-fn-ptr-coercion.rs @@ -0,0 +1,10 @@ +//@ check-pass + +// Check that we use subtyping when reifying a closure into a function pointer. + +fn foo(x: &str) {} + +fn main() { + let c = |_: &str| {}; + let x = c as fn(&'static str); +} diff --git a/tests/ui/impl-trait/recursive-ice-101862.stderr b/tests/ui/impl-trait/recursive-ice-101862.stderr index f4148720c33..970373422e8 100644 --- a/tests/ui/impl-trait/recursive-ice-101862.stderr +++ b/tests/ui/impl-trait/recursive-ice-101862.stderr @@ -11,13 +11,13 @@ LL | vec![].append(&mut ice(x.as_ref())); = note: `#[warn(unconditional_recursion)]` on by default error[E0792]: expected generic type parameter, found `&str` - --> $DIR/recursive-ice-101862.rs:6:5 + --> $DIR/recursive-ice-101862.rs:6:19 | LL | pub fn ice(x: impl AsRef<str>) -> impl IntoIterator<Item = ()> { | --------------- this generic parameter must be used with a generic type parameter LL | LL | vec![].append(&mut ice(x.as_ref())); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^ error: aborting due to 1 previous error; 1 warning emitted diff --git a/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr b/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr index 17cb6fbd94b..f6f341d6f2f 100644 --- a/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr +++ b/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr @@ -254,7 +254,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:279:9 + --> $DIR/empty-types.rs:281:9 | LL | _ => {} | ^ @@ -262,7 +262,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:282:9 + --> $DIR/empty-types.rs:284:9 | LL | (_, _) => {} | ^^^^^^ @@ -270,7 +270,7 @@ LL | (_, _) => {} = note: this pattern matches no values because `(!, !)` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:285:9 + --> $DIR/empty-types.rs:287:9 | LL | Ok(_) => {} | ^^^^^ @@ -278,7 +278,7 @@ LL | Ok(_) => {} = note: this pattern matches no values because `Result<!, !>` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:286:9 + --> $DIR/empty-types.rs:288:9 | LL | Err(_) => {} | ^^^^^^ @@ -286,7 +286,7 @@ LL | Err(_) => {} = note: this pattern matches no values because `Result<!, !>` is uninhabited error[E0004]: non-exhaustive patterns: type `&[!]` is non-empty - --> $DIR/empty-types.rs:318:11 + --> $DIR/empty-types.rs:327:11 | LL | match slice_never {} | ^^^^^^^^^^^ @@ -300,7 +300,7 @@ LL + } | error[E0004]: non-exhaustive patterns: `&[]` not covered - --> $DIR/empty-types.rs:329:11 + --> $DIR/empty-types.rs:338:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[]` not covered @@ -313,7 +313,7 @@ LL + &[] => todo!() | error[E0004]: non-exhaustive patterns: `&[]` not covered - --> $DIR/empty-types.rs:343:11 + --> $DIR/empty-types.rs:352:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[]` not covered @@ -327,7 +327,7 @@ LL + &[] => todo!() | error[E0004]: non-exhaustive patterns: type `[!]` is non-empty - --> $DIR/empty-types.rs:350:11 + --> $DIR/empty-types.rs:359:11 | LL | match *slice_never {} | ^^^^^^^^^^^^ @@ -341,7 +341,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:359:9 + --> $DIR/empty-types.rs:368:9 | LL | _ => {} | ^ @@ -349,7 +349,7 @@ LL | _ => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:362:9 + --> $DIR/empty-types.rs:371:9 | LL | [_, _, _] => {} | ^^^^^^^^^ @@ -357,7 +357,7 @@ LL | [_, _, _] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:365:9 + --> $DIR/empty-types.rs:374:9 | LL | [_, ..] => {} | ^^^^^^^ @@ -365,7 +365,7 @@ LL | [_, ..] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error[E0004]: non-exhaustive patterns: type `[!; 0]` is non-empty - --> $DIR/empty-types.rs:379:11 + --> $DIR/empty-types.rs:388:11 | LL | match array_0_never {} | ^^^^^^^^^^^^^ @@ -379,7 +379,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:386:9 + --> $DIR/empty-types.rs:395:9 | LL | [] => {} | -- matches all the values already @@ -387,7 +387,7 @@ LL | _ => {} | ^ unreachable pattern error[E0004]: non-exhaustive patterns: `[]` not covered - --> $DIR/empty-types.rs:388:11 + --> $DIR/empty-types.rs:397:11 | LL | match array_0_never { | ^^^^^^^^^^^^^ pattern `[]` not covered @@ -401,7 +401,7 @@ LL + [] => todo!() | error: unreachable pattern - --> $DIR/empty-types.rs:407:9 + --> $DIR/empty-types.rs:416:9 | LL | Some(_) => {} | ^^^^^^^ @@ -409,7 +409,7 @@ LL | Some(_) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:412:9 + --> $DIR/empty-types.rs:421:9 | LL | Some(_a) => {} | ^^^^^^^^ @@ -417,7 +417,7 @@ LL | Some(_a) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:417:9 + --> $DIR/empty-types.rs:426:9 | LL | None => {} | ---- matches all the values already @@ -426,7 +426,7 @@ LL | _ => {} | ^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:422:9 + --> $DIR/empty-types.rs:431:9 | LL | None => {} | ---- matches all the values already @@ -435,7 +435,7 @@ LL | _a => {} | ^^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:594:9 + --> $DIR/empty-types.rs:603:9 | LL | _ => {} | ^ @@ -443,7 +443,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:597:9 + --> $DIR/empty-types.rs:606:9 | LL | _x => {} | ^^ @@ -451,7 +451,7 @@ LL | _x => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:600:9 + --> $DIR/empty-types.rs:609:9 | LL | _ if false => {} | ^ @@ -459,7 +459,7 @@ LL | _ if false => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:603:9 + --> $DIR/empty-types.rs:612:9 | LL | _x if false => {} | ^^ diff --git a/tests/ui/pattern/usefulness/empty-types.never_pats.stderr b/tests/ui/pattern/usefulness/empty-types.never_pats.stderr index 1ecb15f2cae..55a138c2d1c 100644 --- a/tests/ui/pattern/usefulness/empty-types.never_pats.stderr +++ b/tests/ui/pattern/usefulness/empty-types.never_pats.stderr @@ -296,7 +296,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:279:9 + --> $DIR/empty-types.rs:281:9 | LL | _ => {} | ^ @@ -304,7 +304,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:282:9 + --> $DIR/empty-types.rs:284:9 | LL | (_, _) => {} | ^^^^^^ @@ -312,7 +312,7 @@ LL | (_, _) => {} = note: this pattern matches no values because `(!, !)` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:285:9 + --> $DIR/empty-types.rs:287:9 | LL | Ok(_) => {} | ^^^^^ @@ -320,15 +320,29 @@ LL | Ok(_) => {} = note: this pattern matches no values because `Result<!, !>` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:286:9 + --> $DIR/empty-types.rs:288:9 | LL | Err(_) => {} | ^^^^^^ | = note: this pattern matches no values because `Result<!, !>` is uninhabited +error[E0005]: refutable pattern in local binding + --> $DIR/empty-types.rs:297:13 + | +LL | let Ok(_) = *ptr_result_never_err; + | ^^^^^ pattern `Err(!)` not covered + | + = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant + = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html + = note: the matched value is of type `Result<u8, !>` +help: you might want to use `if let` to ignore the variant that isn't matched + | +LL | if let Ok(_) = *ptr_result_never_err { todo!() }; + | ++ +++++++++++ + error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:307:11 + --> $DIR/empty-types.rs:316:11 | LL | match *x {} | ^^ @@ -342,7 +356,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `(!, !)` is non-empty - --> $DIR/empty-types.rs:309:11 + --> $DIR/empty-types.rs:318:11 | LL | match *x {} | ^^ @@ -356,7 +370,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: `Ok(!)` and `Err(!)` not covered - --> $DIR/empty-types.rs:311:11 + --> $DIR/empty-types.rs:320:11 | LL | match *x {} | ^^ patterns `Ok(!)` and `Err(!)` not covered @@ -378,7 +392,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `[!; 3]` is non-empty - --> $DIR/empty-types.rs:313:11 + --> $DIR/empty-types.rs:322:11 | LL | match *x {} | ^^ @@ -392,7 +406,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `&[!]` is non-empty - --> $DIR/empty-types.rs:318:11 + --> $DIR/empty-types.rs:327:11 | LL | match slice_never {} | ^^^^^^^^^^^ @@ -406,7 +420,7 @@ LL + } | error[E0004]: non-exhaustive patterns: `&[!, ..]` not covered - --> $DIR/empty-types.rs:320:11 + --> $DIR/empty-types.rs:329:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[!, ..]` not covered @@ -420,7 +434,7 @@ LL + &[!, ..] | error[E0004]: non-exhaustive patterns: `&[]`, `&[!]` and `&[!, !]` not covered - --> $DIR/empty-types.rs:329:11 + --> $DIR/empty-types.rs:338:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]`, `&[!]` and `&[!, !]` not covered @@ -433,7 +447,7 @@ LL + &[] | &[!] | &[!, !] => todo!() | error[E0004]: non-exhaustive patterns: `&[]` and `&[!, ..]` not covered - --> $DIR/empty-types.rs:343:11 + --> $DIR/empty-types.rs:352:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]` and `&[!, ..]` not covered @@ -447,7 +461,7 @@ LL + &[] | &[!, ..] => todo!() | error[E0004]: non-exhaustive patterns: type `[!]` is non-empty - --> $DIR/empty-types.rs:350:11 + --> $DIR/empty-types.rs:359:11 | LL | match *slice_never {} | ^^^^^^^^^^^^ @@ -461,7 +475,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:359:9 + --> $DIR/empty-types.rs:368:9 | LL | _ => {} | ^ @@ -469,7 +483,7 @@ LL | _ => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:362:9 + --> $DIR/empty-types.rs:371:9 | LL | [_, _, _] => {} | ^^^^^^^^^ @@ -477,7 +491,7 @@ LL | [_, _, _] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:365:9 + --> $DIR/empty-types.rs:374:9 | LL | [_, ..] => {} | ^^^^^^^ @@ -485,7 +499,7 @@ LL | [_, ..] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error[E0004]: non-exhaustive patterns: type `[!; 0]` is non-empty - --> $DIR/empty-types.rs:379:11 + --> $DIR/empty-types.rs:388:11 | LL | match array_0_never {} | ^^^^^^^^^^^^^ @@ -499,7 +513,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:386:9 + --> $DIR/empty-types.rs:395:9 | LL | [] => {} | -- matches all the values already @@ -507,7 +521,7 @@ LL | _ => {} | ^ unreachable pattern error[E0004]: non-exhaustive patterns: `[]` not covered - --> $DIR/empty-types.rs:388:11 + --> $DIR/empty-types.rs:397:11 | LL | match array_0_never { | ^^^^^^^^^^^^^ pattern `[]` not covered @@ -521,7 +535,7 @@ LL + [] => todo!() | error: unreachable pattern - --> $DIR/empty-types.rs:407:9 + --> $DIR/empty-types.rs:416:9 | LL | Some(_) => {} | ^^^^^^^ @@ -529,7 +543,7 @@ LL | Some(_) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:412:9 + --> $DIR/empty-types.rs:421:9 | LL | Some(_a) => {} | ^^^^^^^^ @@ -537,7 +551,7 @@ LL | Some(_a) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:417:9 + --> $DIR/empty-types.rs:426:9 | LL | None => {} | ---- matches all the values already @@ -546,7 +560,7 @@ LL | _ => {} | ^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:422:9 + --> $DIR/empty-types.rs:431:9 | LL | None => {} | ---- matches all the values already @@ -555,7 +569,7 @@ LL | _a => {} | ^^ unreachable pattern error[E0004]: non-exhaustive patterns: `&Some(!)` not covered - --> $DIR/empty-types.rs:442:11 + --> $DIR/empty-types.rs:451:11 | LL | match ref_opt_never { | ^^^^^^^^^^^^^ pattern `&Some(!)` not covered @@ -574,7 +588,7 @@ LL + &Some(!) | error[E0004]: non-exhaustive patterns: `Some(!)` not covered - --> $DIR/empty-types.rs:483:11 + --> $DIR/empty-types.rs:492:11 | LL | match *ref_opt_never { | ^^^^^^^^^^^^^^ pattern `Some(!)` not covered @@ -593,7 +607,7 @@ LL + Some(!) | error[E0004]: non-exhaustive patterns: `Err(!)` not covered - --> $DIR/empty-types.rs:531:11 + --> $DIR/empty-types.rs:540:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(!)` not covered @@ -612,7 +626,7 @@ LL + Err(!) | error[E0004]: non-exhaustive patterns: `Err(!)` not covered - --> $DIR/empty-types.rs:542:11 + --> $DIR/empty-types.rs:551:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(!)` not covered @@ -631,7 +645,7 @@ LL + Err(!) | error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:561:11 + --> $DIR/empty-types.rs:570:11 | LL | match *ref_tuple_half_never {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -645,7 +659,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:594:9 + --> $DIR/empty-types.rs:603:9 | LL | _ => {} | ^ @@ -653,7 +667,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:597:9 + --> $DIR/empty-types.rs:606:9 | LL | _x => {} | ^^ @@ -661,7 +675,7 @@ LL | _x => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:600:9 + --> $DIR/empty-types.rs:609:9 | LL | _ if false => {} | ^ @@ -669,7 +683,7 @@ LL | _ if false => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:603:9 + --> $DIR/empty-types.rs:612:9 | LL | _x if false => {} | ^^ @@ -677,7 +691,7 @@ LL | _x if false => {} = note: this pattern matches no values because `!` is uninhabited error[E0004]: non-exhaustive patterns: `&!` not covered - --> $DIR/empty-types.rs:628:11 + --> $DIR/empty-types.rs:637:11 | LL | match ref_never { | ^^^^^^^^^ pattern `&!` not covered @@ -693,7 +707,7 @@ LL + &! | error[E0004]: non-exhaustive patterns: `Ok(!)` not covered - --> $DIR/empty-types.rs:644:11 + --> $DIR/empty-types.rs:653:11 | LL | match *ref_result_never { | ^^^^^^^^^^^^^^^^^ pattern `Ok(!)` not covered @@ -712,7 +726,7 @@ LL + Ok(!) | error[E0004]: non-exhaustive patterns: `Some(!)` not covered - --> $DIR/empty-types.rs:664:11 + --> $DIR/empty-types.rs:673:11 | LL | match *x { | ^^ pattern `Some(!)` not covered @@ -730,7 +744,7 @@ LL ~ None => {}, LL + Some(!) | -error: aborting due to 64 previous errors; 1 warning emitted +error: aborting due to 65 previous errors; 1 warning emitted Some errors have detailed explanations: E0004, E0005. For more information about an error, try `rustc --explain E0004`. diff --git a/tests/ui/pattern/usefulness/empty-types.normal.stderr b/tests/ui/pattern/usefulness/empty-types.normal.stderr index c3421cd592e..83b3989ffde 100644 --- a/tests/ui/pattern/usefulness/empty-types.normal.stderr +++ b/tests/ui/pattern/usefulness/empty-types.normal.stderr @@ -287,7 +287,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:279:9 + --> $DIR/empty-types.rs:281:9 | LL | _ => {} | ^ @@ -295,7 +295,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:282:9 + --> $DIR/empty-types.rs:284:9 | LL | (_, _) => {} | ^^^^^^ @@ -303,7 +303,7 @@ LL | (_, _) => {} = note: this pattern matches no values because `(!, !)` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:285:9 + --> $DIR/empty-types.rs:287:9 | LL | Ok(_) => {} | ^^^^^ @@ -311,15 +311,29 @@ LL | Ok(_) => {} = note: this pattern matches no values because `Result<!, !>` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:286:9 + --> $DIR/empty-types.rs:288:9 | LL | Err(_) => {} | ^^^^^^ | = note: this pattern matches no values because `Result<!, !>` is uninhabited +error[E0005]: refutable pattern in local binding + --> $DIR/empty-types.rs:297:13 + | +LL | let Ok(_) = *ptr_result_never_err; + | ^^^^^ pattern `Err(_)` not covered + | + = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant + = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html + = note: the matched value is of type `Result<u8, !>` +help: you might want to use `if let` to ignore the variant that isn't matched + | +LL | if let Ok(_) = *ptr_result_never_err { todo!() }; + | ++ +++++++++++ + error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:307:11 + --> $DIR/empty-types.rs:316:11 | LL | match *x {} | ^^ @@ -333,7 +347,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `(!, !)` is non-empty - --> $DIR/empty-types.rs:309:11 + --> $DIR/empty-types.rs:318:11 | LL | match *x {} | ^^ @@ -347,7 +361,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: `Ok(_)` and `Err(_)` not covered - --> $DIR/empty-types.rs:311:11 + --> $DIR/empty-types.rs:320:11 | LL | match *x {} | ^^ patterns `Ok(_)` and `Err(_)` not covered @@ -369,7 +383,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `[!; 3]` is non-empty - --> $DIR/empty-types.rs:313:11 + --> $DIR/empty-types.rs:322:11 | LL | match *x {} | ^^ @@ -383,7 +397,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `&[!]` is non-empty - --> $DIR/empty-types.rs:318:11 + --> $DIR/empty-types.rs:327:11 | LL | match slice_never {} | ^^^^^^^^^^^ @@ -397,7 +411,7 @@ LL + } | error[E0004]: non-exhaustive patterns: `&[_, ..]` not covered - --> $DIR/empty-types.rs:320:11 + --> $DIR/empty-types.rs:329:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[_, ..]` not covered @@ -411,7 +425,7 @@ LL + &[_, ..] => todo!() | error[E0004]: non-exhaustive patterns: `&[]`, `&[_]` and `&[_, _]` not covered - --> $DIR/empty-types.rs:329:11 + --> $DIR/empty-types.rs:338:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]`, `&[_]` and `&[_, _]` not covered @@ -424,7 +438,7 @@ LL + &[] | &[_] | &[_, _] => todo!() | error[E0004]: non-exhaustive patterns: `&[]` and `&[_, ..]` not covered - --> $DIR/empty-types.rs:343:11 + --> $DIR/empty-types.rs:352:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]` and `&[_, ..]` not covered @@ -438,7 +452,7 @@ LL + &[] | &[_, ..] => todo!() | error[E0004]: non-exhaustive patterns: type `[!]` is non-empty - --> $DIR/empty-types.rs:350:11 + --> $DIR/empty-types.rs:359:11 | LL | match *slice_never {} | ^^^^^^^^^^^^ @@ -452,7 +466,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:359:9 + --> $DIR/empty-types.rs:368:9 | LL | _ => {} | ^ @@ -460,7 +474,7 @@ LL | _ => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:362:9 + --> $DIR/empty-types.rs:371:9 | LL | [_, _, _] => {} | ^^^^^^^^^ @@ -468,7 +482,7 @@ LL | [_, _, _] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:365:9 + --> $DIR/empty-types.rs:374:9 | LL | [_, ..] => {} | ^^^^^^^ @@ -476,7 +490,7 @@ LL | [_, ..] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error[E0004]: non-exhaustive patterns: type `[!; 0]` is non-empty - --> $DIR/empty-types.rs:379:11 + --> $DIR/empty-types.rs:388:11 | LL | match array_0_never {} | ^^^^^^^^^^^^^ @@ -490,7 +504,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:386:9 + --> $DIR/empty-types.rs:395:9 | LL | [] => {} | -- matches all the values already @@ -498,7 +512,7 @@ LL | _ => {} | ^ unreachable pattern error[E0004]: non-exhaustive patterns: `[]` not covered - --> $DIR/empty-types.rs:388:11 + --> $DIR/empty-types.rs:397:11 | LL | match array_0_never { | ^^^^^^^^^^^^^ pattern `[]` not covered @@ -512,7 +526,7 @@ LL + [] => todo!() | error: unreachable pattern - --> $DIR/empty-types.rs:407:9 + --> $DIR/empty-types.rs:416:9 | LL | Some(_) => {} | ^^^^^^^ @@ -520,7 +534,7 @@ LL | Some(_) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:412:9 + --> $DIR/empty-types.rs:421:9 | LL | Some(_a) => {} | ^^^^^^^^ @@ -528,7 +542,7 @@ LL | Some(_a) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:417:9 + --> $DIR/empty-types.rs:426:9 | LL | None => {} | ---- matches all the values already @@ -537,7 +551,7 @@ LL | _ => {} | ^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:422:9 + --> $DIR/empty-types.rs:431:9 | LL | None => {} | ---- matches all the values already @@ -546,7 +560,7 @@ LL | _a => {} | ^^ unreachable pattern error[E0004]: non-exhaustive patterns: `&Some(_)` not covered - --> $DIR/empty-types.rs:442:11 + --> $DIR/empty-types.rs:451:11 | LL | match ref_opt_never { | ^^^^^^^^^^^^^ pattern `&Some(_)` not covered @@ -565,7 +579,7 @@ LL + &Some(_) => todo!() | error[E0004]: non-exhaustive patterns: `Some(_)` not covered - --> $DIR/empty-types.rs:483:11 + --> $DIR/empty-types.rs:492:11 | LL | match *ref_opt_never { | ^^^^^^^^^^^^^^ pattern `Some(_)` not covered @@ -584,7 +598,7 @@ LL + Some(_) => todo!() | error[E0004]: non-exhaustive patterns: `Err(_)` not covered - --> $DIR/empty-types.rs:531:11 + --> $DIR/empty-types.rs:540:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(_)` not covered @@ -603,7 +617,7 @@ LL + Err(_) => todo!() | error[E0004]: non-exhaustive patterns: `Err(_)` not covered - --> $DIR/empty-types.rs:542:11 + --> $DIR/empty-types.rs:551:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(_)` not covered @@ -622,7 +636,7 @@ LL + Err(_) => todo!() | error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:561:11 + --> $DIR/empty-types.rs:570:11 | LL | match *ref_tuple_half_never {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -636,7 +650,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:594:9 + --> $DIR/empty-types.rs:603:9 | LL | _ => {} | ^ @@ -644,7 +658,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:597:9 + --> $DIR/empty-types.rs:606:9 | LL | _x => {} | ^^ @@ -652,7 +666,7 @@ LL | _x => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:600:9 + --> $DIR/empty-types.rs:609:9 | LL | _ if false => {} | ^ @@ -660,7 +674,7 @@ LL | _ if false => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:603:9 + --> $DIR/empty-types.rs:612:9 | LL | _x if false => {} | ^^ @@ -668,7 +682,7 @@ LL | _x if false => {} = note: this pattern matches no values because `!` is uninhabited error[E0004]: non-exhaustive patterns: `&_` not covered - --> $DIR/empty-types.rs:628:11 + --> $DIR/empty-types.rs:637:11 | LL | match ref_never { | ^^^^^^^^^ pattern `&_` not covered @@ -684,7 +698,7 @@ LL + &_ => todo!() | error[E0004]: non-exhaustive patterns: `Ok(_)` not covered - --> $DIR/empty-types.rs:644:11 + --> $DIR/empty-types.rs:653:11 | LL | match *ref_result_never { | ^^^^^^^^^^^^^^^^^ pattern `Ok(_)` not covered @@ -703,7 +717,7 @@ LL + Ok(_) => todo!() | error[E0004]: non-exhaustive patterns: `Some(_)` not covered - --> $DIR/empty-types.rs:664:11 + --> $DIR/empty-types.rs:673:11 | LL | match *x { | ^^ pattern `Some(_)` not covered @@ -721,7 +735,7 @@ LL ~ None => {}, LL + Some(_) => todo!() | -error: aborting due to 64 previous errors +error: aborting due to 65 previous errors Some errors have detailed explanations: E0004, E0005. For more information about an error, try `rustc --explain E0004`. diff --git a/tests/ui/pattern/usefulness/empty-types.rs b/tests/ui/pattern/usefulness/empty-types.rs index 639c48cea12..d561a0e9c12 100644 --- a/tests/ui/pattern/usefulness/empty-types.rs +++ b/tests/ui/pattern/usefulness/empty-types.rs @@ -17,7 +17,7 @@ #[derive(Copy, Clone)] enum Void {} -/// A bunch of never situations that can't be normally constructed. +/// A bunch of never situations that can't be normally constructed so we take them as argument. #[derive(Copy, Clone)] struct NeverBundle { never: !, @@ -272,6 +272,8 @@ fn nested_validity_tracking(bundle: NeverBundle) { let ref_never: &! = &never; let tuple_never: (!, !) = bundle.tuple_never; let result_never: Result<!, !> = bundle.result_never; + let result_never_err: Result<u8, !> = Ok(0); + let ptr_result_never_err: *const Result<u8, !> = &result_never_err as *const _; let union_never = Uninit::<!>::new(); // These should be considered known_valid and warn unreachable. @@ -287,6 +289,13 @@ fn nested_validity_tracking(bundle: NeverBundle) { } // These should be considered !known_valid and not warn unreachable. + unsafe { + match *ptr_result_never_err { + Ok(_) => {} + Err(_) => {} + } + let Ok(_) = *ptr_result_never_err; //[normal,never_pats]~ ERROR refutable pattern + } match ref_never { &_ => {} } diff --git a/tests/ui/proc-macro/auxiliary/parse-invis-delim-issue-128895.rs b/tests/ui/proc-macro/auxiliary/parse-invis-delim-issue-128895.rs new file mode 100644 index 00000000000..07e135ee8eb --- /dev/null +++ b/tests/ui/proc-macro/auxiliary/parse-invis-delim-issue-128895.rs @@ -0,0 +1,44 @@ +//@ force-host +//@ no-prefer-dynamic + +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::*; + +// This proc macro ignores its input and returns this token stream +// +// impl <«A1»: Comparable> Comparable for («A1»,) {} +// +// where `«`/`»` are invisible delimiters. This was being misparsed in bug +// #128895. +#[proc_macro] +pub fn main(_input: TokenStream) -> TokenStream { + let a1 = TokenTree::Group( + Group::new( + Delimiter::None, + std::iter::once(TokenTree::Ident(Ident::new("A1", Span::call_site()))).collect(), + ) + ); + vec![ + TokenTree::Ident(Ident::new("impl", Span::call_site())), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + a1.clone(), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Ident(Ident::new("Comparable", Span::call_site())), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + TokenTree::Ident(Ident::new("Comparable", Span::call_site())), + TokenTree::Ident(Ident::new("for", Span::call_site())), + TokenTree::Group( + Group::new( + Delimiter::Parenthesis, + vec![ + a1.clone(), + TokenTree::Punct(Punct::new(',', Spacing::Alone)), + ].into_iter().collect::<TokenStream>(), + ) + ), + TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), + ].into_iter().collect::<TokenStream>() +} diff --git a/tests/ui/proc-macro/parse-invis-delim-issue-128895.rs b/tests/ui/proc-macro/parse-invis-delim-issue-128895.rs new file mode 100644 index 00000000000..3d5af5fee21 --- /dev/null +++ b/tests/ui/proc-macro/parse-invis-delim-issue-128895.rs @@ -0,0 +1,14 @@ +//@ aux-build:parse-invis-delim-issue-128895.rs +//@ check-pass + +#![no_std] // Don't load unnecessary hygiene information from std +extern crate std; + +#[macro_use] +extern crate parse_invis_delim_issue_128895; + +trait Comparable {} + +parse_invis_delim_issue_128895::main!(); + +fn main() {} diff --git a/tests/ui/rust-2024/unsafe-before_exec.e2024.stderr b/tests/ui/rust-2024/unsafe-before_exec.e2024.stderr new file mode 100644 index 00000000000..2798ccdefd0 --- /dev/null +++ b/tests/ui/rust-2024/unsafe-before_exec.e2024.stderr @@ -0,0 +1,11 @@ +error[E0133]: call to unsafe function `before_exec` is unsafe and requires unsafe block + --> $DIR/unsafe-before_exec.rs:14:5 + | +LL | cmd.before_exec(|| Ok(())); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ call to unsafe function + | + = note: consult the function's documentation for information on how to avoid undefined behavior + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0133`. diff --git a/tests/ui/rust-2024/unsafe-before_exec.rs b/tests/ui/rust-2024/unsafe-before_exec.rs new file mode 100644 index 00000000000..540394da80e --- /dev/null +++ b/tests/ui/rust-2024/unsafe-before_exec.rs @@ -0,0 +1,17 @@ +//@ revisions: e2021 e2024 +//@ only-unix +//@[e2021] edition: 2021 +//@[e2021] check-pass +//@[e2024] edition: 2024 +//@[e2024] compile-flags: -Zunstable-options + +use std::process::Command; +use std::os::unix::process::CommandExt; + +#[allow(deprecated)] +fn main() { + let mut cmd = Command::new("sleep"); + cmd.before_exec(|| Ok(())); + //[e2024]~^ ERROR call to unsafe function `before_exec` is unsafe + drop(cmd); // we don't actually run the command. +} diff --git a/tests/ui/rust-2024/unsafe-env-suggestion.stderr b/tests/ui/rust-2024/unsafe-env-suggestion.stderr index 3aa10a3bed6..1506741f6bc 100644 --- a/tests/ui/rust-2024/unsafe-env-suggestion.stderr +++ b/tests/ui/rust-2024/unsafe-env-suggestion.stderr @@ -11,7 +11,7 @@ note: the lint level is defined here | LL | #![deny(deprecated_safe_2024)] | ^^^^^^^^^^^^^^^^^^^^ -help: you can wrap the call in an `unsafe` block if you can guarantee the code is only ever called from single-threaded code +help: you can wrap the call in an `unsafe` block if you can guarantee that the environment access only happens in single-threaded code | LL + // TODO: Audit that the environment access only happens in single-threaded code. LL ~ unsafe { env::set_var("FOO", "BAR") }; @@ -25,7 +25,7 @@ LL | env::remove_var("FOO"); | = warning: this is accepted in the current edition (Rust 2015) but is a hard error in Rust 2024! = note: for more information, see issue #27970 <https://github.com/rust-lang/rust/issues/27970> -help: you can wrap the call in an `unsafe` block if you can guarantee the code is only ever called from single-threaded code +help: you can wrap the call in an `unsafe` block if you can guarantee that the environment access only happens in single-threaded code | LL + // TODO: Audit that the environment access only happens in single-threaded code. LL ~ unsafe { env::remove_var("FOO") }; diff --git a/tests/ui/sanitizer/cfi-assoc-ty-lifetime-issue-123053.rs b/tests/ui/sanitizer/cfi/assoc-ty-lifetime-issue-123053.rs index dd604b6bf7d..dd604b6bf7d 100644 --- a/tests/ui/sanitizer/cfi-assoc-ty-lifetime-issue-123053.rs +++ b/tests/ui/sanitizer/cfi/assoc-ty-lifetime-issue-123053.rs diff --git a/tests/ui/sanitizer/cfi-async-closures.rs b/tests/ui/sanitizer/cfi/async-closures.rs index d94f2992d84..d94f2992d84 100644 --- a/tests/ui/sanitizer/cfi-async-closures.rs +++ b/tests/ui/sanitizer/cfi/async-closures.rs diff --git a/tests/ui/sanitizer/cfi-can-reveal-opaques.rs b/tests/ui/sanitizer/cfi/can-reveal-opaques.rs index 55988a62a8c..55988a62a8c 100644 --- a/tests/ui/sanitizer/cfi-can-reveal-opaques.rs +++ b/tests/ui/sanitizer/cfi/can-reveal-opaques.rs diff --git a/tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.rs b/tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.rs index 10c5bf6ea5e..10c5bf6ea5e 100644 --- a/tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.rs +++ b/tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.rs diff --git a/tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.stderr b/tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.stderr index de67d6a6b7f..de67d6a6b7f 100644 --- a/tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.stderr +++ b/tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.stderr diff --git a/tests/ui/sanitizer/cfi-closures.rs b/tests/ui/sanitizer/cfi/closures.rs index 9f9002da674..9f9002da674 100644 --- a/tests/ui/sanitizer/cfi-closures.rs +++ b/tests/ui/sanitizer/cfi/closures.rs diff --git a/tests/ui/sanitizer/cfi-complex-receiver.rs b/tests/ui/sanitizer/cfi/complex-receiver.rs index c7b45a775ca..c7b45a775ca 100644 --- a/tests/ui/sanitizer/cfi-complex-receiver.rs +++ b/tests/ui/sanitizer/cfi/complex-receiver.rs diff --git a/tests/ui/sanitizer/cfi-coroutine.rs b/tests/ui/sanitizer/cfi/coroutine.rs index ad994fcf737..ad994fcf737 100644 --- a/tests/ui/sanitizer/cfi-coroutine.rs +++ b/tests/ui/sanitizer/cfi/coroutine.rs diff --git a/tests/ui/sanitizer/cfi-drop-in-place.rs b/tests/ui/sanitizer/cfi/drop-in-place.rs index 8ce2c432602..8ce2c432602 100644 --- a/tests/ui/sanitizer/cfi-drop-in-place.rs +++ b/tests/ui/sanitizer/cfi/drop-in-place.rs diff --git a/tests/ui/sanitizer/cfi-drop-no-principal.rs b/tests/ui/sanitizer/cfi/drop-no-principal.rs index c1c88c8c71c..c1c88c8c71c 100644 --- a/tests/ui/sanitizer/cfi-drop-no-principal.rs +++ b/tests/ui/sanitizer/cfi/drop-no-principal.rs diff --git a/tests/ui/sanitizer/cfi-fn-ptr.rs b/tests/ui/sanitizer/cfi/fn-ptr.rs index 505b4b8e7f0..505b4b8e7f0 100644 --- a/tests/ui/sanitizer/cfi-fn-ptr.rs +++ b/tests/ui/sanitizer/cfi/fn-ptr.rs diff --git a/tests/ui/sanitizer/cfi-generalize-pointers-attr-cfg.rs b/tests/ui/sanitizer/cfi/generalize-pointers-attr-cfg.rs index d46002c69fd..d46002c69fd 100644 --- a/tests/ui/sanitizer/cfi-generalize-pointers-attr-cfg.rs +++ b/tests/ui/sanitizer/cfi/generalize-pointers-attr-cfg.rs diff --git a/tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.rs b/tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.rs index 8ba13bd3639..8ba13bd3639 100644 --- a/tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.rs +++ b/tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.rs diff --git a/tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.stderr b/tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.stderr index 621708de241..621708de241 100644 --- a/tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.stderr +++ b/tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.stderr diff --git a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.rs b/tests/ui/sanitizer/cfi/invalid-attr-encoding.rs index 7ef6bd2f0ac..7ef6bd2f0ac 100644 --- a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.rs +++ b/tests/ui/sanitizer/cfi/invalid-attr-encoding.rs diff --git a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.stderr b/tests/ui/sanitizer/cfi/invalid-attr-encoding.stderr index 93ec134241e..1aa6bef17b1 100644 --- a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.stderr +++ b/tests/ui/sanitizer/cfi/invalid-attr-encoding.stderr @@ -1,5 +1,5 @@ error: malformed `cfi_encoding` attribute input - --> $DIR/cfi-invalid-attr-cfi-encoding.rs:10:1 + --> $DIR/invalid-attr-encoding.rs:10:1 | LL | #[cfi_encoding] | ^^^^^^^^^^^^^^^ help: must be of the form: `#[cfi_encoding = "encoding"]` diff --git a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.aarch64.stderr b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.aarch64.stderr index 7f596a19104..7f596a19104 100644 --- a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.aarch64.stderr +++ b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.aarch64.stderr diff --git a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.rs b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.rs index c628709d7a1..c628709d7a1 100644 --- a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.rs +++ b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.rs diff --git a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.x86_64.stderr b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.x86_64.stderr index 7f596a19104..7f596a19104 100644 --- a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.x86_64.stderr +++ b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.x86_64.stderr diff --git a/tests/ui/sanitizer/cfi-normalize-integers-attr-cfg.rs b/tests/ui/sanitizer/cfi/normalize-integers-attr-cfg.rs index 24c2c2c13da..24c2c2c13da 100644 --- a/tests/ui/sanitizer/cfi-normalize-integers-attr-cfg.rs +++ b/tests/ui/sanitizer/cfi/normalize-integers-attr-cfg.rs diff --git a/tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.rs b/tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.rs index a7ecefbf7ef..a7ecefbf7ef 100644 --- a/tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.rs +++ b/tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.rs diff --git a/tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.stderr b/tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.stderr index 748fb60dad9..748fb60dad9 100644 --- a/tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.stderr +++ b/tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.stderr diff --git a/tests/ui/sanitizer/cfi-requires-lto.rs b/tests/ui/sanitizer/cfi/requires-lto.rs index 5a34f696e05..5a34f696e05 100644 --- a/tests/ui/sanitizer/cfi-requires-lto.rs +++ b/tests/ui/sanitizer/cfi/requires-lto.rs diff --git a/tests/ui/sanitizer/cfi-requires-lto.stderr b/tests/ui/sanitizer/cfi/requires-lto.stderr index efc0c43138e..efc0c43138e 100644 --- a/tests/ui/sanitizer/cfi-requires-lto.stderr +++ b/tests/ui/sanitizer/cfi/requires-lto.stderr diff --git a/tests/ui/sanitizer/cfi-self-ref.rs b/tests/ui/sanitizer/cfi/self-ref.rs index 3b524ac661c..3b524ac661c 100644 --- a/tests/ui/sanitizer/cfi-self-ref.rs +++ b/tests/ui/sanitizer/cfi/self-ref.rs diff --git a/tests/ui/sanitizer/cfi-sized-associated-ty.rs b/tests/ui/sanitizer/cfi/sized-associated-ty.rs index f5b4e22e9d9..f5b4e22e9d9 100644 --- a/tests/ui/sanitizer/cfi-sized-associated-ty.rs +++ b/tests/ui/sanitizer/cfi/sized-associated-ty.rs diff --git a/tests/ui/sanitizer/cfi-supertraits.rs b/tests/ui/sanitizer/cfi/supertraits.rs index 4bb6177577f..4bb6177577f 100644 --- a/tests/ui/sanitizer/cfi-supertraits.rs +++ b/tests/ui/sanitizer/cfi/supertraits.rs diff --git a/tests/ui/sanitizer/cfi-virtual-auto.rs b/tests/ui/sanitizer/cfi/virtual-auto.rs index 6971d516a20..6971d516a20 100644 --- a/tests/ui/sanitizer/cfi-virtual-auto.rs +++ b/tests/ui/sanitizer/cfi/virtual-auto.rs diff --git a/tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.rs b/tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.rs index 954e4ec3b85..954e4ec3b85 100644 --- a/tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.rs +++ b/tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.rs diff --git a/tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.stderr b/tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.stderr index 8d6dc1d8f1e..8d6dc1d8f1e 100644 --- a/tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.stderr +++ b/tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.stderr diff --git a/tests/ui/traits/next-solver/alias-bound-unsound.rs b/tests/ui/traits/next-solver/alias-bound-unsound.rs index a5bd3e7afa8..272e5db3b7a 100644 --- a/tests/ui/traits/next-solver/alias-bound-unsound.rs +++ b/tests/ui/traits/next-solver/alias-bound-unsound.rs @@ -27,5 +27,6 @@ fn main() { //~| ERROR overflow evaluating the requirement `&<() as Foo>::Item well-formed` //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` + //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` println!("{x}"); } diff --git a/tests/ui/traits/next-solver/alias-bound-unsound.stderr b/tests/ui/traits/next-solver/alias-bound-unsound.stderr index a5c2f215134..e5cf5b6bc3d 100644 --- a/tests/ui/traits/next-solver/alias-bound-unsound.stderr +++ b/tests/ui/traits/next-solver/alias-bound-unsound.stderr @@ -44,6 +44,12 @@ LL | drop(<() as Foo>::copy_me(&x)); | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 6 previous errors +error[E0275]: overflow evaluating the requirement `<() as Foo>::Item == _` + --> $DIR/alias-bound-unsound.rs:24:31 + | +LL | drop(<() as Foo>::copy_me(&x)); + | ^^ + +error: aborting due to 7 previous errors For more information about this error, try `rustc --explain E0275`. diff --git a/tests/ui/try-trait/bad-interconversion.stderr b/tests/ui/try-trait/bad-interconversion.stderr index 642a93d64e2..9aab2cf6ab8 100644 --- a/tests/ui/try-trait/bad-interconversion.stderr +++ b/tests/ui/try-trait/bad-interconversion.stderr @@ -45,7 +45,7 @@ LL | Some(Err("hello")?) | ^ use `.ok()?` if you want to discard the `Result<Infallible, &str>` error information | = help: the trait `FromResidual<Result<Infallible, &str>>` is not implemented for `Option<u16>` - = help: the trait `FromResidual` is implemented for `Option<T>` + = help: the trait `FromResidual<Option<Infallible>>` is implemented for `Option<T>` error[E0277]: the `?` operator can only be used on `Option`s in a function that returns `Option` --> $DIR/bad-interconversion.rs:27:33 @@ -56,7 +56,7 @@ LL | Some(ControlFlow::Break(123)?) | ^ this `?` produces `ControlFlow<{integer}, Infallible>`, which is incompatible with `Option<u64>` | = help: the trait `FromResidual<ControlFlow<{integer}, Infallible>>` is not implemented for `Option<u64>` - = help: the trait `FromResidual` is implemented for `Option<T>` + = help: the trait `FromResidual<Option<Infallible>>` is implemented for `Option<T>` error[E0277]: the `?` operator can only be used on `ControlFlow`s in a function that returns `ControlFlow` --> $DIR/bad-interconversion.rs:32:39 diff --git a/tests/ui/try-trait/option-to-result.stderr b/tests/ui/try-trait/option-to-result.stderr index 8055b2a0b04..1a5a925f92f 100644 --- a/tests/ui/try-trait/option-to-result.stderr +++ b/tests/ui/try-trait/option-to-result.stderr @@ -20,7 +20,7 @@ LL | a?; | ^ use `.ok()?` if you want to discard the `Result<Infallible, i32>` error information | = help: the trait `FromResidual<Result<Infallible, i32>>` is not implemented for `Option<i32>` - = help: the trait `FromResidual` is implemented for `Option<T>` + = help: the trait `FromResidual<Option<Infallible>>` is implemented for `Option<T>` error: aborting due to 2 previous errors diff --git a/triagebot.toml b/triagebot.toml index 33108f743cb..a98d5f6a7c2 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -913,7 +913,7 @@ cc = ["@kobzol"] [assign] warn_non_default_branch = true contributing_url = "https://rustc-dev-guide.rust-lang.org/getting-started.html" -users_on_vacation = ["jyn514", "jhpratt", "oli-obk", "michaelwoerister"] +users_on_vacation = ["jyn514", "jhpratt", "oli-obk"] [assign.adhoc_groups] compiler-team = [ |
