diff options
915 files changed, 20887 insertions, 6797 deletions
diff --git a/Cargo.lock b/Cargo.lock index 1fd288af3a1..eeb3c99a294 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,7 +195,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01667f6f40216b9a0b2945e05fed5f1ad0ab6470e69cb9378001e37b1c0668e4" dependencies = [ - "object 0.36.5", + "object 0.36.7", ] [[package]] @@ -405,9 +405,9 @@ version = "0.1.0" [[package]] name = "cc" -version = "1.2.0" +version = "1.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" +checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" dependencies = [ "shlex", ] @@ -503,9 +503,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.39" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4db298d517d5fa00b2b84bbe044efd3fde43874a41db0d46f91994646a2da4" +checksum = "ac2e663e3e3bed2d32d065a8404024dad306e699a04263ec59919529f803aee9" dependencies = [ "clap", ] @@ -1107,9 +1107,9 @@ checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "env_filter" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" dependencies = [ "log", "regex", @@ -1117,9 +1117,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "anstream", "anstyle", @@ -1153,9 +1153,9 @@ dependencies = [ [[package]] name = "expect-test" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0" +checksum = "63af43ff4431e848fb47472a920f14fa71c24de13255a5692e93d4e90302acb0" dependencies = [ "dissimilar", "once_cell", @@ -1212,7 +1212,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", - "miniz_oxide 0.8.1", + "miniz_oxide 0.8.2", ] [[package]] @@ -1257,9 +1257,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" [[package]] name = "form_urlencoded" @@ -2305,9 +2305,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ef2593ffb6958c941575cee70c8e257438749971869c4ae5acf6f91a168a61" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" dependencies = [ "adler2", ] @@ -2526,9 +2526,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.5" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "crc32fast", "flate2", @@ -2536,7 +2536,7 @@ dependencies = [ "indexmap", "memchr", "ruzstd", - "wasmparser 0.218.0", + "wasmparser 0.222.0", ] [[package]] @@ -2706,7 +2706,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 2.0.7", + "thiserror 2.0.9", "ucd-trie", ] @@ -3169,7 +3169,7 @@ dependencies = [ "build_helper", "gimli 0.31.1", "libc", - "object 0.36.5", + "object 0.36.7", "regex", "serde_json", "similar", @@ -3488,7 +3488,7 @@ dependencies = [ "itertools", "libc", "measureme", - "object 0.36.5", + "object 0.36.7", "rustc-demangle", "rustc_abi", "rustc_ast", @@ -3527,7 +3527,7 @@ dependencies = [ "either", "itertools", "libc", - "object 0.36.5", + "object 0.36.7", "pathdiff", "regex", "rustc_abi", @@ -4529,7 +4529,7 @@ name = "rustc_target" version = "0.0.0" dependencies = [ "bitflags", - "object 0.36.5", + "object 0.36.7", "rustc_abi", "rustc_data_structures", "rustc_fs_util", @@ -4882,9 +4882,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" dependencies = [ "indexmap", "itoa", @@ -5279,11 +5279,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.7" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767" +checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" dependencies = [ - "thiserror-impl 2.0.7", + "thiserror-impl 2.0.9", ] [[package]] @@ -5299,9 +5299,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.7" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36" +checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" dependencies = [ "proc-macro2", "quote", @@ -5316,7 +5316,7 @@ checksum = "813ba76597db32dc4f6992fd8bf8f394715b88d352fd97401da67dab6283b4c6" dependencies = [ "gimli 0.30.0", "hashbrown 0.14.5", - "object 0.36.5", + "object 0.36.7", "tracing", ] @@ -5413,9 +5413,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -5955,12 +5955,12 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.221.2" +version = "0.222.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17a3bd88f2155da63a1f2fcb8a56377a24f0b6dfed12733bb5f544e86f690c5" +checksum = "3432682105d7e994565ef928ccf5856cf6af4ba3dddebedb737f61caed70f956" dependencies = [ "leb128", - "wasmparser 0.221.2", + "wasmparser 0.222.0", ] [[package]] @@ -5981,15 +5981,6 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.218.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09e46c7fceceaa72b2dd1a8a137ea7fd8f93dfaa69806010a709918e496c5dc" -dependencies = [ - "bitflags", -] - -[[package]] -name = "wasmparser" version = "0.219.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c771866898879073c53b565a6c7b49953795159836714ac56a5befb581227c5" @@ -6004,9 +5995,9 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.221.2" +version = "0.222.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9845c470a2e10b61dd42c385839cdd6496363ed63b5c9e420b5488b77bd22083" +checksum = "4adf50fde1b1a49c1add6a80d47aea500c88db70551805853aa8b88f3ea27ab5" dependencies = [ "bitflags", "indexmap", @@ -6015,22 +6006,22 @@ dependencies = [ [[package]] name = "wast" -version = "221.0.2" +version = "222.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc4470b9de917ba199157d1f0ae104f2ae362be728c43e68c571c7715bd629e" +checksum = "5ce7191f4b7da0dd300cc32476abae6457154e4625d9b1bc26890828a9a26f6e" dependencies = [ "bumpalo", "leb128", "memchr", "unicode-width 0.2.0", - "wasm-encoder 0.221.2", + "wasm-encoder 0.222.0", ] [[package]] name = "wat" -version = "1.221.2" +version = "1.222.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b1f3c6d82af47286494c6caea1d332037f5cbeeac82bbf5ef59cb8c201c466e" +checksum = "8fde61b4b52f9a84ae31b5e8902a2cd3162ea45d8bf564c729c3288fe52f4334" dependencies = [ "wast", ] diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index cec868e5c8e..31e6750a678 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -31,7 +31,7 @@ use rustc_data_structures::sync::Lrc; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; pub use rustc_span::AttrId; use rustc_span::source_map::{Spanned, respan}; -use rustc_span::{DUMMY_SP, ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; pub use crate::format::*; @@ -387,22 +387,15 @@ impl GenericParam { /// Represents lifetime, type and const parameters attached to a declaration of /// a function, enum, trait, etc. -#[derive(Clone, Encodable, Decodable, Debug)] +#[derive(Clone, Encodable, Decodable, Debug, Default)] pub struct Generics { pub params: ThinVec<GenericParam>, pub where_clause: WhereClause, pub span: Span, } -impl Default for Generics { - /// Creates an instance of `Generics`. - fn default() -> Generics { - Generics { params: ThinVec::new(), where_clause: Default::default(), span: DUMMY_SP } - } -} - /// A where-clause in a definition. -#[derive(Clone, Encodable, Decodable, Debug)] +#[derive(Clone, Encodable, Decodable, Debug, Default)] pub struct WhereClause { /// `true` if we ate a `where` token. /// @@ -419,12 +412,6 @@ impl WhereClause { } } -impl Default for WhereClause { - fn default() -> WhereClause { - WhereClause { has_where_token: false, predicates: ThinVec::new(), span: DUMMY_SP } - } -} - /// A single predicate in a where-clause. #[derive(Clone, Encodable, Decodable, Debug)] pub struct WherePredicate { diff --git a/compiler/rustc_ast/src/util/parser.rs b/compiler/rustc_ast/src/util/parser.rs index 1d4b01aa94c..8f2b7a23c01 100644 --- a/compiler/rustc_ast/src/util/parser.rs +++ b/compiler/rustc_ast/src/util/parser.rs @@ -153,9 +153,10 @@ impl AssocOp { match *self { Assign | AssignOp(_) => Fixity::Right, As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd - | BitXor | BitOr | Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual - | LAnd | LOr => Fixity::Left, - DotDot | DotDotEq => Fixity::None, + | BitXor | BitOr | LAnd | LOr => Fixity::Left, + Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | DotDot | DotDotEq => { + Fixity::None + } } } diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index f885b20c761..3fbf1210186 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -263,7 +263,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { &self, negative_impls, span.to(of_trait.as_ref().map_or(span, |t| t.path.span)), - "negative trait bounds are not yet fully implemented; \ + "negative trait bounds are not fully implemented; \ use marker types for now" ); } diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 24c1c0f221e..172df102929 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -1204,8 +1204,10 @@ impl<'a> State<'a> { } ast::TyKind::Path(Some(qself), path) => self.print_qpath(path, qself, false), ast::TyKind::TraitObject(bounds, syntax) => { - if *syntax == ast::TraitObjectSyntax::Dyn { - self.word_nbsp("dyn"); + match syntax { + ast::TraitObjectSyntax::Dyn => self.word_nbsp("dyn"), + ast::TraitObjectSyntax::DynStar => self.word_nbsp("dyn*"), + ast::TraitObjectSyntax::None => {} } self.print_type_bounds(bounds); } diff --git a/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs b/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs index 6f5382ce61d..ff466703f73 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/fixup.rs @@ -1,7 +1,9 @@ use rustc_ast::Expr; use rustc_ast::util::{classify, parser}; -#[derive(Copy, Clone, Debug)] +// The default amount of fixing is minimal fixing, so all fixups are set to `false` by `Default`. +// Fixups should be turned on in a targeted fashion where needed. +#[derive(Copy, Clone, Debug, Default)] pub(crate) struct FixupContext { /// Print expression such that it can be parsed back as a statement /// consisting of the original expression. @@ -93,20 +95,6 @@ pub(crate) struct FixupContext { parenthesize_exterior_struct_lit: bool, } -/// The default amount of fixing is minimal fixing. Fixups should be turned on -/// in a targeted fashion where needed. -impl Default for FixupContext { - fn default() -> Self { - FixupContext { - stmt: false, - leftmost_subexpression_in_stmt: false, - match_arm: false, - leftmost_subexpression_in_match_arm: false, - parenthesize_exterior_struct_lit: false, - } - } -} - impl FixupContext { /// Create the initial fixup for printing an expression in statement /// position. diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 19b5c8689c8..8b968177c3c 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -202,6 +202,7 @@ fn do_mir_borrowck<'tcx>( polonius_output, opt_closure_req, nll_errors, + localized_outlives_constraints, } = nll::compute_regions( &infcx, free_regions, @@ -315,6 +316,16 @@ fn do_mir_borrowck<'tcx>( mbcx.report_move_errors(); + // If requested, dump polonius MIR. + polonius::dump_polonius_mir( + &infcx, + body, + ®ioncx, + &borrow_set, + localized_outlives_constraints, + &opt_closure_req, + ); + // For each non-user used mutable variable, check if it's been assigned from // a user-declared local. If so, then put that local into the used_mut set. // Note that this set is expected to be small - only upvars from closures @@ -809,7 +820,6 @@ use self::ReadOrWrite::{Activation, Read, Reservation, Write}; #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum ArtificialField { - ArrayLength, FakeBorrow, } @@ -1257,16 +1267,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { ); } - &(Rvalue::Len(place) | Rvalue::Discriminant(place)) => { - let af = match *rvalue { - Rvalue::Len(..) => Some(ArtificialField::ArrayLength), - Rvalue::Discriminant(..) => None, - _ => unreachable!(), - }; + &Rvalue::Discriminant(place) => { self.access_place( location, (place, span), - (Shallow(af), Read(ReadKind::Copy)), + (Shallow(None), Read(ReadKind::Copy)), LocalMutationIsAllowed::No, state, ); @@ -1602,6 +1607,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -1643,6 +1649,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) diff --git a/compiler/rustc_borrowck/src/member_constraints.rs b/compiler/rustc_borrowck/src/member_constraints.rs index fc621a3b828..a0adf471fd3 100644 --- a/compiler/rustc_borrowck/src/member_constraints.rs +++ b/compiler/rustc_borrowck/src/member_constraints.rs @@ -4,10 +4,9 @@ use std::ops::Index; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxIndexMap; use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::infer::MemberConstraint; use rustc_middle::ty::{self, Ty}; use rustc_span::Span; -use tracing::debug; +use tracing::instrument; /// Compactly stores a set of `R0 member of [R1...Rn]` constraints, /// indexed by the region `R0`. @@ -23,7 +22,7 @@ where /// Stores the data about each `R0 member of [R1..Rn]` constraint. /// These are organized into a linked list, so each constraint /// contains the index of the next constraint with the same `R0`. - constraints: IndexVec<NllMemberConstraintIndex, NllMemberConstraint<'tcx>>, + constraints: IndexVec<NllMemberConstraintIndex, MemberConstraint<'tcx>>, /// Stores the `R1..Rn` regions for *all* sets. For any given /// constraint, we keep two indices so that we can pull out a @@ -33,7 +32,7 @@ where /// Represents a `R0 member of [R1..Rn]` constraint #[derive(Debug)] -pub(crate) struct NllMemberConstraint<'tcx> { +pub(crate) struct MemberConstraint<'tcx> { next_constraint: Option<NllMemberConstraintIndex>, /// The span where the hidden type was instantiated. @@ -70,37 +69,34 @@ impl Default for MemberConstraintSet<'_, ty::RegionVid> { } impl<'tcx> MemberConstraintSet<'tcx, ty::RegionVid> { + pub(crate) fn is_empty(&self) -> bool { + self.constraints.is_empty() + } + /// Pushes a member constraint into the set. - /// - /// The input member constraint `m_c` is in the form produced by - /// the `rustc_middle::infer` code. - /// - /// The `to_region_vid` callback fn is used to convert the regions - /// within into `RegionVid` format -- it typically consults the - /// `UniversalRegions` data structure that is known to the caller - /// (but which this code is unaware of). - pub(crate) fn push_constraint( + #[instrument(level = "debug", skip(self))] + pub(crate) fn add_member_constraint( &mut self, - m_c: &MemberConstraint<'tcx>, - mut to_region_vid: impl FnMut(ty::Region<'tcx>) -> ty::RegionVid, + key: ty::OpaqueTypeKey<'tcx>, + hidden_ty: Ty<'tcx>, + definition_span: Span, + member_region_vid: ty::RegionVid, + choice_regions: &[ty::RegionVid], ) { - debug!("push_constraint(m_c={:?})", m_c); - let member_region_vid: ty::RegionVid = to_region_vid(m_c.member_region); let next_constraint = self.first_constraints.get(&member_region_vid).cloned(); let start_index = self.choice_regions.len(); - let end_index = start_index + m_c.choice_regions.len(); - debug!("push_constraint: member_region_vid={:?}", member_region_vid); - let constraint_index = self.constraints.push(NllMemberConstraint { + self.choice_regions.extend(choice_regions); + let end_index = self.choice_regions.len(); + let constraint_index = self.constraints.push(MemberConstraint { next_constraint, member_region_vid, - definition_span: m_c.definition_span, - hidden_ty: m_c.hidden_ty, - key: m_c.key, + definition_span, + hidden_ty, + key, start_index, end_index, }); self.first_constraints.insert(member_region_vid, constraint_index); - self.choice_regions.extend(m_c.choice_regions.iter().map(|&r| to_region_vid(r))); } } @@ -182,7 +178,7 @@ where /// R0 member of [R1..Rn] /// ``` pub(crate) fn choice_regions(&self, pci: NllMemberConstraintIndex) -> &[ty::RegionVid] { - let NllMemberConstraint { start_index, end_index, .. } = &self.constraints[pci]; + let MemberConstraint { start_index, end_index, .. } = &self.constraints[pci]; &self.choice_regions[*start_index..*end_index] } } @@ -191,9 +187,9 @@ impl<'tcx, R> Index<NllMemberConstraintIndex> for MemberConstraintSet<'tcx, R> where R: Copy + Eq, { - type Output = NllMemberConstraint<'tcx>; + type Output = MemberConstraint<'tcx>; - fn index(&self, i: NllMemberConstraintIndex) -> &NllMemberConstraint<'tcx> { + fn index(&self, i: NllMemberConstraintIndex) -> &MemberConstraint<'tcx> { &self.constraints[i] } } @@ -215,7 +211,7 @@ where /// target_list: A -> B -> C -> D -> E -> F -> (None) /// ``` fn append_list( - constraints: &mut IndexSlice<NllMemberConstraintIndex, NllMemberConstraint<'_>>, + constraints: &mut IndexSlice<NllMemberConstraintIndex, MemberConstraint<'_>>, target_list: NllMemberConstraintIndex, source_list: NllMemberConstraintIndex, ) { diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index 7656031ed3a..abe27555b18 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -29,6 +29,7 @@ use crate::consumers::ConsumerOptions; use crate::diagnostics::RegionErrors; use crate::facts::{AllFacts, AllFactsExt, RustcFacts}; use crate::location::LocationTable; +use crate::polonius::LocalizedOutlivesConstraintSet; use crate::region_infer::RegionInferenceContext; use crate::type_check::{self, MirTypeckResults}; use crate::universal_regions::UniversalRegions; @@ -45,6 +46,9 @@ pub(crate) struct NllOutput<'tcx> { pub polonius_output: Option<Box<PoloniusOutput>>, pub opt_closure_req: Option<ClosureRegionRequirements<'tcx>>, pub nll_errors: RegionErrors<'tcx>, + + /// When using `-Zpolonius=next`: the localized typeck and liveness constraints. + pub localized_outlives_constraints: Option<LocalizedOutlivesConstraintSet>, } /// Rewrites the regions in the MIR to use NLL variables, also scraping out the set of universal @@ -135,6 +139,15 @@ pub(crate) fn compute_regions<'a, 'tcx>( elements, ); + // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives + // constraints. + let localized_outlives_constraints = + if infcx.tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + Some(polonius::create_localized_constraints(&mut regioncx, body)) + } else { + None + }; + // If requested: dump NLL facts, and run legacy polonius analysis. let polonius_output = all_facts.as_ref().and_then(|all_facts| { if infcx.tcx.sess.opts.unstable_opts.nll_facts { @@ -175,6 +188,7 @@ pub(crate) fn compute_regions<'a, 'tcx>( polonius_output, opt_closure_req: closure_region_requirements, nll_errors, + localized_outlives_constraints, } } @@ -215,40 +229,7 @@ pub(super) fn dump_nll_mir<'tcx>( &0, body, |pass_where, out| { - match pass_where { - // Before the CFG, dump out the values for each region variable. - PassWhere::BeforeCFG => { - regioncx.dump_mir(tcx, out)?; - writeln!(out, "|")?; - - if let Some(closure_region_requirements) = closure_region_requirements { - writeln!(out, "| Free Region Constraints")?; - for_each_region_constraint(tcx, closure_region_requirements, &mut |msg| { - writeln!(out, "| {msg}") - })?; - writeln!(out, "|")?; - } - - if borrow_set.len() > 0 { - writeln!(out, "| Borrows")?; - for (borrow_idx, borrow_data) in borrow_set.iter_enumerated() { - writeln!( - out, - "| {:?}: issued at {:?} in {:?}", - borrow_idx, borrow_data.reserve_location, borrow_data.region - )?; - } - writeln!(out, "|")?; - } - } - - PassWhere::BeforeLocation(_) => {} - - PassWhere::AfterTerminator(_) => {} - - PassWhere::BeforeBlock(_) | PassWhere::AfterLocation(_) | PassWhere::AfterCFG => {} - } - Ok(()) + emit_nll_mir(tcx, regioncx, closure_region_requirements, borrow_set, pass_where, out) }, options, ); @@ -266,6 +247,51 @@ pub(super) fn dump_nll_mir<'tcx>( }; } +/// Produces the actual NLL MIR sections to emit during the dumping process. +pub(crate) fn emit_nll_mir<'tcx>( + tcx: TyCtxt<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>, + borrow_set: &BorrowSet<'tcx>, + pass_where: PassWhere, + out: &mut dyn io::Write, +) -> io::Result<()> { + match pass_where { + // Before the CFG, dump out the values for each region variable. + PassWhere::BeforeCFG => { + regioncx.dump_mir(tcx, out)?; + writeln!(out, "|")?; + + if let Some(closure_region_requirements) = closure_region_requirements { + writeln!(out, "| Free Region Constraints")?; + for_each_region_constraint(tcx, closure_region_requirements, &mut |msg| { + writeln!(out, "| {msg}") + })?; + writeln!(out, "|")?; + } + + if borrow_set.len() > 0 { + writeln!(out, "| Borrows")?; + for (borrow_idx, borrow_data) in borrow_set.iter_enumerated() { + writeln!( + out, + "| {:?}: issued at {:?} in {:?}", + borrow_idx, borrow_data.reserve_location, borrow_data.region + )?; + } + writeln!(out, "|")?; + } + } + + PassWhere::BeforeLocation(_) => {} + + PassWhere::AfterTerminator(_) => {} + + PassWhere::BeforeBlock(_) | PassWhere::AfterLocation(_) | PassWhere::AfterCFG => {} + } + Ok(()) +} + #[allow(rustc::diagnostic_outside_of_impl)] #[allow(rustc::untranslatable_diagnostic)] pub(super) fn dump_annotation<'tcx, 'infcx>( diff --git a/compiler/rustc_borrowck/src/places_conflict.rs b/compiler/rustc_borrowck/src/places_conflict.rs index 679e111caa9..560b8c0349a 100644 --- a/compiler/rustc_borrowck/src/places_conflict.rs +++ b/compiler/rustc_borrowck/src/places_conflict.rs @@ -203,8 +203,7 @@ fn place_components_conflict<'tcx>( let base_ty = base.ty(body, tcx).ty; match (elem, base_ty.kind(), access) { - (_, _, Shallow(Some(ArtificialField::ArrayLength))) - | (_, _, Shallow(Some(ArtificialField::FakeBorrow))) => { + (_, _, Shallow(Some(ArtificialField::FakeBorrow))) => { // The array length is like additional fields on the // type; it does not overlap any existing data there. // Furthermore, if cannot actually be a prefix of any diff --git a/compiler/rustc_borrowck/src/polonius/constraints.rs b/compiler/rustc_borrowck/src/polonius/constraints.rs new file mode 100644 index 00000000000..50f59dd0dee --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/constraints.rs @@ -0,0 +1,45 @@ +use rustc_middle::ty::RegionVid; +use rustc_mir_dataflow::points::PointIndex; + +/// A localized outlives constraint reifies the CFG location where the outlives constraint holds, +/// within the origins themselves as if they were different from point to point: from `a: b` +/// outlives constraints to `a@p: b@p`, where `p` is the point in the CFG. +/// +/// This models two sources of constraints: +/// - constraints that traverse the subsets between regions at a given point, `a@p: b@p`. These +/// depend on typeck constraints generated via assignments, calls, etc. (In practice there are +/// subtleties where a statement's effect only starts being visible at the successor point, via +/// the "result" of that statement). +/// - constraints that traverse the CFG via the same region, `a@p: a@q`, where `p` is a predecessor +/// of `q`. These depend on the liveness of the regions at these points, as well as their +/// variance. +/// +/// The `source` origin at `from` flows into the `target` origin at `to`. +/// +/// This dual of NLL's [crate::constraints::OutlivesConstraint] therefore encodes the +/// position-dependent outlives constraints used by Polonius, to model the flow-sensitive loan +/// propagation via reachability within a graph of localized constraints. +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +pub(crate) struct LocalizedOutlivesConstraint { + pub source: RegionVid, + pub from: PointIndex, + pub target: RegionVid, + pub to: PointIndex, +} + +/// A container of [LocalizedOutlivesConstraint]s that can be turned into a traversable +/// `rustc_data_structures` graph. +#[derive(Clone, Default, Debug)] +pub(crate) struct LocalizedOutlivesConstraintSet { + pub outlives: Vec<LocalizedOutlivesConstraint>, +} + +impl LocalizedOutlivesConstraintSet { + pub(crate) fn push(&mut self, constraint: LocalizedOutlivesConstraint) { + if constraint.source == constraint.target && constraint.from == constraint.to { + // 'a@p: 'a@p is pretty uninteresting + return; + } + self.outlives.push(constraint); + } +} diff --git a/compiler/rustc_borrowck/src/polonius/dump.rs b/compiler/rustc_borrowck/src/polonius/dump.rs new file mode 100644 index 00000000000..a6d80149034 --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/dump.rs @@ -0,0 +1,104 @@ +use std::io; + +use rustc_middle::mir::pretty::{PrettyPrintMirOptions, dump_mir_with_options}; +use rustc_middle::mir::{Body, ClosureRegionRequirements, PassWhere}; +use rustc_middle::ty::TyCtxt; +use rustc_session::config::MirIncludeSpans; + +use crate::borrow_set::BorrowSet; +use crate::polonius::{LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet}; +use crate::{BorrowckInferCtxt, RegionInferenceContext}; + +/// `-Zdump-mir=polonius` dumps MIR annotated with NLL and polonius specific information. +// Note: this currently duplicates most of NLL MIR, with some additions for the localized outlives +// constraints. This is ok for now as this dump will change in the near future to an HTML file to +// become more useful. +pub(crate) fn dump_polonius_mir<'tcx>( + infcx: &BorrowckInferCtxt<'tcx>, + body: &Body<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + borrow_set: &BorrowSet<'tcx>, + localized_outlives_constraints: Option<LocalizedOutlivesConstraintSet>, + closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>, +) { + let tcx = infcx.tcx; + if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + return; + } + + let localized_outlives_constraints = localized_outlives_constraints + .expect("missing localized constraints with `-Zpolonius=next`"); + + // We want the NLL extra comments printed by default in NLL MIR dumps (they were removed in + // #112346). Specifying `-Z mir-include-spans` on the CLI still has priority: for example, + // they're always disabled in mir-opt tests to make working with blessed dumps easier. + let options = PrettyPrintMirOptions { + include_extra_comments: matches!( + tcx.sess.opts.unstable_opts.mir_include_spans, + MirIncludeSpans::On | MirIncludeSpans::Nll + ), + }; + + dump_mir_with_options( + tcx, + false, + "polonius", + &0, + body, + |pass_where, out| { + emit_polonius_mir( + tcx, + regioncx, + closure_region_requirements, + borrow_set, + &localized_outlives_constraints, + pass_where, + out, + ) + }, + options, + ); +} + +/// Produces the actual NLL + Polonius MIR sections to emit during the dumping process. +fn emit_polonius_mir<'tcx>( + tcx: TyCtxt<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>, + borrow_set: &BorrowSet<'tcx>, + localized_outlives_constraints: &LocalizedOutlivesConstraintSet, + pass_where: PassWhere, + out: &mut dyn io::Write, +) -> io::Result<()> { + // Emit the regular NLL front-matter + crate::nll::emit_nll_mir( + tcx, + regioncx, + closure_region_requirements, + borrow_set, + pass_where.clone(), + out, + )?; + + let liveness = regioncx.liveness_constraints(); + + // Add localized outlives constraints + match pass_where { + PassWhere::BeforeCFG => { + if localized_outlives_constraints.outlives.len() > 0 { + writeln!(out, "| Localized constraints")?; + + for constraint in &localized_outlives_constraints.outlives { + let LocalizedOutlivesConstraint { source, from, target, to } = constraint; + let from = liveness.location_from_point(*from); + let to = liveness.location_from_point(*to); + writeln!(out, "| {source:?} at {from:?} -> {target:?} at {to:?}")?; + } + writeln!(out, "|")?; + } + } + _ => {} + } + + Ok(()) +} diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs index 0d5b6f3a2c8..178f70a6730 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs @@ -299,16 +299,11 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> { self.consume_operand(location, op); } - &(Rvalue::Len(place) | Rvalue::Discriminant(place)) => { - let af = match rvalue { - Rvalue::Len(..) => Some(ArtificialField::ArrayLength), - Rvalue::Discriminant(..) => None, - _ => unreachable!(), - }; + &Rvalue::Discriminant(place) => { self.access_place( location, place, - (Shallow(af), Read(ReadKind::Copy)), + (Shallow(None), Read(ReadKind::Copy)), LocalMutationIsAllowed::No, ); } diff --git a/compiler/rustc_borrowck/src/polonius/mod.rs b/compiler/rustc_borrowck/src/polonius/mod.rs index 9c1583f1988..eee5e70efe3 100644 --- a/compiler/rustc_borrowck/src/polonius/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/mod.rs @@ -1 +1,180 @@ +//! Polonius analysis and support code: +//! - dedicated constraints +//! - conversion from NLL constraints +//! - debugging utilities +//! - etc. +//! +//! The current implementation models the flow-sensitive borrow-checking concerns as a graph +//! containing both information about regions and information about the control flow. +//! +//! Loan propagation is seen as a reachability problem (with some subtleties) between where the loan +//! is introduced and a given point. +//! +//! Constraints arising from type-checking allow loans to flow from region to region at the same CFG +//! point. Constraints arising from liveness allow loans to flow within from point to point, between +//! live regions at these points. +//! +//! Edges can be bidirectional to encode invariant relationships, and loans can flow "back in time" +//! to traverse these constraints arising earlier in the CFG. +//! +//! When incorporating kills in the traversal, the loans reaching a given point are considered live. +//! +//! After this, the usual NLL process happens. These live loans are fed into a dataflow analysis +//! combining them with the points where loans go out of NLL scope (the frontier where they stop +//! propagating to a live region), to yield the "loans in scope" or "active loans", at a given +//! point. +//! +//! Illegal accesses are still computed by checking whether one of these resulting loans is +//! invalidated. +//! +//! More information on this simple approach can be found in the following links, and in the future +//! in the rustc dev guide: +//! - <https://smallcultfollowing.com/babysteps/blog/2023/09/22/polonius-part-1/> +//! - <https://smallcultfollowing.com/babysteps/blog/2023/09/29/polonius-part-2/> +//! + +mod constraints; +pub(crate) use constraints::*; +mod dump; +pub(crate) use dump::dump_polonius_mir; pub(crate) mod legacy; + +use rustc_middle::mir::{Body, Location}; +use rustc_mir_dataflow::points::PointIndex; + +use crate::RegionInferenceContext; +use crate::constraints::OutlivesConstraint; +use crate::region_infer::values::LivenessValues; +use crate::type_check::Locations; +use crate::universal_regions::UniversalRegions; + +/// Creates a constraint set for `-Zpolonius=next` by: +/// - converting NLL typeck constraints to be localized +/// - encoding liveness constraints +pub(crate) fn create_localized_constraints<'tcx>( + regioncx: &mut RegionInferenceContext<'tcx>, + body: &Body<'tcx>, +) -> LocalizedOutlivesConstraintSet { + let mut localized_outlives_constraints = LocalizedOutlivesConstraintSet::default(); + convert_typeck_constraints( + body, + regioncx.liveness_constraints(), + regioncx.outlives_constraints(), + &mut localized_outlives_constraints, + ); + create_liveness_constraints( + body, + regioncx.liveness_constraints(), + regioncx.universal_regions(), + &mut localized_outlives_constraints, + ); + + // FIXME: here, we can trace loan reachability in the constraint graph and record this as loan + // liveness for the next step in the chain, the NLL loan scope and active loans computations. + + localized_outlives_constraints +} + +/// Propagate loans throughout the subset graph at a given point (with some subtleties around the +/// location where effects start to be visible). +fn convert_typeck_constraints<'tcx>( + body: &Body<'tcx>, + liveness: &LivenessValues, + outlives_constraints: impl Iterator<Item = OutlivesConstraint<'tcx>>, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + for outlives_constraint in outlives_constraints { + match outlives_constraint.locations { + Locations::All(_) => { + // For now, turn logical constraints holding at all points into physical edges at + // every point in the graph. + // FIXME: encode this into *traversal* instead. + for (block, bb) in body.basic_blocks.iter_enumerated() { + let statement_count = bb.statements.len(); + for statement_index in 0..=statement_count { + let current_location = Location { block, statement_index }; + let current_point = liveness.point_from_location(current_location); + + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: outlives_constraint.sup, + from: current_point, + target: outlives_constraint.sub, + to: current_point, + }); + } + } + } + + _ => {} + } + } +} + +/// Propagate loans throughout the CFG: for each statement in the MIR, create localized outlives +/// constraints for loans that are propagated to the next statements. +pub(crate) fn create_liveness_constraints<'tcx>( + body: &Body<'tcx>, + liveness: &LivenessValues, + universal_regions: &UniversalRegions<'tcx>, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + for (block, bb) in body.basic_blocks.iter_enumerated() { + let statement_count = bb.statements.len(); + for statement_index in 0..=statement_count { + let current_location = Location { block, statement_index }; + let current_point = liveness.point_from_location(current_location); + + if statement_index < statement_count { + // Intra-block edges, straight line constraints from each point to its successor + // within the same block. + let next_location = Location { block, statement_index: statement_index + 1 }; + let next_point = liveness.point_from_location(next_location); + propagate_loans_between_points( + current_point, + next_point, + liveness, + universal_regions, + localized_outlives_constraints, + ); + } else { + // Inter-block edges, from the block's terminator to each successor block's entry + // point. + for successor_block in bb.terminator().successors() { + let next_location = Location { block: successor_block, statement_index: 0 }; + let next_point = liveness.point_from_location(next_location); + propagate_loans_between_points( + current_point, + next_point, + liveness, + universal_regions, + localized_outlives_constraints, + ); + } + } + } + } +} + +/// Propagate loans within a region between two points in the CFG, if that region is live at both +/// the source and target points. +fn propagate_loans_between_points( + current_point: PointIndex, + next_point: PointIndex, + _liveness: &LivenessValues, + universal_regions: &UniversalRegions<'_>, + localized_outlives_constraints: &mut LocalizedOutlivesConstraintSet, +) { + // Universal regions are semantically live at all points. + // Note: we always have universal regions but they're not always (or often) involved in the + // subset graph. For now, we emit all their edges unconditionally, but some of these subgraphs + // will be disconnected from the rest of the graph and thus, unnecessary. + // FIXME: only emit the edges of universal regions that existential regions can reach. + for region in universal_regions.universal_regions_iter() { + localized_outlives_constraints.push(LocalizedOutlivesConstraint { + source: region, + from: current_point, + target: region, + to: next_point, + }); + } +} diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index 60f7770d3f7..907a3f16b06 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -571,7 +571,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// Given a universal region in scope on the MIR, returns the /// corresponding index. /// - /// (Panics if `r` is not a registered universal region.) + /// Panics if `r` is not a registered universal region, most notably + /// if it is a placeholder. Handling placeholders requires access to the + /// `MirTypeckRegionConstraints`. pub(crate) fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { self.universal_regions().to_region_vid(r) } @@ -2227,6 +2229,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn scc_representative(&self, scc: ConstraintSccIndex) -> RegionVid { self.constraint_sccs.annotation(scc).representative } + + pub(crate) fn liveness_constraints(&self) -> &LivenessValues { + &self.liveness_constraints + } } impl<'tcx> RegionDefinition<'tcx> { diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs index a16bce63839..0b0757f16ab 100644 --- a/compiler/rustc_borrowck/src/region_infer/values.rs +++ b/compiler/rustc_borrowck/src/region_infer/values.rs @@ -199,6 +199,11 @@ impl LivenessValues { self.elements.point_from_location(location) } + #[inline] + pub(crate) fn location_from_point(&self, point: PointIndex) -> Location { + self.elements.to_location(point) + } + /// When using `-Zpolonius=next`, returns whether the `loan_idx` is live at the given `point`. pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, point: PointIndex) -> bool { self.loans diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index 918efac2a20..4b7f5321388 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -77,17 +77,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { #[instrument(skip(self), level = "debug")] pub(super) fn convert_all(&mut self, query_constraints: &QueryRegionConstraints<'tcx>) { - let QueryRegionConstraints { outlives, member_constraints } = query_constraints; - - // Annoying: to invoke `self.to_region_vid`, we need access to - // `self.constraints`, but we also want to be mutating - // `self.member_constraints`. For now, just swap out the value - // we want and replace at the end. - let mut tmp = std::mem::take(&mut self.constraints.member_constraints); - for member_constraint in member_constraints { - tmp.push_constraint(member_constraint, |r| self.to_region_vid(r)); - } - self.constraints.member_constraints = tmp; + let QueryRegionConstraints { outlives } = query_constraints; for &(predicate, constraint_category) in outlives { self.convert(predicate, constraint_category); @@ -295,13 +285,8 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { match result { Ok(TypeOpOutput { output: ty, constraints, .. }) => { - if let Some(constraints) = constraints { - assert!( - constraints.member_constraints.is_empty(), - "no member constraints expected from normalizing: {:#?}", - constraints.member_constraints - ); - next_outlives_predicates.extend(constraints.outlives.iter().copied()); + if let Some(QueryRegionConstraints { outlives }) = constraints { + next_outlives_predicates.extend(outlives.iter().copied()); } ty } diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 0c59813d124..c29f3033dd0 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -3,7 +3,7 @@ use std::rc::Rc; use std::{fmt, iter, mem}; -use rustc_abi::{FIRST_VARIANT, FieldIdx}; +use rustc_abi::FieldIdx; use rustc_data_structures::frozen::Frozen; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_errors::ErrorGuaranteed; @@ -40,9 +40,7 @@ use rustc_mir_dataflow::points::DenseLocationMap; use rustc_span::def_id::CRATE_DEF_ID; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Span, sym}; -use rustc_trait_selection::traits::query::type_op::custom::{ - CustomTypeOp, scrape_region_constraints, -}; +use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints; use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput}; use tracing::{debug, instrument, trace}; @@ -75,20 +73,12 @@ macro_rules! span_mirbug { }) } -macro_rules! span_mirbug_and_err { - ($context:expr, $elem:expr, $($message:tt)*) => ({ - { - span_mirbug!($context, $elem, $($message)*); - $context.error() - } - }) -} - mod canonical; mod constraint_conversion; pub(crate) mod free_region_relations; mod input_output; pub(crate) mod liveness; +mod opaque_types; mod relate_tys; /// Type checks the given `mir` in the context of the inference @@ -179,52 +169,8 @@ pub(crate) fn type_check<'a, 'tcx>( liveness::generate(&mut typeck, body, &elements, flow_inits, move_data); - let opaque_type_values = infcx - .take_opaque_types() - .into_iter() - .map(|(opaque_type_key, decl)| { - let _: Result<_, ErrorGuaranteed> = typeck.fully_perform_op( - Locations::All(body.span), - ConstraintCategory::OpaqueType, - CustomTypeOp::new( - |ocx| { - ocx.infcx.register_member_constraints( - opaque_type_key, - decl.hidden_type.ty, - decl.hidden_type.span, - ); - Ok(()) - }, - "opaque_type_map", - ), - ); - let hidden_type = infcx.resolve_vars_if_possible(decl.hidden_type); - trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind()); - if hidden_type.has_non_region_infer() { - infcx.dcx().span_bug( - decl.hidden_type.span, - format!("could not resolve {:#?}", hidden_type.ty.kind()), - ); - } - - // Convert all regions to nll vars. - let (opaque_type_key, hidden_type) = - fold_regions(infcx.tcx, (opaque_type_key, hidden_type), |region, _| { - match region.kind() { - ty::ReVar(_) => region, - ty::RePlaceholder(placeholder) => { - typeck.constraints.placeholder_region(infcx, placeholder) - } - _ => ty::Region::new_var( - infcx.tcx, - typeck.universal_regions.to_region_vid(region), - ), - } - }); - - (opaque_type_key, hidden_type) - }) - .collect(); + let opaque_type_values = + opaque_types::take_opaques_and_register_member_constraints(&mut typeck); MirTypeckResults { constraints, universal_region_relations, opaque_type_values } } @@ -241,11 +187,9 @@ enum FieldAccessError { OutOfRange { field_count: usize }, } -/// Verifies that MIR types are sane to not crash further checks. +/// Verifies that MIR types are sane. /// -/// The sanitize_XYZ methods here take an MIR object and compute its -/// type, calling `span_mirbug` and returning an error type if there -/// is a problem. +/// FIXME: This should be merged with the actual `TypeChecker`. struct TypeVerifier<'a, 'b, 'tcx> { typeck: &'a mut TypeChecker<'b, 'tcx>, promoted: &'b IndexSlice<Promoted, Body<'tcx>>, @@ -260,14 +204,91 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { - self.sanitize_place(place, location, context); + self.super_place(place, context, location); + let tcx = self.tcx(); + let place_ty = place.ty(self.body(), tcx); + if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context { + let trait_ref = ty::TraitRef::new( + tcx, + tcx.require_lang_item(LangItem::Copy, Some(self.last_span)), + [place_ty.ty], + ); + + // To have a `Copy` operand, the type `T` of the + // value must be `Copy`. Note that we prove that `T: Copy`, + // rather than using the `is_copy_modulo_regions` + // test. This is important because + // `is_copy_modulo_regions` ignores the resulting region + // obligations and assumes they pass. This can result in + // bounds from `Copy` impls being unsoundly ignored (e.g., + // #29149). Note that we decide to use `Copy` before knowing + // whether the bounds fully apply: in effect, the rule is + // that if a value of some type could implement `Copy`, then + // it must. + self.typeck.prove_trait_ref( + trait_ref, + location.to_locations(), + ConstraintCategory::CopyBound, + ); + } + } + + fn visit_projection_elem( + &mut self, + place: PlaceRef<'tcx>, + elem: PlaceElem<'tcx>, + context: PlaceContext, + location: Location, + ) { + let tcx = self.tcx(); + let base_ty = place.ty(self.body(), tcx); + match elem { + // All these projections don't add any constraints, so there's nothing to + // do here. We check their invariants in the MIR validator after all. + ProjectionElem::Deref + | ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::Downcast(..) => {} + ProjectionElem::Field(field, fty) => { + let fty = self.typeck.normalize(fty, location); + let ty = base_ty.field_ty(tcx, field); + let ty = self.typeck.normalize(ty, location); + debug!(?fty, ?ty); + + if let Err(terr) = self.typeck.relate_types( + ty, + context.ambient_variance(), + fty, + location.to_locations(), + ConstraintCategory::Boring, + ) { + span_mirbug!(self, place, "bad field access ({:?}: {:?}): {:?}", ty, fty, terr); + } + } + ProjectionElem::OpaqueCast(ty) => { + let ty = self.typeck.normalize(ty, location); + self.typeck + .relate_types( + ty, + context.ambient_variance(), + base_ty.ty, + location.to_locations(), + ConstraintCategory::TypeAnnotation, + ) + .unwrap(); + } + ProjectionElem::Subtype(_) => { + bug!("ProjectionElem::Subtype shouldn't exist in borrowck") + } + } } fn visit_const_operand(&mut self, constant: &ConstOperand<'tcx>, location: Location) { debug!(?constant, ?location, "visit_const_operand"); self.super_const_operand(constant, location); - let ty = self.sanitize_type(constant, constant.const_.ty()); + let ty = constant.const_.ty(); self.typeck.infcx.tcx.for_each_free_region(&ty, |live_region| { let live_region_vid = self.typeck.universal_regions.to_region_vid(live_region); @@ -337,7 +358,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { }; let promoted_body = &self.promoted[promoted]; - self.sanitize_promoted(promoted_body, location); + self.verify_promoted(promoted_body, location); let promoted_ty = promoted_body.return_ty(); check_err(self, promoted_body, ty, promoted_ty); @@ -387,15 +408,8 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } } - fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { - self.super_rvalue(rvalue, location); - let rval_ty = rvalue.ty(self.body(), self.tcx()); - self.sanitize_type(rvalue, rval_ty); - } - fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) { self.super_local_decl(local, local_decl); - self.sanitize_type(local_decl, local_decl.ty); if let Some(user_ty) = &local_decl.user_ty { for (user_ty, span) in user_ty.projections_and_spans() { @@ -434,7 +448,6 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } fn visit_body(&mut self, body: &Body<'tcx>) { - self.sanitize_type(&"return type", body.return_ty()); // The types of local_decls are checked above which is called in super_body. self.super_body(body); } @@ -449,64 +462,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { self.typeck.infcx.tcx } - fn sanitize_type(&mut self, parent: &dyn fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> { - if ty.has_escaping_bound_vars() || ty.references_error() { - span_mirbug_and_err!(self, parent, "bad type {:?}", ty) - } else { - ty - } - } - - /// Checks that the types internal to the `place` match up with - /// what would be expected. - #[instrument(level = "debug", skip(self, location), ret)] - fn sanitize_place( - &mut self, - place: &Place<'tcx>, - location: Location, - context: PlaceContext, - ) -> PlaceTy<'tcx> { - let mut place_ty = PlaceTy::from_ty(self.body().local_decls[place.local].ty); - - for elem in place.projection.iter() { - if place_ty.variant_index.is_none() { - if let Err(guar) = place_ty.ty.error_reported() { - return PlaceTy::from_ty(Ty::new_error(self.tcx(), guar)); - } - } - place_ty = self.sanitize_projection(place_ty, elem, place, location, context); - } - - if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context { - let tcx = self.tcx(); - let trait_ref = ty::TraitRef::new( - tcx, - tcx.require_lang_item(LangItem::Copy, Some(self.last_span)), - [place_ty.ty], - ); - - // To have a `Copy` operand, the type `T` of the - // value must be `Copy`. Note that we prove that `T: Copy`, - // rather than using the `is_copy_modulo_regions` - // test. This is important because - // `is_copy_modulo_regions` ignores the resulting region - // obligations and assumes they pass. This can result in - // bounds from `Copy` impls being unsoundly ignored (e.g., - // #29149). Note that we decide to use `Copy` before knowing - // whether the bounds fully apply: in effect, the rule is - // that if a value of some type could implement `Copy`, then - // it must. - self.typeck.prove_trait_ref( - trait_ref, - location.to_locations(), - ConstraintCategory::CopyBound, - ); - } - - place_ty - } - - fn sanitize_promoted(&mut self, promoted_body: &'b Body<'tcx>, location: Location) { + fn verify_promoted(&mut self, promoted_body: &'b Body<'tcx>, location: Location) { // Determine the constraints from the promoted MIR by running the type // checker on the promoted MIR, then transfer the constraints back to // the main MIR, changing the locations to the provided location. @@ -562,240 +518,6 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { self.typeck.constraints.liveness_constraints.add_location(region, location); } } - - #[instrument(skip(self, location), ret, level = "debug")] - fn sanitize_projection( - &mut self, - base: PlaceTy<'tcx>, - pi: PlaceElem<'tcx>, - place: &Place<'tcx>, - location: Location, - context: PlaceContext, - ) -> PlaceTy<'tcx> { - let tcx = self.tcx(); - let base_ty = base.ty; - match pi { - ProjectionElem::Deref => { - let deref_ty = base_ty.builtin_deref(true); - PlaceTy::from_ty(deref_ty.unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "deref of non-pointer {:?}", base_ty) - })) - } - ProjectionElem::Index(i) => { - let index_ty = Place::from(i).ty(self.body(), tcx).ty; - if index_ty != tcx.types.usize { - PlaceTy::from_ty(span_mirbug_and_err!(self, i, "index by non-usize {:?}", i)) - } else { - PlaceTy::from_ty(base_ty.builtin_index().unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) - })) - } - } - ProjectionElem::ConstantIndex { .. } => { - // consider verifying in-bounds - PlaceTy::from_ty(base_ty.builtin_index().unwrap_or_else(|| { - span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty) - })) - } - ProjectionElem::Subslice { from, to, from_end } => { - PlaceTy::from_ty(match base_ty.kind() { - ty::Array(inner, _) => { - assert!(!from_end, "array subslices should not use from_end"); - Ty::new_array(tcx, *inner, to - from) - } - ty::Slice(..) => { - assert!(from_end, "slice subslices should use from_end"); - base_ty - } - _ => span_mirbug_and_err!(self, place, "slice of non-array {:?}", base_ty), - }) - } - ProjectionElem::Downcast(maybe_name, index) => match base_ty.kind() { - ty::Adt(adt_def, _args) if adt_def.is_enum() => { - if index.as_usize() >= adt_def.variants().len() { - PlaceTy::from_ty(span_mirbug_and_err!( - self, - place, - "cast to variant #{:?} but enum only has {:?}", - index, - adt_def.variants().len() - )) - } else { - PlaceTy { ty: base_ty, variant_index: Some(index) } - } - } - // We do not need to handle coroutines here, because this runs - // before the coroutine transform stage. - _ => { - let ty = if let Some(name) = maybe_name { - span_mirbug_and_err!( - self, - place, - "can't downcast {:?} as {:?}", - base_ty, - name - ) - } else { - span_mirbug_and_err!(self, place, "can't downcast {:?}", base_ty) - }; - PlaceTy::from_ty(ty) - } - }, - ProjectionElem::Field(field, fty) => { - let fty = self.sanitize_type(place, fty); - let fty = self.typeck.normalize(fty, location); - match self.field_ty(place, base, field, location) { - Ok(ty) => { - let ty = self.typeck.normalize(ty, location); - debug!(?fty, ?ty); - - if let Err(terr) = self.typeck.relate_types( - ty, - self.get_ambient_variance(context), - fty, - location.to_locations(), - ConstraintCategory::Boring, - ) { - span_mirbug!( - self, - place, - "bad field access ({:?}: {:?}): {:?}", - ty, - fty, - terr - ); - } - } - Err(FieldAccessError::OutOfRange { field_count }) => span_mirbug!( - self, - place, - "accessed field #{} but variant only has {}", - field.index(), - field_count - ), - } - PlaceTy::from_ty(fty) - } - ProjectionElem::Subtype(_) => { - bug!("ProjectionElem::Subtype shouldn't exist in borrowck") - } - ProjectionElem::OpaqueCast(ty) => { - let ty = self.sanitize_type(place, ty); - let ty = self.typeck.normalize(ty, location); - self.typeck - .relate_types( - ty, - self.get_ambient_variance(context), - base.ty, - location.to_locations(), - ConstraintCategory::TypeAnnotation, - ) - .unwrap(); - PlaceTy::from_ty(ty) - } - } - } - - fn error(&mut self) -> Ty<'tcx> { - Ty::new_misc_error(self.tcx()) - } - - fn get_ambient_variance(&self, context: PlaceContext) -> ty::Variance { - use rustc_middle::mir::visit::NonMutatingUseContext::*; - use rustc_middle::mir::visit::NonUseContext::*; - - match context { - PlaceContext::MutatingUse(_) => ty::Invariant, - PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant, - PlaceContext::NonMutatingUse( - Inspect | Copy | Move | PlaceMention | SharedBorrow | FakeBorrow | RawBorrow - | Projection, - ) => ty::Covariant, - PlaceContext::NonUse(AscribeUserTy(variance)) => variance, - } - } - - fn field_ty( - &mut self, - parent: &dyn fmt::Debug, - base_ty: PlaceTy<'tcx>, - field: FieldIdx, - location: Location, - ) -> Result<Ty<'tcx>, FieldAccessError> { - let tcx = self.tcx(); - - let (variant, args) = match base_ty { - PlaceTy { ty, variant_index: Some(variant_index) } => match *ty.kind() { - ty::Adt(adt_def, args) => (adt_def.variant(variant_index), args), - ty::Coroutine(def_id, args) => { - let mut variants = args.as_coroutine().state_tys(def_id, tcx); - let Some(mut variant) = variants.nth(variant_index.into()) else { - bug!( - "variant_index of coroutine out of range: {:?}/{:?}", - variant_index, - args.as_coroutine().state_tys(def_id, tcx).count() - ); - }; - return match variant.nth(field.index()) { - Some(ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { field_count: variant.count() }), - }; - } - _ => bug!("can't have downcast of non-adt non-coroutine type"), - }, - PlaceTy { ty, variant_index: None } => match *ty.kind() { - ty::Adt(adt_def, args) if !adt_def.is_enum() => { - (adt_def.variant(FIRST_VARIANT), args) - } - ty::Closure(_, args) => { - return match args.as_closure().upvar_tys().get(field.index()) { - Some(&ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { - field_count: args.as_closure().upvar_tys().len(), - }), - }; - } - ty::CoroutineClosure(_, args) => { - return match args.as_coroutine_closure().upvar_tys().get(field.index()) { - Some(&ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { - field_count: args.as_coroutine_closure().upvar_tys().len(), - }), - }; - } - ty::Coroutine(_, args) => { - // Only prefix fields (upvars and current state) are - // accessible without a variant index. - return match args.as_coroutine().prefix_tys().get(field.index()) { - Some(ty) => Ok(*ty), - None => Err(FieldAccessError::OutOfRange { - field_count: args.as_coroutine().prefix_tys().len(), - }), - }; - } - ty::Tuple(tys) => { - return match tys.get(field.index()) { - Some(&ty) => Ok(ty), - None => Err(FieldAccessError::OutOfRange { field_count: tys.len() }), - }; - } - _ => { - return Ok(span_mirbug_and_err!( - self, - parent, - "can't project out of {:?}", - base_ty - )); - } - }, - }; - - if let Some(field) = variant.fields.get(field) { - Ok(self.typeck.normalize(field.ty(tcx, args), location)) - } else { - Err(FieldAccessError::OutOfRange { field_count: variant.fields.len() }) - } - } } /// The MIR type checker. Visits the MIR and enforces all the @@ -955,6 +677,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { self.body } + fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> RegionVid { + if let ty::RePlaceholder(placeholder) = r.kind() { + self.constraints.placeholder_region(self.infcx, placeholder).as_var() + } else { + self.universal_regions.to_region_vid(r) + } + } + fn unsized_feature_enabled(&self) -> bool { let features = self.tcx().features(); features.unsized_locals() || features.unsized_fn_params() @@ -2464,7 +2194,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { Rvalue::RawPtr(..) | Rvalue::ThreadLocalRef(..) - | Rvalue::Len(..) | Rvalue::Discriminant(..) | Rvalue::NullaryOp(NullOp::OffsetOf(..), _) => {} } @@ -2480,7 +2209,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | Rvalue::Repeat(..) | Rvalue::Ref(..) | Rvalue::RawPtr(..) - | Rvalue::Len(..) | Rvalue::Cast(..) | Rvalue::ShallowInitBox(..) | Rvalue::BinaryOp(..) diff --git a/compiler/rustc_borrowck/src/type_check/opaque_types.rs b/compiler/rustc_borrowck/src/type_check/opaque_types.rs new file mode 100644 index 00000000000..edf3b1ae092 --- /dev/null +++ b/compiler/rustc_borrowck/src/type_check/opaque_types.rs @@ -0,0 +1,335 @@ +use std::iter; + +use rustc_data_structures::fx::FxIndexMap; +use rustc_middle::span_bug; +use rustc_middle::ty::fold::fold_regions; +use rustc_middle::ty::{ + self, GenericArgKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeSuperVisitable, + TypeVisitable, TypeVisitableExt, TypeVisitor, +}; +use tracing::{debug, trace}; + +use super::{MemberConstraintSet, TypeChecker}; + +/// Once we're done with typechecking the body, we take all the opaque types +/// defined by this function and add their 'member constraints'. +pub(super) fn take_opaques_and_register_member_constraints<'tcx>( + typeck: &mut TypeChecker<'_, 'tcx>, +) -> FxIndexMap<OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>> { + let infcx = typeck.infcx; + // Annoying: to invoke `typeck.to_region_vid`, we need access to + // `typeck.constraints`, but we also want to be mutating + // `typeck.member_constraints`. For now, just swap out the value + // we want and replace at the end. + let mut member_constraints = std::mem::take(&mut typeck.constraints.member_constraints); + let opaque_types = infcx + .take_opaque_types() + .into_iter() + .map(|(opaque_type_key, decl)| { + let hidden_type = infcx.resolve_vars_if_possible(decl.hidden_type); + register_member_constraints( + typeck, + &mut member_constraints, + opaque_type_key, + hidden_type, + ); + trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind()); + if hidden_type.has_non_region_infer() { + span_bug!(hidden_type.span, "could not resolve {:?}", hidden_type.ty); + } + + // Convert all regions to nll vars. + let (opaque_type_key, hidden_type) = + fold_regions(infcx.tcx, (opaque_type_key, hidden_type), |r, _| { + ty::Region::new_var(infcx.tcx, typeck.to_region_vid(r)) + }); + + (opaque_type_key, hidden_type) + }) + .collect(); + assert!(typeck.constraints.member_constraints.is_empty()); + typeck.constraints.member_constraints = member_constraints; + opaque_types +} + +/// Given the map `opaque_types` containing the opaque +/// `impl Trait` types whose underlying, hidden types are being +/// inferred, this method adds constraints to the regions +/// appearing in those underlying hidden types to ensure that they +/// at least do not refer to random scopes within the current +/// function. These constraints are not (quite) sufficient to +/// guarantee that the regions are actually legal values; that +/// final condition is imposed after region inference is done. +/// +/// # The Problem +/// +/// Let's work through an example to explain how it works. Assume +/// the current function is as follows: +/// +/// ```text +/// fn foo<'a, 'b>(..) -> (impl Bar<'a>, impl Bar<'b>) +/// ``` +/// +/// Here, we have two `impl Trait` types whose values are being +/// inferred (the `impl Bar<'a>` and the `impl +/// Bar<'b>`). Conceptually, this is sugar for a setup where we +/// define underlying opaque types (`Foo1`, `Foo2`) and then, in +/// the return type of `foo`, we *reference* those definitions: +/// +/// ```text +/// type Foo1<'x> = impl Bar<'x>; +/// type Foo2<'x> = impl Bar<'x>; +/// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } +/// // ^^^^ ^^ +/// // | | +/// // | args +/// // def_id +/// ``` +/// +/// As indicating in the comments above, each of those references +/// is (in the compiler) basically generic parameters (`args`) +/// applied to the type of a suitable `def_id` (which identifies +/// `Foo1` or `Foo2`). +/// +/// Now, at this point in compilation, what we have done is to +/// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with +/// fresh inference variables C1 and C2. We wish to use the values +/// of these variables to infer the underlying types of `Foo1` and +/// `Foo2`. That is, this gives rise to higher-order (pattern) unification +/// constraints like: +/// +/// ```text +/// for<'a> (Foo1<'a> = C1) +/// for<'b> (Foo1<'b> = C2) +/// ``` +/// +/// For these equation to be satisfiable, the types `C1` and `C2` +/// can only refer to a limited set of regions. For example, `C1` +/// can only refer to `'static` and `'a`, and `C2` can only refer +/// to `'static` and `'b`. The job of this function is to impose that +/// constraint. +/// +/// Up to this point, C1 and C2 are basically just random type +/// inference variables, and hence they may contain arbitrary +/// regions. In fact, it is fairly likely that they do! Consider +/// this possible definition of `foo`: +/// +/// ```text +/// fn foo<'a, 'b>(x: &'a i32, y: &'b i32) -> (impl Bar<'a>, impl Bar<'b>) { +/// (&*x, &*y) +/// } +/// ``` +/// +/// Here, the values for the concrete types of the two impl +/// traits will include inference variables: +/// +/// ```text +/// &'0 i32 +/// &'1 i32 +/// ``` +/// +/// Ordinarily, the subtyping rules would ensure that these are +/// sufficiently large. But since `impl Bar<'a>` isn't a specific +/// type per se, we don't get such constraints by default. This +/// is where this function comes into play. It adds extra +/// constraints to ensure that all the regions which appear in the +/// inferred type are regions that could validly appear. +/// +/// This is actually a bit of a tricky constraint in general. We +/// want to say that each variable (e.g., `'0`) can only take on +/// values that were supplied as arguments to the opaque type +/// (e.g., `'a` for `Foo1<'a>`) or `'static`, which is always in +/// scope. We don't have a constraint quite of this kind in the current +/// region checker. +/// +/// # The Solution +/// +/// We generally prefer to make `<=` constraints, since they +/// integrate best into the region solver. To do that, we find the +/// "minimum" of all the arguments that appear in the args: that +/// is, some region which is less than all the others. In the case +/// of `Foo1<'a>`, that would be `'a` (it's the only choice, after +/// all). Then we apply that as a least bound to the variables +/// (e.g., `'a <= '0`). +/// +/// In some cases, there is no minimum. Consider this example: +/// +/// ```text +/// fn baz<'a, 'b>() -> impl Trait<'a, 'b> { ... } +/// ``` +/// +/// Here we would report a more complex "in constraint", like `'r +/// in ['a, 'b, 'static]` (where `'r` is some region appearing in +/// the hidden type). +/// +/// # Constrain regions, not the hidden concrete type +/// +/// Note that generating constraints on each region `Rc` is *not* +/// the same as generating an outlives constraint on `Tc` itself. +/// For example, if we had a function like this: +/// +/// ``` +/// # #![feature(type_alias_impl_trait)] +/// # fn main() {} +/// # trait Foo<'a> {} +/// # impl<'a, T> Foo<'a> for (&'a u32, T) {} +/// fn foo<'a, T>(x: &'a u32, y: T) -> impl Foo<'a> { +/// (x, y) +/// } +/// +/// // Equivalent to: +/// # mod dummy { use super::*; +/// type FooReturn<'a, T> = impl Foo<'a>; +/// fn foo<'a, T>(x: &'a u32, y: T) -> FooReturn<'a, T> { +/// (x, y) +/// } +/// # } +/// ``` +/// +/// then the hidden type `Tc` would be `(&'0 u32, T)` (where `'0` +/// is an inference variable). If we generated a constraint that +/// `Tc: 'a`, then this would incorrectly require that `T: 'a` -- +/// but this is not necessary, because the opaque type we +/// create will be allowed to reference `T`. So we only generate a +/// constraint that `'0: 'a`. +fn register_member_constraints<'tcx>( + typeck: &mut TypeChecker<'_, 'tcx>, + member_constraints: &mut MemberConstraintSet<'tcx, ty::RegionVid>, + opaque_type_key: OpaqueTypeKey<'tcx>, + OpaqueHiddenType { span, ty: hidden_ty }: OpaqueHiddenType<'tcx>, +) { + let tcx = typeck.tcx(); + let hidden_ty = typeck.infcx.resolve_vars_if_possible(hidden_ty); + debug!(?hidden_ty); + + let variances = tcx.variances_of(opaque_type_key.def_id); + debug!(?variances); + + // For a case like `impl Foo<'a, 'b>`, we would generate a constraint + // `'r in ['a, 'b, 'static]` for each region `'r` that appears in the + // hidden type (i.e., it must be equal to `'a`, `'b`, or `'static`). + // + // `conflict1` and `conflict2` are the two region bounds that we + // detected which were unrelated. They are used for diagnostics. + + // Create the set of choice regions: each region in the hidden + // type can be equal to any of the region parameters of the + // opaque type definition. + let fr_static = typeck.universal_regions.fr_static; + let choice_regions: Vec<_> = opaque_type_key + .args + .iter() + .enumerate() + .filter(|(i, _)| variances[*i] == ty::Invariant) + .filter_map(|(_, arg)| match arg.unpack() { + GenericArgKind::Lifetime(r) => Some(typeck.to_region_vid(r)), + GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, + }) + .chain(iter::once(fr_static)) + .collect(); + + // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's + // not currently sound until we have existential regions. + hidden_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { + tcx, + op: |r| { + member_constraints.add_member_constraint( + opaque_type_key, + hidden_ty, + span, + typeck.to_region_vid(r), + &choice_regions, + ) + }, + }); +} + +/// Visitor that requires that (almost) all regions in the type visited outlive +/// `least_region`. We cannot use `push_outlives_components` because regions in +/// closure signatures are not included in their outlives components. We need to +/// ensure all regions outlive the given bound so that we don't end up with, +/// say, `ReVar` appearing in a return type and causing ICEs when other +/// functions end up with region constraints involving regions from other +/// functions. +/// +/// We also cannot use `for_each_free_region` because for closures it includes +/// the regions parameters from the enclosing item. +/// +/// We ignore any type parameters because impl trait values are assumed to +/// capture all the in-scope type parameters. +struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { + tcx: TyCtxt<'tcx>, + op: OP, +} + +impl<'tcx, OP> TypeVisitor<TyCtxt<'tcx>> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP> +where + OP: FnMut(ty::Region<'tcx>), +{ + fn visit_binder<T: TypeVisitable<TyCtxt<'tcx>>>(&mut self, t: &ty::Binder<'tcx, T>) { + t.super_visit_with(self); + } + + fn visit_region(&mut self, r: ty::Region<'tcx>) { + match *r { + // ignore bound regions, keep visiting + ty::ReBound(_, _) => {} + _ => (self.op)(r), + } + } + + fn visit_ty(&mut self, ty: Ty<'tcx>) { + // We're only interested in types involving regions + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return; + } + + match ty.kind() { + ty::Closure(_, args) => { + // Skip lifetime parameters of the enclosing item(s) + + for upvar in args.as_closure().upvar_tys() { + upvar.visit_with(self); + } + args.as_closure().sig_as_fn_ptr_ty().visit_with(self); + } + + ty::CoroutineClosure(_, args) => { + // Skip lifetime parameters of the enclosing item(s) + + for upvar in args.as_coroutine_closure().upvar_tys() { + upvar.visit_with(self); + } + + args.as_coroutine_closure().signature_parts_ty().visit_with(self); + } + + ty::Coroutine(_, args) => { + // Skip lifetime parameters of the enclosing item(s) + // Also skip the witness type, because that has no free regions. + + for upvar in args.as_coroutine().upvar_tys() { + upvar.visit_with(self); + } + args.as_coroutine().return_ty().visit_with(self); + args.as_coroutine().yield_ty().visit_with(self); + args.as_coroutine().resume_ty().visit_with(self); + } + + ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => { + // Skip lifetime parameters that are not captures. + let variances = self.tcx.variances_of(*def_id); + + for (v, s) in std::iter::zip(variances, args.iter()) { + if *v != ty::Bivariant { + s.visit_with(self); + } + } + } + + _ => { + ty.super_visit_with(self); + } + } + } +} diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index fb2bd552157..1ac45cbea38 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -881,6 +881,10 @@ impl<'tcx> UniversalRegionIndices<'tcx> { /// reference those regions from the `ParamEnv`. It is also used /// during initialization. Relies on the `indices` map having been /// fully initialized. + /// + /// Panics if `r` is not a registered universal region, most notably + /// if it is a placeholder. Handling placeholders requires access to the + /// `MirTypeckRegionConstraints`. fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { if let ty::ReVar(..) = *r { r.as_var() diff --git a/compiler/rustc_builtin_macros/messages.ftl b/compiler/rustc_builtin_macros/messages.ftl index 87d3d288013..7a31d2a2239 100644 --- a/compiler/rustc_builtin_macros/messages.ftl +++ b/compiler/rustc_builtin_macros/messages.ftl @@ -249,9 +249,9 @@ builtin_macros_naked_functions_testing_attribute = .label = function marked with testing attribute here .naked_attribute = `#[naked]` is incompatible with testing attributes -builtin_macros_no_default_variant = no default declared - .help = make a unit variant default by placing `#[default]` above it - .suggestion = make `{$ident}` default +builtin_macros_no_default_variant = `#[derive(Default)]` on enum with no `#[default]` + .label = this enum needs a unit variant marked with `#[default]` + .suggestion = make this unit variant default by placing `#[default]` on it builtin_macros_non_abi = at least one abi must be provided as an argument to `clobber_abi` diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index a7d9f608cbd..3c7bebd0f19 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -42,7 +42,9 @@ pub(crate) fn expand_deriving_default( StaticStruct(_, fields) => { default_struct_substructure(cx, trait_span, substr, fields) } - StaticEnum(enum_def, _) => default_enum_substructure(cx, trait_span, enum_def), + StaticEnum(enum_def, _) => { + default_enum_substructure(cx, trait_span, enum_def, item.span()) + } _ => cx.dcx().span_bug(trait_span, "method in `derive(Default)`"), } })), @@ -96,9 +98,10 @@ fn default_enum_substructure( cx: &ExtCtxt<'_>, trait_span: Span, enum_def: &EnumDef, + item_span: Span, ) -> BlockOrExpr { let expr = match try { - let default_variant = extract_default_variant(cx, enum_def, trait_span)?; + let default_variant = extract_default_variant(cx, enum_def, trait_span, item_span)?; validate_default_attribute(cx, default_variant)?; default_variant } { @@ -146,6 +149,7 @@ fn extract_default_variant<'a>( cx: &ExtCtxt<'_>, enum_def: &'a EnumDef, trait_span: Span, + item_span: Span, ) -> Result<&'a rustc_ast::Variant, ErrorGuaranteed> { let default_variants: SmallVec<[_; 1]> = enum_def .variants @@ -163,9 +167,10 @@ fn extract_default_variant<'a>( .filter(|variant| !attr::contains_name(&variant.attrs, sym::non_exhaustive)); let suggs = possible_defaults - .map(|v| errors::NoDefaultVariantSugg { span: v.span, ident: v.ident }) + .map(|v| errors::NoDefaultVariantSugg { span: v.span.shrink_to_lo() }) .collect(); - let guar = cx.dcx().emit_err(errors::NoDefaultVariant { span: trait_span, suggs }); + let guar = + cx.dcx().emit_err(errors::NoDefaultVariant { span: trait_span, item_span, suggs }); return Err(guar); } diff --git a/compiler/rustc_builtin_macros/src/errors.rs b/compiler/rustc_builtin_macros/src/errors.rs index b3198e7743d..1abdfdb9c65 100644 --- a/compiler/rustc_builtin_macros/src/errors.rs +++ b/compiler/rustc_builtin_macros/src/errors.rs @@ -369,26 +369,21 @@ pub(crate) struct DerivePathArgsValue { } #[derive(Diagnostic)] -#[diag(builtin_macros_no_default_variant)] -#[help] +#[diag(builtin_macros_no_default_variant, code = E0665)] pub(crate) struct NoDefaultVariant { #[primary_span] pub(crate) span: Span, + #[label] + pub(crate) item_span: Span, #[subdiagnostic] pub(crate) suggs: Vec<NoDefaultVariantSugg>, } #[derive(Subdiagnostic)] -#[suggestion( - builtin_macros_suggestion, - code = "#[default] {ident}", - applicability = "maybe-incorrect", - style = "tool-only" -)] +#[suggestion(builtin_macros_suggestion, code = "#[default] ", applicability = "maybe-incorrect")] pub(crate) struct NoDefaultVariantSugg { #[primary_span] pub(crate) span: Span, - pub(crate) ident: Ident, } #[derive(Diagnostic)] diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index 34066eb83fc..956a024fa4d 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -828,12 +828,6 @@ fn codegen_stmt<'tcx>( fx.bcx.ins().nop(); } } - Rvalue::Len(place) => { - let place = codegen_place(fx, place); - let usize_layout = fx.layout_of(fx.tcx.types.usize); - let len = codegen_array_len(fx, place); - lval.write_cvalue(fx, CValue::by_val(len, usize_layout)); - } Rvalue::ShallowInitBox(ref operand, content_ty) => { let content_ty = fx.monomorphize(content_ty); let box_layout = fx.layout_of(Ty::new_box(fx.tcx, content_ty)); diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index d4bd498e28a..3a9b4d36807 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -8,8 +8,9 @@ edition = "2021" ar_archive_writer = "0.4.2" arrayvec = { version = "0.7", default-features = false } bitflags = "2.4.1" -# Pinned so `cargo update` bumps don't cause breakage -cc = "=1.2.0" +# Pinned so `cargo update` bumps don't cause breakage. Please also update the +# `cc` in `rustc_llvm` if you update the `cc` here. +cc = "=1.2.5" either = "1.5.0" itertools = "0.12" pathdiff = "0.2.0" diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs index cf72c2ed742..869798d8be1 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs @@ -432,6 +432,7 @@ fn push_debuginfo_type_name<'tcx>( push_closure_or_coroutine_name(tcx, def_id, args, qualified, output, visited); } } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binders)"), ty::Param(_) | ty::Error(_) | ty::Infer(_) diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index cf537392234..3b62148abb7 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -10,9 +10,9 @@ use rustc_session::config::OptLevel; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument}; +use super::FunctionCx; use super::operand::{OperandRef, OperandValue}; use super::place::PlaceRef; -use super::{FunctionCx, LocalRef}; use crate::common::IntPredicate; use crate::traits::*; use crate::{MemFlags, base}; @@ -593,14 +593,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.codegen_place_to_pointer(bx, place, mk_ptr) } - mir::Rvalue::Len(place) => { - let size = self.evaluate_array_len(bx, place); - OperandRef { - val: OperandValue::Immediate(size), - layout: bx.cx().layout_of(bx.tcx().types.usize), - } - } - mir::Rvalue::BinaryOp(op_with_overflow, box (ref lhs, ref rhs)) if let Some(op) = op_with_overflow.overflowing_to_wrapping() => { @@ -800,24 +792,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } - fn evaluate_array_len(&mut self, bx: &mut Bx, place: mir::Place<'tcx>) -> Bx::Value { - // ZST are passed as operands and require special handling - // because codegen_place() panics if Local is operand. - if let Some(index) = place.as_local() { - if let LocalRef::Operand(op) = self.locals[index] { - if let ty::Array(_, n) = op.layout.ty.kind() { - let n = n - .try_to_target_usize(bx.tcx()) - .expect("expected monomorphic const in codegen"); - return bx.cx().const_usize(n); - } - } - } - // use common size calculation for non zero-sized types - let cg_value = self.codegen_place(bx, place.as_ref()); - cg_value.len(bx.cx()) - } - /// Codegen an `Rvalue::RawPtr` or `Rvalue::Ref` fn codegen_place_to_pointer( &mut self, @@ -1089,7 +1063,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::Rvalue::Ref(..) | mir::Rvalue::CopyForDeref(..) | mir::Rvalue::RawPtr(..) | - mir::Rvalue::Len(..) | mir::Rvalue::Cast(..) | // (*) mir::Rvalue::ShallowInitBox(..) | // (*) mir::Rvalue::BinaryOp(..) | diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index c31c94495d0..0c2242b810b 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -253,7 +253,7 @@ const_eval_non_const_fmt_macro_call = cannot call non-const formatting macro in {const_eval_const_context}s const_eval_non_const_fn_call = - cannot call non-const fn `{$def_path_str}` in {const_eval_const_context}s + cannot call non-const {$def_descr} `{$def_path_str}` in {const_eval_const_context}s const_eval_non_const_impl = impl defined here, but it is not `const` diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index f4257ad9671..e895c44199b 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -488,8 +488,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { Rvalue::Use(_) | Rvalue::CopyForDeref(..) | Rvalue::Repeat(..) - | Rvalue::Discriminant(..) - | Rvalue::Len(_) => {} + | Rvalue::Discriminant(..) => {} Rvalue::Aggregate(kind, ..) => { if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref() diff --git a/compiler/rustc_const_eval/src/check_consts/ops.rs b/compiler/rustc_const_eval/src/check_consts/ops.rs index afb7900c4b0..ebd680ac28a 100644 --- a/compiler/rustc_const_eval/src/check_consts/ops.rs +++ b/compiler/rustc_const_eval/src/check_consts/ops.rs @@ -304,6 +304,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> { } _ => ccx.dcx().create_err(errors::NonConstFnCall { span, + def_descr: ccx.tcx.def_descr(callee), def_path_str: ccx.tcx.def_path_str_with_args(callee, args), kind: ccx.const_kind(), }), diff --git a/compiler/rustc_const_eval/src/check_consts/qualifs.rs b/compiler/rustc_const_eval/src/check_consts/qualifs.rs index e244b50a4b5..b1b7fb406b1 100644 --- a/compiler/rustc_const_eval/src/check_consts/qualifs.rs +++ b/compiler/rustc_const_eval/src/check_consts/qualifs.rs @@ -230,9 +230,7 @@ where Q::in_any_value_of_ty(cx, rvalue.ty(cx.body, cx.tcx)) } - Rvalue::Discriminant(place) | Rvalue::Len(place) => { - in_place::<Q, _>(cx, in_local, place.as_ref()) - } + Rvalue::Discriminant(place) => in_place::<Q, _>(cx, in_local, place.as_ref()), Rvalue::CopyForDeref(place) => in_place::<Q, _>(cx, in_local, place.as_ref()), diff --git a/compiler/rustc_const_eval/src/check_consts/resolver.rs b/compiler/rustc_const_eval/src/check_consts/resolver.rs index 79df63a9e84..5a6e7ab2bee 100644 --- a/compiler/rustc_const_eval/src/check_consts/resolver.rs +++ b/compiler/rustc_const_eval/src/check_consts/resolver.rs @@ -197,7 +197,6 @@ where | mir::Rvalue::CopyForDeref(..) | mir::Rvalue::ThreadLocalRef(..) | mir::Rvalue::Repeat(..) - | mir::Rvalue::Len(..) | mir::Rvalue::BinaryOp(..) | mir::Rvalue::NullaryOp(..) | mir::Rvalue::UnaryOp(..) diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index 6f51b09323d..4ff8aa9a3b4 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -178,7 +178,8 @@ fn const_to_valtree_inner<'tcx>( | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) - | ty::CoroutineWitness(..) => Err(ValTreeCreationError::NonSupportedType(ty)), + | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) => Err(ValTreeCreationError::NonSupportedType(ty)), } } @@ -358,7 +359,10 @@ pub fn valtree_to_const_value<'tcx>( | ty::FnPtr(..) | ty::Str | ty::Slice(_) - | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()), + | ty::Dynamic(..) + | ty::UnsafeBinder(_) => { + bug!("no ValTree should have been created for type {:?}", ty.kind()) + } } } diff --git a/compiler/rustc_const_eval/src/errors.rs b/compiler/rustc_const_eval/src/errors.rs index 80236ee05b7..57534540019 100644 --- a/compiler/rustc_const_eval/src/errors.rs +++ b/compiler/rustc_const_eval/src/errors.rs @@ -192,6 +192,7 @@ pub(crate) struct NonConstFnCall { #[primary_span] pub span: Span, pub def_path_str: String, + pub def_descr: &'static str, pub kind: ConstContext, } diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index 1af8438534f..e9eca8814c3 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -90,6 +90,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>( | ty::CoroutineClosure(_, _) | ty::Coroutine(_, _) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Never | ty::Tuple(_) | ty::Error(_) => ConstValue::from_target_usize(0u64, &tcx), diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 6512675530a..7d0e0492792 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -505,6 +505,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // We don't want to do any queries, so there is not much we can do with ADTs. ty::Adt(..) => false, + ty::UnsafeBinder(ty) => is_very_trivially_sized(ty.skip_binder()), + ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false, ty::Infer(ty::TyVar(_)) => false, diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index a26c2eca107..32e77fe1024 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -15,7 +15,7 @@ use tracing::{info, instrument, trace}; use super::{ FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy, - Projectable, Scalar, interp_ok, throw_ub, + Projectable, interp_ok, throw_ub, }; use crate::util; @@ -218,12 +218,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.write_repeat(operand, &dest)?; } - Len(place) => { - let src = self.eval_place(place)?; - let len = src.len(self)?; - self.write_scalar(Scalar::from_target_usize(len, self), &dest)?; - } - Ref(_, borrow_kind, place) => { let src = self.eval_place(place)?; let place = self.force_allocation(&src)?; diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 6f101395ccf..d75df1ad442 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -768,6 +768,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { // Nothing to check. interp_ok(true) } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), // The above should be all the primitive types. The rest is compound, we // check them by visiting their fields/variants. ty::Adt(..) diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 36c7bed5c11..e14cd603c58 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -38,7 +38,8 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::FnPtr(..) | ty::Never | ty::Tuple(_) - | ty::Dynamic(_, _, _) => self.pretty_print_type(ty), + | ty::Dynamic(_, _, _) + | ty::UnsafeBinder(_) => self.pretty_print_type(ty), // Placeholders (all printed as `_` to uniformize them). ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error(_) => { diff --git a/compiler/rustc_data_structures/src/packed.rs b/compiler/rustc_data_structures/src/packed.rs index f54b12b5b53..c8921536530 100644 --- a/compiler/rustc_data_structures/src/packed.rs +++ b/compiler/rustc_data_structures/src/packed.rs @@ -18,6 +18,13 @@ impl Pu128 { } } +impl From<Pu128> for u128 { + #[inline] + fn from(value: Pu128) -> Self { + value.get() + } +} + impl From<u128> for Pu128 { #[inline] fn from(value: u128) -> Self { diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 3dc39fc131a..90f382e7226 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -1388,7 +1388,13 @@ pub fn install_ice_hook( // opt in to less-verbose backtraces by manually setting "RUST_BACKTRACE" // (e.g. `RUST_BACKTRACE=1`) if env::var_os("RUST_BACKTRACE").is_none() { - panic::set_backtrace_style(panic::BacktraceStyle::Full); + // HACK: this check is extremely dumb, but we don't really need it to be smarter since this should only happen in the test suite anyway. + let ui_testing = std::env::args().any(|arg| arg == "-Zui-testing"); + if env!("CFG_RELEASE_CHANNEL") == "dev" && !ui_testing { + panic::set_backtrace_style(panic::BacktraceStyle::Short); + } else { + panic::set_backtrace_style(panic::BacktraceStyle::Full); + } } let using_internal_features = Arc::new(std::sync::atomic::AtomicBool::default()); diff --git a/compiler/rustc_error_codes/src/error_codes/E0015.md b/compiler/rustc_error_codes/src/error_codes/E0015.md index ac78f66adad..244cc476243 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0015.md +++ b/compiler/rustc_error_codes/src/error_codes/E0015.md @@ -7,7 +7,7 @@ fn create_some() -> Option<u8> { Some(1) } -// error: cannot call non-const fn `create_some` in constants +// error: cannot call non-const function `create_some` in constants const FOO: Option<u8> = create_some(); ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0665.md b/compiler/rustc_error_codes/src/error_codes/E0665.md index ae54d6d1579..caa94423377 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0665.md +++ b/compiler/rustc_error_codes/src/error_codes/E0665.md @@ -1,10 +1,9 @@ -#### Note: this error code is no longer emitted by the compiler. - -The `Default` trait was derived on an enum. +The `Default` trait was derived on an enum without specifying the default +variant. Erroneous code example: -```compile_fail +```compile_fail,E0665 #[derive(Default)] enum Food { Sweet, @@ -16,18 +15,30 @@ The `Default` cannot be derived on an enum for the simple reason that the compiler doesn't know which value to pick by default whereas it can for a struct as long as all its fields implement the `Default` trait as well. -If you still want to implement `Default` on your enum, you'll have to do it "by -hand": +For the case where the desired default variant has no payload, you can +annotate it with `#[default]` to derive it: ``` +#[derive(Default)] enum Food { + #[default] Sweet, Salty, } +``` + +In the case where the default variant does have a payload, you will have to +implement `Default` on your enum manually: + +``` +enum Food { + Sweet(i32), + Salty, +} impl Default for Food { fn default() -> Food { - Food::Sweet + Food::Sweet(1) } } ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0788.md b/compiler/rustc_error_codes/src/error_codes/E0788.md index d655e51fa66..ba138aed2d1 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0788.md +++ b/compiler/rustc_error_codes/src/error_codes/E0788.md @@ -1,26 +1,24 @@ -A `#[coverage]` attribute was applied to something which does not show up -in code coverage, or is too granular to be excluded from the coverage report. +A `#[coverage(off|on)]` attribute was found in a position where it is not +allowed. -For now, this attribute can only be applied to function, method, and closure -definitions. In the future, it may be added to statements, blocks, and -expressions, and for the time being, using this attribute in those places -will just emit an `unused_attributes` lint instead of this error. +Coverage attributes can be applied to: +- Function and method declarations that have a body, including trait methods + that have a default implementation. +- Closure expressions, in situations where attributes can be applied to + expressions. +- `impl` blocks (inherent or trait), and modules. Example of erroneous code: ```compile_fail,E0788 -#[coverage(off)] -struct Foo; - -#[coverage(on)] -const FOO: Foo = Foo; +unsafe extern "C" { + #[coverage(off)] + fn foreign_fn(); +} ``` -`#[coverage(off)]` tells the compiler to not generate coverage instrumentation -for a piece of code when the `-C instrument-coverage` flag is passed. Things -like structs and consts are not coverable code, and thus cannot do anything -with this attribute. - -If you wish to apply this attribute to all methods in an impl or module, -manually annotate each method; it is not possible to annotate the entire impl -with a `#[coverage]` attribute. +When using the `-C instrument-coverage` flag, coverage attributes act as a +hint to the compiler that it should instrument or not instrument the +corresponding function or enclosed functions. The precise effect of applying +a coverage attribute is not guaranteed and may change in future compiler +versions. diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 5a9b8c43e74..776de1988cc 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -157,9 +157,6 @@ declare_features! ( (accepted, const_refs_to_static, "1.83.0", Some(119618)), /// Allows implementing `Copy` for closures where possible (RFC 2132). (accepted, copy_closures, "1.26.0", Some(44490)), - /// Allows function attribute `#[coverage(on/off)]`, to control coverage - /// instrumentation of that function. - (accepted, coverage_attribute, "CURRENT_RUSTC_VERSION", Some(84605)), /// Allows `crate` in paths. (accepted, crate_in_paths, "1.30.0", Some(45477)), /// Allows users to provide classes for fenced code block using `class:classname`. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 63e5ebb8688..4112ae80980 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -480,9 +480,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ template!(List: "address, kcfi, memory, thread"), DuplicatesOk, EncodeCrossCrate::No, experimental!(no_sanitize) ), - ungated!( + gated!( coverage, Normal, template!(OneOf: &[sym::off, sym::on]), ErrorPreceding, EncodeCrossCrate::No, + coverage_attribute, experimental!(coverage) ), ungated!( diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index ebb07195a28..d40823d2ed6 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -447,6 +447,9 @@ declare_features! ( (unstable, coroutine_clone, "1.65.0", Some(95360)), /// Allows defining coroutines. (unstable, coroutines, "1.21.0", Some(43122)), + /// Allows function attribute `#[coverage(on/off)]`, to control coverage + /// instrumentation of that function. + (unstable, coverage_attribute, "1.74.0", Some(84605)), /// Allows non-builtin attributes in inner attribute position. (unstable, custom_inner_attributes, "1.30.0", Some(54726)), /// Allows custom test frameworks with `#![test_runner]` and `#[test_case]`. diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index 2eea65125b0..3b98f358b1e 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -673,37 +673,6 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err let impl_span = tcx.def_span(checker.impl_def_id); let self_ty = tcx.impl_trait_ref(checker.impl_def_id).unwrap().instantiate_identity().self_ty(); - // If an ADT is repr(transparent)... - if let ty::Adt(def, args) = *self_ty.kind() - && def.repr().transparent() - { - // FIXME(compiler-errors): This should and could be deduplicated into a query. - // Find the nontrivial field. - let adt_typing_env = ty::TypingEnv::non_body_analysis(tcx, def.did()); - let nontrivial_field = def.all_fields().find(|field_def| { - let field_ty = tcx.type_of(field_def.did).instantiate_identity(); - !tcx.layout_of(adt_typing_env.as_query_input(field_ty)) - .is_ok_and(|layout| layout.layout.is_1zst()) - }); - - if let Some(nontrivial_field) = nontrivial_field { - // Check that the nontrivial field implements `PointerLike`. - let nontrivial_field = nontrivial_field.ty(tcx, args); - let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); - let ocx = ObligationCtxt::new(&infcx); - ocx.register_bound( - ObligationCause::misc(impl_span, checker.impl_def_id), - param_env, - nontrivial_field, - tcx.lang_items().pointer_like().unwrap(), - ); - // FIXME(dyn-star): We should regionck this implementation. - if ocx.select_all_or_error().is_empty() { - return Ok(()); - } - } - } - let is_permitted_primitive = match *self_ty.kind() { ty::Adt(def, _) => def.is_box(), ty::Uint(..) | ty::Int(..) | ty::RawPtr(..) | ty::Ref(..) | ty::FnPtr(..) => true, @@ -717,6 +686,74 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err return Ok(()); } + let why_disqualified = match *self_ty.kind() { + // If an ADT is repr(transparent) + ty::Adt(self_ty_def, args) => { + if self_ty_def.repr().transparent() { + // FIXME(compiler-errors): This should and could be deduplicated into a query. + // Find the nontrivial field. + let adt_typing_env = ty::TypingEnv::non_body_analysis(tcx, self_ty_def.did()); + let nontrivial_field = self_ty_def.all_fields().find(|field_def| { + let field_ty = tcx.type_of(field_def.did).instantiate_identity(); + !tcx.layout_of(adt_typing_env.as_query_input(field_ty)) + .is_ok_and(|layout| layout.layout.is_1zst()) + }); + + if let Some(nontrivial_field) = nontrivial_field { + // Check that the nontrivial field implements `PointerLike`. + let nontrivial_field_ty = nontrivial_field.ty(tcx, args); + let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); + let ocx = ObligationCtxt::new(&infcx); + ocx.register_bound( + ObligationCause::misc(impl_span, checker.impl_def_id), + param_env, + nontrivial_field_ty, + tcx.lang_items().pointer_like().unwrap(), + ); + // FIXME(dyn-star): We should regionck this implementation. + if ocx.select_all_or_error().is_empty() { + return Ok(()); + } else { + format!( + "the field `{field_name}` of {descr} `{self_ty}` \ + does not implement `PointerLike`", + field_name = nontrivial_field.name, + descr = self_ty_def.descr() + ) + } + } else { + format!( + "the {descr} `{self_ty}` is `repr(transparent)`, \ + but does not have a non-trivial field (it is zero-sized)", + descr = self_ty_def.descr() + ) + } + } else if self_ty_def.is_box() { + // If we got here, then the `layout.is_pointer_like()` check failed + // and this box is not a thin pointer. + + String::from("boxes of dynamically-sized types are too large to be `PointerLike`") + } else { + format!( + "the {descr} `{self_ty}` is not `repr(transparent)`", + descr = self_ty_def.descr() + ) + } + } + ty::Ref(..) => { + // If we got here, then the `layout.is_pointer_like()` check failed + // and this reference is not a thin pointer. + String::from("references to dynamically-sized types are too large to be `PointerLike`") + } + ty::Dynamic(..) | ty::Foreign(..) => { + String::from("types of dynamic or unknown size may not implement `PointerLike`") + } + _ => { + // This is a white lie; it is true everywhere outside the standard library. + format!("only user-defined sized types are eligible for `impl PointerLike`") + } + }; + Err(tcx .dcx() .struct_span_err( @@ -724,5 +761,6 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err "implementation must be applied to type that has the same ABI as a pointer, \ or is `repr(transparent)` and whose field is `PointerLike`", ) + .with_note(why_disqualified) .emit()) } diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs index 8f6f5b5f222..a86dede48bf 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs @@ -178,7 +178,8 @@ impl<'tcx> InherentCollect<'tcx> { | ty::Ref(..) | ty::Never | ty::FnPtr(..) - | ty::Tuple(..) => self.check_primitive_impl(id, self_ty), + | ty::Tuple(..) + | ty::UnsafeBinder(_) => self.check_primitive_impl(id, self_ty), ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) | ty::Param(_) => { Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span })) } diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs index eca85c22a40..7d651155781 100644 --- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs +++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs @@ -225,7 +225,8 @@ pub(crate) fn orphan_check_impl( | ty::FnDef(..) | ty::FnPtr(..) | ty::Never - | ty::Tuple(..) => (LocalImpl::Allow, NonlocalImpl::DisallowOther), + | ty::Tuple(..) + | ty::UnsafeBinder(_) => (LocalImpl::Allow, NonlocalImpl::DisallowOther), ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs index b56222763d0..2154568c512 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs @@ -2318,13 +2318,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self.lower_fn_ty(hir_ty.hir_id, bf.safety, bf.abi, bf.decl, None, Some(hir_ty)), ) } - hir::TyKind::UnsafeBinder(_binder) => { - let guar = self - .dcx() - .struct_span_err(hir_ty.span, "unsafe binders are not yet implemented") - .emit(); - Ty::new_error(tcx, guar) - } + hir::TyKind::UnsafeBinder(binder) => Ty::new_unsafe_binder( + tcx, + ty::Binder::bind_with_vars( + self.lower_ty(binder.inner_ty), + tcx.late_bound_vars(hir_ty.hir_id), + ), + ), hir::TyKind::TraitObject(bounds, lifetime, repr) => { if let Some(guar) = self.prohibit_or_lint_bare_trait_object_ty(hir_ty) { // Don't continue with type analysis if the `dyn` keyword is missing diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 415b23d812b..e954d2b9ea4 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -322,6 +322,11 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraints_from_sig(current, sig_tys.with(hdr), variance); } + ty::UnsafeBinder(ty) => { + // FIXME(unsafe_binders): This is covariant, right? + self.add_constraints_from_ty(current, ty.skip_binder(), variance); + } + ty::Error(_) => { // we encounter this when walking the trait references for object // types, where we use Error as the Self type diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index de2a7726e9b..5c1c5892190 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -402,8 +402,10 @@ impl<'a> State<'a> { } hir::TyKind::Path(ref qpath) => self.print_qpath(qpath, false), hir::TyKind::TraitObject(bounds, lifetime, syntax) => { - if syntax == ast::TraitObjectSyntax::Dyn { - self.word_space("dyn"); + match syntax { + ast::TraitObjectSyntax::Dyn => self.word_nbsp("dyn"), + ast::TraitObjectSyntax::DynStar => self.word_nbsp("dyn*"), + ast::TraitObjectSyntax::None => {} } let mut first = true; for bound in bounds { diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index 243313ee876..87300f5bb83 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -77,12 +77,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut prior_non_diverging_arms = vec![]; // Used only for diagnostics. let mut prior_arm = None; for arm in arms { + self.diverges.set(Diverges::Maybe); + if let Some(e) = &arm.guard { - self.diverges.set(Diverges::Maybe); self.check_expr_has_type_or_error(e, tcx.types.bool, |_| {}); + + // FIXME: If this is the first arm and the pattern is irrefutable, + // e.g. `_` or `x`, and the guard diverges, then the whole match + // may also be considered to diverge. We should warn on all subsequent + // arms, too, just like we do for diverging scrutinees above. } - self.diverges.set(Diverges::Maybe); + // N.B. We don't reset diverges here b/c we want to warn in the arm + // if the guard diverges, like: `x if { loop {} } => f()`, and we + // also want to consider the arm to diverge itself. let arm_ty = self.check_expr_with_expectation(arm.body, expected); all_arms_diverge &= self.diverges.get(); diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index 59c06cbc5b5..7b07e0ee939 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -116,6 +116,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&f) => self.pointer_kind(f, span)?, }, + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + // Pointers to foreign types are thin, despite being unsized ty::Foreign(..) => Some(PointerKind::Thin), // We should really try to normalize here. @@ -721,13 +723,11 @@ impl<'a, 'tcx> CastCheck<'tcx> { use rustc_middle::ty::cast::IntTy::*; if self.cast_ty.is_dyn_star() { - if fcx.tcx.features().dyn_star() { - span_bug!(self.span, "should be handled by `coerce`"); - } else { - // Report "casting is invalid" rather than "non-primitive cast" - // if the feature is not enabled. - return Err(CastError::IllegalCast); - } + // This coercion will fail if the feature is not enabled, OR + // if the coercion is (currently) illegal (e.g. `dyn* Foo + Send` + // to `dyn* Foo`). Report "casting is invalid" rather than + // "non-primitive cast". + return Err(CastError::IllegalCast); } let (t_from, t_cast) = match (CastTy::from_ty(self.expr_ty), CastTy::from_ty(self.cast_ty)) diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index 541e16e42a7..f9e4a592d92 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -737,8 +737,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return Err(TypeError::Mismatch); } - if let ty::Dynamic(a_data, _, _) = a.kind() - && let ty::Dynamic(b_data, _, _) = b.kind() + // FIXME(dyn_star): We should probably allow things like casting from + // `dyn* Foo + Send` to `dyn* Foo`. + if let ty::Dynamic(a_data, _, ty::DynStar) = a.kind() + && let ty::Dynamic(b_data, _, ty::DynStar) = b.kind() && a_data.principal_def_id() == b_data.principal_def_id() { return self.unify_and(a, b, |_| vec![]); diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index caea53d9200..a7d12cae7b5 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -574,8 +574,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_expr_index(base, idx, expr, brackets_span) } ExprKind::Yield(value, _) => self.check_expr_yield(value, expr), - ExprKind::UnsafeBinderCast(kind, expr, ty) => { - self.check_expr_unsafe_binder_cast(kind, expr, ty, expected) + ExprKind::UnsafeBinderCast(kind, inner_expr, ty) => { + self.check_expr_unsafe_binder_cast(expr.span, kind, inner_expr, ty, expected) } ExprKind::Err(guar) => Ty::new_error(tcx, guar), } @@ -1649,14 +1649,94 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn check_expr_unsafe_binder_cast( &self, - _kind: hir::UnsafeBinderCastKind, - expr: &'tcx hir::Expr<'tcx>, - _hir_ty: Option<&'tcx hir::Ty<'tcx>>, - _expected: Expectation<'tcx>, + span: Span, + kind: hir::UnsafeBinderCastKind, + inner_expr: &'tcx hir::Expr<'tcx>, + hir_ty: Option<&'tcx hir::Ty<'tcx>>, + expected: Expectation<'tcx>, ) -> Ty<'tcx> { - let guar = - self.dcx().struct_span_err(expr.span, "unsafe binders are not yet implemented").emit(); - Ty::new_error(self.tcx, guar) + self.dcx().span_err(inner_expr.span, "unsafe binder casts are not fully implemented"); + + match kind { + hir::UnsafeBinderCastKind::Wrap => { + let ascribed_ty = + hir_ty.map(|hir_ty| self.lower_ty_saving_user_provided_ty(hir_ty)); + let expected_ty = expected.only_has_type(self); + let binder_ty = match (ascribed_ty, expected_ty) { + (Some(ascribed_ty), Some(expected_ty)) => { + self.demand_eqtype(inner_expr.span, expected_ty, ascribed_ty); + expected_ty + } + (Some(ty), None) | (None, Some(ty)) => ty, + // This will always cause a structural resolve error, but we do it + // so we don't need to manually report an E0282 both on this codepath + // and in the others; it all happens in `structurally_resolve_type`. + (None, None) => self.next_ty_var(inner_expr.span), + }; + + let binder_ty = self.structurally_resolve_type(inner_expr.span, binder_ty); + let hint_ty = match *binder_ty.kind() { + ty::UnsafeBinder(binder) => self.instantiate_binder_with_fresh_vars( + inner_expr.span, + infer::BoundRegionConversionTime::HigherRankedType, + binder.into(), + ), + ty::Error(e) => Ty::new_error(self.tcx, e), + _ => { + let guar = self + .dcx() + .struct_span_err( + hir_ty.map_or(span, |hir_ty| hir_ty.span), + format!( + "`wrap_binder!()` can only wrap into unsafe binder, not {}", + binder_ty.sort_string(self.tcx) + ), + ) + .with_note("unsafe binders are the only valid output of wrap") + .emit(); + Ty::new_error(self.tcx, guar) + } + }; + + self.check_expr_has_type_or_error(inner_expr, hint_ty, |_| {}); + + binder_ty + } + hir::UnsafeBinderCastKind::Unwrap => { + let ascribed_ty = + hir_ty.map(|hir_ty| self.lower_ty_saving_user_provided_ty(hir_ty)); + let hint_ty = ascribed_ty.unwrap_or_else(|| self.next_ty_var(inner_expr.span)); + // FIXME(unsafe_binders): coerce here if needed? + let binder_ty = self.check_expr_has_type_or_error(inner_expr, hint_ty, |_| {}); + + // Unwrap the binder. This will be ambiguous if it's an infer var, and will error + // if it's not an unsafe binder. + let binder_ty = self.structurally_resolve_type(inner_expr.span, binder_ty); + match *binder_ty.kind() { + ty::UnsafeBinder(binder) => self.instantiate_binder_with_fresh_vars( + inner_expr.span, + infer::BoundRegionConversionTime::HigherRankedType, + binder.into(), + ), + ty::Error(e) => Ty::new_error(self.tcx, e), + _ => { + let guar = self + .dcx() + .struct_span_err( + hir_ty.map_or(inner_expr.span, |hir_ty| hir_ty.span), + format!( + "expected unsafe binder, found {} as input of \ + `unwrap_binder!()`", + binder_ty.sort_string(self.tcx) + ), + ) + .with_note("only an unsafe binder type can be unwrapped") + .emit(); + Ty::new_error(self.tcx, guar) + } + } + } + } } fn check_expr_array( @@ -2720,12 +2800,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Check field access expressions, this works for both structs and tuples. /// Returns the Ty of the field. /// - /// ```not_rust - /// base.field - /// ^^^^^^^^^^ expr - /// ^^^^ base - /// ^^^^^ field - /// ``` + /// ```ignore (illustrative) + /// base.field + /// ^^^^^^^^^^ expr + /// ^^^^ base + /// ^^^^^ field + /// ``` fn check_expr_field( &self, expr: &'tcx hir::Expr<'tcx>, diff --git a/compiler/rustc_index/Cargo.toml b/compiler/rustc_index/Cargo.toml index 33e8e2824c7..f27db7a5400 100644 --- a/compiler/rustc_index/Cargo.toml +++ b/compiler/rustc_index/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -rustc_index_macros = { path = "../rustc_index_macros", default-features = false } +rustc_index_macros = { path = "../rustc_index_macros" } rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } smallvec = "1.8.1" diff --git a/compiler/rustc_index_macros/Cargo.toml b/compiler/rustc_index_macros/Cargo.toml index a7c2a1804dd..98bc1b6a29b 100644 --- a/compiler/rustc_index_macros/Cargo.toml +++ b/compiler/rustc_index_macros/Cargo.toml @@ -12,5 +12,4 @@ proc-macro2 = "1" quote = "1" [features] -default = ["nightly"] nightly = [] diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index fe66d306ceb..c47f27e871f 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -441,6 +441,7 @@ impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Canonicalizer<'cx, 'tcx> { | ty::FnDef(..) | ty::FnPtr(..) | ty::Dynamic(..) + | ty::UnsafeBinder(_) | ty::Never | ty::Tuple(..) | ty::Alias(..) diff --git a/compiler/rustc_infer/src/infer/canonical/query_response.rs b/compiler/rustc_infer/src/infer/canonical/query_response.rs index 1d3d32ef749..d5aab4781de 100644 --- a/compiler/rustc_infer/src/infer/canonical/query_response.rs +++ b/compiler/rustc_infer/src/infer/canonical/query_response.rs @@ -316,16 +316,6 @@ impl<'tcx> InferCtxt<'tcx> { }), ); - // ...also include the query member constraints. - output_query_region_constraints.member_constraints.extend( - query_response - .value - .region_constraints - .member_constraints - .iter() - .map(|p_c| instantiate_value(self.tcx, &result_args, p_c.clone())), - ); - let user_result: R = query_response.instantiate_projected(self.tcx, &result_args, |q_r| q_r.value.clone()); @@ -643,7 +633,7 @@ pub fn make_query_region_constraints<'tcx>( outlives_obligations: impl Iterator<Item = (Ty<'tcx>, ty::Region<'tcx>, ConstraintCategory<'tcx>)>, region_constraints: &RegionConstraintData<'tcx>, ) -> QueryRegionConstraints<'tcx> { - let RegionConstraintData { constraints, verifys, member_constraints } = region_constraints; + let RegionConstraintData { constraints, verifys } = region_constraints; assert!(verifys.is_empty()); @@ -674,5 +664,5 @@ pub fn make_query_region_constraints<'tcx>( })) .collect(); - QueryRegionConstraints { outlives, member_constraints: member_constraints.clone() } + QueryRegionConstraints { outlives } } diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 5086b741a83..c5a56005c06 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -17,7 +17,6 @@ pub use relate::StructurallyRelateAliases; pub use relate::combine::PredicateEmittingRelation; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; -use rustc_data_structures::sync::Lrc; use rustc_data_structures::undo_log::{Rollback, UndoLogs}; use rustc_data_structures::unify as ut; use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed}; @@ -685,26 +684,6 @@ impl<'tcx> InferCtxt<'tcx> { self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); } - /// Require that the region `r` be equal to one of the regions in - /// the set `regions`. - #[instrument(skip(self), level = "debug")] - pub fn add_member_constraint( - &self, - key: ty::OpaqueTypeKey<'tcx>, - definition_span: Span, - hidden_ty: Ty<'tcx>, - region: ty::Region<'tcx>, - in_regions: Lrc<Vec<ty::Region<'tcx>>>, - ) { - self.inner.borrow_mut().unwrap_region_constraints().add_member_constraint( - key, - definition_span, - hidden_ty, - region, - in_regions, - ); - } - /// Processes a `Coerce` predicate from the fulfillment context. /// This is NOT the preferred way to handle coercion, which is to /// invoke `FnCtxt::coerce` or a similar method (see `coercion.rs`). diff --git a/compiler/rustc_infer/src/infer/opaque_types/mod.rs b/compiler/rustc_infer/src/infer/opaque_types/mod.rs index 8650c20559f..137d438a479 100644 --- a/compiler/rustc_infer/src/infer/opaque_types/mod.rs +++ b/compiler/rustc_infer/src/infer/opaque_types/mod.rs @@ -1,6 +1,5 @@ use hir::def_id::{DefId, LocalDefId}; use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::sync::Lrc; use rustc_hir as hir; use rustc_middle::bug; use rustc_middle::traits::ObligationCause; @@ -8,8 +7,7 @@ use rustc_middle::traits::solve::Goal; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::{ - self, GenericArgKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, - TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, + self, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, }; use rustc_span::Span; use tracing::{debug, instrument}; @@ -181,289 +179,6 @@ impl<'tcx> InferCtxt<'tcx> { Err(TypeError::Sorts(ExpectedFound::new(a, b))) } } - - /// Given the map `opaque_types` containing the opaque - /// `impl Trait` types whose underlying, hidden types are being - /// inferred, this method adds constraints to the regions - /// appearing in those underlying hidden types to ensure that they - /// at least do not refer to random scopes within the current - /// function. These constraints are not (quite) sufficient to - /// guarantee that the regions are actually legal values; that - /// final condition is imposed after region inference is done. - /// - /// # The Problem - /// - /// Let's work through an example to explain how it works. Assume - /// the current function is as follows: - /// - /// ```text - /// fn foo<'a, 'b>(..) -> (impl Bar<'a>, impl Bar<'b>) - /// ``` - /// - /// Here, we have two `impl Trait` types whose values are being - /// inferred (the `impl Bar<'a>` and the `impl - /// Bar<'b>`). Conceptually, this is sugar for a setup where we - /// define underlying opaque types (`Foo1`, `Foo2`) and then, in - /// the return type of `foo`, we *reference* those definitions: - /// - /// ```text - /// type Foo1<'x> = impl Bar<'x>; - /// type Foo2<'x> = impl Bar<'x>; - /// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. } - /// // ^^^^ ^^ - /// // | | - /// // | args - /// // def_id - /// ``` - /// - /// As indicating in the comments above, each of those references - /// is (in the compiler) basically generic parameters (`args`) - /// applied to the type of a suitable `def_id` (which identifies - /// `Foo1` or `Foo2`). - /// - /// Now, at this point in compilation, what we have done is to - /// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with - /// fresh inference variables C1 and C2. We wish to use the values - /// of these variables to infer the underlying types of `Foo1` and - /// `Foo2`. That is, this gives rise to higher-order (pattern) unification - /// constraints like: - /// - /// ```text - /// for<'a> (Foo1<'a> = C1) - /// for<'b> (Foo1<'b> = C2) - /// ``` - /// - /// For these equation to be satisfiable, the types `C1` and `C2` - /// can only refer to a limited set of regions. For example, `C1` - /// can only refer to `'static` and `'a`, and `C2` can only refer - /// to `'static` and `'b`. The job of this function is to impose that - /// constraint. - /// - /// Up to this point, C1 and C2 are basically just random type - /// inference variables, and hence they may contain arbitrary - /// regions. In fact, it is fairly likely that they do! Consider - /// this possible definition of `foo`: - /// - /// ```text - /// fn foo<'a, 'b>(x: &'a i32, y: &'b i32) -> (impl Bar<'a>, impl Bar<'b>) { - /// (&*x, &*y) - /// } - /// ``` - /// - /// Here, the values for the concrete types of the two impl - /// traits will include inference variables: - /// - /// ```text - /// &'0 i32 - /// &'1 i32 - /// ``` - /// - /// Ordinarily, the subtyping rules would ensure that these are - /// sufficiently large. But since `impl Bar<'a>` isn't a specific - /// type per se, we don't get such constraints by default. This - /// is where this function comes into play. It adds extra - /// constraints to ensure that all the regions which appear in the - /// inferred type are regions that could validly appear. - /// - /// This is actually a bit of a tricky constraint in general. We - /// want to say that each variable (e.g., `'0`) can only take on - /// values that were supplied as arguments to the opaque type - /// (e.g., `'a` for `Foo1<'a>`) or `'static`, which is always in - /// scope. We don't have a constraint quite of this kind in the current - /// region checker. - /// - /// # The Solution - /// - /// We generally prefer to make `<=` constraints, since they - /// integrate best into the region solver. To do that, we find the - /// "minimum" of all the arguments that appear in the args: that - /// is, some region which is less than all the others. In the case - /// of `Foo1<'a>`, that would be `'a` (it's the only choice, after - /// all). Then we apply that as a least bound to the variables - /// (e.g., `'a <= '0`). - /// - /// In some cases, there is no minimum. Consider this example: - /// - /// ```text - /// fn baz<'a, 'b>() -> impl Trait<'a, 'b> { ... } - /// ``` - /// - /// Here we would report a more complex "in constraint", like `'r - /// in ['a, 'b, 'static]` (where `'r` is some region appearing in - /// the hidden type). - /// - /// # Constrain regions, not the hidden concrete type - /// - /// Note that generating constraints on each region `Rc` is *not* - /// the same as generating an outlives constraint on `Tc` itself. - /// For example, if we had a function like this: - /// - /// ``` - /// # #![feature(type_alias_impl_trait)] - /// # fn main() {} - /// # trait Foo<'a> {} - /// # impl<'a, T> Foo<'a> for (&'a u32, T) {} - /// fn foo<'a, T>(x: &'a u32, y: T) -> impl Foo<'a> { - /// (x, y) - /// } - /// - /// // Equivalent to: - /// # mod dummy { use super::*; - /// type FooReturn<'a, T> = impl Foo<'a>; - /// fn foo<'a, T>(x: &'a u32, y: T) -> FooReturn<'a, T> { - /// (x, y) - /// } - /// # } - /// ``` - /// - /// then the hidden type `Tc` would be `(&'0 u32, T)` (where `'0` - /// is an inference variable). If we generated a constraint that - /// `Tc: 'a`, then this would incorrectly require that `T: 'a` -- - /// but this is not necessary, because the opaque type we - /// create will be allowed to reference `T`. So we only generate a - /// constraint that `'0: 'a`. - #[instrument(level = "debug", skip(self))] - pub fn register_member_constraints( - &self, - opaque_type_key: OpaqueTypeKey<'tcx>, - concrete_ty: Ty<'tcx>, - span: Span, - ) { - let concrete_ty = self.resolve_vars_if_possible(concrete_ty); - debug!(?concrete_ty); - - let variances = self.tcx.variances_of(opaque_type_key.def_id); - debug!(?variances); - - // For a case like `impl Foo<'a, 'b>`, we would generate a constraint - // `'r in ['a, 'b, 'static]` for each region `'r` that appears in the - // hidden type (i.e., it must be equal to `'a`, `'b`, or `'static`). - // - // `conflict1` and `conflict2` are the two region bounds that we - // detected which were unrelated. They are used for diagnostics. - - // Create the set of choice regions: each region in the hidden - // type can be equal to any of the region parameters of the - // opaque type definition. - let choice_regions: Lrc<Vec<ty::Region<'tcx>>> = Lrc::new( - opaque_type_key - .args - .iter() - .enumerate() - .filter(|(i, _)| variances[*i] == ty::Invariant) - .filter_map(|(_, arg)| match arg.unpack() { - GenericArgKind::Lifetime(r) => Some(r), - GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, - }) - .chain(std::iter::once(self.tcx.lifetimes.re_static)) - .collect(), - ); - - // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's - // not currently sound until we have existential regions. - concrete_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { - tcx: self.tcx, - op: |r| { - self.add_member_constraint( - opaque_type_key, - span, - concrete_ty, - r, - Lrc::clone(&choice_regions), - ) - }, - }); - } -} - -/// Visitor that requires that (almost) all regions in the type visited outlive -/// `least_region`. We cannot use `push_outlives_components` because regions in -/// closure signatures are not included in their outlives components. We need to -/// ensure all regions outlive the given bound so that we don't end up with, -/// say, `ReVar` appearing in a return type and causing ICEs when other -/// functions end up with region constraints involving regions from other -/// functions. -/// -/// We also cannot use `for_each_free_region` because for closures it includes -/// the regions parameters from the enclosing item. -/// -/// We ignore any type parameters because impl trait values are assumed to -/// capture all the in-scope type parameters. -struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { - tcx: TyCtxt<'tcx>, - op: OP, -} - -impl<'tcx, OP> TypeVisitor<TyCtxt<'tcx>> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP> -where - OP: FnMut(ty::Region<'tcx>), -{ - fn visit_binder<T: TypeVisitable<TyCtxt<'tcx>>>(&mut self, t: &ty::Binder<'tcx, T>) { - t.super_visit_with(self); - } - - fn visit_region(&mut self, r: ty::Region<'tcx>) { - match *r { - // ignore bound regions, keep visiting - ty::ReBound(_, _) => {} - _ => (self.op)(r), - } - } - - fn visit_ty(&mut self, ty: Ty<'tcx>) { - // We're only interested in types involving regions - if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { - return; - } - - match ty.kind() { - ty::Closure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_closure().upvar_tys() { - upvar.visit_with(self); - } - args.as_closure().sig_as_fn_ptr_ty().visit_with(self); - } - - ty::CoroutineClosure(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - - for upvar in args.as_coroutine_closure().upvar_tys() { - upvar.visit_with(self); - } - - args.as_coroutine_closure().signature_parts_ty().visit_with(self); - } - - ty::Coroutine(_, args) => { - // Skip lifetime parameters of the enclosing item(s) - // Also skip the witness type, because that has no free regions. - - for upvar in args.as_coroutine().upvar_tys() { - upvar.visit_with(self); - } - args.as_coroutine().return_ty().visit_with(self); - args.as_coroutine().yield_ty().visit_with(self); - args.as_coroutine().resume_ty().visit_with(self); - } - - ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => { - // Skip lifetime parameters that are not captures. - let variances = self.tcx.variances_of(*def_id); - - for (v, s) in std::iter::zip(variances, args.iter()) { - if *v != ty::Bivariant { - s.visit_with(self); - } - } - } - - _ => { - ty.super_visit_with(self); - } - } - } } impl<'tcx> InferCtxt<'tcx> { diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index 61ce86e7767..6dce4b2b21d 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -4,7 +4,6 @@ use std::ops::Range; use std::{cmp, fmt, mem}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::Lrc; use rustc_data_structures::undo_log::UndoLogs; use rustc_data_structures::unify as ut; use rustc_index::IndexVec; @@ -12,7 +11,6 @@ use rustc_macros::{TypeFoldable, TypeVisitable}; use rustc_middle::infer::unify_key::{RegionVariableValue, RegionVidKey}; use rustc_middle::ty::{self, ReBound, ReStatic, ReVar, Region, RegionVid, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; -use rustc_span::Span; use tracing::{debug, instrument}; use self::CombineMapType::*; @@ -22,8 +20,6 @@ use crate::infer::snapshot::undo_log::{InferCtxtUndoLogs, Snapshot}; mod leak_check; -pub use rustc_middle::infer::MemberConstraint; - #[derive(Clone, Default)] pub struct RegionConstraintStorage<'tcx> { /// For each `RegionVid`, the corresponding `RegionVariableOrigin`. @@ -73,11 +69,6 @@ pub struct RegionConstraintData<'tcx> { /// be a region variable (or neither, as it happens). pub constraints: Vec<(Constraint<'tcx>, SubregionOrigin<'tcx>)>, - /// Constraints of the form `R0 member of [R1, ..., Rn]`, meaning that - /// `R0` must be equal to one of the regions `R1..Rn`. These occur - /// with `impl Trait` quite frequently. - pub member_constraints: Vec<MemberConstraint<'tcx>>, - /// A "verify" is something that we need to verify after inference /// is done, but which does not directly affect inference in any /// way. @@ -466,29 +457,6 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { } } - pub(super) fn add_member_constraint( - &mut self, - key: ty::OpaqueTypeKey<'tcx>, - definition_span: Span, - hidden_ty: Ty<'tcx>, - member_region: ty::Region<'tcx>, - choice_regions: Lrc<Vec<ty::Region<'tcx>>>, - ) { - debug!("member_constraint({:?} in {:#?})", member_region, choice_regions); - - if choice_regions.iter().any(|&r| r == member_region) { - return; - } - - self.storage.data.member_constraints.push(MemberConstraint { - key, - definition_span, - hidden_ty, - member_region, - choice_regions, - }); - } - #[instrument(skip(self, origin), level = "debug")] pub(super) fn make_subregion( &mut self, @@ -745,8 +713,8 @@ impl<'tcx> RegionConstraintData<'tcx> { /// Returns `true` if this region constraint data contains no constraints, and `false` /// otherwise. pub fn is_empty(&self) -> bool { - let RegionConstraintData { constraints, member_constraints, verifys } = self; - constraints.is_empty() && member_constraints.is_empty() && verifys.is_empty() + let RegionConstraintData { constraints, verifys } = self; + constraints.is_empty() && verifys.is_empty() } } diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index c0371b1f606..ef9aa11ef7b 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -1284,6 +1284,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { FfiSafe } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) | ty::Infer(..) diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 2b3cb14f9e9..8b1526bc747 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -1023,6 +1023,7 @@ declare_lint! { "`if`, `match`, `while` and `return` do not need parentheses" } +#[derive(Default)] pub(crate) struct UnusedParens { with_self_ty_parens: bool, /// `1 as (i32) < 2` parses to ExprKind::Lt @@ -1030,12 +1031,6 @@ pub(crate) struct UnusedParens { parens_in_cast_in_lt: Vec<ast::NodeId>, } -impl Default for UnusedParens { - fn default() -> Self { - Self { with_self_ty_parens: false, parens_in_cast_in_lt: Vec::new() } - } -} - impl_lint_pass!(UnusedParens => [UNUSED_PARENS]); impl UnusedDelimLint for UnusedParens { diff --git a/compiler/rustc_llvm/Cargo.toml b/compiler/rustc_llvm/Cargo.toml index b29d6b79250..79a6454dbb9 100644 --- a/compiler/rustc_llvm/Cargo.toml +++ b/compiler/rustc_llvm/Cargo.toml @@ -10,5 +10,7 @@ libc = "0.2.73" [build-dependencies] # tidy-alphabetical-start -cc = "1.1.23" +# Pinned so `cargo update` bumps don't cause breakage. Please also update the +# pinned `cc` in `rustc_codegen_ssa` if you update `cc` here. +cc = "=1.2.5" # tidy-alphabetical-end diff --git a/compiler/rustc_middle/src/infer/canonical.rs b/compiler/rustc_middle/src/infer/canonical.rs index ac55497f8b3..0f408375e05 100644 --- a/compiler/rustc_middle/src/infer/canonical.rs +++ b/compiler/rustc_middle/src/infer/canonical.rs @@ -30,7 +30,6 @@ pub use rustc_type_ir as ir; pub use rustc_type_ir::{CanonicalTyVarKind, CanonicalVarKind}; use smallvec::SmallVec; -use crate::infer::MemberConstraint; use crate::mir::ConstraintCategory; use crate::ty::{self, GenericArg, List, Ty, TyCtxt, TypeFlags, TypeVisitableExt}; @@ -91,14 +90,13 @@ pub struct QueryResponse<'tcx, R> { #[derive(HashStable, TypeFoldable, TypeVisitable)] pub struct QueryRegionConstraints<'tcx> { pub outlives: Vec<QueryOutlivesConstraint<'tcx>>, - pub member_constraints: Vec<MemberConstraint<'tcx>>, } impl QueryRegionConstraints<'_> { /// Represents an empty (trivially true) set of region /// constraints. pub fn is_empty(&self) -> bool { - self.outlives.is_empty() && self.member_constraints.is_empty() + self.outlives.is_empty() } } diff --git a/compiler/rustc_middle/src/infer/mod.rs b/compiler/rustc_middle/src/infer/mod.rs index 19fe9e5a54f..3dfcf90cb93 100644 --- a/compiler/rustc_middle/src/infer/mod.rs +++ b/compiler/rustc_middle/src/infer/mod.rs @@ -1,34 +1,2 @@ pub mod canonical; pub mod unify_key; - -use rustc_data_structures::sync::Lrc; -use rustc_macros::{HashStable, TypeFoldable, TypeVisitable}; -use rustc_span::Span; - -use crate::ty::{OpaqueTypeKey, Region, Ty}; - -/// Requires that `region` must be equal to one of the regions in `choice_regions`. -/// We often denote this using the syntax: -/// -/// ```text -/// R0 member of [O1..On] -/// ``` -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -#[derive(HashStable, TypeFoldable, TypeVisitable)] -pub struct MemberConstraint<'tcx> { - /// The `DefId` and args of the opaque type causing this constraint. - /// Used for error reporting. - pub key: OpaqueTypeKey<'tcx>, - - /// The span where the hidden type was instantiated. - pub definition_span: Span, - - /// The hidden type in which `member_region` appears: used for error reporting. - pub hidden_ty: Ty<'tcx>, - - /// The region `R0`. - pub member_region: Region<'tcx>, - - /// The options `O1..On`. - pub choice_regions: Lrc<Vec<Region<'tcx>>>, -} diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index ece468947c2..47522f00bb1 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -23,6 +23,7 @@ pub(crate) const ALIGN: usize = 40; /// An indication of where we are in the control flow graph. Used for printing /// extra information in `dump_mir` +#[derive(Clone)] pub enum PassWhere { /// We have not started dumping the control flow graph, but we are about to. BeforeCFG, @@ -1067,7 +1068,6 @@ impl<'tcx> Debug for Rvalue<'tcx> { pretty_print_const(b, fmt, false)?; write!(fmt, "]") } - Len(ref a) => write!(fmt, "Len({a:?})"), Cast(ref kind, ref place, ref ty) => { with_no_trimmed_paths!(write!(fmt, "{place:?} as {ty} ({kind:?})")) } diff --git a/compiler/rustc_middle/src/mir/statement.rs b/compiler/rustc_middle/src/mir/statement.rs index 1ce735cec63..da3fa9e324a 100644 --- a/compiler/rustc_middle/src/mir/statement.rs +++ b/compiler/rustc_middle/src/mir/statement.rs @@ -424,7 +424,6 @@ impl<'tcx> Rvalue<'tcx> { | Rvalue::Ref(_, _, _) | Rvalue::ThreadLocalRef(_) | Rvalue::RawPtr(_, _) - | Rvalue::Len(_) | Rvalue::Cast( CastKind::IntToInt | CastKind::FloatToInt diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index b7ece5ffa62..bbbaffc5a35 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -821,6 +821,11 @@ pub enum TerminatorKind<'tcx> { /// continues at the `resume` basic block, with the second argument written to the `resume_arg` /// place. If the coroutine is dropped before then, the `drop` basic block is invoked. /// + /// Note that coroutines can be (unstably) cloned under certain conditions, which means that + /// this terminator can **return multiple times**! MIR optimizations that reorder code into + /// different basic blocks needs to be aware of that. + /// See <https://github.com/rust-lang/rust/issues/95360>. + /// /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering. /// /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`? @@ -1346,16 +1351,6 @@ pub enum Rvalue<'tcx> { /// model. RawPtr(Mutability, Place<'tcx>), - /// Yields the length of the place, as a `usize`. - /// - /// If the type of the place is an array, this is the array length. For slices (`[T]`, not - /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is - /// ill-formed for places of other types. - /// - /// This cannot be a `UnOp(PtrMetadata, _)` because that expects a value, and we only - /// have a place, and `UnOp(PtrMetadata, RawPtr(place))` is not a thing. - Len(Place<'tcx>), - /// Performs essentially all of the casts that can be performed via `as`. /// /// This allows for casts from/to a variety of types. diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs index 476e352ed92..cbb26b83c79 100644 --- a/compiler/rustc_middle/src/mir/tcx.rs +++ b/compiler/rustc_middle/src/mir/tcx.rs @@ -5,6 +5,7 @@ use rustc_hir as hir; use tracing::{debug, instrument}; +use ty::CoroutineArgsExt; use crate::mir::*; @@ -25,29 +26,63 @@ impl<'tcx> PlaceTy<'tcx> { PlaceTy { ty, variant_index: None } } - /// `place_ty.field_ty(tcx, f)` computes the type at a given field - /// of a record or enum-variant. (Most clients of `PlaceTy` can - /// instead just extract the relevant type directly from their - /// `PlaceElem`, but some instances of `ProjectionElem<V, T>` do - /// not carry a `Ty` for `T`.) + /// `place_ty.field_ty(tcx, f)` computes the type of a given field. + /// + /// Most clients of `PlaceTy` can instead just extract the relevant type + /// directly from their `PlaceElem`, but some instances of `ProjectionElem<V, T>` + /// do not carry a `Ty` for `T`. /// /// Note that the resulting type has not been normalized. #[instrument(level = "debug", skip(tcx), ret)] pub fn field_ty(self, tcx: TyCtxt<'tcx>, f: FieldIdx) -> Ty<'tcx> { - match self.ty.kind() { - ty::Adt(adt_def, args) => { - let variant_def = match self.variant_index { - None => adt_def.non_enum_variant(), - Some(variant_index) => { - assert!(adt_def.is_enum()); - adt_def.variant(variant_index) - } - }; - let field_def = &variant_def.fields[f]; - field_def.ty(tcx, args) + if let Some(variant_index) = self.variant_index { + match *self.ty.kind() { + ty::Adt(adt_def, args) if adt_def.is_enum() => { + adt_def.variant(variant_index).fields[f].ty(tcx, args) + } + ty::Coroutine(def_id, args) => { + let mut variants = args.as_coroutine().state_tys(def_id, tcx); + let Some(mut variant) = variants.nth(variant_index.into()) else { + bug!("variant {variant_index:?} of coroutine out of range: {self:?}"); + }; + + variant + .nth(f.index()) + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")) + } + _ => bug!("can't downcast non-adt non-coroutine type: {self:?}"), + } + } else { + match self.ty.kind() { + ty::Adt(adt_def, args) if !adt_def.is_enum() => { + adt_def.non_enum_variant().fields[f].ty(tcx, args) + } + ty::Closure(_, args) => args + .as_closure() + .upvar_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + ty::CoroutineClosure(_, args) => args + .as_coroutine_closure() + .upvar_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + // Only prefix fields (upvars and current state) are + // accessible without a variant index. + ty::Coroutine(_, args) => args + .as_coroutine() + .prefix_tys() + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + ty::Tuple(tys) => tys + .get(f.index()) + .copied() + .unwrap_or_else(|| bug!("field {f:?} out of range: {self:?}")), + _ => bug!("can't project out of {self:?}"), } - ty::Tuple(tys) => tys[f.index()], - _ => bug!("extracting field of non-tuple non-adt: {:?}", self), } } @@ -175,7 +210,6 @@ impl<'tcx> Rvalue<'tcx> { let place_ty = place.ty(local_decls, tcx).ty; Ty::new_ptr(tcx, place_ty, mutability) } - Rvalue::Len(..) => tcx.types.usize, Rvalue::Cast(.., ty) => ty, Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => { let lhs_ty = lhs.ty(local_decls, tcx); diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index b919f5726db..473b817aed0 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -67,6 +67,17 @@ impl SwitchTargets { &mut self.targets } + /// Returns a slice with all considered values (not including the fallback). + #[inline] + pub fn all_values(&self) -> &[Pu128] { + &self.values + } + + #[inline] + pub fn all_values_mut(&mut self) -> &mut [Pu128] { + &mut self.values + } + /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the /// specific value. This cannot fail, as it'll return the `otherwise` /// branch if there's not a specific match for the value. diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 62c340d99e3..12a024a219e 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -695,14 +695,6 @@ macro_rules! make_mir_visitor { self.visit_place(path, ctx, location); } - Rvalue::Len(path) => { - self.visit_place( - path, - PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect), - location - ); - } - Rvalue::Cast(_cast_kind, operand, ty) => { self.visit_operand(operand, location); self.visit_ty($(& $mutability)? *ty, TyContext::Location(location)); @@ -1369,12 +1361,12 @@ pub enum PlaceContext { impl PlaceContext { /// Returns `true` if this place context represents a drop. #[inline] - pub fn is_drop(&self) -> bool { + pub fn is_drop(self) -> bool { matches!(self, PlaceContext::MutatingUse(MutatingUseContext::Drop)) } /// Returns `true` if this place context represents a borrow. - pub fn is_borrow(&self) -> bool { + pub fn is_borrow(self) -> bool { matches!( self, PlaceContext::NonMutatingUse( @@ -1384,7 +1376,7 @@ impl PlaceContext { } /// Returns `true` if this place context represents an address-of. - pub fn is_address_of(&self) -> bool { + pub fn is_address_of(self) -> bool { matches!( self, PlaceContext::NonMutatingUse(NonMutatingUseContext::RawBorrow) @@ -1394,7 +1386,7 @@ impl PlaceContext { /// Returns `true` if this place context represents a storage live or storage dead marker. #[inline] - pub fn is_storage_marker(&self) -> bool { + pub fn is_storage_marker(self) -> bool { matches!( self, PlaceContext::NonUse(NonUseContext::StorageLive | NonUseContext::StorageDead) @@ -1403,18 +1395,18 @@ impl PlaceContext { /// Returns `true` if this place context represents a use that potentially changes the value. #[inline] - pub fn is_mutating_use(&self) -> bool { + pub fn is_mutating_use(self) -> bool { matches!(self, PlaceContext::MutatingUse(..)) } /// Returns `true` if this place context represents a use. #[inline] - pub fn is_use(&self) -> bool { + pub fn is_use(self) -> bool { !matches!(self, PlaceContext::NonUse(..)) } /// Returns `true` if this place context represents an assignment statement. - pub fn is_place_assignment(&self) -> bool { + pub fn is_place_assignment(self) -> bool { matches!( self, PlaceContext::MutatingUse( @@ -1424,4 +1416,19 @@ impl PlaceContext { ) ) } + + /// The variance of a place in the given context. + pub fn ambient_variance(self) -> ty::Variance { + use NonMutatingUseContext::*; + use NonUseContext::*; + match self { + PlaceContext::MutatingUse(_) => ty::Invariant, + PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant, + PlaceContext::NonMutatingUse( + Inspect | Copy | Move | PlaceMention | SharedBorrow | FakeBorrow | RawBorrow + | Projection, + ) => ty::Covariant, + PlaceContext::NonUse(AscribeUserTy(variance)) => variance, + } + } } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 977e62becf1..d26c007d227 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -471,7 +471,8 @@ impl<'tcx> Interner for TyCtxt<'tcx> { | ty::CoroutineClosure(..) | ty::Coroutine(_, _) | ty::Never - | ty::Tuple(_) => { + | ty::Tuple(_) + | ty::UnsafeBinder(_) => { let simp = ty::fast_reject::simplify_type( tcx, self_ty, @@ -2295,6 +2296,7 @@ impl<'tcx> TyCtxt<'tcx> { Ref, FnDef, FnPtr, + UnsafeBinder, Placeholder, Coroutine, CoroutineWitness, diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index 4a82af32559..714094db053 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -191,6 +191,7 @@ impl<'tcx> Ty<'tcx> { _ => "fn item".into(), }, ty::FnPtr(..) => "fn pointer".into(), + ty::UnsafeBinder(_) => "unsafe binder".into(), ty::Dynamic(..) => "trait object".into(), ty::Closure(..) | ty::CoroutineClosure(..) => "closure".into(), ty::Coroutine(def_id, ..) => { diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index 04d03187541..0af57f636aa 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -253,6 +253,12 @@ impl FlagComputation { &ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { computation.add_tys(sig_tys.inputs_and_output); }), + + &ty::UnsafeBinder(bound_ty) => { + self.bound_computation(bound_ty.into(), |computation, ty| { + computation.add_ty(ty); + }) + } } } diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 367b0c07f9b..6e6da6de749 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -816,6 +816,11 @@ where bug!("TyAndLayout::field({:?}): not applicable", this) } + ty::UnsafeBinder(bound_ty) => { + let ty = tcx.instantiate_bound_regions_with_erased(bound_ty.into()); + field_ty_or_layout(TyAndLayout { ty, ..this }, cx, i) + } + // Potentially-wide pointers. ty::Ref(_, pointee, _) | ty::RawPtr(pointee, _) => { assert!(i < this.fields.count()); diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index cc746746760..b0150bc1192 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -291,6 +291,7 @@ fn characteristic_def_id_of_type_cached<'a>( | ty::Uint(_) | ty::Str | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Alias(..) | ty::Placeholder(..) | ty::Param(_) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index a089eac5d7e..9fe1caa4b58 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -695,6 +695,10 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))), + ty::UnsafeBinder(ref bound_ty) => { + // FIXME(unsafe_binders): Make this print `unsafe<>` rather than `for<>`. + self.wrap_binder(bound_ty, |ty, cx| cx.pretty_print_type(*ty))?; + } ty::Infer(infer_ty) => { if self.should_print_verbose() { p!(write("{:?}", ty.kind())); @@ -837,6 +841,12 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!( " upvar_tys=", print(args.as_coroutine().tupled_upvars_ty()), + " resume_ty=", + print(args.as_coroutine().resume_ty()), + " yield_ty=", + print(args.as_coroutine().yield_ty()), + " return_ty=", + print(args.as_coroutine().return_ty()), " witness=", print(args.as_coroutine().witness()) ); diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index f38454ceac0..68cb56f3583 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -393,6 +393,7 @@ impl<'tcx> TypeSuperFoldable<TyCtxt<'tcx>> for Ty<'tcx> { ty::Tuple(ts) => ty::Tuple(ts.try_fold_with(folder)?), ty::FnDef(def_id, args) => ty::FnDef(def_id, args.try_fold_with(folder)?), ty::FnPtr(sig_tys, hdr) => ty::FnPtr(sig_tys.try_fold_with(folder)?, hdr), + ty::UnsafeBinder(f) => ty::UnsafeBinder(f.try_fold_with(folder)?), ty::Ref(r, ty, mutbl) => { ty::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) } @@ -443,6 +444,7 @@ impl<'tcx> TypeSuperVisitable<TyCtxt<'tcx>> for Ty<'tcx> { ty::Tuple(ts) => ts.visit_with(visitor), ty::FnDef(_, args) => args.visit_with(visitor), ty::FnPtr(ref sig_tys, _) => sig_tys.visit_with(visitor), + ty::UnsafeBinder(ref f) => f.visit_with(visitor), ty::Ref(r, ty, _) => { try_visit!(r.visit_with(visitor)); ty.visit_with(visitor) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 045c483d6a5..92b3632c8ac 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -674,6 +674,11 @@ impl<'tcx> Ty<'tcx> { } #[inline] + pub fn new_unsafe_binder(tcx: TyCtxt<'tcx>, b: Binder<'tcx, Ty<'tcx>>) -> Ty<'tcx> { + Ty::new(tcx, UnsafeBinder(b.into())) + } + + #[inline] pub fn new_dynamic( tcx: TyCtxt<'tcx>, obj: &'tcx List<ty::PolyExistentialPredicate<'tcx>>, @@ -962,6 +967,10 @@ impl<'tcx> rustc_type_ir::inherent::Ty<TyCtxt<'tcx>> for Ty<'tcx> { Ty::new_pat(interner, ty, pat) } + fn new_unsafe_binder(interner: TyCtxt<'tcx>, ty: ty::Binder<'tcx, Ty<'tcx>>) -> Self { + Ty::new_unsafe_binder(interner, ty) + } + fn new_unit(interner: TyCtxt<'tcx>) -> Self { interner.types.unit } @@ -1480,6 +1489,7 @@ impl<'tcx> Ty<'tcx> { | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) + | ty::UnsafeBinder(_) | ty::Error(_) | ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8, @@ -1659,6 +1669,8 @@ impl<'tcx> Ty<'tcx> { // metadata of `tail`. ty::Param(_) | ty::Alias(..) => Err(tail), + | ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + ty::Infer(ty::TyVar(_)) | ty::Pat(..) | ty::Bound(..) @@ -1819,6 +1831,7 @@ impl<'tcx> Ty<'tcx> { | ty::Float(_) | ty::FnDef(..) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -1898,6 +1911,8 @@ impl<'tcx> Ty<'tcx> { // Might be, but not "trivial" so just giving the safe answer. ty::Adt(..) | ty::Closure(..) | ty::CoroutineClosure(..) => false, + ty::UnsafeBinder(_) => false, + // Needs normalization or revealing to determine, so no is the safe answer. ty::Alias(..) => false, @@ -1976,7 +1991,8 @@ impl<'tcx> Ty<'tcx> { | Coroutine(_, _) | CoroutineWitness(..) | Never - | Tuple(_) => true, + | Tuple(_) + | UnsafeBinder(_) => true, Error(_) | Infer(_) | Alias(_, _) | Param(_) | Bound(_, _) | Placeholder(_) => false, } } diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index fc3530e3dde..ab8285f87d6 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -1241,6 +1241,7 @@ impl<'tcx> Ty<'tcx> { | ty::Foreign(_) | ty::Coroutine(..) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Infer(_) | ty::Alias(..) | ty::Param(_) @@ -1281,6 +1282,7 @@ impl<'tcx> Ty<'tcx> { | ty::Foreign(_) | ty::Coroutine(..) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Infer(_) | ty::Alias(..) | ty::Param(_) @@ -1322,6 +1324,9 @@ impl<'tcx> Ty<'tcx> { | ty::Infer(ty::FreshIntTy(_)) | ty::Infer(ty::FreshFloatTy(_)) => AsyncDropGlueMorphology::Noop, + // FIXME(unsafe_binders): + ty::UnsafeBinder(_) => todo!(), + ty::Tuple(tys) if tys.is_empty() => AsyncDropGlueMorphology::Noop, ty::Adt(adt_def, _) if adt_def.is_manually_drop() => AsyncDropGlueMorphology::Noop, @@ -1522,7 +1527,7 @@ impl<'tcx> Ty<'tcx> { false } - ty::Foreign(_) | ty::CoroutineWitness(..) | ty::Error(_) => false, + ty::Foreign(_) | ty::CoroutineWitness(..) | ty::Error(_) | ty::UnsafeBinder(_) => false, } } @@ -1681,7 +1686,8 @@ pub fn needs_drop_components_with_async<'tcx>( | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) - | ty::CoroutineWitness(..) => Ok(smallvec![ty]), + | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) => Ok(smallvec![ty]), } } diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs index a93a146ec7c..2dcba8c2f82 100644 --- a/compiler/rustc_middle/src/ty/walk.rs +++ b/compiler/rustc_middle/src/ty/walk.rs @@ -194,6 +194,9 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), ); } + ty::UnsafeBinder(bound_ty) => { + stack.push(bound_ty.skip_binder().into()); + } }, GenericArgKind::Lifetime(_) => {} GenericArgKind::Const(parent_ct) => match parent_ct.kind() { diff --git a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index 59f440432eb..3dd5de02230 100644 --- a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -246,7 +246,6 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { let offset = self.parse_operand(args[1])?; Ok(Rvalue::BinaryOp(BinOp::Offset, Box::new((ptr, offset)))) }, - @call(mir_len, args) => Ok(Rvalue::Len(self.parse_place(args[0])?)), @call(mir_ptr_metadata, args) => Ok(Rvalue::UnaryOp(UnOp::PtrMetadata, self.parse_operand(args[0])?)), @call(mir_copy_for_deref, args) => Ok(Rvalue::CopyForDeref(self.parse_place(args[0])?)), ExprKind::Borrow { borrow_kind, arg } => Ok( diff --git a/compiler/rustc_mir_build/src/builder/expr/as_place.rs b/compiler/rustc_mir_build/src/builder/expr/as_place.rs index 01aec70f437..89c7bb357ef 100644 --- a/compiler/rustc_mir_build/src/builder/expr/as_place.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_place.rs @@ -635,7 +635,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// For arrays it'll be `Operand::Constant` with the actual length; /// For slices it'll be `Operand::Move` of a local using `PtrMetadata`. - fn len_of_slice_or_array( + pub(in crate::builder) fn len_of_slice_or_array( &mut self, block: BasicBlock, place: Place<'tcx>, diff --git a/compiler/rustc_mir_build/src/builder/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs index 8cca84d7fcc..0d36b7bb3ee 100644 --- a/compiler/rustc_mir_build/src/builder/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -243,11 +243,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } TestKind::Len { len, op } => { - let usize_ty = self.tcx.types.usize; - let actual = self.temp(usize_ty, test.span); - // actual = len(place) - self.cfg.push_assign(block, source_info, actual, Rvalue::Len(place)); + let actual = self.len_of_slice_or_array(block, place, test.span, source_info); // expected = <N> let expected = self.push_usize(block, source_info, len); @@ -262,7 +259,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fail_block, source_info, op, - Operand::Move(actual), + actual, Operand::Move(expected), ); } diff --git a/compiler/rustc_mir_build/src/builder/scope.rs b/compiler/rustc_mir_build/src/builder/scope.rs index fd9f9da6e77..35c98037827 100644 --- a/compiler/rustc_mir_build/src/builder/scope.rs +++ b/compiler/rustc_mir_build/src/builder/scope.rs @@ -1481,14 +1481,6 @@ fn build_scope_drops<'tcx>( block = next; } DropKind::ForLint => { - // If the operand has been moved, and we are not on an unwind - // path, then don't generate the drop. (We only take this into - // account for non-unwind paths so as not to disturb the - // caching mechanism.) - if scope.moved_locals.iter().any(|&o| o == local) { - continue; - } - // As in the `DropKind::Storage` case below: // normally lint-related drops are not emitted for unwind, // so we can just leave `unwind_to` unmodified, but in some @@ -1500,6 +1492,14 @@ fn build_scope_drops<'tcx>( unwind_to = unwind_drops.drops[unwind_to].next; } + // If the operand has been moved, and we are not on an unwind + // path, then don't generate the drop. (We only take this into + // account for non-unwind paths so as not to disturb the + // caching mechanism.) + if scope.moved_locals.iter().any(|&o| o == local) { + continue; + } + cfg.push(block, Statement { source_info, kind: StatementKind::BackwardIncompatibleDropHint { @@ -1552,7 +1552,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1); for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) { match drop_node.data.kind { - DropKind::Storage => { + DropKind::Storage | DropKind::ForLint => { if is_coroutine { let unwind_drop = self .scopes @@ -1563,7 +1563,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { unwind_indices.push(unwind_indices[drop_node.next]); } } - DropKind::Value | DropKind::ForLint => { + DropKind::Value => { let unwind_drop = self .scopes .unwind_drops diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs index 494b7d54d8a..f8a84674947 100644 --- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs +++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs @@ -1,4 +1,4 @@ -use std::{fmt, iter}; +use std::{fmt, iter, mem}; use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx}; use rustc_hir::lang_items::LangItem; @@ -6,6 +6,7 @@ use rustc_index::Idx; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::*; use rustc_middle::span_bug; +use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::util::IntTypeExt; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt}; use rustc_span::DUMMY_SP; @@ -738,8 +739,13 @@ where loop_block } - fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock { - debug!("open_drop_for_array({:?}, {:?})", ety, opt_size); + fn open_drop_for_array( + &mut self, + array_ty: Ty<'tcx>, + ety: Ty<'tcx>, + opt_size: Option<u64>, + ) -> BasicBlock { + debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size); let tcx = self.tcx(); if let Some(size) = opt_size { @@ -801,13 +807,50 @@ where } } - self.drop_loop_pair(ety) + let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty); + let array_ptr = self.new_temp(array_ptr_ty); + + let slice_ty = Ty::new_slice(tcx, ety); + let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty); + let slice_ptr = self.new_temp(slice_ptr_ty); + + let mut delegate_block = BasicBlockData { + statements: vec![ + self.assign(Place::from(array_ptr), Rvalue::RawPtr(Mutability::Mut, self.place)), + self.assign( + Place::from(slice_ptr), + Rvalue::Cast( + CastKind::PointerCoercion( + PointerCoercion::Unsize, + CoercionSource::Implicit, + ), + Operand::Move(Place::from(array_ptr)), + slice_ptr_ty, + ), + ), + ], + is_cleanup: self.unwind.is_cleanup(), + terminator: None, + }; + + let array_place = mem::replace( + &mut self.place, + Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx), + ); + let slice_block = self.drop_loop_pair_for_slice(ety); + self.place = array_place; + + delegate_block.terminator = Some(Terminator { + source_info: self.source_info, + kind: TerminatorKind::Goto { target: slice_block }, + }); + self.elaborator.patch().new_block(delegate_block) } /// Creates a pair of drop-loops of `place`, which drops its contents, even /// in the case of 1 panic. - fn drop_loop_pair(&mut self, ety: Ty<'tcx>) -> BasicBlock { - debug!("drop_loop_pair({:?})", ety); + fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock { + debug!("drop_loop_pair_for_slice({:?})", ety); let tcx = self.tcx(); let len = self.new_temp(tcx.types.usize); let cur = self.new_temp(tcx.types.usize); @@ -817,10 +860,24 @@ where let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind); + let [PlaceElem::Deref] = self.place.projection.as_slice() else { + span_bug!( + self.source_info.span, + "Expected place for slice drop shim to be *_n, but it's {:?}", + self.place, + ); + }; + let zero = self.constant_usize(0); let block = BasicBlockData { statements: vec![ - self.assign(len.into(), Rvalue::Len(self.place)), + self.assign( + len.into(), + Rvalue::UnaryOp( + UnOp::PtrMetadata, + Operand::Copy(Place::from(self.place.local)), + ), + ), self.assign(cur.into(), Rvalue::Use(zero)), ], is_cleanup: unwind.is_cleanup(), @@ -863,9 +920,9 @@ where ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind), ty::Array(ety, size) => { let size = size.try_to_target_usize(self.tcx()); - self.open_drop_for_array(*ety, size) + self.open_drop_for_array(ty, *ety, size) } - ty::Slice(ety) => self.drop_loop_pair(*ety), + ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety), _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty), } diff --git a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs index 568d8a5acaf..217594b3238 100644 --- a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs +++ b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs @@ -91,7 +91,6 @@ where | Rvalue::Use(..) | Rvalue::ThreadLocalRef(..) | Rvalue::Repeat(..) - | Rvalue::Len(..) | Rvalue::BinaryOp(..) | Rvalue::NullaryOp(..) | Rvalue::UnaryOp(..) diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index 0880364bfca..80875f32e4f 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -161,6 +161,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -200,6 +201,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) @@ -411,7 +413,6 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { Rvalue::Ref(..) | Rvalue::RawPtr(..) | Rvalue::Discriminant(..) - | Rvalue::Len(..) | Rvalue::NullaryOp( NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..) | NullOp::UbChecks, _, diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 711cf2edc46..cc44114782c 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -408,18 +408,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { state: &mut State<FlatSet<Scalar>>, ) -> ValueOrPlace<FlatSet<Scalar>> { let val = match rvalue { - Rvalue::Len(place) => { - let place_ty = place.ty(self.local_decls, self.tcx); - if let ty::Array(_, len) = place_ty.ty.kind() { - Const::Ty(self.tcx.types.usize, *len) - .try_eval_scalar(self.tcx, self.typing_env) - .map_or(FlatSet::Top, FlatSet::Elem) - } else if let [ProjectionElem::Deref] = place.projection[..] { - state.get_len(place.local.into(), &self.map) - } else { - FlatSet::Top - } - } Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => { let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else { return ValueOrPlace::Value(FlatSet::Top); @@ -944,7 +932,8 @@ fn try_write_constant<'tcx>( | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) - | ty::Dynamic(..) => throw_machine_stop_str!("unsupported type"), + | ty::Dynamic(..) + | ty::UnsafeBinder(_) => throw_machine_stop_str!("unsupported type"), ty::Error(_) | ty::Infer(..) | ty::CoroutineWitness(..) => bug!(), } diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index 8f977d2979e..e99bee6a01f 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -574,7 +574,6 @@ impl WriteInfo { | Rvalue::NullaryOp(_, _) | Rvalue::Ref(_, _, _) | Rvalue::RawPtr(_, _) - | Rvalue::Len(_) | Rvalue::Discriminant(_) | Rvalue::CopyForDeref(_) => {} } diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index d5a813ec8ec..283ed94b615 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -223,8 +223,6 @@ enum Value<'tcx> { Projection(VnIndex, ProjectionElem<VnIndex, Ty<'tcx>>), /// Discriminant of the given value. Discriminant(VnIndex), - /// Length of an array or slice. - Len(VnIndex), // Operations. NullaryOp(NullOp<'tcx>, Ty<'tcx>), @@ -513,13 +511,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?; discr_value.into() } - Len(slice) => { - let slice = self.evaluated[slice].as_ref()?; - let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap(); - let len = slice.len(&self.ecx).discard_err()?; - let imm = ImmTy::from_uint(len, usize_layout); - imm.into() - } NullaryOp(null_op, ty) => { let layout = self.ecx.layout_of(ty).ok()?; if let NullOp::SizeOf | NullOp::AlignOf = null_op @@ -863,7 +854,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } // Operations. - Rvalue::Len(ref mut place) => return self.simplify_len(place, location), Rvalue::Cast(ref mut kind, ref mut value, to) => { return self.simplify_cast(kind, value, to, location); } @@ -1433,47 +1423,6 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { Some(self.insert(Value::Cast { kind: *kind, value, from, to })) } - fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> { - // Trivial case: we are fetching a statically known length. - let place_ty = place.ty(self.local_decls, self.tcx).ty; - if let ty::Array(_, len) = place_ty.kind() { - return self.insert_constant(Const::from_ty_const( - *len, - self.tcx.types.usize, - self.tcx, - )); - } - - let mut inner = self.simplify_place_value(place, location)?; - - // The length information is stored in the wide pointer. - // Reborrowing copies length information from one pointer to the other. - while let Value::Address { place: borrowed, .. } = self.get(inner) - && let [PlaceElem::Deref] = borrowed.projection[..] - && let Some(borrowed) = self.locals[borrowed.local] - { - inner = borrowed; - } - - // We have an unsizing cast, which assigns the length to wide pointer metadata. - if let Value::Cast { kind, from, to, .. } = self.get(inner) - && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind - && let Some(from) = from.builtin_deref(true) - && let ty::Array(_, len) = from.kind() - && let Some(to) = to.builtin_deref(true) - && let ty::Slice(..) = to.kind() - { - return self.insert_constant(Const::from_ty_const( - *len, - self.tcx.types.usize, - self.tcx, - )); - } - - // Fallback: a symbolic `Len`. - Some(self.insert(Value::Len(inner))) - } - fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool { let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx); let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx); diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index acf3eb2b62c..f1705d0c831 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -440,7 +440,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { | Rvalue::Use(..) | Rvalue::CopyForDeref(..) | Rvalue::Repeat(..) - | Rvalue::Len(..) | Rvalue::Cast(..) | Rvalue::ShallowInitBox(..) | Rvalue::Discriminant(..) @@ -600,20 +599,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { return None; } - Len(place) => { - let len = if let ty::Array(_, n) = place.ty(self.local_decls(), self.tcx).ty.kind() - { - n.try_to_target_usize(self.tcx)? - } else { - match self.get_const(place)? { - Value::Immediate(src) => src.len(&self.ecx).discard_err()?, - Value::Aggregate { fields, .. } => fields.len() as u64, - Value::Uninit => return None, - } - }; - ImmTy::from_scalar(Scalar::from_target_usize(len, self), layout).into() - } - Ref(..) | RawPtr(..) => return None, NullaryOp(ref null_op, ty) => { diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 5c090bf7cad..e1fba9be5bb 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -437,6 +437,8 @@ fn mir_promoted( Some(MirPhase::Analysis(AnalysisPhase::Initial)), ); + lint_tail_expr_drop_order::run_lint(tcx, def, &body); + let promoted = promote_pass.promoted_fragments.into_inner(); (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted)) } @@ -492,7 +494,6 @@ fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> & } let (body, _) = tcx.mir_promoted(def); - lint_tail_expr_drop_order::run_lint(tcx, def, &body.borrow()); let mut body = body.steal(); if let Some(error_reported) = tainted_by_errors { diff --git a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs index 7fb421dea0c..e5a183bc75c 100644 --- a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs +++ b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs @@ -285,7 +285,9 @@ fn ty_dtor_span<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<Span> { | ty::Placeholder(_) | ty::Infer(_) | ty::Slice(_) - | ty::Array(_, _) => None, + | ty::Array(_, _) + | ty::UnsafeBinder(_) => None, + ty::Adt(adt_def, _) => { let did = adt_def.did(); let try_local_did_span = |did: DefId| { diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs index 20e2a65b311..534ba991780 100644 --- a/compiler/rustc_mir_transform/src/match_branches.rs +++ b/compiler/rustc_mir_transform/src/match_branches.rs @@ -7,6 +7,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::layout::{IntegerExt, TyAndLayout}; use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt}; use rustc_type_ir::TyKind::*; +use tracing::instrument; use super::simplify::simplify_cfg; @@ -51,7 +52,7 @@ impl<'tcx> crate::MirPass<'tcx> for MatchBranchSimplification { } trait SimplifyMatch<'tcx> { - /// Simplifies a match statement, returning true if the simplification succeeds, false + /// Simplifies a match statement, returning `Some` if the simplification succeeds, `None` /// otherwise. Generic code is written here, and we generally don't need a custom /// implementation. fn simplify( @@ -159,6 +160,7 @@ struct SimplifyToIf; /// } /// ``` impl<'tcx> SimplifyMatch<'tcx> for SimplifyToIf { + #[instrument(level = "debug", skip(self, tcx), ret)] fn can_simplify( &mut self, tcx: TyCtxt<'tcx>, @@ -167,12 +169,15 @@ impl<'tcx> SimplifyMatch<'tcx> for SimplifyToIf { bbs: &IndexSlice<BasicBlock, BasicBlockData<'tcx>>, _discr_ty: Ty<'tcx>, ) -> Option<()> { - if targets.iter().len() != 1 { - return None; - } + let (first, second) = match targets.all_targets() { + &[first, otherwise] => (first, otherwise), + &[first, second, otherwise] if bbs[otherwise].is_empty_unreachable() => (first, second), + _ => { + return None; + } + }; + // We require that the possible target blocks all be distinct. - let (_, first) = targets.iter().next().unwrap(); - let second = targets.otherwise(); if first == second { return None; } @@ -221,8 +226,14 @@ impl<'tcx> SimplifyMatch<'tcx> for SimplifyToIf { discr_local: Local, discr_ty: Ty<'tcx>, ) { - let (val, first) = targets.iter().next().unwrap(); - let second = targets.otherwise(); + let ((val, first), second) = match (targets.all_targets(), targets.all_values()) { + (&[first, otherwise], &[val]) => ((val, first), otherwise), + (&[first, second, otherwise], &[val, _]) if bbs[otherwise].is_empty_unreachable() => { + ((val, first), second) + } + _ => unreachable!(), + }; + // We already checked that first and second are different blocks, // and bb_idx has a different terminator from both of them. let first = &bbs[first]; @@ -297,7 +308,7 @@ struct SimplifyToExp { transform_kinds: Vec<TransformKind>, } -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Debug)] enum ExpectedTransformKind<'a, 'tcx> { /// Identical statements. Same(&'a StatementKind<'tcx>), @@ -362,6 +373,7 @@ impl From<ExpectedTransformKind<'_, '_>> for TransformKind { /// } /// ``` impl<'tcx> SimplifyMatch<'tcx> for SimplifyToExp { + #[instrument(level = "debug", skip(self, tcx), ret)] fn can_simplify( &mut self, tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index 6be95b1f0f1..7451f419304 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -430,9 +430,7 @@ impl<'tcx> Validator<'_, 'tcx> { self.validate_operand(op)? } - Rvalue::Discriminant(place) | Rvalue::Len(place) => { - self.validate_place(place.as_ref())? - } + Rvalue::Discriminant(place) => self.validate_place(place.as_ref())?, Rvalue::ThreadLocalRef(_) => return Err(Unpromotable), diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index bce015046e1..a670da94fcc 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -1009,14 +1009,6 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } } Rvalue::Ref(..) => {} - Rvalue::Len(p) => { - let pty = p.ty(&self.body.local_decls, self.tcx).ty; - check_kinds!( - pty, - "Cannot compute length of non-array type {:?}", - ty::Array(..) | ty::Slice(..) - ); - } Rvalue::BinaryOp(op, vals) => { use BinOp::*; let a = vals.0.ty(&self.body.local_decls, self.tcx); diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index 2f7301d8fe5..8a54a4ece98 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -378,6 +378,7 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> { | ty::Pat(_, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_next_trait_solver/src/coherence.rs b/compiler/rustc_next_trait_solver/src/coherence.rs index 2461ef0c0df..408742747c2 100644 --- a/compiler/rustc_next_trait_solver/src/coherence.rs +++ b/compiler/rustc_next_trait_solver/src/coherence.rs @@ -339,7 +339,9 @@ where | ty::Slice(..) | ty::RawPtr(..) | ty::Never - | ty::Tuple(..) => self.found_non_local_ty(ty), + | ty::Tuple(..) + // FIXME(unsafe_binders): Non-local? + | ty::UnsafeBinder(_) => self.found_non_local_ty(ty), ty::Param(..) => panic!("unexpected ty param"), diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 02f6439b77f..63432dc199b 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -545,6 +545,7 @@ where | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(..) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -634,6 +635,7 @@ where | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Alias(..) | ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index 05ce61bc067..7da4f5e0107 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -83,6 +83,8 @@ where .map(|bty| bty.instantiate(cx, args)) .collect()), + ty::UnsafeBinder(bound_ty) => Ok(vec![bound_ty.into()]), + // For `PhantomData<T>`, we pass `T`. ty::Adt(def, args) if def.is_phantom_data() => Ok(vec![ty::Binder::dummy(args.type_at(0))]), @@ -144,6 +146,8 @@ where panic!("unexpected type `{ty:?}`") } + ty::UnsafeBinder(bound_ty) => Ok(vec![bound_ty.into()]), + // impl Sized for () // impl Sized for (T1, T2, .., Tn) where Tn: Sized if n >= 1 ty::Tuple(tys) => Ok(tys.last().map_or_else(Vec::new, |ty| vec![ty::Binder::dummy(ty)])), @@ -239,6 +243,8 @@ where } }, + ty::UnsafeBinder(_) => Err(NoSolution), + // impl Copy/Clone for CoroutineWitness where T: Copy/Clone forall T in coroutine_hidden_types ty::CoroutineWitness(def_id, args) => Ok(ecx .cx() @@ -374,6 +380,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_callable<I: Intern | ty::Never | ty::Tuple(_) | ty::Pat(_, _) + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) @@ -544,6 +551,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I: | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never + | ty::UnsafeBinder(_) | ty::Tuple(_) | ty::Alias(_, _) | ty::Param(_) @@ -694,7 +702,8 @@ pub(in crate::solve) fn extract_fn_def_from_const_callable<I: Interner>( | ty::Param(_) | ty::Placeholder(..) | ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) - | ty::Error(_) => return Err(NoSolution), + | ty::Error(_) + | ty::UnsafeBinder(_) => return Err(NoSolution), ty::Bound(..) | ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { @@ -764,6 +773,10 @@ pub(in crate::solve) fn const_conditions_for_destruct<I: Interner>( | ty::Coroutine(_, _) | ty::CoroutineWitness(_, _) => Err(NoSolution), + // FIXME(unsafe_binders): Unsafe binders could implement `~const Drop` + // if their inner type implements it. + ty::UnsafeBinder(_) => Err(NoSolution), + ty::Dynamic(..) | ty::Param(_) | ty::Alias(..) | ty::Placeholder(_) | ty::Foreign(_) => { Err(NoSolution) } diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs index b8867192225..f5ecfea5408 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs @@ -619,6 +619,11 @@ where Some(tail_ty) => Ty::new_projection(cx, metadata_def_id, [tail_ty]), }, + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binder): Figure out how to handle pointee for unsafe binders. + todo!() + } + ty::Infer( ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_), ) @@ -822,6 +827,11 @@ where | ty::Tuple(_) | ty::Error(_) => self_ty.discriminant_ty(ecx.cx()), + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binders): instantiate this with placeholders?? i guess?? + todo!("discr subgoal...") + } + // We do not call `Ty::discriminant_ty` on alias, param, or placeholder // types, which return `<self_ty as DiscriminantKind>::Discriminant` // (or ICE in the case of placeholders). Projecting a type to itself @@ -869,6 +879,11 @@ where | ty::Tuple(_) | ty::Error(_) => self_ty.async_destructor_ty(ecx.cx()), + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binders): Instantiate the binder with placeholders I guess. + todo!() + } + // We do not call `Ty::async_destructor_ty` on alias, param, or placeholder // types, which return `<self_ty as AsyncDestruct>::AsyncDestructor` // (or ICE in the case of placeholders). Projecting a type to itself diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index 886cdec0345..d68fca60829 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -1100,7 +1100,8 @@ where | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) - | ty::Adt(_, _) => { + | ty::Adt(_, _) + | ty::UnsafeBinder(_) => { let mut disqualifying_impl = None; self.cx().for_each_relevant_impl( goal.predicate.def_id(), diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 2f4adf2af9e..7533e75ffe2 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -279,13 +279,9 @@ impl<'a> Parser<'a> { break; } - let fixity = op.fixity(); - let min_prec = match fixity { + let min_prec = match op.fixity() { Fixity::Right => Bound::Included(prec), - Fixity::Left => Bound::Excluded(prec), - // We currently have no non-associative operators that are not handled above by - // the special cases. The code is here only for future convenience. - Fixity::None => Bound::Excluded(prec), + Fixity::Left | Fixity::None => Bound::Excluded(prec), }; let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { let attrs = this.parse_outer_attributes()?; @@ -337,10 +333,6 @@ impl<'a> Parser<'a> { self.dcx().span_bug(span, "AssocOp should have been handled by special case") } }; - - if let Fixity::None = fixity { - break; - } } Ok((lhs, parsed_something)) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 151abf0be95..1ddb5fc0a11 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -745,6 +745,51 @@ impl<'a> Parser<'a> { Ok(self.mk_block(stmts, s, lo.to(self.prev_token.span))) } + fn recover_missing_dot(&mut self, err: &mut Diag<'_>) { + let Some((ident, _)) = self.token.ident() else { + return; + }; + if let Some(c) = ident.name.as_str().chars().next() + && c.is_uppercase() + { + return; + } + if self.token.is_reserved_ident() && !self.token.is_ident_named(kw::Await) { + return; + } + if self.prev_token.is_reserved_ident() && self.prev_token.is_ident_named(kw::Await) { + // Likely `foo.await bar` + } else if !self.prev_token.is_reserved_ident() && self.prev_token.is_ident() { + // Likely `foo bar` + } else if self.prev_token.kind == token::Question { + // `foo? bar` + } else if self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis) { + // `foo() bar` + } else { + return; + } + if self.token.span == self.prev_token.span { + // Account for syntax errors in proc-macros. + return; + } + if self.look_ahead(1, |t| [token::Semi, token::Question, token::Dot].contains(&t.kind)) { + err.span_suggestion_verbose( + self.prev_token.span.between(self.token.span), + "you might have meant to write a field access", + ".".to_string(), + Applicability::MaybeIncorrect, + ); + } + if self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis)) { + err.span_suggestion_verbose( + self.prev_token.span.between(self.token.span), + "you might have meant to write a method call", + ".".to_string(), + Applicability::MaybeIncorrect, + ); + } + } + /// Parses a statement, including the trailing semicolon. pub fn parse_full_stmt( &mut self, @@ -851,7 +896,8 @@ impl<'a> Parser<'a> { Some(if recover.no() { res? } else { - res.unwrap_or_else(|e| { + res.unwrap_or_else(|mut e| { + self.recover_missing_dot(&mut e); let guar = e.emit(); self.recover_stmt(); guar @@ -872,7 +918,12 @@ impl<'a> Parser<'a> { // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover. match &mut local.kind { LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => { - self.check_mistyped_turbofish_with_multiple_type_params(e, expr)?; + self.check_mistyped_turbofish_with_multiple_type_params(e, expr).map_err( + |mut e| { + self.recover_missing_dot(&mut e); + e + }, + )?; // We found `foo<bar, baz>`, have we fully recovered? self.expect_semi()?; } diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index ba3101e9058..f39bea2a56f 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -112,9 +112,11 @@ passes_coroutine_on_non_closure = attribute should be applied to closures .label = not a closure -passes_coverage_not_fn_or_closure = - attribute should be applied to a function definition or closure - .label = not a function or closure +passes_coverage_attribute_not_allowed = + coverage attribute not allowed here + .not_fn_impl_mod = not a function, impl block, or module + .no_body = function has no body + .help = coverage attribute can be applied to a function (with body), impl block, or module passes_dead_codes = { $multiple -> diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index 8cf20a378d4..12f715a0fe4 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -432,21 +432,34 @@ impl<'tcx> CheckAttrVisitor<'tcx> { /// Checks that `#[coverage(..)]` is applied to a function/closure/method, /// or to an impl block or module. - fn check_coverage(&self, attr: &Attribute, span: Span, target: Target) { + fn check_coverage(&self, attr: &Attribute, target_span: Span, target: Target) { + let mut not_fn_impl_mod = None; + let mut no_body = None; + match target { Target::Fn | Target::Closure | Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) | Target::Impl - | Target::Mod => {} + | Target::Mod => return, + + // These are "functions", but they aren't allowed because they don't + // have a body, so the usual explanation would be confusing. + Target::Method(MethodKind::Trait { body: false }) | Target::ForeignFn => { + no_body = Some(target_span); + } _ => { - self.dcx().emit_err(errors::CoverageNotFnOrClosure { - attr_span: attr.span, - defn_span: span, - }); + not_fn_impl_mod = Some(target_span); } } + + self.dcx().emit_err(errors::CoverageAttributeNotAllowed { + attr_span: attr.span, + not_fn_impl_mod, + no_body, + help: (), + }); } /// Checks that `#[optimize(..)]` is applied to a function/closure/method, diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index 163325f2a3c..d95fa5db0ce 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -71,13 +71,21 @@ pub(crate) struct InlineNotFnOrClosure { pub defn_span: Span, } +/// "coverage attribute not allowed here" #[derive(Diagnostic)] -#[diag(passes_coverage_not_fn_or_closure, code = E0788)] -pub(crate) struct CoverageNotFnOrClosure { +#[diag(passes_coverage_attribute_not_allowed, code = E0788)] +pub(crate) struct CoverageAttributeNotAllowed { #[primary_span] pub attr_span: Span, - #[label] - pub defn_span: Span, + /// "not a function, impl block, or module" + #[label(passes_not_fn_impl_mod)] + pub not_fn_impl_mod: Option<Span>, + /// "function has no body" + #[label(passes_no_body)] + pub no_body: Option<Span>, + /// "coverage attribute can be applied to a function (with body), impl block, or module" + #[help] + pub help: (), } #[derive(Diagnostic)] diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index 009d817a1a9..ae991e3ce40 100644 --- a/compiler/rustc_pattern_analysis/src/rustc.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -415,6 +415,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(_, _) + | ty::UnsafeBinder(_) | ty::Alias(_, _) | ty::Param(_) | ty::Error(_) => ConstructorSet::Unlistable, diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index c50c9007a01..9ae2d981ab0 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -285,6 +285,7 @@ where | ty::Ref(..) | ty::Pat(..) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Param(..) | ty::Bound(..) | ty::Error(_) diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 7b950b97d30..cc9ed566eda 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -193,8 +193,10 @@ enum ImplTraitContext { } /// Used for tracking import use types which will be used for redundant import checking. +/// /// ### Used::Scope Example -/// ```rust,compile_fail +/// +/// ```rust,compile_fail /// #![deny(redundant_imports)] /// use std::mem::drop; /// fn main() { @@ -202,6 +204,7 @@ enum ImplTraitContext { /// drop(s); /// } /// ``` +/// /// Used::Other is for other situations like module-relative uses. #[derive(Clone, Copy, PartialEq, PartialOrd, Debug)] enum Used { diff --git a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs index 34e1c31683a..895259d52a7 100644 --- a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs +++ b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs @@ -621,6 +621,11 @@ pub(crate) fn encode_ty<'tcx>( typeid.push_str(&s); } + // FIXME(unsafe_binders): Implement this. + ty::UnsafeBinder(_) => { + todo!() + } + // Trait types ty::Dynamic(predicates, region, kind) => { // u3dynI<element-type1[..element-typeN]>E, where <element-type> is <predicate>, as diff --git a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/transform.rs b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/transform.rs index 9c01bd04353..9c6186d6882 100644 --- a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/transform.rs +++ b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/transform.rs @@ -64,7 +64,8 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for TransformTy<'tcx> { | ty::Pat(..) | ty::Slice(..) | ty::Str - | ty::Tuple(..) => t.super_fold_with(self), + | ty::Tuple(..) + | ty::UnsafeBinder(_) => t.super_fold_with(self), ty::Bool => { if self.options.contains(EncodeTyOptions::NORMALIZE_INTEGERS) { diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 047e920e688..5c36c986490 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -168,9 +168,10 @@ pub struct CoverageOptions { } /// Controls whether branch coverage or MC/DC coverage is enabled. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] pub enum CoverageLevel { /// Instrument for coverage at the MIR block level. + #[default] Block, /// Also instrument branch points (includes block coverage). Branch, @@ -195,12 +196,6 @@ pub enum CoverageLevel { Mcdc, } -impl Default for CoverageLevel { - fn default() -> Self { - Self::Block - } -} - /// Settings for `-Z instrument-xray` flag. #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)] pub struct InstrumentXRay { diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs index a5a17b4b573..de933952c6a 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs @@ -181,7 +181,6 @@ impl<'tcx> Stable<'tcx> for mir::Rvalue<'tcx> { RawPtr(mutability, place) => { stable_mir::mir::Rvalue::AddressOf(mutability.stable(tables), place.stable(tables)) } - Len(place) => stable_mir::mir::Rvalue::Len(place.stable(tables)), Cast(cast_kind, op, ty) => stable_mir::mir::Rvalue::Cast( cast_kind.stable(tables), op.stable(tables), diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index a4f61313001..e15dad78c69 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -356,6 +356,8 @@ impl<'tcx> Stable<'tcx> for ty::TyKind<'tcx> { ty::FnPtr(sig_tys, hdr) => { TyKind::RigidTy(RigidTy::FnPtr(sig_tys.with(*hdr).stable(tables))) } + // FIXME(unsafe_binders): + ty::UnsafeBinder(_) => todo!(), ty::Dynamic(existential_predicates, region, dyn_kind) => { TyKind::RigidTy(RigidTy::Dynamic( existential_predicates diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 123e4b1f01f..3d202f11722 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1282,7 +1282,6 @@ symbols! { mir_drop, mir_field, mir_goto, - mir_len, mir_make_place, mir_move, mir_offset, @@ -2149,6 +2148,7 @@ symbols! { unwrap, unwrap_binder, unwrap_or, + unwrap_unsafe_binder, use_extern_macros, use_nested_groups, used, diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index a801b3e53a1..0ca47eba5e8 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -466,6 +466,9 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { })?; } + // FIXME(unsafe_binder): + ty::UnsafeBinder(..) => todo!(), + ty::Dynamic(predicates, r, kind) => { self.push(match kind { ty::Dyn => "D", diff --git a/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs index 4fc395c221c..173977b77bd 100644 --- a/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs @@ -1,10 +1,11 @@ -use crate::spec::{LinkerFlavor, Lld, Target, base}; +use crate::spec::{LinkerFlavor, Lld, SanitizerSet, Target, base}; pub(crate) fn target() -> Target { let mut base = base::windows_msvc::opts(); + base.vendor = "win7".into(); base.cpu = "pentium4".into(); base.max_atomic_width = Some(64); - base.vendor = "win7".into(); + base.supported_sanitizers = SanitizerSet::ADDRESS; base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &[ // Mark all dynamic libraries and executables as compatible with the larger 4GiB address @@ -19,7 +20,7 @@ pub(crate) fn target() -> Target { Target { llvm_target: "i686-pc-windows-msvc".into(), metadata: crate::spec::TargetMetadata { - description: Some("32-bit Windows 7 support".into()), + description: Some("32-bit MSVC (Windows 7+)".into()), tier: Some(3), host_tools: Some(false), std: Some(true), diff --git a/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_ohos.rs b/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_ohos.rs index 12e026294cf..11d05db6b0a 100644 --- a/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_ohos.rs +++ b/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_ohos.rs @@ -15,7 +15,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic".into(), - features: "+f,+d".into(), + features: "+f,+d,+lsx".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), supported_sanitizers: SanitizerSet::ADDRESS diff --git a/compiler/rustc_target/src/spec/targets/x86_64_win7_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/x86_64_win7_windows_msvc.rs index f42188ec61a..2eceb688108 100644 --- a/compiler/rustc_target/src/spec/targets/x86_64_win7_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/x86_64_win7_windows_msvc.rs @@ -1,16 +1,17 @@ -use crate::spec::{Target, base}; +use crate::spec::{SanitizerSet, Target, base}; pub(crate) fn target() -> Target { let mut base = base::windows_msvc::opts(); + base.vendor = "win7".into(); base.cpu = "x86-64".into(); base.plt_by_default = false; base.max_atomic_width = Some(64); - base.vendor = "win7".into(); + base.supported_sanitizers = SanitizerSet::ADDRESS; Target { llvm_target: "x86_64-pc-windows-msvc".into(), metadata: crate::spec::TargetMetadata { - description: Some("64-bit Windows 7 support".into()), + description: Some("64-bit MSVC (Windows 7+)".into()), tier: Some(3), host_tools: Some(false), std: Some(true), diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index d41f8f46c17..885b606326c 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -1532,6 +1532,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ty::CoroutineWitness(..) => Some(20), ty::CoroutineClosure(..) => Some(21), ty::Pat(..) => Some(22), + ty::UnsafeBinder(..) => Some(23), ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => None, } } diff --git a/compiler/rustc_trait_selection/src/solve/delegate.rs b/compiler/rustc_trait_selection/src/solve/delegate.rs index 97cde67799c..9b8c9ff6bb8 100644 --- a/compiler/rustc_trait_selection/src/solve/delegate.rs +++ b/compiler/rustc_trait_selection/src/solve/delegate.rs @@ -123,8 +123,6 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate< ) }); - assert_eq!(region_constraints.member_constraints, vec![]); - let mut seen = FxHashSet::default(); region_constraints .outlives diff --git a/compiler/rustc_trait_selection/src/traits/effects.rs b/compiler/rustc_trait_selection/src/traits/effects.rs index b17a489a857..91484ef99db 100644 --- a/compiler/rustc_trait_selection/src/traits/effects.rs +++ b/compiler/rustc_trait_selection/src/traits/effects.rs @@ -1,13 +1,15 @@ use rustc_hir as hir; -use rustc_infer::infer::{BoundRegionConversionTime, DefineOpaqueTypes, InferCtxt}; +use rustc_infer::infer::{BoundRegionConversionTime, DefineOpaqueTypes}; use rustc_infer::traits::{ImplSource, Obligation, PredicateObligation}; use rustc_middle::span_bug; use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::{self, TypingMode}; +use rustc_type_ir::elaborate::elaborate; use rustc_type_ir::solve::NoSolution; -use thin_vec::ThinVec; +use thin_vec::{ThinVec, thin_vec}; use super::SelectionContext; +use super::normalize::normalize_with_depth_to; pub type HostEffectObligation<'tcx> = Obligation<'tcx, ty::HostEffectPredicate<'tcx>>; @@ -38,6 +40,12 @@ pub fn evaluate_host_effect_obligation<'tcx>( Err(EvaluationFailure::NoSolution) => {} } + match evaluate_host_effect_from_item_bounds(selcx, obligation) { + Ok(result) => return Ok(result), + Err(EvaluationFailure::Ambiguous) => return Err(EvaluationFailure::Ambiguous), + Err(EvaluationFailure::NoSolution) => {} + } + match evaluate_host_effect_from_selection_candiate(selcx, obligation) { Ok(result) => return Ok(result), Err(EvaluationFailure::Ambiguous) => return Err(EvaluationFailure::Ambiguous), @@ -48,24 +56,45 @@ pub fn evaluate_host_effect_obligation<'tcx>( } fn match_candidate<'tcx>( - infcx: &InferCtxt<'tcx>, + selcx: &mut SelectionContext<'_, 'tcx>, obligation: &HostEffectObligation<'tcx>, candidate: ty::Binder<'tcx, ty::HostEffectPredicate<'tcx>>, + candidate_is_unnormalized: bool, + more_nested: impl FnOnce(&mut SelectionContext<'_, 'tcx>, &mut ThinVec<PredicateObligation<'tcx>>), ) -> Result<ThinVec<PredicateObligation<'tcx>>, NoSolution> { if !candidate.skip_binder().constness.satisfies(obligation.predicate.constness) { return Err(NoSolution); } - let candidate = infcx.instantiate_binder_with_fresh_vars( + let mut candidate = selcx.infcx.instantiate_binder_with_fresh_vars( obligation.cause.span, BoundRegionConversionTime::HigherRankedType, candidate, ); - let mut nested = infcx - .at(&obligation.cause, obligation.param_env) - .eq(DefineOpaqueTypes::Yes, obligation.predicate.trait_ref, candidate.trait_ref)? - .into_obligations(); + let mut nested = thin_vec![]; + + // Unlike param-env bounds, item bounds may not be normalized. + if candidate_is_unnormalized { + candidate = normalize_with_depth_to( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth, + candidate, + &mut nested, + ); + } + + nested.extend( + selcx + .infcx + .at(&obligation.cause, obligation.param_env) + .eq(DefineOpaqueTypes::Yes, obligation.predicate.trait_ref, candidate.trait_ref)? + .into_obligations(), + ); + + more_nested(selcx, &mut nested); for nested in &mut nested { nested.set_depth_from_parent(obligation.recursion_depth); @@ -82,36 +111,116 @@ fn evaluate_host_effect_from_bounds<'tcx>( let drcx = DeepRejectCtxt::relate_rigid_rigid(selcx.tcx()); let mut candidate = None; - for predicate in obligation.param_env.caller_bounds() { - let bound_predicate = predicate.kind(); - if let ty::ClauseKind::HostEffect(data) = predicate.kind().skip_binder() { - let data = bound_predicate.rebind(data); - if data.skip_binder().trait_ref.def_id != obligation.predicate.trait_ref.def_id { - continue; + for clause in obligation.param_env.caller_bounds() { + let bound_clause = clause.kind(); + let ty::ClauseKind::HostEffect(data) = bound_clause.skip_binder() else { + continue; + }; + let data = bound_clause.rebind(data); + if data.skip_binder().trait_ref.def_id != obligation.predicate.trait_ref.def_id { + continue; + } + + if !drcx + .args_may_unify(obligation.predicate.trait_ref.args, data.skip_binder().trait_ref.args) + { + continue; + } + + let is_match = + infcx.probe(|_| match_candidate(selcx, obligation, data, false, |_, _| {}).is_ok()); + + if is_match { + if candidate.is_some() { + return Err(EvaluationFailure::Ambiguous); + } else { + candidate = Some(data); } + } + } - if !drcx.args_may_unify( - obligation.predicate.trait_ref.args, - data.skip_binder().trait_ref.args, + if let Some(data) = candidate { + Ok(match_candidate(selcx, obligation, data, false, |_, _| {}) + .expect("candidate matched before, so it should match again")) + } else { + Err(EvaluationFailure::NoSolution) + } +} + +fn evaluate_host_effect_from_item_bounds<'tcx>( + selcx: &mut SelectionContext<'_, 'tcx>, + obligation: &HostEffectObligation<'tcx>, +) -> Result<ThinVec<PredicateObligation<'tcx>>, EvaluationFailure> { + let infcx = selcx.infcx; + let tcx = infcx.tcx; + let drcx = DeepRejectCtxt::relate_rigid_rigid(selcx.tcx()); + let mut candidate = None; + + let mut consider_ty = obligation.predicate.self_ty(); + while let ty::Alias(kind @ (ty::Projection | ty::Opaque), alias_ty) = *consider_ty.kind() { + if tcx.is_conditionally_const(alias_ty.def_id) { + for clause in elaborate( + tcx, + tcx.explicit_implied_const_bounds(alias_ty.def_id) + .iter_instantiated_copied(tcx, alias_ty.args) + .map(|(trait_ref, _)| { + trait_ref.to_host_effect_clause(tcx, obligation.predicate.constness) + }), ) { - continue; - } + let bound_clause = clause.kind(); + let ty::ClauseKind::HostEffect(data) = bound_clause.skip_binder() else { + unreachable!("should not elaborate non-HostEffect from HostEffect") + }; + let data = bound_clause.rebind(data); + if data.skip_binder().trait_ref.def_id != obligation.predicate.trait_ref.def_id { + continue; + } - let is_match = infcx.probe(|_| match_candidate(infcx, obligation, data).is_ok()); + if !drcx.args_may_unify( + obligation.predicate.trait_ref.args, + data.skip_binder().trait_ref.args, + ) { + continue; + } - if is_match { - if candidate.is_some() { - return Err(EvaluationFailure::Ambiguous); - } else { - candidate = Some(data); + let is_match = infcx + .probe(|_| match_candidate(selcx, obligation, data, true, |_, _| {}).is_ok()); + + if is_match { + if candidate.is_some() { + return Err(EvaluationFailure::Ambiguous); + } else { + candidate = Some((data, alias_ty)); + } } } } + + if kind != ty::Projection { + break; + } + + consider_ty = alias_ty.self_ty(); } - if let Some(data) = candidate { - Ok(match_candidate(infcx, obligation, data) - .expect("candidate matched before, so it should match again")) + if let Some((data, alias_ty)) = candidate { + Ok(match_candidate(selcx, obligation, data, true, |selcx, nested| { + // An alias bound only holds if we also check the const conditions + // of the alias, so we need to register those, too. + let const_conditions = normalize_with_depth_to( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth, + tcx.const_conditions(alias_ty.def_id).instantiate(tcx, alias_ty.args), + nested, + ); + nested.extend(const_conditions.into_iter().map(|(trait_ref, _)| { + obligation + .with(tcx, trait_ref.to_host_effect_clause(tcx, obligation.predicate.constness)) + })); + }) + .expect("candidate matched before, so it should match again")) } else { Err(EvaluationFailure::NoSolution) } diff --git a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs index 88c11e55b7a..23dabe32ff2 100644 --- a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs +++ b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs @@ -4,11 +4,10 @@ use rustc_infer::infer::resolve::OpportunisticRegionResolver; use rustc_infer::traits::query::type_op::ImpliedOutlivesBounds; use rustc_macros::extension; use rustc_middle::infer::canonical::{OriginalQueryValues, QueryRegionConstraints}; -use rustc_middle::span_bug; pub use rustc_middle::traits::query::OutlivesBound; use rustc_middle::ty::{self, ParamEnv, Ty, TypeFolder, TypeVisitableExt}; use rustc_span::def_id::LocalDefId; -use tracing::{debug, instrument}; +use tracing::instrument; use crate::infer::InferCtxt; use crate::traits::{ObligationCause, ObligationCtxt}; @@ -86,16 +85,12 @@ fn implied_outlives_bounds<'a, 'tcx>( bounds.retain(|bound| !bound.has_placeholders()); if !constraints.is_empty() { - debug!(?constraints); - if !constraints.member_constraints.is_empty() { - span_bug!(span, "{:#?}", constraints.member_constraints); - } - + let QueryRegionConstraints { outlives } = constraints; // Instantiation may have produced new inference variables and constraints on those // variables. Process these constraints. let ocx = ObligationCtxt::new(infcx); let cause = ObligationCause::misc(span, body_id); - for &constraint in &constraints.outlives { + for &constraint in &outlives { ocx.register_obligation(infcx.query_outlives_constraint_to_obligation( constraint, cause.clone(), diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index 4bccd3450bc..54407d17dcf 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1047,6 +1047,8 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( // Integers and floats always have `u8` as their discriminant. | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true, + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + // type parameters, opaques, and unnormalized projections don't have // a known discriminant and may need to be normalized further or rely // on param env for discriminant projections @@ -1072,6 +1074,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Ref(..) | ty::FnDef(..) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(..) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -1163,6 +1166,8 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( true } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + // FIXME(compiler-errors): are Bound and Placeholder types ever known sized? ty::Param(_) | ty::Alias(..) diff --git a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs index 1d3e8d43af7..4004e354dc1 100644 --- a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs @@ -83,7 +83,8 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { | ty::Placeholder(..) | ty::Infer(_) | ty::Bound(..) - | ty::Coroutine(..) => false, + | ty::Coroutine(..) + | ty::UnsafeBinder(_) => false, } } @@ -336,6 +337,11 @@ pub fn dtorck_constraint_for_ty_inner<'tcx>( constraints.dtorck_types.push(ty); } + // Can't instantiate binder here. + ty::UnsafeBinder(_) => { + constraints.dtorck_types.push(ty); + } + ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => { // By the time this code runs, all type variables ought to // be fully resolved. diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs index a618d96ce95..54fce914bb6 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs @@ -180,11 +180,8 @@ where span, )?; output.error_info = error_info; - if let Some(constraints) = output.constraints { - region_constraints - .member_constraints - .extend(constraints.member_constraints.iter().cloned()); - region_constraints.outlives.extend(constraints.outlives.iter().cloned()); + if let Some(QueryRegionConstraints { outlives }) = output.constraints { + region_constraints.outlives.extend(outlives.iter().cloned()); } output.constraints = if region_constraints.is_empty() { None diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index 5e27fd43789..d6ac4baf8ad 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -619,7 +619,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { continue; } - match obligation.self_ty().skip_binder().kind() { + let self_ty = obligation.self_ty().skip_binder(); + match self_ty.kind() { // Fast path to avoid evaluating an obligation that trivially holds. // There may be more bounds, but these are checked by the regular path. ty::FnPtr(..) => return false, @@ -651,6 +652,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::CoroutineClosure(..) | ty::Coroutine(_, _) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Never | ty::Tuple(_) | ty::Error(_) => return true, @@ -794,7 +796,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::Coroutine(..) | ty::Never | ty::Tuple(_) - | ty::CoroutineWitness(..) => { + | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) => { // Only consider auto impls of unsafe traits when there are // no unsafe fields. if self.tcx().trait_is_unsafe(def_id) && self_ty.has_unsafe_fields() { @@ -1176,6 +1179,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::FnDef(_, _) | ty::Pat(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -1220,6 +1224,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::CoroutineClosure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) + | ty::UnsafeBinder(_) | ty::Never | ty::Tuple(..) | ty::Alias(..) diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 0462b1d9ee7..7857ed95cc7 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -2095,6 +2095,9 @@ impl<'tcx> SelectionContext<'_, 'tcx> { } } + // FIXME(unsafe_binders): This binder needs to be squashed + ty::UnsafeBinder(binder_ty) => Where(binder_ty.map_bound(|ty| vec![ty])), + ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => None, ty::Infer(ty::TyVar(_)) => Ambiguous, @@ -2133,6 +2136,10 @@ impl<'tcx> SelectionContext<'_, 'tcx> { None } + // FIXME(unsafe_binder): Should we conditionally + // (i.e. universally) implement copy/clone? + ty::UnsafeBinder(_) => None, + ty::Dynamic(..) | ty::Str | ty::Slice(..) @@ -2285,6 +2292,9 @@ impl<'tcx> SelectionContext<'_, 'tcx> { | ty::Never | ty::Char => ty::Binder::dummy(Vec::new()), + // FIXME(unsafe_binders): Squash the double binder for now, I guess. + ty::UnsafeBinder(_) => return Err(SelectionError::Unimplemented), + // Treat this like `struct str([u8]);` ty::Str => ty::Binder::dummy(vec![Ty::new_slice(self.tcx(), self.tcx().types.u8)]), diff --git a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs index 1430cfae51f..401b41c796d 100644 --- a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs @@ -483,15 +483,19 @@ fn report_negative_positive_conflict<'tcx>( negative_impl_def_id: DefId, positive_impl_def_id: DefId, ) -> ErrorGuaranteed { - tcx.dcx() - .create_err(NegativePositiveConflict { - impl_span: tcx.def_span(local_impl_def_id), - trait_desc: overlap.trait_ref, - self_ty: overlap.self_ty, - negative_impl_span: tcx.span_of_impl(negative_impl_def_id), - positive_impl_span: tcx.span_of_impl(positive_impl_def_id), - }) - .emit() + let mut diag = tcx.dcx().create_err(NegativePositiveConflict { + impl_span: tcx.def_span(local_impl_def_id), + trait_desc: overlap.trait_ref, + self_ty: overlap.self_ty, + negative_impl_span: tcx.span_of_impl(negative_impl_def_id), + positive_impl_span: tcx.span_of_impl(positive_impl_def_id), + }); + + for cause in &overlap.intercrate_ambiguity_causes { + cause.add_intercrate_ambiguity_hint(&mut diag); + } + + diag.emit() } fn report_conflicting_impls<'tcx>( diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs index c95b1641d1f..9d32eb05386 100644 --- a/compiler/rustc_trait_selection/src/traits/wf.rs +++ b/compiler/rustc_trait_selection/src/traits/wf.rs @@ -828,6 +828,9 @@ impl<'a, 'tcx> TypeVisitor<TyCtxt<'tcx>> for WfPredicates<'a, 'tcx> { // Let the visitor iterate into the argument/return // types appearing in the fn signature. } + ty::UnsafeBinder(_) => { + // FIXME(unsafe_binders): We should also recurse into the binder here. + } ty::Dynamic(data, r, _) => { // WfObject diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index eb30169a7d9..fc76a86f797 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -49,7 +49,8 @@ fn resolve_instance_raw<'tcx>( | ty::Adt(..) | ty::Dynamic(..) | ty::Array(..) - | ty::Slice(..) => {} + | ty::Slice(..) + | ty::UnsafeBinder(..) => {} // Drop shims can only be built from ADTs. _ => return Ok(None), } diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index a3b2ed07d4b..9f138cf1275 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -666,6 +666,11 @@ fn layout_of_uncached<'tcx>( tcx.mk_layout(layout) } + ty::UnsafeBinder(bound_ty) => { + let ty = tcx.instantiate_bound_regions_with_erased(bound_ty.into()); + cx.layout_of(ty)?.layout + } + // Types with no meaningful known layout. ty::Alias(..) => { // NOTE(eddyb) `layout_of` query should've normalized these away, diff --git a/compiler/rustc_ty_utils/src/needs_drop.rs b/compiler/rustc_ty_utils/src/needs_drop.rs index 1c85eb2a861..80de7e20951 100644 --- a/compiler/rustc_ty_utils/src/needs_drop.rs +++ b/compiler/rustc_ty_utils/src/needs_drop.rs @@ -202,6 +202,11 @@ where } } + ty::UnsafeBinder(bound_ty) => { + let ty = self.tcx.instantiate_bound_regions_with_erased(bound_ty.into()); + queue_type(self, ty); + } + _ if tcx.type_is_copy_modulo_regions(self.typing_env, component) => {} ty::Closure(_, args) => { diff --git a/compiler/rustc_ty_utils/src/ty.rs b/compiler/rustc_ty_utils/src/ty.rs index 774f0660258..7eed32e3a33 100644 --- a/compiler/rustc_ty_utils/src/ty.rs +++ b/compiler/rustc_ty_utils/src/ty.rs @@ -37,6 +37,8 @@ fn sized_constraint_for_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<Ty<' | Never | Dynamic(_, _, ty::DynStar) => None, + UnsafeBinder(_) => todo!(), + // these are never sized Str | Slice(..) | Dynamic(_, _, ty::Dyn) | Foreign(..) => Some(ty), diff --git a/compiler/rustc_type_ir/src/binder.rs b/compiler/rustc_type_ir/src/binder.rs index c06a578d8ec..47447af2215 100644 --- a/compiler/rustc_type_ir/src/binder.rs +++ b/compiler/rustc_type_ir/src/binder.rs @@ -6,8 +6,6 @@ use std::ops::{ControlFlow, Deref}; use derive_where::derive_where; #[cfg(feature = "nightly")] use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable}; -#[cfg(feature = "nightly")] -use rustc_serialize::Decodable; use tracing::instrument; use crate::data_structures::SsoHashSet; @@ -69,14 +67,14 @@ macro_rules! impl_binder_encode_decode { self.as_ref().skip_binder().encode(e); } } - impl<I: Interner, D: crate::TyDecoder<I = I>> Decodable<D> for ty::Binder<I, $t> + impl<I: Interner, D: crate::TyDecoder<I = I>> rustc_serialize::Decodable<D> for ty::Binder<I, $t> where $t: TypeVisitable<I> + rustc_serialize::Decodable<D>, I::BoundVarKinds: rustc_serialize::Decodable<D>, { fn decode(decoder: &mut D) -> Self { - let bound_vars = Decodable::decode(decoder); - ty::Binder::bind_with_vars(<$t>::decode(decoder), bound_vars) + let bound_vars = rustc_serialize::Decodable::decode(decoder); + ty::Binder::bind_with_vars(rustc_serialize::Decodable::decode(decoder), bound_vars) } } )* diff --git a/compiler/rustc_type_ir/src/fast_reject.rs b/compiler/rustc_type_ir/src/fast_reject.rs index 81c8a7d4bfa..9b3ff14d507 100644 --- a/compiler/rustc_type_ir/src/fast_reject.rs +++ b/compiler/rustc_type_ir/src/fast_reject.rs @@ -41,6 +41,7 @@ pub enum SimplifiedType<DefId> { Coroutine(DefId), CoroutineWitness(DefId), Function(usize), + UnsafeBinder, Placeholder, Error, } @@ -138,6 +139,7 @@ pub fn simplify_type<I: Interner>( ty::FnPtr(sig_tys, _hdr) => { Some(SimplifiedType::Function(sig_tys.skip_binder().inputs().len())) } + ty::UnsafeBinder(_) => Some(SimplifiedType::UnsafeBinder), ty::Placeholder(..) => Some(SimplifiedType::Placeholder), ty::Param(_) => match treat_params { TreatParams::AsRigid => Some(SimplifiedType::Placeholder), @@ -290,7 +292,8 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_ | ty::Coroutine(..) | ty::CoroutineWitness(..) | ty::Foreign(_) - | ty::Placeholder(_) => {} + | ty::Placeholder(_) + | ty::UnsafeBinder(_) => {} }; // The type system needs to support exponentially large types @@ -447,6 +450,13 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_ matches!(rhs.kind(), ty::Pat(rhs_ty, _) if self.types_may_unify_inner(lhs_ty, rhs_ty, depth)) } + ty::UnsafeBinder(lhs_ty) => match rhs.kind() { + ty::UnsafeBinder(rhs_ty) => { + self.types_may_unify(lhs_ty.skip_binder(), rhs_ty.skip_binder()) + } + _ => false, + }, + ty::Error(..) => true, } } diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 2db40accda3..872cf668018 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -112,6 +112,8 @@ pub trait Ty<I: Interner<Ty = Self>>: fn new_pat(interner: I, ty: Self, pat: I::Pat) -> Self; + fn new_unsafe_binder(interner: I, ty: ty::Binder<I, I::Ty>) -> Self; + fn tuple_fields(self) -> I::Tys; fn to_opt_closure_kind(self) -> Option<ty::ClosureKind>; @@ -185,6 +187,7 @@ pub trait Ty<I: Interner<Ty = Self>>: | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) diff --git a/compiler/rustc_type_ir/src/outlives.rs b/compiler/rustc_type_ir/src/outlives.rs index 0e94e989b97..c26e211a794 100644 --- a/compiler/rustc_type_ir/src/outlives.rs +++ b/compiler/rustc_type_ir/src/outlives.rs @@ -202,6 +202,7 @@ impl<I: Interner> TypeVisitor<I> for OutlivesCollector<'_, I> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnPtr(..) + | ty::UnsafeBinder(_) | ty::Dynamic(_, _, _) | ty::Tuple(_) => { ty.super_visit_with(self); diff --git a/compiler/rustc_type_ir/src/relate.rs b/compiler/rustc_type_ir/src/relate.rs index 0b013b2017f..e628b66d2f0 100644 --- a/compiler/rustc_type_ir/src/relate.rs +++ b/compiler/rustc_type_ir/src/relate.rs @@ -549,6 +549,10 @@ pub fn structurally_relate_tys<I: Interner, R: TypeRelation<I>>( Ok(Ty::new_pat(cx, ty, pat)) } + (ty::UnsafeBinder(a_binder), ty::UnsafeBinder(b_binder)) => { + Ok(Ty::new_unsafe_binder(cx, relation.binders(*a_binder, *b_binder)?)) + } + _ => Err(TypeError::Sorts(ExpectedFound::new(a, b))), } } diff --git a/compiler/rustc_type_ir/src/ty_kind.rs b/compiler/rustc_type_ir/src/ty_kind.rs index 033fcdb6c03..52e4fa19cb0 100644 --- a/compiler/rustc_type_ir/src/ty_kind.rs +++ b/compiler/rustc_type_ir/src/ty_kind.rs @@ -1,4 +1,5 @@ use std::fmt; +use std::ops::Deref; use derive_where::derive_where; use rustc_ast_ir::Mutability; @@ -13,6 +14,7 @@ use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Gen use self::TyKind::*; pub use self::closure::*; use crate::inherent::*; +use crate::visit::TypeVisitable; use crate::{self as ty, DebruijnIndex, Interner}; mod closure; @@ -150,6 +152,13 @@ pub enum TyKind<I: Interner> { /// worth the mild inconvenience. FnPtr(ty::Binder<I, FnSigTys<I>>, FnHeader<I>), + /// An unsafe binder type. + /// + /// A higher-ranked type used to represent a type which has had some of its + /// lifetimes erased. This can be used to represent types in positions where + /// a lifetime is literally inexpressible, such as self-referential types. + UnsafeBinder(UnsafeBinderInner<I>), + /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. Dynamic(I::BoundExistentialPredicates, I::Region, DynKind), @@ -287,6 +296,8 @@ impl<I: Interner> fmt::Debug for TyKind<I> { Ref(r, t, m) => write!(f, "&{:?} {}{:?}", r, m.prefix_str(), t), FnDef(d, s) => f.debug_tuple("FnDef").field(d).field(&s).finish(), FnPtr(sig_tys, hdr) => write!(f, "{:?}", sig_tys.with(*hdr)), + // FIXME(unsafe_binder): print this like `unsafe<'a> T<'a>`. + UnsafeBinder(binder) => write!(f, "{:?}", binder), Dynamic(p, r, repr) => match repr { DynKind::Dyn => write!(f, "dyn {p:?} + {r:?}"), DynKind::DynStar => write!(f, "dyn* {p:?} + {r:?}"), @@ -964,6 +975,66 @@ impl<I: Interner> fmt::Debug for FnSig<I> { } } +// FIXME: this is a distinct type because we need to define `Encode`/`Decode` +// impls in this crate for `Binder<I, I::Ty>`. +#[derive_where(Clone, Copy, PartialEq, Eq, Hash; I: Interner)] +#[cfg_attr(feature = "nightly", derive(HashStable_NoContext))] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +pub struct UnsafeBinderInner<I: Interner>(ty::Binder<I, I::Ty>); + +impl<I: Interner> From<ty::Binder<I, I::Ty>> for UnsafeBinderInner<I> { + fn from(value: ty::Binder<I, I::Ty>) -> Self { + UnsafeBinderInner(value) + } +} + +impl<I: Interner> From<UnsafeBinderInner<I>> for ty::Binder<I, I::Ty> { + fn from(value: UnsafeBinderInner<I>) -> Self { + value.0 + } +} + +impl<I: Interner> fmt::Debug for UnsafeBinderInner<I> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl<I: Interner> Deref for UnsafeBinderInner<I> { + type Target = ty::Binder<I, I::Ty>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(feature = "nightly")] +impl<I: Interner, E: crate::TyEncoder<I = I>> rustc_serialize::Encodable<E> for UnsafeBinderInner<I> +where + I::Ty: rustc_serialize::Encodable<E>, + I::BoundVarKinds: rustc_serialize::Encodable<E>, +{ + fn encode(&self, e: &mut E) { + self.bound_vars().encode(e); + self.as_ref().skip_binder().encode(e); + } +} + +#[cfg(feature = "nightly")] +impl<I: Interner, D: crate::TyDecoder<I = I>> rustc_serialize::Decodable<D> for UnsafeBinderInner<I> +where + I::Ty: TypeVisitable<I> + rustc_serialize::Decodable<D>, + I::BoundVarKinds: rustc_serialize::Decodable<D>, +{ + fn decode(decoder: &mut D) -> Self { + let bound_vars = rustc_serialize::Decodable::decode(decoder); + UnsafeBinderInner(ty::Binder::bind_with_vars( + rustc_serialize::Decodable::decode(decoder), + bound_vars, + )) + } +} + // This is just a `FnSig` without the `FnHeader` fields. #[derive_where(Clone, Copy, Debug, PartialEq, Eq, Hash; I: Interner)] #[cfg_attr(feature = "nightly", derive(TyEncodable, TyDecodable, HashStable_NoContext))] diff --git a/library/Cargo.lock b/library/Cargo.lock index 2026cd584cc..22f6e1edf21 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -189,9 +189,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.5" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "compiler_builtins", "memchr", diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs index 116e0e73e96..965fd63a529 100644 --- a/library/alloc/src/collections/binary_heap/mod.rs +++ b/library/alloc/src/collections/binary_heap/mod.rs @@ -531,8 +531,7 @@ impl<T: Ord, A: Allocator> BinaryHeap<T, A> { /// heap.push(1); /// heap.push(5); /// heap.push(2); - /// { - /// let mut val = heap.peek_mut().unwrap(); + /// if let Some(mut val) = heap.peek_mut() { /// *val = 0; /// } /// assert_eq!(heap.peek(), Some(&2)); diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 55496005f40..3a706d5f36b 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -56,7 +56,6 @@ #[cfg(not(no_global_oom_handling))] use core::cmp; use core::cmp::Ordering; -use core::fmt; use core::hash::{Hash, Hasher}; #[cfg(not(no_global_oom_handling))] use core::iter; @@ -65,6 +64,7 @@ use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; +use core::{fmt, intrinsics}; #[unstable(feature = "extract_if", reason = "recently added", issue = "43244")] pub use self::extract_if::ExtractIf; @@ -2680,7 +2680,14 @@ impl<T, A: Allocator> Vec<T, A> { #[rustc_const_unstable(feature = "const_vec_string_slice", issue = "129041")] #[rustc_confusables("length", "size")] pub const fn len(&self) -> usize { - self.len + let len = self.len; + + // SAFETY: The maximum capacity of `Vec<T>` is `isize::MAX` bytes, so the maximum value can + // be returned is `usize::checked_div(mem::size_of::<T>()).unwrap_or(usize::MAX)`, which + // matches the definition of `T::MAX_SLICE_LEN`. + unsafe { intrinsics::assume(len <= T::MAX_SLICE_LEN) }; + + len } /// Returns `true` if the vector contains no elements. diff --git a/library/core/benches/ascii/is_ascii.rs b/library/core/benches/ascii/is_ascii.rs index 4b2920c5eb4..ced7084fb0e 100644 --- a/library/core/benches/ascii/is_ascii.rs +++ b/library/core/benches/ascii/is_ascii.rs @@ -10,9 +10,12 @@ macro_rules! benches { // Ensure we benchmark cases where the functions are called with strings // that are not perfectly aligned or have a length which is not a // multiple of size_of::<usize>() (or both) - benches!(mod unaligned_head MEDIUM[1..] $($name $arg $body)+); - benches!(mod unaligned_tail MEDIUM[..(MEDIUM.len() - 1)] $($name $arg $body)+); - benches!(mod unaligned_both MEDIUM[1..(MEDIUM.len() - 1)] $($name $arg $body)+); + benches!(mod unaligned_head_medium MEDIUM[1..] $($name $arg $body)+); + benches!(mod unaligned_tail_medium MEDIUM[..(MEDIUM.len() - 1)] $($name $arg $body)+); + benches!(mod unaligned_both_medium MEDIUM[1..(MEDIUM.len() - 1)] $($name $arg $body)+); + benches!(mod unaligned_head_long LONG[1..] $($name $arg $body)+); + benches!(mod unaligned_tail_long LONG[..(LONG.len() - 1)] $($name $arg $body)+); + benches!(mod unaligned_both_long LONG[1..(LONG.len() - 1)] $($name $arg $body)+); }; (mod $mod_name: ident $input: ident [$range: expr] $($name: ident $arg: ident $body: block)+) => { @@ -49,6 +52,44 @@ benches! { fn case03_align_to_unrolled(bytes: &[u8]) { is_ascii_align_to_unrolled(bytes) } + + fn case04_while_loop(bytes: &[u8]) { + // Process chunks of 32 bytes at a time in the fast path to enable + // auto-vectorization and use of `pmovmskb`. Two 128-bit vector registers + // can be OR'd together and then the resulting vector can be tested for + // non-ASCII bytes. + const CHUNK_SIZE: usize = 32; + + let mut i = 0; + + while i + CHUNK_SIZE <= bytes.len() { + let chunk_end = i + CHUNK_SIZE; + + // Get LLVM to produce a `pmovmskb` instruction on x86-64 which + // creates a mask from the most significant bit of each byte. + // ASCII bytes are less than 128 (0x80), so their most significant + // bit is unset. + let mut count = 0; + while i < chunk_end { + count += bytes[i].is_ascii() as u8; + i += 1; + } + + // All bytes should be <= 127 so count is equal to chunk size. + if count != CHUNK_SIZE as u8 { + return false; + } + } + + // Process the remaining `bytes.len() % N` bytes. + let mut is_ascii = true; + while i < bytes.len() { + is_ascii &= bytes[i].is_ascii(); + i += 1; + } + + is_ascii + } } // These are separate since it's easier to debug errors if they don't go through diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs index d884fa69efb..e6f39db9dce 100644 --- a/library/core/src/alloc/layout.rs +++ b/library/core/src/alloc/layout.rs @@ -178,8 +178,7 @@ impl Layout { /// allocate backing structure for `T` (which could be a trait /// or other unsized type like a slice). #[stable(feature = "alloc_layout", since = "1.28.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[rustc_const_stable_indirect] + #[rustc_const_stable(feature = "const_alloc_layout", since = "CURRENT_RUSTC_VERSION")] #[must_use] #[inline] pub const fn for_value<T: ?Sized>(t: &T) -> Self { @@ -253,8 +252,7 @@ impl Layout { /// Returns an error if the combination of `self.size()` and the given /// `align` violates the conditions listed in [`Layout::from_size_align`]. #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[rustc_const_stable_indirect] + #[rustc_const_stable(feature = "const_alloc_layout", since = "CURRENT_RUSTC_VERSION")] #[inline] pub const fn align_to(&self, align: usize) -> Result<Self, LayoutError> { if let Some(align) = Alignment::new(align) { @@ -329,8 +327,7 @@ impl Layout { /// This is equivalent to adding the result of `padding_needed_for` /// to the layout's current size. #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[rustc_const_stable_indirect] + #[rustc_const_stable(feature = "const_alloc_layout", since = "CURRENT_RUSTC_VERSION")] #[must_use = "this returns a new `Layout`, \ without modifying the original"] #[inline] @@ -429,8 +426,7 @@ impl Layout { /// # assert_eq!(repr_c(&[u64, u32, u16, u32]), Ok((s, vec![0, 8, 12, 16]))); /// ``` #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[rustc_const_stable_indirect] + #[rustc_const_stable(feature = "const_alloc_layout", since = "CURRENT_RUSTC_VERSION")] #[inline] pub const fn extend(&self, next: Self) -> Result<(Self, usize), LayoutError> { let new_align = Alignment::max(self.align, next.align); @@ -493,8 +489,7 @@ impl Layout { /// On arithmetic overflow or when the total size would exceed /// `isize::MAX`, returns `LayoutError`. #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] - #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")] - #[rustc_const_stable_indirect] + #[rustc_const_stable(feature = "const_alloc_layout", since = "CURRENT_RUSTC_VERSION")] #[inline] pub const fn array<T>(n: usize) -> Result<Self, LayoutError> { // Reduce the amount of code we need to monomorphize per `T`. diff --git a/library/core/src/any.rs b/library/core/src/any.rs index 58107b1e7d0..17d94555927 100644 --- a/library/core/src/any.rs +++ b/library/core/src/any.rs @@ -423,7 +423,8 @@ impl dyn Any + Send { /// /// # Safety /// - /// Same as the method on the type `dyn Any`. + /// The contained value must be of type `T`. Calling this method + /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] pub unsafe fn downcast_ref_unchecked<T: Any>(&self) -> &T { @@ -451,7 +452,8 @@ impl dyn Any + Send { /// /// # Safety /// - /// Same as the method on the type `dyn Any`. + /// The contained value must be of type `T`. Calling this method + /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] pub unsafe fn downcast_mut_unchecked<T: Any>(&mut self) -> &mut T { @@ -552,6 +554,10 @@ impl dyn Any + Send + Sync { /// assert_eq!(*x.downcast_ref_unchecked::<usize>(), 1); /// } /// ``` + /// # Safety + /// + /// The contained value must be of type `T`. Calling this method + /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] pub unsafe fn downcast_ref_unchecked<T: Any>(&self) -> &T { @@ -576,6 +582,10 @@ impl dyn Any + Send + Sync { /// /// assert_eq!(*x.downcast_ref::<usize>().unwrap(), 2); /// ``` + /// # Safety + /// + /// The contained value must be of type `T`. Calling this method + /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] pub unsafe fn downcast_mut_unchecked<T: Any>(&mut self) -> &mut T { diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index cfa4c1fb564..306d565a77e 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -252,7 +252,7 @@ use crate::cmp::Ordering; use crate::fmt::{self, Debug, Display}; -use crate::marker::{PhantomData, Unsize}; +use crate::marker::{PhantomData, PointerLike, Unsize}; use crate::mem; use crate::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn}; use crate::pin::PinCoerceUnsized; @@ -677,6 +677,9 @@ impl<T: CoerceUnsized<U>, U> CoerceUnsized<Cell<U>> for Cell<T> {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Cell<U>> for Cell<T> {} +#[unstable(feature = "pointer_like_trait", issue = "none")] +impl<T: PointerLike> PointerLike for Cell<T> {} + impl<T> Cell<[T]> { /// Returns a `&[Cell<T>]` from a `&Cell<[T]>` /// @@ -2258,6 +2261,9 @@ impl<T: CoerceUnsized<U>, U> CoerceUnsized<UnsafeCell<U>> for UnsafeCell<T> {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<UnsafeCell<U>> for UnsafeCell<T> {} +#[unstable(feature = "pointer_like_trait", issue = "none")] +impl<T: PointerLike> PointerLike for UnsafeCell<T> {} + /// [`UnsafeCell`], but [`Sync`]. /// /// This is just an `UnsafeCell`, except it implements `Sync` @@ -2364,6 +2370,9 @@ impl<T: CoerceUnsized<U>, U> CoerceUnsized<SyncUnsafeCell<U>> for SyncUnsafeCell //#[unstable(feature = "sync_unsafe_cell", issue = "95439")] impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<SyncUnsafeCell<U>> for SyncUnsafeCell<T> {} +#[unstable(feature = "pointer_like_trait", issue = "none")] +impl<T: PointerLike> PointerLike for SyncUnsafeCell<T> {} + #[allow(unused)] fn assert_coerce_unsized( a: UnsafeCell<&i32>, diff --git a/library/core/src/cell/once.rs b/library/core/src/cell/once.rs index c14afe0f476..6a85791916a 100644 --- a/library/core/src/cell/once.rs +++ b/library/core/src/cell/once.rs @@ -262,7 +262,9 @@ impl<T> OnceCell<T> { /// /// let value = cell.get_mut_or_try_init(|| "1234".parse()); /// assert_eq!(value, Ok(&mut 1234)); - /// *value.unwrap() += 2; + /// + /// let Ok(value) = value else { return; }; + /// *value += 2; /// assert_eq!(cell.get(), Some(&1236)) /// ``` #[unstable(feature = "once_cell_get_mut", issue = "121641")] @@ -304,8 +306,8 @@ impl<T> OnceCell<T> { /// assert_eq!(cell.into_inner(), None); /// /// let cell = OnceCell::new(); - /// cell.set("hello".to_string()).unwrap(); - /// assert_eq!(cell.into_inner(), Some("hello".to_string())); + /// let _ = cell.set("hello".to_owned()); + /// assert_eq!(cell.into_inner(), Some("hello".to_owned())); /// ``` #[inline] #[stable(feature = "once_cell", since = "1.70.0")] @@ -332,8 +334,8 @@ impl<T> OnceCell<T> { /// assert_eq!(cell.take(), None); /// /// let mut cell = OnceCell::new(); - /// cell.set("hello".to_string()).unwrap(); - /// assert_eq!(cell.take(), Some("hello".to_string())); + /// let _ = cell.set("hello".to_owned()); + /// assert_eq!(cell.take(), Some("hello".to_owned())); /// assert_eq!(cell.get(), None); /// ``` #[inline] diff --git a/library/core/src/cmp.rs b/library/core/src/cmp.rs index 66a6578fc72..5a3b9365cd2 100644 --- a/library/core/src/cmp.rs +++ b/library/core/src/cmp.rs @@ -348,7 +348,7 @@ pub trait Eq: PartialEq<Self> { #[rustc_builtin_macro] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics, derive_eq, structural_match)] -#[cfg_attr(bootstrap, allow_internal_unstable(coverage_attribute))] +#[allow_internal_unstable(coverage_attribute)] pub macro Eq($item:item) { /* compiler built-in */ } diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index 14c70065101..c2c78dd9c67 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -152,8 +152,9 @@ pub trait Write { /// } /// /// let mut buf = String::new(); - /// writer(&mut buf, "hola").unwrap(); + /// writer(&mut buf, "hola")?; /// assert_eq!(&buf, "hola"); + /// # std::fmt::Result::Ok(()) /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn write_str(&mut self, s: &str) -> Result; @@ -179,9 +180,10 @@ pub trait Write { /// } /// /// let mut buf = String::new(); - /// writer(&mut buf, 'a').unwrap(); - /// writer(&mut buf, 'b').unwrap(); + /// writer(&mut buf, 'a')?; + /// writer(&mut buf, 'b')?; /// assert_eq!(&buf, "ab"); + /// # std::fmt::Result::Ok(()) /// ``` #[stable(feature = "fmt_write_char", since = "1.1.0")] fn write_char(&mut self, c: char) -> Result { @@ -208,8 +210,9 @@ pub trait Write { /// } /// /// let mut buf = String::new(); - /// writer(&mut buf, "world").unwrap(); + /// writer(&mut buf, "world")?; /// assert_eq!(&buf, "world"); + /// # std::fmt::Result::Ok(()) /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn write_fmt(&mut self, args: Arguments<'_>) -> Result { diff --git a/library/core/src/intrinsics/mir.rs b/library/core/src/intrinsics/mir.rs index 55dcf7cd47e..834f44c7790 100644 --- a/library/core/src/intrinsics/mir.rs +++ b/library/core/src/intrinsics/mir.rs @@ -233,7 +233,7 @@ //! //! - Operands implicitly convert to `Use` rvalues. //! - `&`, `&mut`, `addr_of!`, and `addr_of_mut!` all work to create their associated rvalue. -//! - [`Discriminant`], [`Len`], and [`CopyForDeref`] have associated functions. +//! - [`Discriminant`] and [`CopyForDeref`] have associated functions. //! - Unary and binary operations use their normal Rust syntax - `a * b`, `!c`, etc. //! - The binary operation `Offset` can be created via [`Offset`]. //! - Checked binary operations are represented by wrapping the associated binop in [`Checked`]. @@ -401,7 +401,6 @@ define!("mir_storage_dead", fn StorageDead<T>(local: T)); define!("mir_assume", fn Assume(operand: bool)); define!("mir_deinit", fn Deinit<T>(place: T)); define!("mir_checked", fn Checked<T>(binop: T) -> (T, bool)); -define!("mir_len", fn Len<T>(place: T) -> usize); define!( "mir_ptr_metadata", fn PtrMetadata<P: ?Sized>(place: *const P) -> <P as ::core::ptr::Pointee>::Metadata diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 3e53c0497cc..42b8eb33a1a 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -3795,7 +3795,7 @@ where /// See [`const_eval_select()`] for the rules and requirements around that intrinsic. pub(crate) macro const_eval_select { ( - @capture { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? : + @capture$([$($binders:tt)*])? { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? : if const $(#[$compiletime_attr:meta])* $compiletime:block else @@ -3803,7 +3803,7 @@ pub(crate) macro const_eval_select { ) => { // Use the `noinline` arm, after adding explicit `inline` attributes $crate::intrinsics::const_eval_select!( - @capture { $($arg : $ty = $val),* } $(-> $ret)? : + @capture$([$($binders)*])? { $($arg : $ty = $val),* } $(-> $ret)? : #[noinline] if const #[inline] // prevent codegen on this function @@ -3817,7 +3817,7 @@ pub(crate) macro const_eval_select { }, // With a leading #[noinline], we don't add inline attributes ( - @capture { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? : + @capture$([$($binders:tt)*])? { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? : #[noinline] if const $(#[$compiletime_attr:meta])* $compiletime:block @@ -3825,12 +3825,12 @@ pub(crate) macro const_eval_select { $(#[$runtime_attr:meta])* $runtime:block ) => {{ $(#[$runtime_attr])* - fn runtime($($arg: $ty),*) $( -> $ret )? { + fn runtime$(<$($binders)*>)?($($arg: $ty),*) $( -> $ret )? { $runtime } $(#[$compiletime_attr])* - const fn compiletime($($arg: $ty),*) $( -> $ret )? { + const fn compiletime$(<$($binders)*>)?($($arg: $ty),*) $( -> $ret )? { // Don't warn if one of the arguments is unused. $(let _ = $arg;)* @@ -3842,14 +3842,14 @@ pub(crate) macro const_eval_select { // We support leaving away the `val` expressions for *all* arguments // (but not for *some* arguments, that's too tricky). ( - @capture { $($arg:ident : $ty:ty),* $(,)? } $( -> $ret:ty )? : + @capture$([$($binders:tt)*])? { $($arg:ident : $ty:ty),* $(,)? } $( -> $ret:ty )? : if const $(#[$compiletime_attr:meta])* $compiletime:block else $(#[$runtime_attr:meta])* $runtime:block ) => { $crate::intrinsics::const_eval_select!( - @capture { $($arg : $ty = $arg),* } $(-> $ret)? : + @capture$([$($binders)*])? { $($arg : $ty = $arg),* } $(-> $ret)? : if const $(#[$compiletime_attr])* $compiletime else diff --git a/library/core/src/iter/sources/once.rs b/library/core/src/iter/sources/once.rs index 21be4377da1..c4a9860bdd7 100644 --- a/library/core/src/iter/sources/once.rs +++ b/library/core/src/iter/sources/once.rs @@ -34,7 +34,7 @@ use crate::iter::{FusedIterator, TrustedLen}; /// use std::fs; /// use std::path::PathBuf; /// -/// let dirs = fs::read_dir(".foo").unwrap(); +/// let dirs = fs::read_dir(".foo")?; /// /// // we need to convert from an iterator of DirEntry-s to an iterator of /// // PathBufs, so we use map @@ -50,6 +50,7 @@ use crate::iter::{FusedIterator, TrustedLen}; /// for f in files { /// println!("{f:?}"); /// } +/// # std::io::Result::Ok(()) /// ``` #[stable(feature = "iter_once", since = "1.2.0")] pub fn once<T>(value: T) -> Once<T> { diff --git a/library/core/src/iter/traits/collect.rs b/library/core/src/iter/traits/collect.rs index 8ab1c26f95e..73e6d931060 100644 --- a/library/core/src/iter/traits/collect.rs +++ b/library/core/src/iter/traits/collect.rs @@ -152,39 +152,6 @@ pub trait FromIterator<A>: Sized { fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self; } -/// This implementation turns an iterator of tuples into a tuple of types which implement -/// [`Default`] and [`Extend`]. -/// -/// This is similar to [`Iterator::unzip`], but is also composable with other [`FromIterator`] -/// implementations: -/// -/// ```rust -/// # fn main() -> Result<(), core::num::ParseIntError> { -/// let string = "1,2,123,4"; -/// -/// let (numbers, lengths): (Vec<_>, Vec<_>) = string -/// .split(',') -/// .map(|s| s.parse().map(|n: u32| (n, s.len()))) -/// .collect::<Result<_, _>>()?; -/// -/// assert_eq!(numbers, [1, 2, 123, 4]); -/// assert_eq!(lengths, [1, 1, 3, 1]); -/// # Ok(()) } -/// ``` -#[stable(feature = "from_iterator_for_tuple", since = "1.79.0")] -impl<A, B, AE, BE> FromIterator<(AE, BE)> for (A, B) -where - A: Default + Extend<AE>, - B: Default + Extend<BE>, -{ - fn from_iter<I: IntoIterator<Item = (AE, BE)>>(iter: I) -> Self { - let mut res = <(A, B)>::default(); - res.extend(iter); - - res - } -} - /// Conversion into an [`Iterator`]. /// /// By implementing `IntoIterator` for a type, you define how it will be @@ -629,7 +596,7 @@ macro_rules! spec_tuple_impl { } impl<$($ty_names,)* $($extend_ty_names,)* Iter> $trait_name<$($extend_ty_names),*> for Iter - where + where $($extend_ty_names: Extend<$ty_names>,)* Iter: Iterator<Item = ($($ty_names,)*)>, { @@ -639,7 +606,7 @@ macro_rules! spec_tuple_impl { } impl<$($ty_names,)* $($extend_ty_names,)* Iter> $trait_name<$($extend_ty_names),*> for Iter - where + where $($extend_ty_names: Extend<$ty_names>,)* Iter: TrustedLen<Item = ($($ty_names,)*)>, { @@ -647,29 +614,64 @@ macro_rules! spec_tuple_impl { fn extend<'a, $($ty_names,)*>( $($var_names: &'a mut impl Extend<$ty_names>,)* ) -> impl FnMut((), ($($ty_names,)*)) + 'a { - #[allow(non_snake_case)] - // SAFETY: We reserve enough space for the `size_hint`, and the iterator is `TrustedLen` - // so its `size_hint` is exact. - move |(), ($($extend_ty_names,)*)| unsafe { - $($var_names.extend_one_unchecked($extend_ty_names);)* + #[allow(non_snake_case)] + // SAFETY: We reserve enough space for the `size_hint`, and the iterator is + // `TrustedLen` so its `size_hint` is exact. + move |(), ($($extend_ty_names,)*)| unsafe { + $($var_names.extend_one_unchecked($extend_ty_names);)* + } } - } - let (lower_bound, upper_bound) = self.size_hint(); + let (lower_bound, upper_bound) = self.size_hint(); - if upper_bound.is_none() { - // We cannot reserve more than `usize::MAX` items, and this is likely to go out of memory anyway. - $default_fn_name(self, $($var_names,)*); - return; - } + if upper_bound.is_none() { + // We cannot reserve more than `usize::MAX` items, and this is likely to go out of memory anyway. + $default_fn_name(self, $($var_names,)*); + return; + } - if lower_bound > 0 { - $($var_names.extend_reserve(lower_bound);)* + if lower_bound > 0 { + $($var_names.extend_reserve(lower_bound);)* + } + + self.fold((), extend($($var_names,)*)); } + } - self.fold((), extend($($var_names,)*)); + /// This implementation turns an iterator of tuples into a tuple of types which implement + /// [`Default`] and [`Extend`]. + /// + /// This is similar to [`Iterator::unzip`], but is also composable with other [`FromIterator`] + /// implementations: + /// + /// ```rust + /// # fn main() -> Result<(), core::num::ParseIntError> { + /// let string = "1,2,123,4"; + /// + /// // Example given for a 2-tuple, but 1- through 12-tuples are supported + /// let (numbers, lengths): (Vec<_>, Vec<_>) = string + /// .split(',') + /// .map(|s| s.parse().map(|n: u32| (n, s.len()))) + /// .collect::<Result<_, _>>()?; + /// + /// assert_eq!(numbers, [1, 2, 123, 4]); + /// assert_eq!(lengths, [1, 1, 3, 1]); + /// # Ok(()) } + /// ``` + #[$meta] + $(#[$doctext])? + #[stable(feature = "from_iterator_for_tuple", since = "1.79.0")] + impl<$($ty_names,)* $($extend_ty_names,)*> FromIterator<($($extend_ty_names,)*)> for ($($ty_names,)*) + where + $($ty_names: Default + Extend<$extend_ty_names>,)* + { + fn from_iter<Iter: IntoIterator<Item = ($($extend_ty_names,)*)>>(iter: Iter) -> Self { + let mut res = <($($ty_names,)*)>::default(); + res.extend(iter); + + res + } } - } }; } diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 38dfbbef393..ff39e8ac25f 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -2564,7 +2564,7 @@ pub trait Iterator { /// # Example /// /// ``` - /// let reduced: i32 = (1..10).reduce(|acc, e| acc + e).unwrap(); + /// let reduced: i32 = (1..10).reduce(|acc, e| acc + e).unwrap_or(0); /// assert_eq!(reduced, 45); /// /// // Which is equivalent to doing it with `fold`: @@ -3087,7 +3087,7 @@ pub trait Iterator { /// [2.4, f32::NAN, 1.3] /// .into_iter() /// .reduce(f32::max) - /// .unwrap(), + /// .unwrap_or(0.), /// 2.4 /// ); /// ``` @@ -3123,7 +3123,7 @@ pub trait Iterator { /// [2.4, f32::NAN, 1.3] /// .into_iter() /// .reduce(f32::min) - /// .unwrap(), + /// .unwrap_or(0.), /// 1.3 /// ); /// ``` diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 18bd9bb8118..a7f741a9408 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -107,13 +107,13 @@ // // Library features: // tidy-alphabetical-start -#![cfg_attr(bootstrap, feature(coverage_attribute))] #![cfg_attr(bootstrap, feature(do_not_recommend))] #![feature(array_ptr_get)] #![feature(asm_experimental_arch)] #![feature(const_eval_select)] #![feature(const_typed_swap)] #![feature(core_intrinsics)] +#![feature(coverage_attribute)] #![feature(internal_impls_macro)] #![feature(ip)] #![feature(is_ascii_octdigit)] diff --git a/library/core/src/macros/mod.rs b/library/core/src/macros/mod.rs index bff7ad98df3..402b436d28e 100644 --- a/library/core/src/macros/mod.rs +++ b/library/core/src/macros/mod.rs @@ -1549,7 +1549,7 @@ pub(crate) mod builtin { /// NAME is a string that represents a valid function name. /// MODE is any of Forward, Reverse, ForwardFirst, ReverseFirst. /// INPUT_ACTIVITIES consists of one valid activity for each input parameter. - /// OUTPUT_ACTIVITY must not be set if we implicitely return nothing (or explicitely return + /// OUTPUT_ACTIVITY must not be set if we implicitly return nothing (or explicitly return /// `-> ()`). Otherwise it must be set to one of the allowed activities. #[unstable(feature = "autodiff", issue = "124509")] #[allow_internal_unstable(rustc_attrs)] @@ -1673,8 +1673,7 @@ pub(crate) mod builtin { /// /// [the reference]: ../../../reference/attributes/testing.html#the-test-attribute #[stable(feature = "rust1", since = "1.0.0")] - #[allow_internal_unstable(test, rustc_attrs)] - #[cfg_attr(bootstrap, allow_internal_unstable(coverage_attribute))] + #[allow_internal_unstable(test, rustc_attrs, coverage_attribute)] #[rustc_builtin_macro] pub macro test($item:item) { /* compiler built-in */ @@ -1687,8 +1686,7 @@ pub(crate) mod builtin { soft, reason = "`bench` is a part of custom test frameworks which are unstable" )] - #[allow_internal_unstable(test, rustc_attrs)] - #[cfg_attr(bootstrap, allow_internal_unstable(coverage_attribute))] + #[allow_internal_unstable(test, rustc_attrs, coverage_attribute)] #[rustc_builtin_macro] pub macro bench($item:item) { /* compiler built-in */ diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index 3d79706f8ec..29fc01d37fe 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -982,8 +982,14 @@ pub trait Tuple {} /// A marker for pointer-like types. /// -/// This trait can only be implemented for types that have the same size and alignment -/// as a `usize` or `*const ()`. +/// This trait can only be implemented for types that are certain to have +/// the same size and alignment as a [`usize`] or [`*const ()`](pointer). +/// To ensure this, there are special requirements on implementations +/// of `PointerLike` (other than the already-provided implementations +/// for built-in types): +/// +/// * The type must have `#[repr(transparent)]`. +/// * The type’s sole non-zero-sized field must itself implement `PointerLike`. #[unstable(feature = "pointer_like_trait", issue = "none")] #[lang = "pointer_like"] #[diagnostic::on_unimplemented( @@ -997,6 +1003,7 @@ pub trait PointerLike {} marker_impls! { #[unstable(feature = "pointer_like_trait", issue = "none")] PointerLike for + isize, usize, {T} &T, {T} &mut T, diff --git a/library/core/src/mem/maybe_uninit.rs b/library/core/src/mem/maybe_uninit.rs index 9b3d6902098..58fb5be5812 100644 --- a/library/core/src/mem/maybe_uninit.rs +++ b/library/core/src/mem/maybe_uninit.rs @@ -232,6 +232,26 @@ use crate::{fmt, intrinsics, ptr, slice}; /// remain `#[repr(transparent)]`. That said, `MaybeUninit<T>` will *always* guarantee that it has /// the same size, alignment, and ABI as `T`; it's just that the way `MaybeUninit` implements that /// guarantee may evolve. +/// +/// Note that even though `T` and `MaybeUninit<T>` are ABI compatible it is still unsound to +/// transmute `&mut T` to `&mut MaybeUninit<T>` and expose that to safe code because it would allow +/// safe code to access uninitialized memory: +/// +/// ```rust,no_run +/// use core::mem::MaybeUninit; +/// +/// fn unsound_transmute<T>(val: &mut T) -> &mut MaybeUninit<T> { +/// unsafe { core::mem::transmute(val) } +/// } +/// +/// fn main() { +/// let mut code = 0; +/// let code = &mut code; +/// let code2 = unsound_transmute(code); +/// *code2 = MaybeUninit::uninit(); +/// std::process::exit(*code); // UB! Accessing uninitialized memory. +/// } +/// ``` #[stable(feature = "maybe_uninit", since = "1.36.0")] // Lang item so we can wrap other types in it. This is useful for coroutines. #[lang = "maybe_uninit"] diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index 78ad6880709..57acc9dcd6e 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -1241,6 +1241,17 @@ pub trait SizedTypeProperties: Sized { #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] const LAYOUT: Layout = Layout::new::<Self>(); + + /// The largest safe length for a `[Self]`. + /// + /// Anything larger than this would make `size_of_val` overflow `isize::MAX`, + /// which is never allowed for a single object. + #[doc(hidden)] + #[unstable(feature = "sized_type_properties", issue = "none")] + const MAX_SLICE_LEN: usize = match size_of::<Self>() { + 0 => usize::MAX, + n => (isize::MAX as usize) / n, + }; } #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] diff --git a/library/core/src/option.rs b/library/core/src/option.rs index f4ac7af6396..a9f06b92ad5 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -937,10 +937,16 @@ impl<T> Option<T> { /// Returns the contained [`Some`] value, consuming the `self` value. /// /// Because this function may panic, its use is generally discouraged. + /// Panics are meant for unrecoverable errors, and + /// [may abort the entire program][panic-abort]. + /// /// Instead, prefer to use pattern matching and handle the [`None`] /// case explicitly, or call [`unwrap_or`], [`unwrap_or_else`], or - /// [`unwrap_or_default`]. + /// [`unwrap_or_default`]. In functions returning `Option`, you can use + /// [the `?` (try) operator][try-option]. /// + /// [panic-abort]: https://doc.rust-lang.org/book/ch09-01-unrecoverable-errors-with-panic.html + /// [try-option]: https://doc.rust-lang.org/book/ch09-02-recoverable-errors-with-result.html#where-the--operator-can-be-used /// [`unwrap_or`]: Option::unwrap_or /// [`unwrap_or_else`]: Option::unwrap_or_else /// [`unwrap_or_default`]: Option::unwrap_or_default diff --git a/library/core/src/primitive_docs.rs b/library/core/src/primitive_docs.rs index e105ceadff7..c5f029363e5 100644 --- a/library/core/src/primitive_docs.rs +++ b/library/core/src/primitive_docs.rs @@ -563,11 +563,11 @@ impl () {} /// Note that here the call to [`drop`] is for clarity - it indicates /// that we are done with the given value and it should be destroyed. /// -/// ## 3. Create it using `ptr::addr_of!` +/// ## 3. Create it using `&raw` /// -/// Instead of coercing a reference to a raw pointer, you can use the macros -/// [`ptr::addr_of!`] (for `*const T`) and [`ptr::addr_of_mut!`] (for `*mut T`). -/// These macros allow you to create raw pointers to fields to which you cannot +/// Instead of coercing a reference to a raw pointer, you can use the raw borrow +/// operators `&raw const` (for `*const T`) and `&raw mut` (for `*mut T`). +/// These operators allow you to create raw pointers to fields to which you cannot /// create a reference (without causing undefined behavior), such as an /// unaligned field. This might be necessary if packed structs or uninitialized /// memory is involved. @@ -580,7 +580,7 @@ impl () {} /// unaligned: u32, /// } /// let s = S::default(); -/// let p = std::ptr::addr_of!(s.unaligned); // not allowed with coercion +/// let p = &raw const s.unaligned; // not allowed with coercion /// ``` /// /// ## 4. Get it from C. diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index f100adecbbb..ec569291853 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -12,14 +12,17 @@ impl<T: ?Sized> *const T { /// Therefore, two pointers that are null may still not compare equal to /// each other. /// - /// ## Behavior during const evaluation + /// # Panics during const evaluation /// - /// When this function is used during const evaluation, it may return `false` for pointers - /// that turn out to be null at runtime. Specifically, when a pointer to some memory - /// is offset beyond its bounds in such a way that the resulting pointer is null, - /// the function will still return `false`. There is no way for CTFE to know - /// the absolute position of that memory, so we cannot tell if the pointer is - /// null or not. + /// If this method is used during const evaluation, and `self` is a pointer + /// that is offset beyond the bounds of the memory it initially pointed to, + /// then there might not be enough information to determine whether the + /// pointer is null. This is because the absolute address in memory is not + /// known at compile time. If the nullness of the pointer cannot be + /// determined, this method will panic. + /// + /// In-bounds pointers are never null, so the method will never panic for + /// such pointers. /// /// # Examples /// @@ -254,6 +257,13 @@ impl<T: ?Sized> *const T { /// When calling this method, you have to ensure that *either* the pointer is null *or* /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null + /// /// # Examples /// /// ``` @@ -331,6 +341,13 @@ impl<T: ?Sized> *const T { /// When calling this method, you have to ensure that *either* the pointer is null *or* /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null + /// /// # Examples /// /// ``` @@ -502,11 +519,12 @@ impl<T: ?Sized> *const T { /// let mut out = String::new(); /// while ptr != end_rounded_up { /// unsafe { - /// write!(&mut out, "{}, ", *ptr).unwrap(); + /// write!(&mut out, "{}, ", *ptr)?; /// } /// ptr = ptr.wrapping_offset(step); /// } /// assert_eq!(out.as_str(), "1, 3, 5, "); + /// # std::fmt::Result::Ok(()) /// ``` #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] #[must_use = "returns a new pointer rather than modifying its argument"] @@ -1125,11 +1143,12 @@ impl<T: ?Sized> *const T { /// let mut out = String::new(); /// while ptr != end_rounded_up { /// unsafe { - /// write!(&mut out, "{}, ", *ptr).unwrap(); + /// write!(&mut out, "{}, ", *ptr)?; /// } /// ptr = ptr.wrapping_add(step); /// } /// assert_eq!(out, "1, 3, 5, "); + /// # std::fmt::Result::Ok(()) /// ``` #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] @@ -1203,11 +1222,12 @@ impl<T: ?Sized> *const T { /// let mut out = String::new(); /// while ptr != start_rounded_down { /// unsafe { - /// write!(&mut out, "{}, ", *ptr).unwrap(); + /// write!(&mut out, "{}, ", *ptr)?; /// } /// ptr = ptr.wrapping_sub(step); /// } /// assert_eq!(out, "5, 3, 1, "); + /// # std::fmt::Result::Ok(()) /// ``` #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] @@ -1604,6 +1624,13 @@ impl<T> *const [T] { /// /// [valid]: crate::ptr#safety /// [allocated object]: crate::ptr#allocated-object + /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null #[inline] #[unstable(feature = "ptr_as_uninit", issue = "75402")] pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> { diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 1423e7ea8d1..e6e13eaff7b 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -395,6 +395,7 @@ #![allow(clippy::not_unsafe_ptr_arg_deref)] use crate::cmp::Ordering; +use crate::intrinsics::const_eval_select; use crate::marker::FnPtr; use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::{fmt, hash, intrinsics, ub_checks}; @@ -1074,25 +1075,6 @@ pub const unsafe fn swap<T>(x: *mut T, y: *mut T) { #[rustc_const_unstable(feature = "const_swap_nonoverlapping", issue = "133668")] #[rustc_diagnostic_item = "ptr_swap_nonoverlapping"] pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) { - #[allow(unused)] - macro_rules! attempt_swap_as_chunks { - ($ChunkTy:ty) => { - if mem::align_of::<T>() >= mem::align_of::<$ChunkTy>() - && mem::size_of::<T>() % mem::size_of::<$ChunkTy>() == 0 - { - let x: *mut $ChunkTy = x.cast(); - let y: *mut $ChunkTy = y.cast(); - let count = count * (mem::size_of::<T>() / mem::size_of::<$ChunkTy>()); - // SAFETY: these are the same bytes that the caller promised were - // ok, just typed as `MaybeUninit<ChunkTy>`s instead of as `T`s. - // The `if` condition above ensures that we're not violating - // alignment requirements, and that the division is exact so - // that we don't lose any bytes off the end. - return unsafe { swap_nonoverlapping_simple_untyped(x, y, count) }; - } - }; - } - ub_checks::assert_unsafe_precondition!( check_language_ub, "ptr::swap_nonoverlapping requires that both pointer arguments are aligned and non-null \ @@ -1111,19 +1093,48 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) { } ); - // Split up the slice into small power-of-two-sized chunks that LLVM is able - // to vectorize (unless it's a special type with more-than-pointer alignment, - // because we don't want to pessimize things like slices of SIMD vectors.) - if mem::align_of::<T>() <= mem::size_of::<usize>() - && (!mem::size_of::<T>().is_power_of_two() - || mem::size_of::<T>() > mem::size_of::<usize>() * 2) - { - attempt_swap_as_chunks!(usize); - attempt_swap_as_chunks!(u8); - } + const_eval_select!( + @capture[T] { x: *mut T, y: *mut T, count: usize }: + if const { + // At compile-time we want to always copy this in chunks of `T`, to ensure that if there + // are pointers inside `T` we will copy them in one go rather than trying to copy a part + // of a pointer (which would not work). + // SAFETY: Same preconditions as this function + unsafe { swap_nonoverlapping_simple_untyped(x, y, count) } + } else { + macro_rules! attempt_swap_as_chunks { + ($ChunkTy:ty) => { + if mem::align_of::<T>() >= mem::align_of::<$ChunkTy>() + && mem::size_of::<T>() % mem::size_of::<$ChunkTy>() == 0 + { + let x: *mut $ChunkTy = x.cast(); + let y: *mut $ChunkTy = y.cast(); + let count = count * (mem::size_of::<T>() / mem::size_of::<$ChunkTy>()); + // SAFETY: these are the same bytes that the caller promised were + // ok, just typed as `MaybeUninit<ChunkTy>`s instead of as `T`s. + // The `if` condition above ensures that we're not violating + // alignment requirements, and that the division is exact so + // that we don't lose any bytes off the end. + return unsafe { swap_nonoverlapping_simple_untyped(x, y, count) }; + } + }; + } + + // Split up the slice into small power-of-two-sized chunks that LLVM is able + // to vectorize (unless it's a special type with more-than-pointer alignment, + // because we don't want to pessimize things like slices of SIMD vectors.) + if mem::align_of::<T>() <= mem::size_of::<usize>() + && (!mem::size_of::<T>().is_power_of_two() + || mem::size_of::<T>() > mem::size_of::<usize>() * 2) + { + attempt_swap_as_chunks!(usize); + attempt_swap_as_chunks!(u8); + } - // SAFETY: Same preconditions as this function - unsafe { swap_nonoverlapping_simple_untyped(x, y, count) } + // SAFETY: Same preconditions as this function + unsafe { swap_nonoverlapping_simple_untyped(x, y, count) } + } + ) } /// Same behavior and safety conditions as [`swap_nonoverlapping`] diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 3639feaacf3..34567917b52 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -12,14 +12,17 @@ impl<T: ?Sized> *mut T { /// Therefore, two pointers that are null may still not compare equal to /// each other. /// - /// ## Behavior during const evaluation + /// # Panics during const evaluation /// - /// When this function is used during const evaluation, it may return `false` for pointers - /// that turn out to be null at runtime. Specifically, when a pointer to some memory - /// is offset beyond its bounds in such a way that the resulting pointer is null, - /// the function will still return `false`. There is no way for CTFE to know - /// the absolute position of that memory, so we cannot tell if the pointer is - /// null or not. + /// If this method is used during const evaluation, and `self` is a pointer + /// that is offset beyond the bounds of the memory it initially pointed to, + /// then there might not be enough information to determine whether the + /// pointer is null. This is because the absolute address in memory is not + /// known at compile time. If the nullness of the pointer cannot be + /// determined, this method will panic. + /// + /// In-bounds pointers are never null, so the method will never panic for + /// such pointers. /// /// # Examples /// @@ -243,6 +246,13 @@ impl<T: ?Sized> *mut T { /// When calling this method, you have to ensure that *either* the pointer is null *or* /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null-1 + /// /// # Examples /// /// ``` @@ -327,6 +337,13 @@ impl<T: ?Sized> *mut T { /// Note that because the created reference is to `MaybeUninit<T>`, the /// source pointer can point to uninitialized memory. /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null-1 + /// /// # Examples /// /// ``` @@ -590,6 +607,12 @@ impl<T: ?Sized> *mut T { /// the pointer is null *or* /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null-1 /// /// # Examples /// @@ -673,6 +696,13 @@ impl<T: ?Sized> *mut T { /// /// When calling this method, you have to ensure that *either* the pointer is null *or* /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion). + /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null-1 #[inline] #[unstable(feature = "ptr_as_uninit", issue = "75402")] pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>> @@ -1949,6 +1979,13 @@ impl<T> *mut [T] { /// /// [valid]: crate::ptr#safety /// [allocated object]: crate::ptr#allocated-object + /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null-1 #[inline] #[unstable(feature = "ptr_as_uninit", issue = "75402")] pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> { @@ -2000,6 +2037,13 @@ impl<T> *mut [T] { /// /// [valid]: crate::ptr#safety /// [allocated object]: crate::ptr#allocated-object + /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: #method.is_null-1 #[inline] #[unstable(feature = "ptr_as_uninit", issue = "75402")] pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> { diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 6b601405e1c..e0ba469272e 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -204,6 +204,13 @@ impl<T: ?Sized> NonNull<T> { /// Creates a new `NonNull` if `ptr` is non-null. /// + /// # Panics during const evaluation + /// + /// This method will panic during const evaluation if the pointer cannot be + /// determined to be null or not. See [`is_null`] for more information. + /// + /// [`is_null`]: ../primitive.pointer.html#method.is_null-1 + /// /// # Examples /// /// ``` @@ -1548,6 +1555,10 @@ impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: U #[stable(feature = "pin", since = "1.33.0")] unsafe impl<T: ?Sized> PinCoerceUnsized for NonNull<T> {} +#[unstable(feature = "pointer_like_trait", issue = "none")] +#[cfg(not(bootstrap))] +impl<T> core::marker::PointerLike for NonNull<T> {} + #[stable(feature = "nonnull", since = "1.25.0")] impl<T: ?Sized> fmt::Debug for NonNull<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/library/core/src/result.rs b/library/core/src/result.rs index 9c7be618bc7..92b5cba1531 100644 --- a/library/core/src/result.rs +++ b/library/core/src/result.rs @@ -1065,10 +1065,15 @@ impl<T, E> Result<T, E> { /// Returns the contained [`Ok`] value, consuming the `self` value. /// /// Because this function may panic, its use is generally discouraged. - /// Instead, prefer to use pattern matching and handle the [`Err`] - /// case explicitly, or call [`unwrap_or`], [`unwrap_or_else`], or - /// [`unwrap_or_default`]. + /// Panics are meant for unrecoverable errors, and + /// [may abort the entire program][panic-abort]. + /// + /// Instead, prefer to use [the `?` (try) operator][try-operator], or pattern matching + /// to handle the [`Err`] case explicitly, or call [`unwrap_or`], + /// [`unwrap_or_else`], or [`unwrap_or_default`]. /// + /// [panic-abort]: https://doc.rust-lang.org/book/ch09-01-unrecoverable-errors-with-panic.html + /// [try-operator]: https://doc.rust-lang.org/book/ch09-02-recoverable-errors-with-result.html#a-shortcut-for-propagating-errors-the--operator /// [`unwrap_or`]: Result::unwrap_or /// [`unwrap_or_else`]: Result::unwrap_or_else /// [`unwrap_or_default`]: Result::unwrap_or_default diff --git a/library/core/src/slice/ascii.rs b/library/core/src/slice/ascii.rs index 7cdb896586f..51b25fa40e3 100644 --- a/library/core/src/slice/ascii.rs +++ b/library/core/src/slice/ascii.rs @@ -3,8 +3,9 @@ use core::ascii::EscapeDefault; use crate::fmt::{self, Write}; +#[cfg(not(all(target_arch = "x86_64", target_feature = "sse2")))] use crate::intrinsics::const_eval_select; -use crate::{ascii, iter, mem, ops}; +use crate::{ascii, iter, ops}; #[cfg(not(test))] impl [u8] { @@ -328,14 +329,6 @@ impl<'a> fmt::Debug for EscapeAscii<'a> { } } -/// Returns `true` if any byte in the word `v` is nonascii (>= 128). Snarfed -/// from `../str/mod.rs`, which does something similar for utf8 validation. -#[inline] -const fn contains_nonascii(v: usize) -> bool { - const NONASCII_MASK: usize = usize::repeat_u8(0x80); - (NONASCII_MASK & v) != 0 -} - /// ASCII test *without* the chunk-at-a-time optimizations. /// /// This is carefully structured to produce nice small code -- it's smaller in @@ -366,6 +359,7 @@ pub const fn is_ascii_simple(mut bytes: &[u8]) -> bool { /// /// If any of these loads produces something for which `contains_nonascii` /// (above) returns true, then we know the answer is false. +#[cfg(not(all(target_arch = "x86_64", target_feature = "sse2")))] #[inline] #[rustc_allow_const_fn_unstable(const_eval_select)] // fallback impl has same behavior const fn is_ascii(s: &[u8]) -> bool { @@ -376,7 +370,14 @@ const fn is_ascii(s: &[u8]) -> bool { if const { is_ascii_simple(s) } else { - const USIZE_SIZE: usize = mem::size_of::<usize>(); + /// Returns `true` if any byte in the word `v` is nonascii (>= 128). Snarfed + /// from `../str/mod.rs`, which does something similar for utf8 validation. + const fn contains_nonascii(v: usize) -> bool { + const NONASCII_MASK: usize = usize::repeat_u8(0x80); + (NONASCII_MASK & v) != 0 + } + + const USIZE_SIZE: usize = size_of::<usize>(); let len = s.len(); let align_offset = s.as_ptr().align_offset(USIZE_SIZE); @@ -386,7 +387,7 @@ const fn is_ascii(s: &[u8]) -> bool { // // We also do this for architectures where `size_of::<usize>()` isn't // sufficient alignment for `usize`, because it's a weird edge case. - if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() { + if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < align_of::<usize>() { return is_ascii_simple(s); } @@ -420,7 +421,7 @@ const fn is_ascii(s: &[u8]) -> bool { // have alignment information it should have given a `usize::MAX` for // `align_offset` earlier, sending things through the scalar path instead of // this one, so this check should pass if it's reachable. - debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>())); + debug_assert!(word_ptr.is_aligned_to(align_of::<usize>())); // Read subsequent words until the last aligned word, excluding the last // aligned word by itself to be done in tail check later, to ensure that @@ -455,3 +456,48 @@ const fn is_ascii(s: &[u8]) -> bool { } ) } + +/// ASCII test optimized to use the `pmovmskb` instruction available on `x86-64` +/// platforms. +/// +/// Other platforms are not likely to benefit from this code structure, so they +/// use SWAR techniques to test for ASCII in `usize`-sized chunks. +#[cfg(all(target_arch = "x86_64", target_feature = "sse2"))] +#[inline] +const fn is_ascii(bytes: &[u8]) -> bool { + // Process chunks of 32 bytes at a time in the fast path to enable + // auto-vectorization and use of `pmovmskb`. Two 128-bit vector registers + // can be OR'd together and then the resulting vector can be tested for + // non-ASCII bytes. + const CHUNK_SIZE: usize = 32; + + let mut i = 0; + + while i + CHUNK_SIZE <= bytes.len() { + let chunk_end = i + CHUNK_SIZE; + + // Get LLVM to produce a `pmovmskb` instruction on x86-64 which + // creates a mask from the most significant bit of each byte. + // ASCII bytes are less than 128 (0x80), so their most significant + // bit is unset. + let mut count = 0; + while i < chunk_end { + count += bytes[i].is_ascii() as u8; + i += 1; + } + + // All bytes should be <= 127 so count is equal to chunk size. + if count != CHUNK_SIZE as u8 { + return false; + } + } + + // Process the remaining `bytes.len() % N` bytes. + let mut is_ascii = true; + while i < bytes.len() { + is_ascii &= bytes[i].is_ascii(); + i += 1; + } + + is_ascii +} diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index a27baf9db22..d2842f69008 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -46,13 +46,19 @@ impl<'a, T> IntoIterator for &'a mut [T] { /// Basic usage: /// /// ``` -/// // First, we declare a type which has `iter` method to get the `Iter` struct (`&[usize]` here): +/// // First, we need a slice to call the `iter` method on: /// let slice = &[1, 2, 3]; /// -/// // Then, we iterate over it: +/// // Then we call `iter` on the slice to get the `Iter` struct, +/// // and iterate over it: /// for element in slice.iter() { /// println!("{element}"); /// } +/// +/// // This for loop actually already works without calling `iter`: +/// for element in slice { +/// println!("{element}"); +/// } /// ``` /// /// [`iter`]: slice::iter @@ -109,19 +115,25 @@ impl<'a, T> Iter<'a, T> { /// Basic usage: /// /// ``` - /// // First, we declare a type which has the `iter` method to get the `Iter` + /// // First, we need a slice to call the `iter` method on: /// // struct (`&[usize]` here): /// let slice = &[1, 2, 3]; /// - /// // Then, we get the iterator: + /// // Then we call `iter` on the slice to get the `Iter` struct: /// let mut iter = slice.iter(); - /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]": + /// // Here `as_slice` still returns the whole slice, so this prints "[1, 2, 3]": /// println!("{:?}", iter.as_slice()); /// - /// // Next, we move to the second element of the slice: + /// // Now, we call the `next` method to remove the first element of the iterator: /// iter.next(); - /// // Now `as_slice` returns "[2, 3]": + /// // Here the iterator does not contain the first element of the slice any more, + /// // so `as_slice` only returns the last two elements of the slice, + /// // and so this prints "[2, 3]": /// println!("{:?}", iter.as_slice()); + /// + /// // The underlying slice has not been modified and still contains three elements, + /// // so this prints "[1, 2, 3]": + /// println!("{:?}", slice); /// ``` #[must_use] #[stable(feature = "iter_to_slice", since = "1.4.0")] @@ -166,11 +178,11 @@ impl<T> AsRef<[T]> for Iter<'_, T> { /// Basic usage: /// /// ``` -/// // First, we declare a type which has `iter_mut` method to get the `IterMut` -/// // struct (`&[usize]` here): -/// let mut slice = &mut [1, 2, 3]; +/// // First, we need a slice to call the `iter_mut` method on: +/// let slice = &mut [1, 2, 3]; /// -/// // Then, we iterate over it and increment each element value: +/// // Then we call `iter_mut` on the slice to get the `IterMut` struct, +/// // iterate over it and increment each element value: /// for element in slice.iter_mut() { /// *element += 1; /// } @@ -247,28 +259,21 @@ impl<'a, T> IterMut<'a, T> { /// Basic usage: /// /// ``` - /// // First, we declare a type which has `iter_mut` method to get the `IterMut` - /// // struct (`&[usize]` here): + /// // First, we need a slice to call the `iter_mut` method on: /// let mut slice = &mut [1, 2, 3]; /// - /// { - /// // Then, we get the iterator: - /// let mut iter = slice.iter_mut(); - /// // We move to next element: - /// iter.next(); - /// // So if we print what `into_slice` method returns here, we have "[2, 3]": - /// println!("{:?}", iter.into_slice()); - /// } - /// - /// // Now let's modify a value of the slice: - /// { - /// // First we get back the iterator: - /// let mut iter = slice.iter_mut(); - /// // We change the value of the first element of the slice returned by the `next` method: - /// *iter.next().unwrap() += 1; - /// } - /// // Now slice is "[2, 2, 3]": - /// println!("{slice:?}"); + /// // Then we call `iter_mut` on the slice to get the `IterMut` struct: + /// let mut iter = slice.iter_mut(); + /// // Now, we call the `next` method to remove the first element of the iterator, + /// // unwrap and dereference what we get from `next` and increase its value by 1: + /// *iter.next().unwrap() += 1; + /// // Here the iterator does not contain the first element of the slice any more, + /// // so `into_slice` only returns the last two elements of the slice, + /// // and so this prints "[2, 3]": + /// println!("{:?}", iter.into_slice()); + /// // The underlying slice still contains three elements, but its first element + /// // was increased by 1, so this prints "[2, 2, 3]": + /// println!("{:?}", slice); /// ``` #[must_use = "`self` will be dropped if the result is not used"] #[stable(feature = "iter_to_slice", since = "1.4.0")] diff --git a/library/core/src/str/converts.rs b/library/core/src/str/converts.rs index c7bae42765f..de68f80aa0c 100644 --- a/library/core/src/str/converts.rs +++ b/library/core/src/str/converts.rs @@ -47,10 +47,11 @@ use crate::{mem, ptr}; /// // some bytes, in a vector /// let sparkle_heart = vec![240, 159, 146, 150]; /// -/// // We know these bytes are valid, so just use `unwrap()`. -/// let sparkle_heart = str::from_utf8(&sparkle_heart).unwrap(); +/// // We can use the ? (try) operator to check if the bytes are valid +/// let sparkle_heart = str::from_utf8(&sparkle_heart)?; /// /// assert_eq!("💖", sparkle_heart); +/// # Ok::<_, str::Utf8Error>(()) /// ``` /// /// Incorrect bytes: diff --git a/library/core/src/str/lossy.rs b/library/core/src/str/lossy.rs index e7677c8317a..ed2cefc59a5 100644 --- a/library/core/src/str/lossy.rs +++ b/library/core/src/str/lossy.rs @@ -8,7 +8,7 @@ impl [u8] { /// Creates an iterator over the contiguous valid UTF-8 ranges of this /// slice, and the non-UTF-8 fragments in between. /// - /// See the [`Utf8Chunk`] type for documenation of the items yielded by this iterator. + /// See the [`Utf8Chunk`] type for documentation of the items yielded by this iterator. /// /// # Examples /// @@ -150,7 +150,7 @@ impl fmt::Debug for Debug<'_> { /// If you want a simple conversion from UTF-8 byte slices to string slices, /// [`from_utf8`] is easier to use. /// -/// See the [`Utf8Chunk`] type for documenation of the items yielded by this iterator. +/// See the [`Utf8Chunk`] type for documentation of the items yielded by this iterator. /// /// [byteslice]: slice /// [`from_utf8`]: super::from_utf8 diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index 487fffba881..fda26a67299 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -86,7 +86,7 @@ //! // This is fine: `join` synchronizes the code in a way such that the atomic //! // store happens-before the non-atomic write. //! let handle = s.spawn(|| atomic.store(1, Ordering::Relaxed)); // atomic store -//! handle.join().unwrap(); // synchronize +//! handle.join().expect("thread won't panic"); // synchronize //! s.spawn(|| unsafe { atomic.as_ptr().write(2) }); // non-atomic write //! }); //! @@ -103,7 +103,7 @@ //! // This is fine: `join` synchronizes the code in a way such that //! // the 1-byte store happens-before the 2-byte store. //! let handle = s.spawn(|| atomic.store(1, Ordering::Relaxed)); -//! handle.join().unwrap(); +//! handle.join().expect("thread won't panic"); //! s.spawn(|| unsafe { //! let differently_sized = transmute::<&AtomicU16, &AtomicU8>(&atomic); //! differently_sized.store(2, Ordering::Relaxed); diff --git a/library/core/tests/hash/mod.rs b/library/core/tests/hash/mod.rs index bf91e9e5df0..9f14995f73f 100644 --- a/library/core/tests/hash/mod.rs +++ b/library/core/tests/hash/mod.rs @@ -4,16 +4,11 @@ use std::hash::{BuildHasher, Hash, Hasher}; use std::ptr; use std::rc::Rc; +#[derive(Default)] struct MyHasher { hash: u64, } -impl Default for MyHasher { - fn default() -> MyHasher { - MyHasher { hash: 0 } - } -} - impl Hasher for MyHasher { fn write(&mut self, buf: &[u8]) { for byte in buf { @@ -107,6 +102,8 @@ fn test_writer_hasher() { struct Custom { hash: u64, } + +#[derive(Default)] struct CustomHasher { output: u64, } @@ -123,12 +120,6 @@ impl Hasher for CustomHasher { } } -impl Default for CustomHasher { - fn default() -> CustomHasher { - CustomHasher { output: 0 } - } -} - impl Hash for Custom { fn hash<H: Hasher>(&self, state: &mut H) { state.write_u64(self.hash); diff --git a/library/core/tests/iter/adapters/take.rs b/library/core/tests/iter/adapters/take.rs index 65a8a93b4a9..b932059afec 100644 --- a/library/core/tests/iter/adapters/take.rs +++ b/library/core/tests/iter/adapters/take.rs @@ -255,7 +255,7 @@ fn test_reverse_on_zip() { let zipped_iter = vec_1.iter().zip(core::iter::repeat(0).take(20)); - // Cannot call rev here for automatic reversed zip constuction + // Cannot call rev here for automatic reversed zip construction for (&one, zero) in zipped_iter.rev() { assert_eq!((1, 0), (one, zero)); } diff --git a/library/core/tests/iter/traits/iterator.rs b/library/core/tests/iter/traits/iterator.rs index 76f1e3319d4..e31d2e15b6d 100644 --- a/library/core/tests/iter/traits/iterator.rs +++ b/library/core/tests/iter/traits/iterator.rs @@ -630,6 +630,18 @@ fn test_collect_into_tuples() { assert!(e.2 == d); } +#[test] +fn test_collect_for_tuples() { + let a = vec![(1, 2, 3), (4, 5, 6), (7, 8, 9)]; + let b = vec![1, 4, 7]; + let c = vec![2, 5, 8]; + let d = vec![3, 6, 9]; + let e: (Vec<_>, Vec<_>, Vec<_>) = a.into_iter().collect(); + assert!(e.0 == b); + assert!(e.1 == c); + assert!(e.2 == d); +} + // just tests by whether or not this compiles fn _empty_impl_all_auto_traits<T>() { use std::panic::{RefUnwindSafe, UnwindSafe}; diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 89b65eefd02..9f0ab7b3f29 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -16,6 +16,7 @@ #![feature(const_black_box)] #![feature(const_eval_select)] #![feature(const_swap)] +#![feature(const_swap_nonoverlapping)] #![feature(const_trait_impl)] #![feature(core_intrinsics)] #![feature(core_io_borrowed_buf)] diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs index 454b13a7ee3..e6825d8e39e 100644 --- a/library/core/tests/ptr.rs +++ b/library/core/tests/ptr.rs @@ -860,7 +860,10 @@ fn swap_copy_untyped() { } #[test] -fn test_const_copy() { +fn test_const_copy_ptr() { + // `copy` and `copy_nonoverlapping` are thin layers on top of intrinsics. Ensure they correctly + // deal with pointers even when the pointers cross the boundary from one "element" being copied + // to another. const { let ptr1 = &1; let mut ptr2 = &666; @@ -899,21 +902,61 @@ fn test_const_copy() { } #[test] -fn test_const_swap() { +fn test_const_swap_ptr() { + // The `swap` functions are implemented in the library, they are not primitives. + // Only `swap_nonoverlapping` takes a count; pointers that cross multiple elements + // are *not* supported. + // We put the pointer at an odd offset in the type and copy them as an array of bytes, + // which should catch most of the ways that the library implementation can get it wrong. + + #[cfg(target_pointer_width = "32")] + type HalfPtr = i16; + #[cfg(target_pointer_width = "64")] + type HalfPtr = i32; + + #[repr(C, packed)] + #[allow(unused)] + struct S { + f1: HalfPtr, + // Crucially this field is at an offset that is not a multiple of the pointer size. + ptr: &'static i32, + // Make sure the entire type does not have a power-of-2 size: + // make it 3 pointers in size. This used to hit a bug in `swap_nonoverlapping`. + f2: [HalfPtr; 3], + } + + // Ensure the entire thing is usize-aligned, so in principle this + // looks like it could be eligible for a `usize` copying loop. + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + struct A(S); + const { - let mut ptr1 = &1; - let mut ptr2 = &666; + let mut s1 = A(S { ptr: &1, f1: 0, f2: [0; 3] }); + let mut s2 = A(S { ptr: &666, f1: 0, f2: [0; 3] }); - // Swap ptr1 and ptr2, bytewise. `swap` does not take a count - // so the best we can do is use an array. - type T = [u8; mem::size_of::<&i32>()]; + // Swap ptr1 and ptr2, as an array. + type T = [u8; mem::size_of::<A>()]; unsafe { - ptr::swap(ptr::from_mut(&mut ptr1).cast::<T>(), ptr::from_mut(&mut ptr2).cast::<T>()); + ptr::swap(ptr::from_mut(&mut s1).cast::<T>(), ptr::from_mut(&mut s2).cast::<T>()); } // Make sure they still work. - assert!(*ptr1 == 666); - assert!(*ptr2 == 1); + assert!(*s1.0.ptr == 666); + assert!(*s2.0.ptr == 1); + + // Swap them back, again as an array. + unsafe { + ptr::swap_nonoverlapping( + ptr::from_mut(&mut s1).cast::<T>(), + ptr::from_mut(&mut s2).cast::<T>(), + 1, + ); + } + + // Make sure they still work. + assert!(*s1.0.ptr == 1); + assert!(*s2.0.ptr == 666); }; } diff --git a/library/portable-simd/crates/core_simd/src/vendor/arm.rs b/library/portable-simd/crates/core_simd/src/vendor/arm.rs index f8878d11f09..3dc54481b6f 100644 --- a/library/portable-simd/crates/core_simd/src/vendor/arm.rs +++ b/library/portable-simd/crates/core_simd/src/vendor/arm.rs @@ -49,17 +49,6 @@ mod neon { } #[cfg(any( - all(target_feature = "v5te", not(target_feature = "mclass")), - all(target_feature = "mclass", target_feature = "dsp"), -))] -mod dsp { - use super::*; - - from_transmute! { unsafe Simd<u16, 2> => uint16x2_t } - from_transmute! { unsafe Simd<i16, 2> => int16x2_t } -} - -#[cfg(any( all(target_feature = "v6", not(target_feature = "mclass")), all(target_feature = "mclass", target_feature = "dsp"), ))] @@ -68,6 +57,8 @@ mod simd32 { from_transmute! { unsafe Simd<u8, 4> => uint8x4_t } from_transmute! { unsafe Simd<i8, 4> => int8x4_t } + from_transmute! { unsafe Simd<u16, 2> => uint16x2_t } + from_transmute! { unsafe Simd<i16, 2> => int16x2_t } } #[cfg(all( diff --git a/library/proc_macro/src/bridge/fxhash.rs b/library/proc_macro/src/bridge/fxhash.rs index 74a41451825..3345e099a37 100644 --- a/library/proc_macro/src/bridge/fxhash.rs +++ b/library/proc_macro/src/bridge/fxhash.rs @@ -22,6 +22,7 @@ pub type FxHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher>>; /// out-performs an FNV-based hash within rustc itself -- the collision rate is /// similar or slightly worse than FNV, but the speed of the hash function /// itself is much higher because it works on up to 8 bytes at a time. +#[derive(Default)] pub struct FxHasher { hash: usize, } @@ -31,13 +32,6 @@ const K: usize = 0x9e3779b9; #[cfg(target_pointer_width = "64")] const K: usize = 0x517cc1b727220a95; -impl Default for FxHasher { - #[inline] - fn default() -> FxHasher { - FxHasher { hash: 0 } - } -} - impl FxHasher { #[inline] fn add_to_hash(&mut self, i: usize) { diff --git a/library/std/src/fs.rs b/library/std/src/fs.rs index 2d5d869630e..9b752ed1443 100644 --- a/library/std/src/fs.rs +++ b/library/std/src/fs.rs @@ -1869,8 +1869,10 @@ impl Permissions { /// /// # Note /// - /// This function does not take Access Control Lists (ACLs) or Unix group - /// membership into account. + /// This function does not take Access Control Lists (ACLs), Unix group + /// membership and other nuances into account. + /// Therefore the return value of this function cannot be relied upon + /// to predict whether attempts to read or write the file will actually succeed. /// /// # Windows /// @@ -1885,10 +1887,13 @@ impl Permissions { /// # Unix (including macOS) /// /// On Unix-based platforms this checks if *any* of the owner, group or others - /// write permission bits are set. It does not check if the current - /// user is in the file's assigned group. It also does not check ACLs. - /// Therefore the return value of this function cannot be relied upon - /// to predict whether attempts to read or write the file will actually succeed. + /// write permission bits are set. It does not consider anything else, including: + /// + /// * Whether the current user is in the file's assigned group. + /// * Permissions granted by ACL. + /// * That `root` user can write to files that do not have any write bits set. + /// * Writable files on a filesystem that is mounted read-only. + /// /// The [`PermissionsExt`] trait gives direct access to the permission bits but /// also does not read ACLs. /// @@ -2397,12 +2402,14 @@ pub fn symlink_metadata<P: AsRef<Path>>(path: P) -> io::Result<Metadata> { /// # Platform-specific behavior /// /// This function currently corresponds to the `rename` function on Unix -/// and the `MoveFileEx` function with the `MOVEFILE_REPLACE_EXISTING` flag on Windows. +/// and the `SetFileInformationByHandle` function on Windows. /// /// Because of this, the behavior when both `from` and `to` exist differs. On /// Unix, if `from` is a directory, `to` must also be an (empty) directory. If -/// `from` is not a directory, `to` must also be not a directory. In contrast, -/// on Windows, `from` can be anything, but `to` must *not* be a directory. +/// `from` is not a directory, `to` must also be not a directory. The behavior +/// on Windows is the same on Windows 10 1607 and higher if `FileRenameInfoEx` +/// is supported by the filesystem; otherwise, `from` can be anything, but +/// `to` must *not* be a directory. /// /// Note that, this [may change in the future][changes]. /// diff --git a/library/std/src/fs/tests.rs b/library/std/src/fs/tests.rs index 018e1958641..0308a5f433a 100644 --- a/library/std/src/fs/tests.rs +++ b/library/std/src/fs/tests.rs @@ -1912,3 +1912,44 @@ fn test_hidden_file_truncation() { let metadata = file.metadata().unwrap(); assert_eq!(metadata.len(), 0); } + +#[cfg(windows)] +#[test] +fn test_rename_file_over_open_file() { + // Make sure that std::fs::rename works if the target file is already opened with FILE_SHARE_DELETE. See #123985. + let tmpdir = tmpdir(); + + // Create source with test data to read. + let source_path = tmpdir.join("source_file.txt"); + fs::write(&source_path, b"source hello world").unwrap(); + + // Create target file with test data to read; + let target_path = tmpdir.join("target_file.txt"); + fs::write(&target_path, b"target hello world").unwrap(); + + // Open target file + let target_file = fs::File::open(&target_path).unwrap(); + + // Rename source + fs::rename(source_path, &target_path).unwrap(); + + core::mem::drop(target_file); + assert_eq!(fs::read(target_path).unwrap(), b"source hello world"); +} + +#[test] +#[cfg(windows)] +fn test_rename_directory_to_non_empty_directory() { + // Renaming a directory over a non-empty existing directory should fail on Windows. + let tmpdir: TempDir = tmpdir(); + + let source_path = tmpdir.join("source_directory"); + let target_path = tmpdir.join("target_directory"); + + fs::create_dir(&source_path).unwrap(); + fs::create_dir(&target_path).unwrap(); + + fs::write(target_path.join("target_file.txt"), b"target hello world").unwrap(); + + error!(fs::rename(source_path, target_path), 145); // ERROR_DIR_NOT_EMPTY +} diff --git a/library/std/src/io/mod.rs b/library/std/src/io/mod.rs index 4ffb0463006..7912f969bbd 100644 --- a/library/std/src/io/mod.rs +++ b/library/std/src/io/mod.rs @@ -1083,7 +1083,7 @@ pub trait Read { /// let f = BufReader::new(File::open("foo.txt")?); /// /// for byte in f.bytes() { - /// println!("{}", byte.unwrap()); + /// println!("{}", byte?); /// } /// Ok(()) /// } @@ -1995,15 +1995,16 @@ pub trait Seek { /// .write(true) /// .read(true) /// .create(true) - /// .open("foo.txt").unwrap(); + /// .open("foo.txt")?; /// /// let hello = "Hello!\n"; - /// write!(f, "{hello}").unwrap(); - /// f.rewind().unwrap(); + /// write!(f, "{hello}")?; + /// f.rewind()?; /// /// let mut buf = String::new(); - /// f.read_to_string(&mut buf).unwrap(); + /// f.read_to_string(&mut buf)?; /// assert_eq!(&buf, hello); + /// # std::io::Result::Ok(()) /// ``` #[stable(feature = "seek_rewind", since = "1.55.0")] fn rewind(&mut self) -> Result<()> { @@ -2212,8 +2213,9 @@ fn skip_until<R: BufRead + ?Sized>(r: &mut R, delim: u8) -> Result<usize> { /// /// let stdin = io::stdin(); /// for line in stdin.lock().lines() { -/// println!("{}", line.unwrap()); +/// println!("{}", line?); /// } +/// # std::io::Result::Ok(()) /// ``` /// /// If you have something that implements [`Read`], you can use the [`BufReader` @@ -2236,7 +2238,8 @@ fn skip_until<R: BufRead + ?Sized>(r: &mut R, delim: u8) -> Result<usize> { /// let f = BufReader::new(f); /// /// for line in f.lines() { -/// println!("{}", line.unwrap()); +/// let line = line?; +/// println!("{line}"); /// } /// /// Ok(()) @@ -2274,7 +2277,7 @@ pub trait BufRead: Read { /// let stdin = io::stdin(); /// let mut stdin = stdin.lock(); /// - /// let buffer = stdin.fill_buf().unwrap(); + /// let buffer = stdin.fill_buf()?; /// /// // work with buffer /// println!("{buffer:?}"); @@ -2282,6 +2285,7 @@ pub trait BufRead: Read { /// // ensure the bytes we worked with aren't returned again later /// let length = buffer.len(); /// stdin.consume(length); + /// # std::io::Result::Ok(()) /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn fill_buf(&mut self) -> Result<&[u8]>; @@ -2327,12 +2331,13 @@ pub trait BufRead: Read { /// let stdin = io::stdin(); /// let mut stdin = stdin.lock(); /// - /// while stdin.has_data_left().unwrap() { + /// while stdin.has_data_left()? { /// let mut line = String::new(); - /// stdin.read_line(&mut line).unwrap(); + /// stdin.read_line(&mut line)?; /// // work with line /// println!("{line:?}"); /// } + /// # std::io::Result::Ok(()) /// ``` #[unstable(feature = "buf_read_has_data_left", reason = "recently added", issue = "86423")] fn has_data_left(&mut self) -> Result<bool> { diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index dca5ccca0c4..e7ce5bc6140 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -81,7 +81,9 @@ extern "C" fn __rust_foreign_exception() -> ! { rtabort!("Rust cannot catch foreign exceptions"); } +#[derive(Default)] enum Hook { + #[default] Default, Custom(Box<dyn Fn(&PanicHookInfo<'_>) + 'static + Sync + Send>), } @@ -96,13 +98,6 @@ impl Hook { } } -impl Default for Hook { - #[inline] - fn default() -> Hook { - Hook::Default - } -} - static HOOK: RwLock<Hook> = RwLock::new(Hook::Default); /// Registers a custom panic hook, replacing the previously registered hook. diff --git a/library/std/src/process.rs b/library/std/src/process.rs index 6933528cdbd..929d2b57afe 100644 --- a/library/std/src/process.rs +++ b/library/std/src/process.rs @@ -224,7 +224,7 @@ pub struct Child { /// has been captured. You might find it helpful to do /// /// ```ignore (incomplete) - /// let stdin = child.stdin.take().unwrap(); + /// let stdin = child.stdin.take().expect("handle present"); /// ``` /// /// to avoid partially moving the `child` and thus blocking yourself from calling @@ -236,7 +236,7 @@ pub struct Child { /// has been captured. You might find it helpful to do /// /// ```ignore (incomplete) - /// let stdout = child.stdout.take().unwrap(); + /// let stdout = child.stdout.take().expect("handle present"); /// ``` /// /// to avoid partially moving the `child` and thus blocking yourself from calling @@ -248,7 +248,7 @@ pub struct Child { /// has been captured. You might find it helpful to do /// /// ```ignore (incomplete) - /// let stderr = child.stderr.take().unwrap(); + /// let stderr = child.stderr.take().expect("handle present"); /// ``` /// /// to avoid partially moving the `child` and thus blocking yourself from calling @@ -1052,14 +1052,14 @@ impl Command { /// use std::io::{self, Write}; /// let output = Command::new("/bin/cat") /// .arg("file.txt") - /// .output() - /// .expect("failed to execute process"); + /// .output()?; /// /// println!("status: {}", output.status); - /// io::stdout().write_all(&output.stdout).unwrap(); - /// io::stderr().write_all(&output.stderr).unwrap(); + /// io::stdout().write_all(&output.stdout)?; + /// io::stderr().write_all(&output.stderr)?; /// /// assert!(output.status.success()); + /// # io::Result::Ok(()) /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn output(&mut self) -> io::Result<Output> { @@ -1391,11 +1391,11 @@ impl Stdio { /// let output = Command::new("rev") /// .stdin(Stdio::inherit()) /// .stdout(Stdio::piped()) - /// .output() - /// .expect("Failed to execute command"); + /// .output()?; /// /// print!("You piped in the reverse of: "); - /// io::stdout().write_all(&output.stdout).unwrap(); + /// io::stdout().write_all(&output.stdout)?; + /// # io::Result::Ok(()) /// ``` #[must_use] #[stable(feature = "process", since = "1.0.0")] @@ -1575,14 +1575,14 @@ impl From<fs::File> for Stdio { /// use std::process::Command; /// /// // With the `foo.txt` file containing "Hello, world!" - /// let file = File::open("foo.txt").unwrap(); + /// let file = File::open("foo.txt")?; /// /// let reverse = Command::new("rev") /// .stdin(file) // Implicit File conversion into a Stdio - /// .output() - /// .expect("failed reverse command"); + /// .output()?; /// /// assert_eq!(reverse.stdout, b"!dlrow ,olleH"); + /// # std::io::Result::Ok(()) /// ``` fn from(file: fs::File) -> Stdio { Stdio::from_inner(file.into_inner().into()) @@ -2179,7 +2179,7 @@ impl Child { /// ```no_run /// use std::process::Command; /// - /// let mut child = Command::new("ls").spawn().unwrap(); + /// let mut child = Command::new("ls").spawn()?; /// /// match child.try_wait() { /// Ok(Some(status)) => println!("exited with: {status}"), @@ -2190,6 +2190,7 @@ impl Child { /// } /// Err(e) => println!("error attempting to wait: {e}"), /// } + /// # std::io::Result::Ok(()) /// ``` #[stable(feature = "process_try_wait", since = "1.18.0")] pub fn try_wait(&mut self) -> io::Result<Option<ExitStatus>> { diff --git a/library/std/src/sys/pal/windows/c/bindings.txt b/library/std/src/sys/pal/windows/c/bindings.txt index 248ce3c9ff6..d98e90eedfe 100644 --- a/library/std/src/sys/pal/windows/c/bindings.txt +++ b/library/std/src/sys/pal/windows/c/bindings.txt @@ -2295,6 +2295,7 @@ Windows.Win32.Storage.FileSystem.FILE_NAME_OPENED Windows.Win32.Storage.FileSystem.FILE_READ_ATTRIBUTES Windows.Win32.Storage.FileSystem.FILE_READ_DATA Windows.Win32.Storage.FileSystem.FILE_READ_EA +Windows.Win32.Storage.FileSystem.FILE_RENAME_INFO Windows.Win32.Storage.FileSystem.FILE_SHARE_DELETE Windows.Win32.Storage.FileSystem.FILE_SHARE_MODE Windows.Win32.Storage.FileSystem.FILE_SHARE_NONE @@ -2603,5 +2604,7 @@ Windows.Win32.System.Threading.WaitForMultipleObjects Windows.Win32.System.Threading.WaitForSingleObject Windows.Win32.System.Threading.WakeAllConditionVariable Windows.Win32.System.Threading.WakeConditionVariable +Windows.Win32.System.WindowsProgramming.FILE_RENAME_FLAG_POSIX_SEMANTICS +Windows.Win32.System.WindowsProgramming.FILE_RENAME_FLAG_REPLACE_IF_EXISTS Windows.Win32.System.WindowsProgramming.PROGRESS_CONTINUE Windows.Win32.UI.Shell.GetUserProfileDirectoryW diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index 19925e59dfe..ed29f3d264c 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -2472,6 +2472,22 @@ pub const FILE_RANDOM_ACCESS: NTCREATEFILE_CREATE_OPTIONS = 2048u32; pub const FILE_READ_ATTRIBUTES: FILE_ACCESS_RIGHTS = 128u32; pub const FILE_READ_DATA: FILE_ACCESS_RIGHTS = 1u32; pub const FILE_READ_EA: FILE_ACCESS_RIGHTS = 8u32; +pub const FILE_RENAME_FLAG_POSIX_SEMANTICS: u32 = 2u32; +pub const FILE_RENAME_FLAG_REPLACE_IF_EXISTS: u32 = 1u32; +#[repr(C)] +#[derive(Clone, Copy)] +pub struct FILE_RENAME_INFO { + pub Anonymous: FILE_RENAME_INFO_0, + pub RootDirectory: HANDLE, + pub FileNameLength: u32, + pub FileName: [u16; 1], +} +#[repr(C)] +#[derive(Clone, Copy)] +pub union FILE_RENAME_INFO_0 { + pub ReplaceIfExists: BOOLEAN, + pub Flags: u32, +} pub const FILE_RESERVE_OPFILTER: NTCREATEFILE_CREATE_OPTIONS = 1048576u32; pub const FILE_SEQUENTIAL_ONLY: NTCREATEFILE_CREATE_OPTIONS = 4u32; pub const FILE_SESSION_AWARE: NTCREATEFILE_CREATE_OPTIONS = 262144u32; diff --git a/library/std/src/sys/pal/windows/fs.rs b/library/std/src/sys/pal/windows/fs.rs index 5bdd5f81b9c..dda4259919b 100644 --- a/library/std/src/sys/pal/windows/fs.rs +++ b/library/std/src/sys/pal/windows/fs.rs @@ -1,5 +1,6 @@ use super::api::{self, WinError}; use super::{IoResult, to_u16s}; +use crate::alloc::{alloc, handle_alloc_error}; use crate::borrow::Cow; use crate::ffi::{OsStr, OsString, c_void}; use crate::io::{self, BorrowedCursor, Error, IoSlice, IoSliceMut, SeekFrom}; @@ -315,19 +316,31 @@ impl File { && api::get_last_error() == WinError::ALREADY_EXISTS { unsafe { - // This originally used `FileAllocationInfo` instead of - // `FileEndOfFileInfo` but that wasn't supported by WINE. - // It's arguable which fits the semantics of `OpenOptions` - // better so let's just use the more widely supported method. - let eof = c::FILE_END_OF_FILE_INFO { EndOfFile: 0 }; + // This first tries `FileAllocationInfo` but falls back to + // `FileEndOfFileInfo` in order to support WINE. + // If WINE gains support for FileAllocationInfo, we should + // remove the fallback. + let alloc = c::FILE_ALLOCATION_INFO { AllocationSize: 0 }; let result = c::SetFileInformationByHandle( handle.as_raw_handle(), c::FileEndOfFileInfo, - (&raw const eof).cast::<c_void>(), - mem::size_of::<c::FILE_END_OF_FILE_INFO>() as u32, + (&raw const alloc).cast::<c_void>(), + mem::size_of::<c::FILE_ALLOCATION_INFO>() as u32, ); if result == 0 { - return Err(io::Error::last_os_error()); + if api::get_last_error().code != 0 { + panic!("FILE_ALLOCATION_INFO failed!!!"); + } + let eof = c::FILE_END_OF_FILE_INFO { EndOfFile: 0 }; + let result = c::SetFileInformationByHandle( + handle.as_raw_handle(), + c::FileEndOfFileInfo, + (&raw const eof).cast::<c_void>(), + mem::size_of::<c::FILE_END_OF_FILE_INFO>() as u32, + ); + if result == 0 { + return Err(io::Error::last_os_error()); + } } } } @@ -1223,7 +1236,139 @@ pub fn unlink(p: &Path) -> io::Result<()> { pub fn rename(old: &Path, new: &Path) -> io::Result<()> { let old = maybe_verbatim(old)?; let new = maybe_verbatim(new)?; - cvt(unsafe { c::MoveFileExW(old.as_ptr(), new.as_ptr(), c::MOVEFILE_REPLACE_EXISTING) })?; + + let new_len_without_nul_in_bytes = (new.len() - 1).try_into().unwrap(); + + // The last field of FILE_RENAME_INFO, the file name, is unsized, + // and FILE_RENAME_INFO has two padding bytes. + // Therefore we need to make sure to not allocate less than + // size_of::<c::FILE_RENAME_INFO>() bytes, which would be the case with + // 0 or 1 character paths + a null byte. + let struct_size = mem::size_of::<c::FILE_RENAME_INFO>() + .max(mem::offset_of!(c::FILE_RENAME_INFO, FileName) + new.len() * mem::size_of::<u16>()); + + let struct_size: u32 = struct_size.try_into().unwrap(); + + let create_file = |extra_access, extra_flags| { + let handle = unsafe { + HandleOrInvalid::from_raw_handle(c::CreateFileW( + old.as_ptr(), + c::SYNCHRONIZE | c::DELETE | extra_access, + c::FILE_SHARE_READ | c::FILE_SHARE_WRITE | c::FILE_SHARE_DELETE, + ptr::null(), + c::OPEN_EXISTING, + c::FILE_ATTRIBUTE_NORMAL | c::FILE_FLAG_BACKUP_SEMANTICS | extra_flags, + ptr::null_mut(), + )) + }; + + OwnedHandle::try_from(handle).map_err(|_| io::Error::last_os_error()) + }; + + // The following code replicates `MoveFileEx`'s behavior as reverse-engineered from its disassembly. + // If `old` refers to a mount point, we move it instead of the target. + let handle = match create_file(c::FILE_READ_ATTRIBUTES, c::FILE_FLAG_OPEN_REPARSE_POINT) { + Ok(handle) => { + let mut file_attribute_tag_info: MaybeUninit<c::FILE_ATTRIBUTE_TAG_INFO> = + MaybeUninit::uninit(); + + let result = unsafe { + cvt(c::GetFileInformationByHandleEx( + handle.as_raw_handle(), + c::FileAttributeTagInfo, + file_attribute_tag_info.as_mut_ptr().cast(), + mem::size_of::<c::FILE_ATTRIBUTE_TAG_INFO>().try_into().unwrap(), + )) + }; + + if let Err(err) = result { + if err.raw_os_error() == Some(c::ERROR_INVALID_PARAMETER as _) + || err.raw_os_error() == Some(c::ERROR_INVALID_FUNCTION as _) + { + // `GetFileInformationByHandleEx` documents that not all underlying drivers support all file information classes. + // Since we know we passed the correct arguments, this means the underlying driver didn't understand our request; + // `MoveFileEx` proceeds by reopening the file without inhibiting reparse point behavior. + None + } else { + Some(Err(err)) + } + } else { + // SAFETY: The struct has been initialized by GetFileInformationByHandleEx + let file_attribute_tag_info = unsafe { file_attribute_tag_info.assume_init() }; + + if file_attribute_tag_info.FileAttributes & c::FILE_ATTRIBUTE_REPARSE_POINT != 0 + && file_attribute_tag_info.ReparseTag != c::IO_REPARSE_TAG_MOUNT_POINT + { + // The file is not a mount point: Reopen the file without inhibiting reparse point behavior. + None + } else { + // The file is a mount point: Don't reopen the file so that the mount point gets renamed. + Some(Ok(handle)) + } + } + } + // The underlying driver may not support `FILE_FLAG_OPEN_REPARSE_POINT`: Retry without it. + Err(err) if err.raw_os_error() == Some(c::ERROR_INVALID_PARAMETER as _) => None, + Err(err) => Some(Err(err)), + } + .unwrap_or_else(|| create_file(0, 0))?; + + let layout = core::alloc::Layout::from_size_align( + struct_size as _, + mem::align_of::<c::FILE_RENAME_INFO>(), + ) + .unwrap(); + + let file_rename_info = unsafe { alloc(layout) } as *mut c::FILE_RENAME_INFO; + + if file_rename_info.is_null() { + handle_alloc_error(layout); + } + + // SAFETY: file_rename_info is a non-null pointer pointing to memory allocated by the global allocator. + let mut file_rename_info = unsafe { Box::from_raw(file_rename_info) }; + + // SAFETY: We have allocated enough memory for a full FILE_RENAME_INFO struct and a filename. + unsafe { + (&raw mut (*file_rename_info).Anonymous).write(c::FILE_RENAME_INFO_0 { + Flags: c::FILE_RENAME_FLAG_REPLACE_IF_EXISTS | c::FILE_RENAME_FLAG_POSIX_SEMANTICS, + }); + + (&raw mut (*file_rename_info).RootDirectory).write(ptr::null_mut()); + (&raw mut (*file_rename_info).FileNameLength).write(new_len_without_nul_in_bytes); + + new.as_ptr() + .copy_to_nonoverlapping((&raw mut (*file_rename_info).FileName) as *mut u16, new.len()); + } + + // We don't use `set_file_information_by_handle` here as `FILE_RENAME_INFO` is used for both `FileRenameInfo` and `FileRenameInfoEx`. + let result = unsafe { + cvt(c::SetFileInformationByHandle( + handle.as_raw_handle(), + c::FileRenameInfoEx, + (&raw const *file_rename_info).cast::<c_void>(), + struct_size, + )) + }; + + if let Err(err) = result { + if err.raw_os_error() == Some(c::ERROR_INVALID_PARAMETER as _) { + // FileRenameInfoEx and FILE_RENAME_FLAG_POSIX_SEMANTICS were added in Windows 10 1607; retry with FileRenameInfo. + file_rename_info.Anonymous.ReplaceIfExists = 1; + + cvt(unsafe { + c::SetFileInformationByHandle( + handle.as_raw_handle(), + c::FileRenameInfo, + (&raw const *file_rename_info).cast::<c_void>(), + struct_size, + ) + })?; + } else { + return Err(err); + } + } + Ok(()) } diff --git a/library/std/src/sys_common/process.rs b/library/std/src/sys_common/process.rs index 5333ee146f7..9f61d69d858 100644 --- a/library/std/src/sys_common/process.rs +++ b/library/std/src/sys_common/process.rs @@ -8,19 +8,13 @@ use crate::sys::process::{EnvKey, ExitStatus, Process, StdioPipes}; use crate::{env, fmt, io}; // Stores a set of changes to an environment -#[derive(Clone)] +#[derive(Clone, Default)] pub struct CommandEnv { clear: bool, saw_path: bool, vars: BTreeMap<EnvKey, Option<OsString>>, } -impl Default for CommandEnv { - fn default() -> Self { - CommandEnv { clear: false, saw_path: false, vars: Default::default() } - } -} - impl fmt::Debug for CommandEnv { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut debug_command_env = f.debug_struct("CommandEnv"); diff --git a/library/stdarch b/library/stdarch -Subproject e5e00aab0a8c8fa35fb7865e88fa82366f615c5 +Subproject 684de0d6fef708cae08214fef9643dd9ec7296e diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index d46c0ab7fef..f32d95fe836 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -31,8 +31,13 @@ pub struct Std { } impl Std { - pub fn new_with_build_kind(target: TargetSelection, kind: Option<Kind>) -> Self { - Self { target, crates: vec![], override_build_kind: kind } + pub fn new(target: TargetSelection) -> Self { + Self { target, crates: vec![], override_build_kind: None } + } + + pub fn build_kind(mut self, kind: Option<Kind>) -> Self { + self.override_build_kind = kind; + self } } @@ -167,20 +172,17 @@ pub struct Rustc { impl Rustc { pub fn new(target: TargetSelection, builder: &Builder<'_>) -> Self { - Self::new_with_build_kind(target, builder, None) - } - - pub fn new_with_build_kind( - target: TargetSelection, - builder: &Builder<'_>, - kind: Option<Kind>, - ) -> Self { let crates = builder .in_tree_crates("rustc-main", Some(target)) .into_iter() .map(|krate| krate.name.to_string()) .collect(); - Self { target, crates, override_build_kind: kind } + Self { target, crates, override_build_kind: None } + } + + pub fn build_kind(mut self, build_kind: Option<Kind>) -> Self { + self.override_build_kind = build_kind; + self } } @@ -216,7 +218,7 @@ impl Step for Rustc { builder.ensure(crate::core::build_steps::compile::Std::new(compiler, compiler.host)); builder.ensure(crate::core::build_steps::compile::Std::new(compiler, target)); } else { - builder.ensure(Std::new_with_build_kind(target, self.override_build_kind)); + builder.ensure(Std::new(target).build_kind(self.override_build_kind)); } let mut cargo = builder::Cargo::new( diff --git a/src/bootstrap/src/core/build_steps/clippy.rs b/src/bootstrap/src/core/build_steps/clippy.rs index 0884d86cc6d..518db156fea 100644 --- a/src/bootstrap/src/core/build_steps/clippy.rs +++ b/src/bootstrap/src/core/build_steps/clippy.rs @@ -215,7 +215,7 @@ impl Step for Rustc { builder.ensure(compile::Std::new(compiler, compiler.host)); builder.ensure(compile::Std::new(compiler, target)); } else { - builder.ensure(check::Std::new_with_build_kind(target, Some(Kind::Check))); + builder.ensure(check::Std::new(target).build_kind(Some(Kind::Check))); } let mut cargo = builder::Cargo::new( @@ -285,7 +285,7 @@ macro_rules! lint_any { let compiler = builder.compiler(builder.top_stage, builder.config.build); let target = self.target; - builder.ensure(check::Rustc::new_with_build_kind(target, builder, Some(Kind::Check))); + builder.ensure(check::Rustc::new(target, builder).build_kind(Some(Kind::Check))); let cargo = prepare_tool_cargo( builder, diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 6700f3ba680..d30a0d028ff 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -57,41 +57,20 @@ impl Std { } } - pub fn force_recompile(compiler: Compiler, target: TargetSelection) -> Self { - Self { - target, - compiler, - crates: Default::default(), - force_recompile: true, - extra_rust_args: &[], - is_for_mir_opt_tests: false, - } + pub fn force_recompile(mut self, force_recompile: bool) -> Self { + self.force_recompile = force_recompile; + self } - pub fn new_for_mir_opt_tests(compiler: Compiler, target: TargetSelection) -> Self { - Self { - target, - compiler, - crates: Default::default(), - force_recompile: false, - extra_rust_args: &[], - is_for_mir_opt_tests: true, - } + #[allow(clippy::wrong_self_convention)] + pub fn is_for_mir_opt_tests(mut self, is_for_mir_opt_tests: bool) -> Self { + self.is_for_mir_opt_tests = is_for_mir_opt_tests; + self } - pub fn new_with_extra_rust_args( - compiler: Compiler, - target: TargetSelection, - extra_rust_args: &'static [&'static str], - ) -> Self { - Self { - target, - compiler, - crates: Default::default(), - force_recompile: false, - extra_rust_args, - is_for_mir_opt_tests: false, - } + pub fn extra_rust_args(mut self, extra_rust_args: &'static [&'static str]) -> Self { + self.extra_rust_args = extra_rust_args; + self } fn copy_extra_objects( diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index c76504761be..54aad088552 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -270,7 +270,11 @@ impl Step for GenerateCompletions { (Bash, builder.src.join("src/etc/completions/x.py.sh")), (Zsh, builder.src.join("src/etc/completions/x.py.zsh")), (Fish, builder.src.join("src/etc/completions/x.py.fish")), - (PowerShell, builder.src.join("src/etc/completions/x.py.ps1")) + (PowerShell, builder.src.join("src/etc/completions/x.py.ps1")), + (Bash, builder.src.join("src/etc/completions/x.sh")), + (Zsh, builder.src.join("src/etc/completions/x.zsh")), + (Fish, builder.src.join("src/etc/completions/x.fish")), + (PowerShell, builder.src.join("src/etc/completions/x.ps1")) ); } diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 8d9d2b6b6a1..636c88b099b 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -1718,7 +1718,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the // ensure that `libproc_macro` is available on the host. if suite == "mir-opt" { - builder.ensure(compile::Std::new_for_mir_opt_tests(compiler, compiler.host)); + builder.ensure(compile::Std::new(compiler, compiler.host).is_for_mir_opt_tests(true)); } else { builder.ensure(compile::Std::new(compiler, compiler.host)); } @@ -1731,7 +1731,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the let mut cmd = builder.tool_cmd(Tool::Compiletest); if suite == "mir-opt" { - builder.ensure(compile::Std::new_for_mir_opt_tests(compiler, target)); + builder.ensure(compile::Std::new(compiler, target).is_for_mir_opt_tests(true)); } else { builder.ensure(compile::Std::new(compiler, target)); } @@ -2737,7 +2737,7 @@ impl Step for Crate { // Prepare sysroot // See [field@compile::Std::force_recompile]. - builder.ensure(compile::Std::force_recompile(compiler, compiler.host)); + builder.ensure(compile::Std::new(compiler, compiler.host).force_recompile(true)); // If we're not doing a full bootstrap but we're testing a stage2 // version of libstd, then what we're actually testing is the libstd @@ -2781,7 +2781,7 @@ impl Step for Crate { } else { // Also prepare a sysroot for the target. if builder.config.build != target { - builder.ensure(compile::Std::force_recompile(compiler, target)); + builder.ensure(compile::Std::new(compiler, target).force_recompile(true)); builder.ensure(RemoteCopyLibs { compiler, target }); } @@ -3557,10 +3557,10 @@ impl Step for CodegenGCC { let compiler = self.compiler; let target = self.target; - builder.ensure(compile::Std::new_with_extra_rust_args(compiler, target, &[ - "-Csymbol-mangling-version=v0", - "-Cpanic=abort", - ])); + builder.ensure( + compile::Std::new(compiler, target) + .extra_rust_args(&["-Csymbol-mangling-version=v0", "-Cpanic=abort"]), + ); // If we're not doing a full bootstrap but we're testing a stage2 // version of libstd, then what we're actually testing is the libstd diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index bfeb811508c..00bcbe9f86d 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -633,7 +633,14 @@ pub fn get_completion<G: clap_complete::Generator>(shell: G, path: &Path) -> Opt }) }; let mut buf = Vec::new(); - clap_complete::generate(shell, &mut cmd, "x.py", &mut buf); + let (bin_name, _) = path + .file_name() + .expect("path should be a regular file") + .to_str() + .expect("file name should be UTF-8") + .rsplit_once('.') + .expect("file name should have an extension"); + clap_complete::generate(shell, &mut cmd, bin_name, &mut buf); if buf == current.as_bytes() { return None; } diff --git a/src/build_helper/src/fs/mod.rs b/src/build_helper/src/fs/mod.rs new file mode 100644 index 00000000000..02029846fd1 --- /dev/null +++ b/src/build_helper/src/fs/mod.rs @@ -0,0 +1,69 @@ +//! Misc filesystem related helpers for use by bootstrap and tools. +use std::fs::Metadata; +use std::path::Path; +use std::{fs, io}; + +#[cfg(test)] +mod tests; + +/// Helper to ignore [`std::io::ErrorKind::NotFound`], but still propagate other +/// [`std::io::ErrorKind`]s. +pub fn ignore_not_found<Op>(mut op: Op) -> io::Result<()> +where + Op: FnMut() -> io::Result<()>, +{ + match op() { + Ok(()) => Ok(()), + Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(()), + Err(e) => Err(e), + } +} + +/// A wrapper around [`std::fs::remove_dir_all`] that can also be used on *non-directory entries*, +/// including files and symbolic links. +/// +/// - This will produce an error if the target path is not found. +/// - Like [`std::fs::remove_dir_all`], this helper does not traverse symbolic links, will remove +/// symbolic link itself. +/// - This helper is **not** robust against races on the underlying filesystem, behavior is +/// unspecified if this helper is called concurrently. +/// - This helper is not robust against TOCTOU problems. +/// +/// FIXME: this implementation is insufficiently robust to replace bootstrap's clean `rm_rf` +/// implementation: +/// +/// - This implementation currently does not perform retries. +#[track_caller] +pub fn recursive_remove<P: AsRef<Path>>(path: P) -> io::Result<()> { + let path = path.as_ref(); + let metadata = fs::symlink_metadata(path)?; + #[cfg(windows)] + let is_dir_like = |meta: &fs::Metadata| { + use std::os::windows::fs::FileTypeExt; + meta.is_dir() || meta.file_type().is_symlink_dir() + }; + #[cfg(not(windows))] + let is_dir_like = fs::Metadata::is_dir; + + if is_dir_like(&metadata) { + fs::remove_dir_all(path) + } else { + try_remove_op_set_perms(fs::remove_file, path, metadata) + } +} + +fn try_remove_op_set_perms<'p, Op>(mut op: Op, path: &'p Path, metadata: Metadata) -> io::Result<()> +where + Op: FnMut(&'p Path) -> io::Result<()>, +{ + match op(path) { + Ok(()) => Ok(()), + Err(e) if e.kind() == io::ErrorKind::PermissionDenied => { + let mut perms = metadata.permissions(); + perms.set_readonly(false); + fs::set_permissions(path, perms)?; + op(path) + } + Err(e) => Err(e), + } +} diff --git a/src/build_helper/src/fs/tests.rs b/src/build_helper/src/fs/tests.rs new file mode 100644 index 00000000000..1e694393127 --- /dev/null +++ b/src/build_helper/src/fs/tests.rs @@ -0,0 +1,214 @@ +#![deny(unused_must_use)] + +use std::{env, fs, io}; + +use super::recursive_remove; + +mod recursive_remove_tests { + use super::*; + + // Basic cases + + #[test] + fn nonexistent_path() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_nonexistent_path"); + assert!(fs::symlink_metadata(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound)); + assert!(recursive_remove(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound)); + } + + #[test] + fn file() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_file"); + fs::write(&path, b"").unwrap(); + assert!(fs::symlink_metadata(&path).is_ok()); + assert!(recursive_remove(&path).is_ok()); + assert!(fs::symlink_metadata(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound)); + } + + mod dir_tests { + use super::*; + + #[test] + fn dir_empty() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_dir_tests_dir_empty"); + fs::create_dir_all(&path).unwrap(); + assert!(fs::symlink_metadata(&path).is_ok()); + assert!(recursive_remove(&path).is_ok()); + assert!( + fs::symlink_metadata(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + } + + #[test] + fn dir_recursive() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_dir_tests_dir_recursive"); + fs::create_dir_all(&path).unwrap(); + assert!(fs::symlink_metadata(&path).is_ok()); + + let file_a = path.join("a.txt"); + fs::write(&file_a, b"").unwrap(); + assert!(fs::symlink_metadata(&file_a).is_ok()); + + let dir_b = path.join("b"); + fs::create_dir_all(&dir_b).unwrap(); + assert!(fs::symlink_metadata(&dir_b).is_ok()); + + let file_c = dir_b.join("c.rs"); + fs::write(&file_c, b"").unwrap(); + assert!(fs::symlink_metadata(&file_c).is_ok()); + + assert!(recursive_remove(&path).is_ok()); + + assert!( + fs::symlink_metadata(&file_a).is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + assert!( + fs::symlink_metadata(&dir_b).is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + assert!( + fs::symlink_metadata(&file_c).is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + } + } + + /// Check that [`recursive_remove`] does not traverse symlinks and only removes symlinks + /// themselves. + /// + /// Symlink-to-file versus symlink-to-dir is a distinction that's important on Windows, but not + /// on Unix. + mod symlink_tests { + use super::*; + + #[cfg(unix)] + #[test] + fn unix_symlink() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_symlink_tests_unix_symlink"); + let symlink_path = + tmpdir.join("__INTERNAL_BOOTSTRAP__symlink_tests_unix_symlink_symlink"); + fs::write(&path, b"").unwrap(); + + assert!(fs::symlink_metadata(&path).is_ok()); + assert!( + fs::symlink_metadata(&symlink_path) + .is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + + std::os::unix::fs::symlink(&path, &symlink_path).unwrap(); + + assert!(recursive_remove(&symlink_path).is_ok()); + + // Check that the symlink got removed... + assert!( + fs::symlink_metadata(&symlink_path) + .is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + // ... but pointed-to file still exists. + assert!(fs::symlink_metadata(&path).is_ok()); + + fs::remove_file(&path).unwrap(); + } + + #[cfg(windows)] + #[test] + fn windows_symlink_to_file() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_symlink_tests_windows_symlink_to_file"); + let symlink_path = tmpdir + .join("__INTERNAL_BOOTSTRAP_SYMLINK_symlink_tests_windows_symlink_to_file_symlink"); + fs::write(&path, b"").unwrap(); + + assert!(fs::symlink_metadata(&path).is_ok()); + assert!( + fs::symlink_metadata(&symlink_path) + .is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + + std::os::windows::fs::symlink_file(&path, &symlink_path).unwrap(); + + assert!(recursive_remove(&symlink_path).is_ok()); + + // Check that the symlink-to-file got removed... + assert!( + fs::symlink_metadata(&symlink_path) + .is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + // ... but pointed-to file still exists. + assert!(fs::symlink_metadata(&path).is_ok()); + + fs::remove_file(&path).unwrap(); + } + + #[cfg(windows)] + #[test] + fn windows_symlink_to_dir() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_symlink_tests_windows_symlink_to_dir"); + let symlink_path = + tmpdir.join("__INTERNAL_BOOTSTRAP_symlink_tests_windows_symlink_to_dir_symlink"); + fs::create_dir_all(&path).unwrap(); + + assert!(fs::symlink_metadata(&path).is_ok()); + assert!( + fs::symlink_metadata(&symlink_path) + .is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + + std::os::windows::fs::symlink_dir(&path, &symlink_path).unwrap(); + + assert!(recursive_remove(&symlink_path).is_ok()); + + // Check that the symlink-to-dir got removed... + assert!( + fs::symlink_metadata(&symlink_path) + .is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + // ... but pointed-to dir still exists. + assert!(fs::symlink_metadata(&path).is_ok()); + + fs::remove_dir_all(&path).unwrap(); + } + } + + /// Read-only file and directories only need special handling on Windows. + #[cfg(windows)] + mod readonly_tests { + use super::*; + + #[test] + fn overrides_readonly() { + let tmpdir = env::temp_dir(); + let path = tmpdir.join("__INTERNAL_BOOTSTRAP_readonly_tests_overrides_readonly"); + + // In case of a previous failed test: + if let Ok(mut perms) = fs::symlink_metadata(&path).map(|m| m.permissions()) { + perms.set_readonly(false); + fs::set_permissions(&path, perms).unwrap(); + fs::remove_file(&path).unwrap(); + } + + fs::write(&path, b"").unwrap(); + + let mut perms = fs::symlink_metadata(&path).unwrap().permissions(); + perms.set_readonly(true); + fs::set_permissions(&path, perms).unwrap(); + + // Check that file exists but is read-only, and that normal `std::fs::remove_file` fails + // to delete the file. + assert!(fs::symlink_metadata(&path).is_ok_and(|m| m.permissions().readonly())); + assert!( + fs::remove_file(&path).is_err_and(|e| e.kind() == io::ErrorKind::PermissionDenied) + ); + + assert!(recursive_remove(&path).is_ok()); + + assert!( + fs::symlink_metadata(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound) + ); + } + } +} diff --git a/src/build_helper/src/lib.rs b/src/build_helper/src/lib.rs index 4a4f0ca2a9d..dceb5fdeeea 100644 --- a/src/build_helper/src/lib.rs +++ b/src/build_helper/src/lib.rs @@ -2,6 +2,7 @@ pub mod ci; pub mod drop_bomb; +pub mod fs; pub mod git; pub mod metrics; pub mod stage0_parser; diff --git a/src/ci/docker/README.md b/src/ci/docker/README.md index 876787c30e5..2f35e605026 100644 --- a/src/ci/docker/README.md +++ b/src/ci/docker/README.md @@ -26,6 +26,10 @@ DEPLOY=1 ./src/ci/docker/run.sh x86_64-gnu while locally, to the `obj/$image_name` directory. This is primarily to prevent strange linker errors when using multiple Docker images. +## Local Development + +Refer to the [dev guide](https://rustc-dev-guide.rust-lang.org/tests/docker.html) for more information on testing locally. + ## Filesystem layout - Each host architecture has its own `host-{arch}` directory, and those diff --git a/src/ci/docker/host-aarch64/dist-arm-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-arm-linux/Dockerfile index 4a749473004..420c42bc9d8 100644 --- a/src/ci/docker/host-aarch64/dist-arm-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-arm-linux/Dockerfile @@ -19,7 +19,7 @@ RUN sh /scripts/rustbuild-setup.sh WORKDIR /tmp COPY scripts/crosstool-ng-build.sh /scripts/ -COPY host-aarch64/dist-arm-linux/arm-linux-gnueabi.defconfig /tmp/crosstool.defconfig +COPY host-x86_64/dist-arm-linux/arm-linux-gnueabi.defconfig /tmp/crosstool.defconfig RUN /scripts/crosstool-ng-build.sh COPY scripts/sccache.sh /scripts/ diff --git a/src/ci/docker/host-aarch64/dist-arm-linux/arm-linux-gnueabi.defconfig b/src/ci/docker/host-x86_64/dist-arm-linux/arm-linux-gnueabi.defconfig index e7afdbe9d4d..e7afdbe9d4d 100644 --- a/src/ci/docker/host-aarch64/dist-arm-linux/arm-linux-gnueabi.defconfig +++ b/src/ci/docker/host-x86_64/dist-arm-linux/arm-linux-gnueabi.defconfig diff --git a/src/ci/docker/scripts/build-fuchsia-toolchain.sh b/src/ci/docker/scripts/build-fuchsia-toolchain.sh index 027d412d250..3c65a52ada7 100755 --- a/src/ci/docker/scripts/build-fuchsia-toolchain.sh +++ b/src/ci/docker/scripts/build-fuchsia-toolchain.sh @@ -4,13 +4,13 @@ set -ex source shared.sh FUCHSIA_SDK_URL=https://chrome-infra-packages.appspot.com/dl/fuchsia/sdk/core/linux-amd64 -FUCHSIA_SDK_ID=version:21.20240610.2.1 -FUCHSIA_SDK_SHA256=2d2d057fc3f0404197cced2200f88cbcdaaf5fbf6475955045091f8676791ce7 +FUCHSIA_SDK_ID=version:26.20241211.7.1 +FUCHSIA_SDK_SHA256=2cb7a9a0419f7413a46e0ccef7dad89f7c9979940d7c1ee87fac70ff499757d6 FUCHSIA_SDK_USR_DIR=/usr/local/core-linux-amd64-fuchsia-sdk CLANG_DOWNLOAD_URL=\ https://chrome-infra-packages.appspot.com/dl/fuchsia/third_party/clang/linux-amd64 -CLANG_DOWNLOAD_ID=git_revision:3809e20afc68d7d03821f0ec59b928dcf9befbf4 -CLANG_DOWNLOAD_SHA256=3c2c442b61cd9e8f1b567738f6d53cffe11b3fc820e7dae87a82a0859be8f204 +CLANG_DOWNLOAD_ID=git_revision:388d7f144880dcd85ff31f06793304405a9f44b6 +CLANG_DOWNLOAD_SHA256=970d1f427b9c9a3049d8622c80c86830ff31b5334ad8da47a2f1e81143197e8b install_clang() { mkdir -p clang_download diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 94033d79af9..876a7793592 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -43,7 +43,7 @@ runners: os: windows-2022-16core-64gb <<: *base-job - - &job-linux-8c-aarch64 + - &job-aarch64-linux os: ubuntu-22.04-arm64-8core-32gb envs: @@ -124,10 +124,10 @@ auto: ############################# - image: aarch64-gnu - <<: *job-linux-8c-aarch64 + <<: *job-aarch64-linux - image: aarch64-gnu-debug - <<: *job-linux-8c-aarch64 + <<: *job-aarch64-linux - image: arm-android <<: *job-linux-4c @@ -144,7 +144,7 @@ auto: <<: *job-linux-4c - image: dist-arm-linux - <<: *job-linux-8c-aarch64 + <<: *job-linux-8c - image: dist-armhf-linux <<: *job-linux-4c diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index 1e9f5a33fc7..9743b3ba442 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -81,6 +81,7 @@ - [\*-nto-qnx-\*](platform-support/nto-qnx.md) - [*-unikraft-linux-musl](platform-support/unikraft-linux-musl.md) - [*-unknown-hermit](platform-support/hermit.md) + - [*-unknown-freebsd](platform-support/freebsd.md) - [\*-unknown-netbsd\*](platform-support/netbsd.md) - [*-unknown-openbsd](platform-support/openbsd.md) - [*-unknown-redox](platform-support/redox.md) diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index db6612f9fff..00ab61051c3 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -101,7 +101,7 @@ target | notes [`riscv64gc-unknown-linux-gnu`](platform-support/riscv64gc-unknown-linux-gnu.md) | RISC-V Linux (kernel 4.20, glibc 2.29) [`riscv64gc-unknown-linux-musl`](platform-support/riscv64gc-unknown-linux-musl.md) | RISC-V Linux (kernel 4.20, musl 1.2.3) [`s390x-unknown-linux-gnu`](platform-support/s390x-unknown-linux-gnu.md) | S390x Linux (kernel 3.2, glibc 2.17) -`x86_64-unknown-freebsd` | 64-bit FreeBSD +[`x86_64-unknown-freebsd`](platform-support/freebsd.md) | 64-bit amd64 FreeBSD `x86_64-unknown-illumos` | illumos `x86_64-unknown-linux-musl` | 64-bit Linux with musl 1.2.3 [`x86_64-unknown-netbsd`](platform-support/netbsd.md) | NetBSD/amd64 @@ -167,7 +167,7 @@ target | std | notes `i586-unknown-linux-musl` | ✓ | 32-bit Linux w/o SSE, musl 1.2.3 [^x86_32-floats-x87] [`i686-linux-android`](platform-support/android.md) | ✓ | 32-bit x86 Android [^x86_32-floats-return-ABI] [`i686-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | 32-bit x86 MinGW (Windows 10+), LLVM ABI [^x86_32-floats-return-ABI] -`i686-unknown-freebsd` | ✓ | 32-bit FreeBSD [^x86_32-floats-return-ABI] +[`i686-unknown-freebsd`](platform-support/freebsd.md) | ✓ | 32-bit x86 FreeBSD [^x86_32-floats-return-ABI] `i686-unknown-linux-musl` | ✓ | 32-bit Linux with musl 1.2.3 [^x86_32-floats-return-ABI] [`i686-unknown-uefi`](platform-support/unknown-uefi.md) | ? | 32-bit UEFI [`loongarch64-unknown-none`](platform-support/loongarch-none.md) | * | LoongArch64 Bare-metal (LP64D ABI) @@ -259,7 +259,7 @@ target | std | host | notes [`aarch64-unknown-teeos`](platform-support/aarch64-unknown-teeos.md) | ? | | ARM64 TEEOS | [`aarch64-unknown-nto-qnx700`](platform-support/nto-qnx.md) | ? | | ARM64 QNX Neutrino 7.0 RTOS | [`aarch64-unknown-nto-qnx710`](platform-support/nto-qnx.md) | ✓ | | ARM64 QNX Neutrino 7.1 RTOS | -`aarch64-unknown-freebsd` | ✓ | ✓ | ARM64 FreeBSD +[`aarch64-unknown-freebsd`](platform-support/freebsd.md) | ✓ | ✓ | ARM64 FreeBSD [`aarch64-unknown-hermit`](platform-support/hermit.md) | ✓ | | ARM64 Hermit `aarch64-unknown-illumos` | ✓ | ✓ | ARM64 illumos `aarch64-unknown-linux-gnu_ilp32` | ✓ | ✓ | ARM64 Linux (ILP32 ABI) @@ -278,14 +278,14 @@ target | std | host | notes `armv4t-unknown-linux-gnueabi` | ? | | Armv4T Linux [`armv5te-none-eabi`](platform-support/armv5te-none-eabi.md) | * | | Bare Armv5TE `armv5te-unknown-linux-uclibceabi` | ? | | Armv5TE Linux with uClibc -`armv6-unknown-freebsd` | ✓ | ✓ | Armv6 FreeBSD +[`armv6-unknown-freebsd`](platform-support/freebsd.md) | ✓ | ✓ | Armv6 FreeBSD [`armv6-unknown-netbsd-eabihf`](platform-support/netbsd.md) | ✓ | ✓ | Armv6 NetBSD w/hard-float [`armv6k-nintendo-3ds`](platform-support/armv6k-nintendo-3ds.md) | ? | | Armv6k Nintendo 3DS, Horizon (Requires devkitARM toolchain) [`armv7-rtems-eabihf`](platform-support/armv7-rtems-eabihf.md) | ? | | RTEMS OS for ARM BSPs [`armv7-sony-vita-newlibeabihf`](platform-support/armv7-sony-vita-newlibeabihf.md) | ✓ | | Armv7-A Cortex-A9 Sony PlayStation Vita (requires VITASDK toolchain) [`armv7-unknown-linux-uclibceabi`](platform-support/armv7-unknown-linux-uclibceabi.md) | ✓ | ✓ | Armv7-A Linux with uClibc, softfloat [`armv7-unknown-linux-uclibceabihf`](platform-support/armv7-unknown-linux-uclibceabihf.md) | ✓ | ? | Armv7-A Linux with uClibc, hardfloat -`armv7-unknown-freebsd` | ✓ | ✓ | Armv7-A FreeBSD +[`armv7-unknown-freebsd`](platform-support/freebsd.md) | ✓ | ✓ | Armv7-A FreeBSD [`armv7-unknown-netbsd-eabihf`](platform-support/netbsd.md) | ✓ | ✓ | Armv7-A NetBSD w/hard-float [`armv7-unknown-trusty`](platform-support/trusty.md) | ? | | [`armv7-wrs-vxworks-eabihf`](platform-support/vxworks.md) | ✓ | | Armv7-A for VxWorks @@ -344,9 +344,9 @@ target | std | host | notes [`powerpc-unknown-openbsd`](platform-support/powerpc-unknown-openbsd.md) | * | | [`powerpc-wrs-vxworks-spe`](platform-support/vxworks.md) | ✓ | | [`powerpc-wrs-vxworks`](platform-support/vxworks.md) | ✓ | | -`powerpc64-unknown-freebsd` | ✓ | ✓ | PPC64 FreeBSD (ELFv2) -`powerpc64le-unknown-freebsd` | ✓ | ✓ | PPC64LE FreeBSD -`powerpc-unknown-freebsd` | ? | | PowerPC FreeBSD +[`powerpc64-unknown-freebsd`](platform-support/freebsd.md) | ✓ | ✓ | PPC64 FreeBSD (ELFv2) +[`powerpc64le-unknown-freebsd`](platform-support/freebsd.md) | ✓ | ✓ | PPC64LE FreeBSD +[`powerpc-unknown-freebsd`](platform-support/freebsd.md) | ? | | PowerPC FreeBSD `powerpc64-unknown-linux-musl` | ? | | 64-bit PowerPC Linux with musl 1.2.3 [`powerpc64-wrs-vxworks`](platform-support/vxworks.md) | ✓ | | [`powerpc64-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | OpenBSD/powerpc64 diff --git a/src/doc/rustc/src/platform-support/freebsd.md b/src/doc/rustc/src/platform-support/freebsd.md new file mode 100644 index 00000000000..9d34d364920 --- /dev/null +++ b/src/doc/rustc/src/platform-support/freebsd.md @@ -0,0 +1,56 @@ +# \*-unknown-freebsd + +**Tier: 2/3** + +[FreeBSD] multi-platform 4.4BSD-based UNIX-like operating system. + +## Target maintainers + +- Alan Somers `asomers@FreeBSD.org`, https://github.com/asomers +- Mikael Urankar `mikael@FreeBSD.org`, https://github.com/MikaelUrankar + +## Requirements + +The `x86_64-unknown-freebsd` target is Tier 2 with host tools. +`i686-unknown-freebsd` is Tier 2 without host tools. Other targets are Tier 3. +See [platform-support.md](../platform-support.md) for the full list. + +We commit that rustc will run on all currently supported releases of +[FreeBSD][supported-releases] . EoL releases may be supported for a time, too. +The same guarantees apply for the standard library and the libc crate. + +Specific release support matrix, as of Rust 1.82.0: + +| FreeBSD Release | rustc | std | libc | +| --------------- | -------- | -------- | ------- | +| 10 | < 1.78.0 | ? | ? | +| 11 | < 1.78.0 | < 1.78.0 | current | +| 12+ | current | current | current | + +`extern "C"` uses the official calling convention of the respective +architectures. + +FreeBSD OS binaries use the ELF file format. + +## Building Rust programs + +The `x86_64-unknown-freebsd` and `i686-unknown-freebsd` artifacts are +distributed by the rust project and may be installed with rustup. Other +targets are built by the ports system and may be installed with +[pkg(7)][pkg] or [ports(7)][ports]. + +By default the `i686-unknown-freebsd` target uses SSE2 instructions. To build +code that does not require SSE2, build lang/rust from [ports][ports] and +disable the `SSE2` option at build time. That will produce non-compliant +behavior. See [issue #114479][x86-32-float-issue]. + +## Testing + +The Rust test suite can be run natively. It can also be run from the FreeBSD +ports tree with the `make test` command from within the lang/rust directory. + +[FreeBSD]: https://www.FreeBSD.org/ +[supported-releases]: https://www.freebsd.org/security/#sup +[ports]: https://man.freebsd.org/cgi/man.cgi?query=ports +[pkg]: https://man.freebsd.org/cgi/man.cgi?query=pkg +[x86-32-float-issue]: https://github.com/rust-lang/rust/issues/114479 diff --git a/src/doc/rustc/src/platform-support/pc-windows-gnullvm.md b/src/doc/rustc/src/platform-support/pc-windows-gnullvm.md index ed55bcf4f35..89c4cdb2afc 100644 --- a/src/doc/rustc/src/platform-support/pc-windows-gnullvm.md +++ b/src/doc/rustc/src/platform-support/pc-windows-gnullvm.md @@ -17,9 +17,9 @@ Target triples available so far: ## Requirements The easiest way to obtain these targets is cross-compilation, but native build from `x86_64-pc-windows-gnu` is possible with few hacks which I don't recommend. -Std support is expected to be on pair with `*-pc-windows-gnu`. +Std support is expected to be on par with `*-pc-windows-gnu`. -Binaries for this target should be at least on pair with `*-pc-windows-gnu` in terms of requirements and functionality. +Binaries for this target should be at least on par with `*-pc-windows-gnu` in terms of requirements and functionality. Those targets follow Windows calling convention for `extern "C"`. diff --git a/src/doc/unstable-book/src/language-features/arbitrary-self-types-pointers.md b/src/doc/unstable-book/src/language-features/arbitrary-self-types-pointers.md new file mode 100644 index 00000000000..f73bcaffa80 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/arbitrary-self-types-pointers.md @@ -0,0 +1,57 @@ +# `arbitrary_self_types_pointers` + +The tracking issue for this feature is: [#44874] + +[#38788]: https://github.com/rust-lang/rust/issues/44874 + +------------------------ + +This extends the [arbitrary self types] feature to allow methods to +receive `self` by pointer. For example: + +```rust +#![feature(arbitrary_self_types_pointers)] + +struct A; + +impl A { + fn m(self: *const Self) {} +} + +fn main() { + let a = A; + let a_ptr: *const A = &a as *const A; + a_ptr.m(); +} +``` + +In general this is not advised: it's thought to be better practice to wrap +raw pointers in a newtype wrapper which implements the `core::ops::Receiver` +trait, then you need "only" the `arbitrary_self_types` feature. For example: + +```rust +#![feature(arbitrary_self_types)] +#![allow(dead_code)] + +struct A; + +impl A { + fn m(self: Wrapper<Self>) {} // can extract the pointer and do + // what it needs +} + +struct Wrapper<T>(*const T); + +impl<T> core::ops::Receiver for Wrapper<T> { + type Target = T; +} + +fn main() { + let a = A; + let a_ptr: *const A = &a as *const A; + let a_wrapper = Wrapper(a_ptr); + a_wrapper.m(); +} +``` + +[arbitrary self types]: arbitrary-self-types.md diff --git a/src/doc/unstable-book/src/language-features/arbitrary-self-types.md b/src/doc/unstable-book/src/language-features/arbitrary-self-types.md new file mode 100644 index 00000000000..2f8b52d4043 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/arbitrary-self-types.md @@ -0,0 +1,154 @@ +# `arbitrary_self_types` + +The tracking issue for this feature is: [#44874] + +[#38788]: https://github.com/rust-lang/rust/issues/44874 + +------------------------ + +Allows any type implementing `core::ops::Receiver<Target=T>` to be used as the type +of `self` in a method belonging to `T`. + +For example, + +```rust +#![feature(arbitrary_self_types)] + +struct A; + +impl A { + fn f(self: SmartPtr<Self>) -> i32 { 1 } // note self type +} + +struct SmartPtr<T>(T); + +impl<T> core::ops::Receiver for SmartPtr<T> { + type Target = T; +} + +fn main() { + let smart_ptr = SmartPtr(A); + assert_eq!(smart_ptr.f(), 1); +} +``` + +The `Receiver` trait has a blanket implementation for all `T: Deref`, so in fact +things like this work too: + +```rust +#![feature(arbitrary_self_types)] + +use std::rc::Rc; + +struct A; + +impl A { + fn f(self: Rc<Self>) -> i32 { 1 } // Rc implements Deref +} + +fn main() { + let smart_ptr = Rc::new(A); + assert_eq!(smart_ptr.f(), 1); +} +``` + +Interestingly, that works even without the `arbitrary_self_types` feature +- but that's because certain types are _effectively_ hard coded, including +`Rc`. ("Hard coding" isn't quite true; they use a lang-item called +`LegacyReceiver` to denote their special-ness in this way). With the +`arbitrary_self_types` feature, their special-ness goes away, and custom +smart pointers can achieve the same. + +## Changes to method lookup + +Method lookup previously used to work by stepping through the `Deref` +chain then using the resulting list of steps in two different ways: + +* To identify types that might contribute methods via their `impl` + blocks (inherent methods) or via traits +* To identify the types that the method receiver (`a` in the above + examples) can be converted to. + +With this feature, these lists are created by instead stepping through +the `Receiver` chain. However, a note is kept about whether the type +can be reached also via the `Deref` chain. + +The full chain (via `Receiver` hops) is used for the first purpose +(identifying relevant `impl` blocks and traits); whereas the shorter +list (reachable via `Deref`) is used for the second purpose. That's +because, to convert the method target (`a` in `a.b()`) to the self +type, Rust may need to be able to use `Deref::deref`. Type conversions, +then, can only proceed as far as the end of the `Deref` chain whereas +the longer `Receiver` chain can be used to explore more places where +useful methods might reside. + +## Types suitable for use as smart pointers + +This feature allows the creation of customised smart pointers - for example +your own equivalent to `Rc` or `Box` with whatever capabilities you like. +Those smart pointers can either implement `Deref` (if it's safe to +create a reference to the referent) or `Receiver` (if it isn't). + +Either way, smart pointer types should mostly _avoid having methods_. +Calling methods on a smart pointer leads to ambiguity about whether you're +aiming for a method on the pointer, or on the referent. + +Best practice is therefore to put smart pointer functionality into +associated functions instead - that's what's done in all the smart pointer +types within Rust's standard library which implement `Receiver`. + +If you choose to add any methods to your smart pointer type, your users +may run into errors from deshadowing, as described in the next section. + +## Avoiding shadowing + +With or without this feature, Rust emits an error if it finds two method +candidates, like this: + +```rust,compile_fail +use std::pin::Pin; +use std::pin::pin; + +struct A; + +impl A { + fn get_ref(self: Pin<&A>) {} +} + +fn main() { + let pinned_a: Pin<&A> = pin!(A).as_ref(); + let pinned_a: Pin<&A> = pinned_a.as_ref(); + pinned_a.get_ref(); // error[E0034]: multiple applicable items in scope +} +``` + +(this is why Rust's smart pointers are mostly carefully designed to avoid +having methods at all, and shouldn't add new methods in future.) + +With `arbitrary_self_types`, we take care to spot some other kinds of +conflict: + +```rust,compile_fail +#![feature(arbitrary_self_types)] + +use std::pin::Pin; +use std::pin::pin; + +struct A; + +impl A { + fn get_ref(self: &Pin<&A>) {} // note &Pin +} + +fn main() { + let pinned_a: Pin<&mut A> = pin!(A); + let pinned_a: Pin<&A> = pinned_a.as_ref(); + pinned_a.get_ref(); +} +``` + +This is to guard against the case where an inner (referent) type has a +method of a given name, taking the smart pointer by reference, and then +the smart pointer implementer adds a similar method taking self by value. +As noted in the previous section, the safe option is simply +not to add methods to smart pointers, and then these errors can't occur. diff --git a/src/doc/unstable-book/src/language-features/coverage-attribute.md b/src/doc/unstable-book/src/language-features/coverage-attribute.md new file mode 100644 index 00000000000..0a9bd07de07 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/coverage-attribute.md @@ -0,0 +1,30 @@ +# `coverage_attribute` + +The tracking issue for this feature is: [#84605] + +[#84605]: https://github.com/rust-lang/rust/issues/84605 + +--- + +The `coverage` attribute can be used to selectively disable coverage +instrumentation in an annotated function. This might be useful to: + +- Avoid instrumentation overhead in a performance critical function +- Avoid generating coverage for a function that is not meant to be executed, + but still target 100% coverage for the rest of the program. + +## Example + +```rust +#![feature(coverage_attribute)] + +// `foo()` will get coverage instrumentation (by default) +fn foo() { + // ... +} + +#[coverage(off)] +fn bar() { + // ... +} +``` diff --git a/src/etc/completions/x.fish b/src/etc/completions/x.fish new file mode 100644 index 00000000000..f0927183c07 --- /dev/null +++ b/src/etc/completions/x.fish @@ -0,0 +1,672 @@ +# Print an optspec for argparse to handle cmd's options that are independent of any subcommand. +function __fish_x_global_optspecs + string join \n v/verbose i/incremental config= build-dir= build= host= target= exclude= skip= include-default-paths rustc-error-format= on-fail= dry-run dump-bootstrap-shims stage= keep-stage= keep-stage-std= src= j/jobs= warnings= error-format= json-output color= bypass-bootstrap-lock rust-profile-generate= rust-profile-use= llvm-profile-use= llvm-profile-generate enable-bolt-settings skip-stage0-validation reproducible-artifact= set= h/help +end + +function __fish_x_needs_command + # Figure out if the current invocation already has a command. + set -l cmd (commandline -opc) + set -e cmd[1] + argparse -s (__fish_x_global_optspecs) -- $cmd 2>/dev/null + or return + if set -q argv[1] + # Also print the command, so this can be used to figure out what it is. + echo $argv[1] + return 1 + end + return 0 +end + +function __fish_x_using_subcommand + set -l cmd (__fish_x_needs_command) + test -z "$cmd" + and return 1 + contains -- $cmd[1] $argv +end + +complete -c x -n "__fish_x_needs_command" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_needs_command" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_needs_command" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_needs_command" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_needs_command" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_needs_command" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_needs_command" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_needs_command" -l rustc-error-format -r -f +complete -c x -n "__fish_x_needs_command" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_needs_command" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_needs_command" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_needs_command" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_needs_command" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_needs_command" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_needs_command" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_needs_command" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_needs_command" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_needs_command" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_needs_command" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_needs_command" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_needs_command" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_needs_command" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_needs_command" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_needs_command" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_needs_command" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_needs_command" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_needs_command" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_needs_command" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_needs_command" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_needs_command" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_needs_command" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_needs_command" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_needs_command" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_needs_command" -a "build" -d 'Compile either the compiler or libraries' +complete -c x -n "__fish_x_needs_command" -a "check" -d 'Compile either the compiler or libraries, using cargo check' +complete -c x -n "__fish_x_needs_command" -a "clippy" -d 'Run Clippy (uses rustup/cargo-installed clippy binary)' +complete -c x -n "__fish_x_needs_command" -a "fix" -d 'Run cargo fix' +complete -c x -n "__fish_x_needs_command" -a "fmt" -d 'Run rustfmt' +complete -c x -n "__fish_x_needs_command" -a "doc" -d 'Build documentation' +complete -c x -n "__fish_x_needs_command" -a "test" -d 'Build and run some test suites' +complete -c x -n "__fish_x_needs_command" -a "miri" -d 'Build and run some test suites *in Miri*' +complete -c x -n "__fish_x_needs_command" -a "bench" -d 'Build and run some benchmarks' +complete -c x -n "__fish_x_needs_command" -a "clean" -d 'Clean out build directories' +complete -c x -n "__fish_x_needs_command" -a "dist" -d 'Build distribution artifacts' +complete -c x -n "__fish_x_needs_command" -a "install" -d 'Install distribution artifacts' +complete -c x -n "__fish_x_needs_command" -a "run" -d 'Run tools contained in this repository' +complete -c x -n "__fish_x_needs_command" -a "setup" -d 'Set up the environment for development' +complete -c x -n "__fish_x_needs_command" -a "suggest" -d 'Suggest a subset of tests to run, based on modified files' +complete -c x -n "__fish_x_needs_command" -a "vendor" -d 'Vendor dependencies' +complete -c x -n "__fish_x_needs_command" -a "perf" -d 'Perform profiling and benchmarking of the compiler using the `rustc-perf-wrapper` tool' +complete -c x -n "__fish_x_using_subcommand build" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand build" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand build" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand build" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand build" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand build" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand build" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand build" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand build" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand build" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand build" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand build" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand build" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand build" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand build" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand build" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand build" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand build" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand build" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand build" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand build" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand build" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand build" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand build" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand build" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand build" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand check" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand check" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand check" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand check" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand check" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand check" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand check" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand check" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand check" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand check" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand check" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand check" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand check" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand check" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand check" -l all-targets -d 'Check all targets' +complete -c x -n "__fish_x_using_subcommand check" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand check" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand check" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand check" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand check" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand check" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand check" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand check" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand check" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand check" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand check" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand clippy" -s A -d 'clippy lints to allow' -r +complete -c x -n "__fish_x_using_subcommand clippy" -s D -d 'clippy lints to deny' -r +complete -c x -n "__fish_x_using_subcommand clippy" -s W -d 'clippy lints to warn on' -r +complete -c x -n "__fish_x_using_subcommand clippy" -s F -d 'clippy lints to forbid' -r +complete -c x -n "__fish_x_using_subcommand clippy" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand clippy" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand clippy" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand clippy" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand clippy" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand clippy" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand clippy" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand clippy" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand clippy" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand clippy" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand clippy" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand clippy" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand clippy" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand clippy" -l fix +complete -c x -n "__fish_x_using_subcommand clippy" -l allow-dirty +complete -c x -n "__fish_x_using_subcommand clippy" -l allow-staged +complete -c x -n "__fish_x_using_subcommand clippy" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand clippy" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand clippy" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand clippy" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand clippy" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand clippy" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand clippy" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand clippy" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand clippy" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand clippy" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand clippy" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand fix" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand fix" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand fix" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand fix" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand fix" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand fix" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand fix" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand fix" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand fix" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand fix" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand fix" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand fix" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand fix" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand fix" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand fix" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand fix" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand fix" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand fix" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand fix" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand fix" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand fix" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand fix" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand fix" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand fix" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand fmt" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand fmt" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand fmt" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand fmt" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand fmt" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand fmt" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand fmt" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand fmt" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand fmt" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand fmt" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand fmt" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand fmt" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand fmt" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand fmt" -l check -d 'check formatting instead of applying' +complete -c x -n "__fish_x_using_subcommand fmt" -l all -d 'apply to all appropriate files, not just those that have been modified' +complete -c x -n "__fish_x_using_subcommand fmt" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand fmt" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand fmt" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand fmt" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand fmt" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand fmt" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand fmt" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand fmt" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand fmt" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand fmt" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand fmt" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand doc" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand doc" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand doc" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand doc" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand doc" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand doc" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand doc" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand doc" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand doc" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand doc" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand doc" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand doc" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand doc" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand doc" -l open -d 'open the docs in a browser' +complete -c x -n "__fish_x_using_subcommand doc" -l json -d 'render the documentation in JSON format in addition to the usual HTML format' +complete -c x -n "__fish_x_using_subcommand doc" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand doc" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand doc" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand doc" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand doc" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand doc" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand doc" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand doc" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand doc" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand doc" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand doc" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r +complete -c x -n "__fish_x_using_subcommand test" -l compiletest-rustc-args -d 'extra options to pass the compiler when running compiletest tests' -r +complete -c x -n "__fish_x_using_subcommand test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)' -r +complete -c x -n "__fish_x_using_subcommand test" -l compare-mode -d 'mode describing what file the actual ui output will be compared to' -r +complete -c x -n "__fish_x_using_subcommand test" -l pass -d 'force {check,build,run}-pass tests to this mode' -r +complete -c x -n "__fish_x_using_subcommand test" -l run -d 'whether to execute run-* tests' -r +complete -c x -n "__fish_x_using_subcommand test" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand test" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand test" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand test" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand test" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand test" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand test" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand test" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand test" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand test" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand test" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand test" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand test" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand test" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand test" -l no-fail-fast -d 'run all tests regardless of failure' +complete -c x -n "__fish_x_using_subcommand test" -l no-doc -d 'do not run doc tests' +complete -c x -n "__fish_x_using_subcommand test" -l doc -d 'only run doc tests' +complete -c x -n "__fish_x_using_subcommand test" -l bless -d 'whether to automatically update stderr/stdout files' +complete -c x -n "__fish_x_using_subcommand test" -l force-rerun -d 'rerun tests even if the inputs are unchanged' +complete -c x -n "__fish_x_using_subcommand test" -l only-modified -d 'only run tests that result has been changed' +complete -c x -n "__fish_x_using_subcommand test" -l rustfix-coverage -d 'enable this to generate a Rustfix coverage file, which is saved in `/<build_base>/rustfix_missing_coverage.txt`' +complete -c x -n "__fish_x_using_subcommand test" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand test" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand test" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand test" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand test" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand test" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand test" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand test" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand test" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand test" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand test" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand miri" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r +complete -c x -n "__fish_x_using_subcommand miri" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand miri" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand miri" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand miri" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand miri" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand miri" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand miri" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand miri" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand miri" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand miri" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand miri" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand miri" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand miri" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand miri" -l no-fail-fast -d 'run all tests regardless of failure' +complete -c x -n "__fish_x_using_subcommand miri" -l no-doc -d 'do not run doc tests' +complete -c x -n "__fish_x_using_subcommand miri" -l doc -d 'only run doc tests' +complete -c x -n "__fish_x_using_subcommand miri" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand miri" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand miri" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand miri" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand miri" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand miri" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand miri" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand miri" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand miri" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand miri" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand miri" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand bench" -l test-args -r +complete -c x -n "__fish_x_using_subcommand bench" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand bench" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand bench" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand bench" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand bench" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand bench" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand bench" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand bench" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand bench" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand bench" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand bench" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand bench" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand bench" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand bench" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand bench" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand bench" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand bench" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand bench" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand bench" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand bench" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand bench" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand bench" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand bench" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand bench" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand clean" -l stage -d 'Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used' -r +complete -c x -n "__fish_x_using_subcommand clean" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand clean" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand clean" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand clean" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand clean" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand clean" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand clean" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand clean" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand clean" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand clean" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand clean" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand clean" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand clean" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand clean" -l all -d 'Clean the entire build directory (not used by default)' +complete -c x -n "__fish_x_using_subcommand clean" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand clean" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand clean" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand clean" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand clean" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand clean" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand clean" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand clean" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand clean" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand clean" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand clean" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand dist" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand dist" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand dist" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand dist" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand dist" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand dist" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand dist" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand dist" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand dist" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand dist" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand dist" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand dist" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand dist" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand dist" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand dist" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand dist" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand dist" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand dist" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand dist" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand dist" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand dist" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand dist" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand dist" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand dist" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand install" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand install" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand install" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand install" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand install" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand install" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand install" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand install" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand install" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand install" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand install" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand install" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand install" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand install" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand install" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand install" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand install" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand install" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand install" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand install" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand install" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand install" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand install" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand install" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand install" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand install" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand run" -l args -d 'arguments for the tool' -r +complete -c x -n "__fish_x_using_subcommand run" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand run" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand run" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand run" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand run" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand run" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand run" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand run" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand run" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand run" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand run" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand run" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand run" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand run" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand run" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand run" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand run" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand run" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand run" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand run" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand run" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand run" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand run" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand run" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand run" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand run" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand setup" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand setup" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand setup" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand setup" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand setup" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand setup" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand setup" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand setup" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand setup" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand setup" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand setup" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand setup" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand setup" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand setup" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand setup" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand setup" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand setup" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand setup" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand setup" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand setup" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand setup" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand setup" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand setup" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand setup" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand suggest" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand suggest" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand suggest" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand suggest" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand suggest" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand suggest" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand suggest" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand suggest" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand suggest" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand suggest" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand suggest" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand suggest" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand suggest" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand suggest" -l run -d 'run suggested tests' +complete -c x -n "__fish_x_using_subcommand suggest" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand suggest" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand suggest" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand suggest" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand suggest" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand suggest" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand suggest" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand suggest" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand suggest" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand suggest" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand suggest" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand vendor" -l sync -d 'Additional `Cargo.toml` to sync and vendor' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand vendor" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand vendor" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand vendor" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand vendor" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand vendor" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand vendor" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand vendor" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand vendor" -l versioned-dirs -d 'Always include version in subdir name' +complete -c x -n "__fish_x_using_subcommand vendor" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand vendor" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand vendor" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand vendor" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand vendor" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand vendor" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand vendor" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand vendor" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand vendor" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand vendor" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand vendor" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c x -n "__fish_x_using_subcommand perf" -l config -d 'TOML configuration file for build' -r -F +complete -c x -n "__fish_x_using_subcommand perf" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand perf" -l build -d 'build target of the stage0 compiler' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l host -d 'host targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l target -d 'target targets to build' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l exclude -d 'build paths to exclude' -r -F +complete -c x -n "__fish_x_using_subcommand perf" -l skip -d 'build paths to skip' -r -F +complete -c x -n "__fish_x_using_subcommand perf" -l rustc-error-format -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l on-fail -d 'command to run on failure' -r -f -a "(__fish_complete_command)" +complete -c x -n "__fish_x_using_subcommand perf" -l stage -d 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l keep-stage -d 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" +complete -c x -n "__fish_x_using_subcommand perf" -s j -l jobs -d 'number of jobs to run in parallel' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny\t'',warn\t'',default\t''}" +complete -c x -n "__fish_x_using_subcommand perf" -l error-format -d 'rustc error format' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always\t'',never\t'',auto\t''}" +complete -c x -n "__fish_x_using_subcommand perf" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand perf" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F +complete -c x -n "__fish_x_using_subcommand perf" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F +complete -c x -n "__fish_x_using_subcommand perf" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r +complete -c x -n "__fish_x_using_subcommand perf" -l set -d 'override options in config.toml' -r -f +complete -c x -n "__fish_x_using_subcommand perf" -s v -l verbose -d 'use verbose output (-vv for very verbose)' +complete -c x -n "__fish_x_using_subcommand perf" -s i -l incremental -d 'use incremental compilation' +complete -c x -n "__fish_x_using_subcommand perf" -l include-default-paths -d 'include default paths in addition to the provided ones' +complete -c x -n "__fish_x_using_subcommand perf" -l dry-run -d 'dry run; don\'t build anything' +complete -c x -n "__fish_x_using_subcommand perf" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' +complete -c x -n "__fish_x_using_subcommand perf" -l json-output -d 'use message-format=json' +complete -c x -n "__fish_x_using_subcommand perf" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' +complete -c x -n "__fish_x_using_subcommand perf" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x -n "__fish_x_using_subcommand perf" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x -n "__fish_x_using_subcommand perf" -l skip-stage0-validation -d 'Skip stage0 compiler validation' +complete -c x -n "__fish_x_using_subcommand perf" -s h -l help -d 'Print help (see more with \'--help\')' diff --git a/src/etc/completions/x.ps1 b/src/etc/completions/x.ps1 new file mode 100644 index 00000000000..7cbf0f0d13c --- /dev/null +++ b/src/etc/completions/x.ps1 @@ -0,0 +1,799 @@ + +using namespace System.Management.Automation +using namespace System.Management.Automation.Language + +Register-ArgumentCompleter -Native -CommandName 'x' -ScriptBlock { + param($wordToComplete, $commandAst, $cursorPosition) + + $commandElements = $commandAst.CommandElements + $command = @( + 'x' + for ($i = 1; $i -lt $commandElements.Count; $i++) { + $element = $commandElements[$i] + if ($element -isnot [StringConstantExpressionAst] -or + $element.StringConstantType -ne [StringConstantType]::BareWord -or + $element.Value.StartsWith('-') -or + $element.Value -eq $wordToComplete) { + break + } + $element.Value + }) -join ';' + + $completions = @(switch ($command) { + 'x' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('build', 'build', [CompletionResultType]::ParameterValue, 'Compile either the compiler or libraries') + [CompletionResult]::new('check', 'check', [CompletionResultType]::ParameterValue, 'Compile either the compiler or libraries, using cargo check') + [CompletionResult]::new('clippy', 'clippy', [CompletionResultType]::ParameterValue, 'Run Clippy (uses rustup/cargo-installed clippy binary)') + [CompletionResult]::new('fix', 'fix', [CompletionResultType]::ParameterValue, 'Run cargo fix') + [CompletionResult]::new('fmt', 'fmt', [CompletionResultType]::ParameterValue, 'Run rustfmt') + [CompletionResult]::new('doc', 'doc', [CompletionResultType]::ParameterValue, 'Build documentation') + [CompletionResult]::new('test', 'test', [CompletionResultType]::ParameterValue, 'Build and run some test suites') + [CompletionResult]::new('miri', 'miri', [CompletionResultType]::ParameterValue, 'Build and run some test suites *in Miri*') + [CompletionResult]::new('bench', 'bench', [CompletionResultType]::ParameterValue, 'Build and run some benchmarks') + [CompletionResult]::new('clean', 'clean', [CompletionResultType]::ParameterValue, 'Clean out build directories') + [CompletionResult]::new('dist', 'dist', [CompletionResultType]::ParameterValue, 'Build distribution artifacts') + [CompletionResult]::new('install', 'install', [CompletionResultType]::ParameterValue, 'Install distribution artifacts') + [CompletionResult]::new('run', 'run', [CompletionResultType]::ParameterValue, 'Run tools contained in this repository') + [CompletionResult]::new('setup', 'setup', [CompletionResultType]::ParameterValue, 'Set up the environment for development') + [CompletionResult]::new('suggest', 'suggest', [CompletionResultType]::ParameterValue, 'Suggest a subset of tests to run, based on modified files') + [CompletionResult]::new('vendor', 'vendor', [CompletionResultType]::ParameterValue, 'Vendor dependencies') + [CompletionResult]::new('perf', 'perf', [CompletionResultType]::ParameterValue, 'Perform profiling and benchmarking of the compiler using the `rustc-perf-wrapper` tool') + break + } + 'x;build' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;check' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--all-targets', '--all-targets', [CompletionResultType]::ParameterName, 'Check all targets') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;clippy' { + [CompletionResult]::new('-A', '-A ', [CompletionResultType]::ParameterName, 'clippy lints to allow') + [CompletionResult]::new('-D', '-D ', [CompletionResultType]::ParameterName, 'clippy lints to deny') + [CompletionResult]::new('-W', '-W ', [CompletionResultType]::ParameterName, 'clippy lints to warn on') + [CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'clippy lints to forbid') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--fix', '--fix', [CompletionResultType]::ParameterName, 'fix') + [CompletionResult]::new('--allow-dirty', '--allow-dirty', [CompletionResultType]::ParameterName, 'allow-dirty') + [CompletionResult]::new('--allow-staged', '--allow-staged', [CompletionResultType]::ParameterName, 'allow-staged') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;fix' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;fmt' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--check', '--check', [CompletionResultType]::ParameterName, 'check formatting instead of applying') + [CompletionResult]::new('--all', '--all', [CompletionResultType]::ParameterName, 'apply to all appropriate files, not just those that have been modified') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;doc' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--open', '--open', [CompletionResultType]::ParameterName, 'open the docs in a browser') + [CompletionResult]::new('--json', '--json', [CompletionResultType]::ParameterName, 'render the documentation in JSON format in addition to the usual HTML format') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;test' { + [CompletionResult]::new('--test-args', '--test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') + [CompletionResult]::new('--compiletest-rustc-args', '--compiletest-rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running compiletest tests') + [CompletionResult]::new('--extra-checks', '--extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)') + [CompletionResult]::new('--compare-mode', '--compare-mode', [CompletionResultType]::ParameterName, 'mode describing what file the actual ui output will be compared to') + [CompletionResult]::new('--pass', '--pass', [CompletionResultType]::ParameterName, 'force {check,build,run}-pass tests to this mode') + [CompletionResult]::new('--run', '--run', [CompletionResultType]::ParameterName, 'whether to execute run-* tests') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--no-fail-fast', '--no-fail-fast', [CompletionResultType]::ParameterName, 'run all tests regardless of failure') + [CompletionResult]::new('--no-doc', '--no-doc', [CompletionResultType]::ParameterName, 'do not run doc tests') + [CompletionResult]::new('--doc', '--doc', [CompletionResultType]::ParameterName, 'only run doc tests') + [CompletionResult]::new('--bless', '--bless', [CompletionResultType]::ParameterName, 'whether to automatically update stderr/stdout files') + [CompletionResult]::new('--force-rerun', '--force-rerun', [CompletionResultType]::ParameterName, 'rerun tests even if the inputs are unchanged') + [CompletionResult]::new('--only-modified', '--only-modified', [CompletionResultType]::ParameterName, 'only run tests that result has been changed') + [CompletionResult]::new('--rustfix-coverage', '--rustfix-coverage', [CompletionResultType]::ParameterName, 'enable this to generate a Rustfix coverage file, which is saved in `/<build_base>/rustfix_missing_coverage.txt`') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;miri' { + [CompletionResult]::new('--test-args', '--test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--no-fail-fast', '--no-fail-fast', [CompletionResultType]::ParameterName, 'run all tests regardless of failure') + [CompletionResult]::new('--no-doc', '--no-doc', [CompletionResultType]::ParameterName, 'do not run doc tests') + [CompletionResult]::new('--doc', '--doc', [CompletionResultType]::ParameterName, 'only run doc tests') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;bench' { + [CompletionResult]::new('--test-args', '--test-args', [CompletionResultType]::ParameterName, 'test-args') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;clean' { + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--all', '--all', [CompletionResultType]::ParameterName, 'Clean the entire build directory (not used by default)') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;dist' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;install' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;run' { + [CompletionResult]::new('--args', '--args', [CompletionResultType]::ParameterName, 'arguments for the tool') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;setup' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;suggest' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--run', '--run', [CompletionResultType]::ParameterName, 'run suggested tests') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;vendor' { + [CompletionResult]::new('--sync', '--sync', [CompletionResultType]::ParameterName, 'Additional `Cargo.toml` to sync and vendor') + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('--versioned-dirs', '--versioned-dirs', [CompletionResultType]::ParameterName, 'Always include version in subdir name') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + 'x;perf' { + [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') + [CompletionResult]::new('--build-dir', '--build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') + [CompletionResult]::new('--build', '--build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') + [CompletionResult]::new('--host', '--host', [CompletionResultType]::ParameterName, 'host targets to build') + [CompletionResult]::new('--target', '--target', [CompletionResultType]::ParameterName, 'target targets to build') + [CompletionResult]::new('--exclude', '--exclude', [CompletionResultType]::ParameterName, 'build paths to exclude') + [CompletionResult]::new('--skip', '--skip', [CompletionResultType]::ParameterName, 'build paths to skip') + [CompletionResult]::new('--rustc-error-format', '--rustc-error-format', [CompletionResultType]::ParameterName, 'rustc-error-format') + [CompletionResult]::new('--on-fail', '--on-fail', [CompletionResultType]::ParameterName, 'command to run on failure') + [CompletionResult]::new('--stage', '--stage', [CompletionResultType]::ParameterName, 'stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)') + [CompletionResult]::new('--keep-stage', '--keep-stage', [CompletionResultType]::ParameterName, 'stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--keep-stage-std', '--keep-stage-std', [CompletionResultType]::ParameterName, 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)') + [CompletionResult]::new('--src', '--src', [CompletionResultType]::ParameterName, 'path to the root of the rust checkout') + [CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--jobs', '--jobs', [CompletionResultType]::ParameterName, 'number of jobs to run in parallel') + [CompletionResult]::new('--warnings', '--warnings', [CompletionResultType]::ParameterName, 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour') + [CompletionResult]::new('--error-format', '--error-format', [CompletionResultType]::ParameterName, 'rustc error format') + [CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'whether to use color in cargo and rustc output') + [CompletionResult]::new('--rust-profile-generate', '--rust-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with rustc build') + [CompletionResult]::new('--rust-profile-use', '--rust-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for rustc build') + [CompletionResult]::new('--llvm-profile-use', '--llvm-profile-use', [CompletionResultType]::ParameterName, 'use PGO profile for LLVM build') + [CompletionResult]::new('--reproducible-artifact', '--reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive') + [CompletionResult]::new('--set', '--set', [CompletionResultType]::ParameterName, 'override options in config.toml') + [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)') + [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--incremental', '--incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') + [CompletionResult]::new('--include-default-paths', '--include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') + [CompletionResult]::new('--dry-run', '--dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', '--dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') + [CompletionResult]::new('--json-output', '--json-output', [CompletionResultType]::ParameterName, 'use message-format=json') + [CompletionResult]::new('--bypass-bootstrap-lock', '--bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') + [CompletionResult]::new('--llvm-profile-generate', '--llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', '--enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', '--skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') + [CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + [CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') + break + } + }) + + $completions.Where{ $_.CompletionText -like "$wordToComplete*" } | + Sort-Object -Property ListItemText +} diff --git a/src/etc/completions/x.sh b/src/etc/completions/x.sh new file mode 100644 index 00000000000..a4cf80acc30 --- /dev/null +++ b/src/etc/completions/x.sh @@ -0,0 +1,3534 @@ +_x() { + local i cur prev opts cmd + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + prev="${COMP_WORDS[COMP_CWORD-1]}" + cmd="" + opts="" + + for i in ${COMP_WORDS[@]} + do + case "${cmd},${i}" in + ",$1") + cmd="x" + ;; + x,bench) + cmd="x__bench" + ;; + x,build) + cmd="x__build" + ;; + x,check) + cmd="x__check" + ;; + x,clean) + cmd="x__clean" + ;; + x,clippy) + cmd="x__clippy" + ;; + x,dist) + cmd="x__dist" + ;; + x,doc) + cmd="x__doc" + ;; + x,fix) + cmd="x__fix" + ;; + x,fmt) + cmd="x__fmt" + ;; + x,install) + cmd="x__install" + ;; + x,miri) + cmd="x__miri" + ;; + x,perf) + cmd="x__perf" + ;; + x,run) + cmd="x__run" + ;; + x,setup) + cmd="x__setup" + ;; + x,suggest) + cmd="x__suggest" + ;; + x,test) + cmd="x__test" + ;; + x,vendor) + cmd="x__vendor" + ;; + *) + ;; + esac + done + + case "${cmd}" in + x) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]... build check clippy fix fmt doc test miri bench clean dist install run setup suggest vendor perf" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__bench) + opts="-v -i -j -h --test-args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --test-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__build) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__check) + opts="-v -i -j -h --all-targets --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__clean) + opts="-v -i -j -h --all --stage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --stage) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__clippy) + opts="-A -D -W -F -v -i -j -h --fix --allow-dirty --allow-staged --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + -A) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -D) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -W) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -F) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__dist) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__doc) + opts="-v -i -j -h --open --json --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__fix) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__fmt) + opts="-v -i -j -h --check --all --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__install) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__miri) + opts="-v -i -j -h --no-fail-fast --test-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --test-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__perf) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__run) + opts="-v -i -j -h --args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__setup) + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [<PROFILE>|hook|editor|link] [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__suggest) + opts="-v -i -j -h --run --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__test) + opts="-v -i -j -h --no-fail-fast --test-args --compiletest-rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --test-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --compiletest-rustc-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --extra-checks) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --compare-mode) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --pass) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --run) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + x__vendor) + opts="-v -i -j -h --sync --versioned-dirs --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --sync) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --config) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --build-dir) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --build) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --host) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --target) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --exclude) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --skip) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --rustc-error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --on-fail) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --keep-stage-std) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --src) + COMPREPLY=() + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o plusdirs + fi + return 0 + ;; + --jobs) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + -j) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --warnings) + COMPREPLY=($(compgen -W "deny warn default" -- "${cur}")) + return 0 + ;; + --error-format) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + --color) + COMPREPLY=($(compgen -W "always never auto" -- "${cur}")) + return 0 + ;; + --rust-profile-generate) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --rust-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --llvm-profile-use) + local oldifs + if [ -n "${IFS+x}" ]; then + oldifs="$IFS" + fi + IFS=$'\n' + COMPREPLY=($(compgen -f "${cur}")) + if [ -n "${oldifs+x}" ]; then + IFS="$oldifs" + fi + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o filenames + fi + return 0 + ;; + --reproducible-artifact) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --set) + COMPREPLY=("${cur}") + if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then + compopt -o nospace + fi + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + esac +} + +if [[ "${BASH_VERSINFO[0]}" -eq 4 && "${BASH_VERSINFO[1]}" -ge 4 || "${BASH_VERSINFO[0]}" -gt 4 ]]; then + complete -F _x -o nosort -o bashdefault -o default x +else + complete -F _x -o bashdefault -o default x +fi diff --git a/src/etc/completions/x.zsh b/src/etc/completions/x.zsh new file mode 100644 index 00000000000..ee6f504f93e --- /dev/null +++ b/src/etc/completions/x.zsh @@ -0,0 +1,934 @@ +#compdef x + +autoload -U is-at-least + +_x() { + typeset -A opt_args + typeset -a _arguments_options + local ret=1 + + if is-at-least 5.2; then + _arguments_options=(-s -S -C) + else + _arguments_options=(-s -C) + fi + + local context curcontext="$curcontext" state line + _arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'::paths -- paths for the subcommand:_files' \ +'::free_args -- arguments passed to subcommands:_default' \ +":: :_x_commands" \ +"*::: :->bootstrap" \ +&& ret=0 + case $state in + (bootstrap) + words=($line[3] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:x-command-$line[3]:" + case $line[3] in + (build) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(check) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--all-targets[Check all targets]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(clippy) +_arguments "${_arguments_options[@]}" : \ +'*-A+[clippy lints to allow]:LINT:_default' \ +'*-D+[clippy lints to deny]:LINT:_default' \ +'*-W+[clippy lints to warn on]:LINT:_default' \ +'*-F+[clippy lints to forbid]:LINT:_default' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--fix[]' \ +'--allow-dirty[]' \ +'--allow-staged[]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(fix) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(fmt) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--check[check formatting instead of applying]' \ +'--all[apply to all appropriate files, not just those that have been modified]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(doc) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--open[open the docs in a browser]' \ +'--json[render the documentation in JSON format in addition to the usual HTML format]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(test) +_arguments "${_arguments_options[@]}" : \ +'*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS:_default' \ +'*--compiletest-rustc-args=[extra options to pass the compiler when running compiletest tests]:ARGS:_default' \ +'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell)]:EXTRA_CHECKS:_default' \ +'--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE:_default' \ +'--pass=[force {check,build,run}-pass tests to this mode]:check | build | run:_default' \ +'--run=[whether to execute run-* tests]:auto | always | never:_default' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--no-fail-fast[run all tests regardless of failure]' \ +'--no-doc[do not run doc tests]' \ +'--doc[only run doc tests]' \ +'--bless[whether to automatically update stderr/stdout files]' \ +'--force-rerun[rerun tests even if the inputs are unchanged]' \ +'--only-modified[only run tests that result has been changed]' \ +'--rustfix-coverage[enable this to generate a Rustfix coverage file, which is saved in \`/<build_base>/rustfix_missing_coverage.txt\`]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(miri) +_arguments "${_arguments_options[@]}" : \ +'*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS:_default' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--no-fail-fast[run all tests regardless of failure]' \ +'--no-doc[do not run doc tests]' \ +'--doc[only run doc tests]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(bench) +_arguments "${_arguments_options[@]}" : \ +'*--test-args=[]:TEST_ARGS:_default' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(clean) +_arguments "${_arguments_options[@]}" : \ +'--stage=[Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used]:N:_default' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--all[Clean the entire build directory (not used by default)]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(dist) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(install) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +'*--args=[arguments for the tool]:ARGS:_default' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(setup) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'::profile -- Either the profile for `config.toml` or another setup action. May be omitted to set up interactively:_files' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(suggest) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--run[run suggested tests]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(vendor) +_arguments "${_arguments_options[@]}" : \ +'*--sync=[Additional \`Cargo.toml\` to sync and vendor]:SYNC:_files' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'--versioned-dirs[Always include version in subdir name]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(perf) +_arguments "${_arguments_options[@]}" : \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:' \ +'--host=[host targets to build]:HOST:' \ +'--target=[target targets to build]:TARGET:' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:' \ +'--jobs=[number of jobs to run in parallel]:JOBS:' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT:_default' \ +'*--set=[override options in config.toml]:section.option=value:' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ +'--json-output[use message-format=json]' \ +'--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; + esac + ;; +esac +} + +(( $+functions[_x_commands] )) || +_x_commands() { + local commands; commands=( +'build:Compile either the compiler or libraries' \ +'check:Compile either the compiler or libraries, using cargo check' \ +'clippy:Run Clippy (uses rustup/cargo-installed clippy binary)' \ +'fix:Run cargo fix' \ +'fmt:Run rustfmt' \ +'doc:Build documentation' \ +'test:Build and run some test suites' \ +'miri:Build and run some test suites *in Miri*' \ +'bench:Build and run some benchmarks' \ +'clean:Clean out build directories' \ +'dist:Build distribution artifacts' \ +'install:Install distribution artifacts' \ +'run:Run tools contained in this repository' \ +'setup:Set up the environment for development' \ +'suggest:Suggest a subset of tests to run, based on modified files' \ +'vendor:Vendor dependencies' \ +'perf:Perform profiling and benchmarking of the compiler using the \`rustc-perf-wrapper\` tool' \ + ) + _describe -t commands 'x commands' commands "$@" +} +(( $+functions[_x__bench_commands] )) || +_x__bench_commands() { + local commands; commands=() + _describe -t commands 'x bench commands' commands "$@" +} +(( $+functions[_x__build_commands] )) || +_x__build_commands() { + local commands; commands=() + _describe -t commands 'x build commands' commands "$@" +} +(( $+functions[_x__check_commands] )) || +_x__check_commands() { + local commands; commands=() + _describe -t commands 'x check commands' commands "$@" +} +(( $+functions[_x__clean_commands] )) || +_x__clean_commands() { + local commands; commands=() + _describe -t commands 'x clean commands' commands "$@" +} +(( $+functions[_x__clippy_commands] )) || +_x__clippy_commands() { + local commands; commands=() + _describe -t commands 'x clippy commands' commands "$@" +} +(( $+functions[_x__dist_commands] )) || +_x__dist_commands() { + local commands; commands=() + _describe -t commands 'x dist commands' commands "$@" +} +(( $+functions[_x__doc_commands] )) || +_x__doc_commands() { + local commands; commands=() + _describe -t commands 'x doc commands' commands "$@" +} +(( $+functions[_x__fix_commands] )) || +_x__fix_commands() { + local commands; commands=() + _describe -t commands 'x fix commands' commands "$@" +} +(( $+functions[_x__fmt_commands] )) || +_x__fmt_commands() { + local commands; commands=() + _describe -t commands 'x fmt commands' commands "$@" +} +(( $+functions[_x__install_commands] )) || +_x__install_commands() { + local commands; commands=() + _describe -t commands 'x install commands' commands "$@" +} +(( $+functions[_x__miri_commands] )) || +_x__miri_commands() { + local commands; commands=() + _describe -t commands 'x miri commands' commands "$@" +} +(( $+functions[_x__perf_commands] )) || +_x__perf_commands() { + local commands; commands=() + _describe -t commands 'x perf commands' commands "$@" +} +(( $+functions[_x__run_commands] )) || +_x__run_commands() { + local commands; commands=() + _describe -t commands 'x run commands' commands "$@" +} +(( $+functions[_x__setup_commands] )) || +_x__setup_commands() { + local commands; commands=() + _describe -t commands 'x setup commands' commands "$@" +} +(( $+functions[_x__suggest_commands] )) || +_x__suggest_commands() { + local commands; commands=() + _describe -t commands 'x suggest commands' commands "$@" +} +(( $+functions[_x__test_commands] )) || +_x__test_commands() { + local commands; commands=() + _describe -t commands 'x test commands' commands "$@" +} +(( $+functions[_x__vendor_commands] )) || +_x__vendor_commands() { + local commands; commands=() + _describe -t commands 'x vendor commands' commands "$@" +} + +if [ "$funcstack[1]" = "_x" ]; then + _x "$@" +else + compdef _x x +fi diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index 0dfb8a52eba..c59dce185f4 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -226,30 +226,28 @@ impl Cfg { /// `Cfg`. /// /// See `tests::test_simplify_with` for examples. - pub(crate) fn simplify_with(&self, assume: &Cfg) -> Option<Cfg> { + pub(crate) fn simplify_with(&self, assume: &Self) -> Option<Self> { if self == assume { - return None; - } - - if let Cfg::All(a) = self { + None + } else if let Cfg::All(a) = self { let mut sub_cfgs: Vec<Cfg> = if let Cfg::All(b) = assume { a.iter().filter(|a| !b.contains(a)).cloned().collect() } else { a.iter().filter(|&a| a != assume).cloned().collect() }; let len = sub_cfgs.len(); - return match len { + match len { 0 => None, 1 => sub_cfgs.pop(), _ => Some(Cfg::All(sub_cfgs)), - }; - } else if let Cfg::All(b) = assume { - if b.contains(self) { - return None; } + } else if let Cfg::All(b) = assume + && b.contains(self) + { + None + } else { + Some(self.clone()) } - - Some(self.clone()) } } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 9d0c0f687c7..4d46f0e75c8 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2255,12 +2255,14 @@ pub(crate) fn clean_middle_ty<'tcx>( } } + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binders)"), ty::Closure(..) => panic!("Closure"), ty::CoroutineClosure(..) => panic!("CoroutineClosure"), ty::Coroutine(..) => panic!("Coroutine"), ty::Placeholder(..) => panic!("Placeholder"), ty::CoroutineWitness(..) => panic!("CoroutineWitness"), ty::Infer(..) => panic!("Infer"), + ty::Error(_) => FatalError.raise(), } } diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index c9d1ceb0a91..3c1d0c35bef 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -559,6 +559,7 @@ impl<'tcx> LinkCollector<'_, 'tcx> { | ty::Coroutine(..) | ty::CoroutineWitness(..) | ty::Dynamic(..) + | ty::UnsafeBinder(_) | ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) diff --git a/src/tools/cargo b/src/tools/cargo -Subproject 99dff6d77db779716dda9ca3b29c26addd02c1b +Subproject c86f4b3a1b153218e6e50861214b0b4b4e695f2 diff --git a/src/tools/clippy/CHANGELOG.md b/src/tools/clippy/CHANGELOG.md index 61efaa3bf3e..b6033de9350 100644 --- a/src/tools/clippy/CHANGELOG.md +++ b/src/tools/clippy/CHANGELOG.md @@ -5380,6 +5380,7 @@ Released 2018-09-13 [`arc_with_non_send_sync`]: https://rust-lang.github.io/rust-clippy/master/index.html#arc_with_non_send_sync [`arithmetic_side_effects`]: https://rust-lang.github.io/rust-clippy/master/index.html#arithmetic_side_effects [`as_conversions`]: https://rust-lang.github.io/rust-clippy/master/index.html#as_conversions +[`as_pointer_underscore`]: https://rust-lang.github.io/rust-clippy/master/index.html#as_pointer_underscore [`as_ptr_cast_mut`]: https://rust-lang.github.io/rust-clippy/master/index.html#as_ptr_cast_mut [`as_underscore`]: https://rust-lang.github.io/rust-clippy/master/index.html#as_underscore [`assertions_on_constants`]: https://rust-lang.github.io/rust-clippy/master/index.html#assertions_on_constants @@ -5490,6 +5491,7 @@ Released 2018-09-13 [`doc_lazy_continuation`]: https://rust-lang.github.io/rust-clippy/master/index.html#doc_lazy_continuation [`doc_link_with_quotes`]: https://rust-lang.github.io/rust-clippy/master/index.html#doc_link_with_quotes [`doc_markdown`]: https://rust-lang.github.io/rust-clippy/master/index.html#doc_markdown +[`doc_nested_refdefs`]: https://rust-lang.github.io/rust-clippy/master/index.html#doc_nested_refdefs [`double_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_comparisons [`double_must_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_must_use [`double_neg`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_neg @@ -5685,6 +5687,7 @@ Released 2018-09-13 [`lines_filter_map_ok`]: https://rust-lang.github.io/rust-clippy/master/index.html#lines_filter_map_ok [`linkedlist`]: https://rust-lang.github.io/rust-clippy/master/index.html#linkedlist [`lint_groups_priority`]: https://rust-lang.github.io/rust-clippy/master/index.html#lint_groups_priority +[`literal_string_with_formatting_args`]: https://rust-lang.github.io/rust-clippy/master/index.html#literal_string_with_formatting_args [`little_endian_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#little_endian_bytes [`logic_bug`]: https://rust-lang.github.io/rust-clippy/master/index.html#logic_bug [`lossy_float_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#lossy_float_literal @@ -5966,6 +5969,7 @@ Released 2018-09-13 [`repeat_once`]: https://rust-lang.github.io/rust-clippy/master/index.html#repeat_once [`repeat_vec_with_capacity`]: https://rust-lang.github.io/rust-clippy/master/index.html#repeat_vec_with_capacity [`replace_consts`]: https://rust-lang.github.io/rust-clippy/master/index.html#replace_consts +[`repr_packed_without_abi`]: https://rust-lang.github.io/rust-clippy/master/index.html#repr_packed_without_abi [`reserve_after_initialization`]: https://rust-lang.github.io/rust-clippy/master/index.html#reserve_after_initialization [`rest_pat_in_fully_bound_structs`]: https://rust-lang.github.io/rust-clippy/master/index.html#rest_pat_in_fully_bound_structs [`result_expect_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_expect_used @@ -6210,6 +6214,7 @@ Released 2018-09-13 [`allow-comparison-to-zero`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-comparison-to-zero [`allow-dbg-in-tests`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-dbg-in-tests [`allow-expect-in-tests`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-expect-in-tests +[`allow-indexing-slicing-in-tests`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-indexing-slicing-in-tests [`allow-mixed-uninlined-format-args`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-mixed-uninlined-format-args [`allow-one-hash-in-raw-strings`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-one-hash-in-raw-strings [`allow-panic-in-tests`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-panic-in-tests diff --git a/src/tools/clippy/book/src/development/common_tools_writing_lints.md b/src/tools/clippy/book/src/development/common_tools_writing_lints.md index 77910917963..c354e8914f5 100644 --- a/src/tools/clippy/book/src/development/common_tools_writing_lints.md +++ b/src/tools/clippy/book/src/development/common_tools_writing_lints.md @@ -37,7 +37,7 @@ impl LateLintPass<'_> for MyStructLint { // Get type of `expr` let ty = cx.typeck_results().expr_ty(expr); // Match its kind to enter its type - match ty.kind { + match ty.kind() { ty::Adt(adt_def, _) if adt_def.is_struct() => println!("Our `expr` is a struct!"), _ => () } diff --git a/src/tools/clippy/book/src/development/infrastructure/backport.md b/src/tools/clippy/book/src/development/infrastructure/backport.md index 6920c4e4656..9526d8af1c9 100644 --- a/src/tools/clippy/book/src/development/infrastructure/backport.md +++ b/src/tools/clippy/book/src/development/infrastructure/backport.md @@ -5,68 +5,108 @@ Backports in Clippy are rare and should be approved by the Clippy team. For example, a backport is done, if a crucial ICE was fixed or a lint is broken to a point, that it has to be disabled, before landing on stable. -Backports are done to the `beta` branch of Clippy. Backports to stable Clippy -releases basically don't exist, since this would require a Rust point release, -which is almost never justifiable for a Clippy fix. +> Note: If you think a PR should be backported you can label it with +> `beta-nominated`. This has to be done before the Thursday the week before the +> release. +## Filtering PRs to backport -## Backport the changes +First, find all labeled PRs using [this filter][beta-accepted-prs]. + +Next, look at each PR individually. There are a few things to check. Those need +some explanation and are quite subjective. Good judgement is required. + +1. **Is the fix worth a backport?** + + This is really subjective. An ICE fix usually is. Moving a lint to a _lower_ + group (from warn- to allow-by-default) usually as well. An FP fix usually not + (on its own). If a backport is done anyway, FP fixes might also be included. + If the PR has a lot of changes, backports must be considered more carefully. + +2. **Is the problem that was fixed by the PR already in `beta`?** + + It could be that the problem that was fixed by the PR hasn't made it to the + `beta` branch of the Rust repo yet. If that's the case, and the fix is + already synced to the Rust repo, the fix doesn't need to be backported, as it + will hit stable together with the commit that introduced the problem. If the + fix PR is not synced yet, the fix PR either needs to be "backported" to the + Rust `master` branch or to `beta` in the next backport cycle. + +3. **Make sure that the fix is on `master` before porting to `beta`** + + The fix must already be synced to the Rust `master` branch. Otherwise, the + next `beta` will be missing this fix again. If it is not yet in `master` it + should probably not be backported. If the backport is really important, do an + out-of-cycle sync first. However, the out-of-cycle sync should be small, + because the changes in that sync will get right into `beta`, without being + tested in `nightly` first. + +[beta-accepted-prs]: https://github.com/rust-lang/rust-clippy/issues?q=label%3Abeta-nominated + +## Preparation + +> Note: All commands in this chapter will be run in the Rust clone. + +Follow the instructions in [defining remotes] to define the `clippy-upstream` +remote in the Rust repository. -Backports are done on the beta branch of the Clippy repository. +After that, fetch the remote with ```bash -# Assuming the current directory corresponds to the Clippy repository -$ git checkout beta -$ git checkout -b backport -$ git cherry-pick <SHA> # `<SHA>` is the commit hash of the commit(s), that should be backported -$ git push origin backport +git fetch clippy-upstream master ``` -Now you should test that the backport passes all the tests in the Rust -repository. You can do this with: +Then, switch to the `beta` branch: ```bash -# Assuming the current directory corresponds to the Rust repository -$ git checkout beta -# Make sure to change `your-github-name` to your github name in the following command -$ git subtree pull -p src/tools/clippy https://github.com/<your-github-name>/rust-clippy backport -$ ./x.py test src/tools/clippy +git switch beta +git fetch upstream +git reset --hard upstream/beta ``` -Should the test fail, you can fix Clippy directly in the Rust repository. This -has to be first applied to the Clippy beta branch and then again synced to the -Rust repository, though. The easiest way to do this is: +[defining remotes]: release.md#defining-remotes + +## Backport the changes + +When a PR is merged with the GitHub merge queue, the PR is closed with the message + +> \<PR title\> (#\<PR number\>) + +This commit needs to be backported. To do that, find the `<sha1>` of that commit +and run the following command in the clone of the **Rust repository**: ```bash -# In the Rust repository -$ git diff --patch --relative=src/tools/clippy > clippy.patch -# In the Clippy repository -$ git apply /path/to/clippy.patch -$ git add -u -$ git commit -m "Fix rustup fallout" -$ git push origin backport +git cherry-pick -m 1 `<sha1>` ``` -After this, you can open a PR to the `beta` branch of the Clippy repository. +Do this for all PRs that should be backported. +## Open PR in the Rust repository -## Update Clippy in the Rust Repository +Next, open the PR for the backport. Make sure, the PR is opened towards the +`beta` branch and not the `master` branch. The PR description should look like +this: -This step must be done, **after** the PR of the previous step was merged. +``` +[beta] Clippy backports -After the backport landed in the Clippy repository, the branch has to be synced -back to the beta branch of the Rust repository. +r? @Mark-Simulacrum -```bash -# Assuming the current directory corresponds to the Rust repository -$ git checkout beta -$ git checkout -b clippy_backport -$ git subtree pull -p src/tools/clippy https://github.com/rust-lang/rust-clippy beta -$ git push origin clippy_backport +Backports: +- <Link to the Clippy PR> +- ... + +<Short summary of what is backported and why> ``` -Make sure to test the backport in the Rust repository before opening a PR. This -is done with `./x.py test src/tools/clippy`. If that passes all tests, open a PR -to the `beta` branch of the Rust repository. In this PR you should tag the -Clippy team member, that agreed to the backport or the `@rust-lang/clippy` team. -Make sure to add `[beta]` to the title of the PR. +Mark is from the release team and they ultimately have to merge the PR before +branching a new `beta` version. Tag them to take care of the backport. Next, +list all the backports and give a short summary what's backported and why it is +worth backporting this. + +## Relabel backported PRs + +When a PR is backported to Rust `beta`, label the PR with `beta-accepted`. This +will then get picked up when [writing the changelog]. + +[writing the changelog]: changelog_update.md#31-include-beta-accepted-prs diff --git a/src/tools/clippy/book/src/development/infrastructure/release.md b/src/tools/clippy/book/src/development/infrastructure/release.md index 98fabf8e89a..20b870eb69a 100644 --- a/src/tools/clippy/book/src/development/infrastructure/release.md +++ b/src/tools/clippy/book/src/development/infrastructure/release.md @@ -7,112 +7,114 @@ Clippy is released together with stable Rust releases. The dates for these releases can be found at the [Rust Forge]. This document explains the necessary steps to create a Clippy release. -1. [Remerge the `beta` branch](#remerge-the-beta-branch) -2. [Update the `beta` branch](#update-the-beta-branch) -3. [Find the Clippy commit](#find-the-clippy-commit) -4. [Tag the stable commit](#tag-the-stable-commit) -5. [Update `CHANGELOG.md`](#update-changelogmd) - -> _NOTE:_ This document is for stable Rust releases, not for point releases. For -> point releases, step 1. and 2. should be enough. +1. [Defining Remotes](#defining-remotes) +1. [Bump Version](#bump-version) +1. [Find the Clippy commit](#find-the-clippy-commit) +1. [Update the `beta` branch](#update-the-beta-branch) +1. [Update the `stable` branch](#update-the-stable-branch) +1. [Tag the stable commit](#tag-the-stable-commit) +1. [Update `CHANGELOG.md`](#update-changelogmd) [Rust Forge]: https://forge.rust-lang.org/ -## Remerge the `beta` branch +## Defining Remotes + +You may want to define the `upstream` remote of the Clippy project to simplify +the following steps. However, this is optional and you can replace `upstream` +with the full URL instead. + +```bash +git remote add upstream git@github.com:rust-lang/rust-clippy +``` -This step is only necessary, if since the last release something was backported -to the beta Rust release. The remerge is then necessary, to make sure that the -Clippy commit, that was used by the now stable Rust release, persists in the -tree of the Clippy repository. +## Bump Version -To find out if this step is necessary run +When a release needs to be done, `cargo test` will fail, if the versions in the +`Cargo.toml` are not correct. During that sync, the versions need to be bumped. +This is done by running: ```bash -# Assumes that the local master branch of rust-lang/rust-clippy is up-to-date -$ git fetch upstream -$ git branch master --contains upstream/beta +cargo dev release bump_version ``` -If this command outputs `master`, this step is **not** necessary. +This will increase the version number of each relevant `Cargo.toml` file. After +that, just commit the updated files with: ```bash -# Assuming `HEAD` is the current `master` branch of rust-lang/rust-clippy -$ git checkout -b backport_remerge -$ git merge upstream/beta -$ git diff # This diff has to be empty, otherwise something with the remerge failed -$ git push origin backport_remerge # This can be pushed to your fork +git commit -m "Bump Clippy version -> 0.1.XY" **/*Cargo.toml ``` -After this, open a PR to the master branch. In this PR, the commit hash of the -`HEAD` of the `beta` branch must exist. In addition to that, no files should be -changed by this PR. +`XY` should be exchanged with the corresponding version -## Update the `beta` branch +## Find the Clippy commit -This step must be done **after** the PR of the previous step was merged. +For both updating the `beta` and the `stable` branch, the first step is to find +the Clippy commit of the last Clippy sync done in the respective Rust branch. -First, the Clippy commit of the `beta` branch of the Rust repository has to be -determined. +Running the following commands _in the Rust repo_ will get the commit for the +specified `<branch>`: ```bash -# Assuming the current directory corresponds to the Rust repository -$ git fetch upstream -$ git checkout upstream/beta -$ BETA_SHA=$(git log --oneline -- src/tools/clippy/ | grep -o "Merge commit '[a-f0-9]*' into .*" | head -1 | sed -e "s/Merge commit '\([a-f0-9]*\)' into .*/\1/g") +git switch <branch> +SHA=$(git log --oneline -- src/tools/clippy/ | grep -o "Merge commit '[a-f0-9]*' into .*" | head -1 | sed -e "s/Merge commit '\([a-f0-9]*\)' into .*/\1/g") ``` -After finding the Clippy commit, the `beta` branch in the Clippy repository can -be updated. +Where `<branch>` is one of `stable`, `beta`, or `master`. + +## Update the `beta` branch + +After getting the commit of the `beta` branch, the `beta` branch in the Clippy +repository can be updated. ```bash -# Assuming the current directory corresponds to the Clippy repository -$ git checkout beta -$ git reset --hard $BETA_SHA -$ git push upstream beta +git checkout beta +git reset --hard $SHA +git push upstream beta ``` -## Find the Clippy commit +## Update the `stable` branch -The first step is to tag the Clippy commit, that is included in the stable Rust -release. This commit can be found in the Rust repository. +After getting the commit of the `stable` branch, the `stable` branch in the +Clippy repository can be updated. ```bash -# Assuming the current directory corresponds to the Rust repository -$ git fetch upstream # `upstream` is the `rust-lang/rust` remote -$ git checkout 1.XX.0 # XX should be exchanged with the corresponding version -$ SHA=$(git log --oneline -- src/tools/clippy/ | grep -o "Merge commit '[a-f0-9]*' into .*" | head -1 | sed -e "s/Merge commit '\([a-f0-9]*\)' into .*/\1/g") +git checkout stable +git reset --hard $SHA +git push upstream stable ``` -## Tag the stable commit +## Tag the `stable` commit -After finding the Clippy commit, it can be tagged with the release number. +After updating the `stable` branch, tag the HEAD commit and push it to the +Clippy repo. + +> Note: Only push the tag once the Deploy GitHub action of the `beta` branch is +> finished. Otherwise the deploy for the tag might fail. ```bash -# Assuming the current directory corresponds to the Clippy repository -$ git checkout $SHA -$ git tag rust-1.XX.0 # XX should be exchanged with the corresponding version -$ git push upstream rust-1.XX.0 # `upstream` is the `rust-lang/rust-clippy` remote +git tag rust-1.XX.0 # XX should be exchanged with the corresponding version +git push upstream rust-1.XX.0 # `upstream` is the `rust-lang/rust-clippy` remote ``` After this, the release should be available on the Clippy [release page]. [release page]: https://github.com/rust-lang/rust-clippy/releases -## Update the `stable` branch +## Publish `clippy_utils` + +The `clippy_utils` crate is published to `crates.io` without any stability +guarantees. To do this, after the [sync] and the release is done, switch back to +the `upstream/master` branch and publish `clippy_utils`: -At this step you should have already checked out the commit of the `rust-1.XX.0` -tag. Updating the stable branch from here is as easy as: +> Note: The Rustup PR bumping the nightly and Clippy version **must** be merged +> before doing this. ```bash -# Assuming the current directory corresponds to the Clippy repository and the -# commit of the just created rust-1.XX.0 tag is checked out. -$ git push upstream rust-1.XX.0:stable # `upstream` is the `rust-lang/rust-clippy` remote +git switch master && git pull upstream master +cargo publish --manifest-path clippy_utils/Cargo.toml ``` -> _NOTE:_ Usually there are no stable backports for Clippy, so this update -> should be possible without force pushing or anything like this. If there -> should have happened a stable backport, make sure to re-merge those changes -> just as with the `beta` branch. +[sync]: sync.md ## Update `CHANGELOG.md` diff --git a/src/tools/clippy/book/src/development/infrastructure/sync.md b/src/tools/clippy/book/src/development/infrastructure/sync.md index e1fe92f9525..da1ad586607 100644 --- a/src/tools/clippy/book/src/development/infrastructure/sync.md +++ b/src/tools/clippy/book/src/development/infrastructure/sync.md @@ -21,6 +21,8 @@ to beta. For reference, the first sync following this cadence was performed the This process is described in detail in the following sections. For general information about `subtree`s in the Rust repository see [the rustc-dev-guide][subtree]. +[subtree]: https://rustc-dev-guide.rust-lang.org/external-repos.html#external-dependencies-subtree + ## Patching git-subtree to work with big repos Currently, there's a bug in `git-subtree` that prevents it from working properly @@ -50,23 +52,11 @@ sudo chown --reference=/usr/lib/git-core/git-subtree~ /usr/lib/git-core/git-subt > `bash` instead. You can do this by editing the first line of the `git-subtree` > script and changing `sh` to `bash`. -## Defining remotes - -You may want to define remotes, so you don't have to type out the remote -addresses on every sync. You can do this with the following commands (these -commands still have to be run inside the `rust` directory): - -```bash -# Set clippy-upstream remote for pulls -$ git remote add clippy-upstream https://github.com/rust-lang/rust-clippy -# Make sure to not push to the upstream repo -$ git remote set-url --push clippy-upstream DISABLED -# Set a local remote -$ git remote add clippy-local /path/to/rust-clippy -``` +> Note: The following sections assume that you have set up remotes following the +> instructions in [defining remotes]. -> Note: The following sections assume that you have set those remotes with the -> above remote names. +[gitgitgadget-pr]: https://github.com/gitgitgadget/git/pull/493 +[defining remotes]: release.md#defining-remotes ## Performing the sync from [`rust-lang/rust`] to Clippy @@ -78,9 +68,9 @@ to be run inside the `rust` directory): `rustup check`. 3. Sync the changes to the rust-copy of Clippy to your Clippy fork: ```bash - # Be sure to either use a net-new branch, e.g. `sync-from-rust`, or delete the branch beforehand + # Be sure to either use a net-new branch, e.g. `rustup`, or delete the branch beforehand # because changes cannot be fast forwarded and you have to run this command again. - git subtree push -P src/tools/clippy clippy-local sync-from-rust + git subtree push -P src/tools/clippy clippy-local rustup ``` > _Note:_ Most of the time you have to create a merge commit in the @@ -88,21 +78,22 @@ to be run inside the `rust` directory): > rust-copy of Clippy): ```bash git fetch upstream # assuming upstream is the rust-lang/rust remote - git checkout sync-from-rust + git switch rustup git merge upstream/master --no-ff ``` > Note: This is one of the few instances where a merge commit is allowed in > a PR. -4. Bump the nightly version in the Clippy repository by changing the date in the - rust-toolchain file to the current date and committing it with the message: +4. Bump the nightly version in the Clippy repository by running these commands: ```bash - git commit -m "Bump nightly version -> YYYY-MM-DD" + cargo dev sync update_nightly + git commit -m "Bump nightly version -> YYYY-MM-DD" rust-toolchain clippy_utils/README.md ``` 5. Open a PR to `rust-lang/rust-clippy` and wait for it to get merged (to accelerate the process ping the `@rust-lang/clippy` team in your PR and/or ask them in the [Zulip] stream.) [Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/clippy +[`rust-lang/rust`]: https://github.com/rust-lang/rust ## Performing the sync from Clippy to [`rust-lang/rust`] @@ -111,11 +102,7 @@ All the following commands have to be run inside the `rust` directory. 1. Make sure you have checked out the latest `master` of `rust-lang/rust`. 2. Sync the `rust-lang/rust-clippy` master to the rust-copy of Clippy: ```bash - git checkout -b sync-from-clippy + git switch -c clippy-subtree-update git subtree pull -P src/tools/clippy clippy-upstream master ``` 3. Open a PR to [`rust-lang/rust`] - -[gitgitgadget-pr]: https://github.com/gitgitgadget/git/pull/493 -[subtree]: https://rustc-dev-guide.rust-lang.org/external-repos.html#external-dependencies-subtree -[`rust-lang/rust`]: https://github.com/rust-lang/rust diff --git a/src/tools/clippy/book/src/development/the_team.md b/src/tools/clippy/book/src/development/the_team.md index 10341791cec..6bc0783b166 100644 --- a/src/tools/clippy/book/src/development/the_team.md +++ b/src/tools/clippy/book/src/development/the_team.md @@ -72,7 +72,7 @@ you to the alumni group. You're always welcome to come back. ## The Clippy Team -[The Clippy team](https://www.rust-lang.org/governance/teams/dev-tools#Clippy%20team) +[The Clippy team](https://www.rust-lang.org/governance/teams/dev-tools#team-clippy) is responsible for maintaining Clippy. ### Duties diff --git a/src/tools/clippy/book/src/development/type_checking.md b/src/tools/clippy/book/src/development/type_checking.md index e6da4322a17..578836ecc56 100644 --- a/src/tools/clippy/book/src/development/type_checking.md +++ b/src/tools/clippy/book/src/development/type_checking.md @@ -94,7 +94,7 @@ impl LateLintPass<'_> for MyStructLint { // Get type of `expr` let ty = cx.typeck_results().expr_ty(expr); // Match its kind to enter the type - match ty.kind { + match ty.kind() { ty::Adt(adt_def, _) if adt_def.is_struct() => println!("Our `expr` is a struct!"), _ => () } diff --git a/src/tools/clippy/book/src/lint_configuration.md b/src/tools/clippy/book/src/lint_configuration.md index 275d125096e..ea1d7d11389 100644 --- a/src/tools/clippy/book/src/lint_configuration.md +++ b/src/tools/clippy/book/src/lint_configuration.md @@ -81,6 +81,16 @@ Whether `expect` should be allowed in test functions or `#[cfg(test)]` * [`expect_used`](https://rust-lang.github.io/rust-clippy/master/index.html#expect_used) +## `allow-indexing-slicing-in-tests` +Whether `indexing_slicing` should be allowed in test functions or `#[cfg(test)]` + +**Default Value:** `false` + +--- +**Affected lints:** +* [`indexing_slicing`](https://rust-lang.github.io/rust-clippy/master/index.html#indexing_slicing) + + ## `allow-mixed-uninlined-format-args` Whether to allow mixed uninlined format args, e.g. `format!("{} {}", a, foo.bar)` diff --git a/src/tools/clippy/clippy_config/src/conf.rs b/src/tools/clippy/clippy_config/src/conf.rs index 41b56b45d9a..bffa04f6f09 100644 --- a/src/tools/clippy/clippy_config/src/conf.rs +++ b/src/tools/clippy/clippy_config/src/conf.rs @@ -291,6 +291,9 @@ define_Conf! { /// Whether `expect` should be allowed in test functions or `#[cfg(test)]` #[lints(expect_used)] allow_expect_in_tests: bool = false, + /// Whether `indexing_slicing` should be allowed in test functions or `#[cfg(test)]` + #[lints(indexing_slicing)] + allow_indexing_slicing_in_tests: bool = false, /// Whether to allow mixed uninlined format args, e.g. `format!("{} {}", a, foo.bar)` #[lints(uninlined_format_args)] allow_mixed_uninlined_format_args: bool = true, diff --git a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs index 8719f61a890..cf33e1444e4 100644 --- a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs +++ b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs @@ -126,7 +126,7 @@ declare_clippy_lint! { /// /// [cargo-pgo]: https://github.com/Kobzol/cargo-pgo/blob/main/README.md /// - #[clippy::version = "1.82.0"] + #[clippy::version = "1.84.0"] pub ARBITRARY_SOURCE_ITEM_ORDERING, restriction, "arbitrary source item ordering" diff --git a/src/tools/clippy/clippy_lints/src/attrs/mod.rs b/src/tools/clippy/clippy_lints/src/attrs/mod.rs index a9766597d50..92efd1a4ddc 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/mod.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/mod.rs @@ -7,6 +7,7 @@ mod duplicated_attributes; mod inline_always; mod mixed_attributes_style; mod non_minimal_cfg; +mod repr_attributes; mod should_panic_without_expect; mod unnecessary_clippy_cfg; mod useless_attribute; @@ -274,6 +275,44 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does + /// Checks for items with `#[repr(packed)]`-attribute without ABI qualification + /// + /// ### Why is this bad? + /// Without qualification, `repr(packed)` implies `repr(Rust)`. The Rust-ABI is inherently unstable. + /// While this is fine as long as the type is accessed correctly within Rust-code, most uses + /// of `#[repr(packed)]` involve FFI and/or data structures specified by network-protocols or + /// other external specifications. In such situations, the unstable Rust-ABI implied in + /// `#[repr(packed)]` may lead to future bugs should the Rust-ABI change. + /// + /// In case you are relying on a well defined and stable memory layout, qualify the type's + /// representation using the `C`-ABI. Otherwise, if the type in question is only ever + /// accessed from Rust-code according to Rust's rules, use the `Rust`-ABI explicitly. + /// + /// ### Example + /// ```no_run + /// #[repr(packed)] + /// struct NetworkPacketHeader { + /// header_length: u8, + /// header_version: u16 + /// } + /// ``` + /// + /// Use instead: + /// ```no_run + /// #[repr(C, packed)] + /// struct NetworkPacketHeader { + /// header_length: u8, + /// header_version: u16 + /// } + /// ``` + #[clippy::version = "1.84.0"] + pub REPR_PACKED_WITHOUT_ABI, + suspicious, + "ensures that `repr(packed)` always comes with a qualified ABI" +} + +declare_clippy_lint! { + /// ### What it does /// Checks for `any` and `all` combinators in `cfg` with only one condition. /// /// ### Why is this bad? @@ -415,6 +454,7 @@ pub struct Attributes { impl_lint_pass!(Attributes => [ INLINE_ALWAYS, + REPR_PACKED_WITHOUT_ABI, ]); impl Attributes { @@ -431,6 +471,7 @@ impl<'tcx> LateLintPass<'tcx> for Attributes { if is_relevant_item(cx, item) { inline_always::check(cx, item.span, item.ident.name, attrs); } + repr_attributes::check(cx, item.span, attrs, &self.msrv); } fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) { diff --git a/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs b/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs new file mode 100644 index 00000000000..6d1ab46aa0c --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs @@ -0,0 +1,43 @@ +use rustc_hir::Attribute; +use rustc_lint::LateContext; +use rustc_span::{Span, sym}; + +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::msrvs; + +use super::REPR_PACKED_WITHOUT_ABI; + +pub(super) fn check(cx: &LateContext<'_>, item_span: Span, attrs: &[Attribute], msrv: &msrvs::Msrv) { + if msrv.meets(msrvs::REPR_RUST) { + check_packed(cx, item_span, attrs); + } +} + +fn check_packed(cx: &LateContext<'_>, item_span: Span, attrs: &[Attribute]) { + if let Some(items) = attrs.iter().find_map(|attr| { + if attr.ident().is_some_and(|ident| matches!(ident.name, sym::repr)) { + attr.meta_item_list() + } else { + None + } + }) && let Some(packed) = items + .iter() + .find(|item| item.ident().is_some_and(|ident| matches!(ident.name, sym::packed))) + && !items.iter().any(|item| { + item.ident() + .is_some_and(|ident| matches!(ident.name, sym::C | sym::Rust)) + }) + { + span_lint_and_then( + cx, + REPR_PACKED_WITHOUT_ABI, + item_span, + "item uses `packed` representation without ABI-qualification", + |diag| { + diag.warn("unqualified `#[repr(packed)]` defaults to `#[repr(Rust, packed)]`, which has no stable ABI") + .help("qualify the desired ABI explicity via `#[repr(C, packed)]` or `#[repr(Rust, packed)]`") + .span_label(packed.span(), "`packed` representation set here"); + }, + ); + } +} diff --git a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs index e21853598c3..e7158a6a6b6 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs @@ -1,8 +1,8 @@ use super::USELESS_ATTRIBUTE; -use super::utils::{extract_clippy_lint, is_lint_level, is_word}; +use super::utils::{is_lint_level, is_word, namespace_and_lint}; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::{SpanRangeExt, first_line_of_span}; -use rustc_ast::{Attribute, Item, ItemKind, MetaItemInner}; +use rustc_ast::{Attribute, Item, ItemKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, LintContext}; use rustc_middle::lint::in_external_macro; @@ -20,11 +20,13 @@ pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { for lint in lint_list { match item.kind { ItemKind::Use(..) => { - if let MetaItemInner::MetaItem(meta_item) = lint - && meta_item.is_word() - && let Some(ident) = meta_item.ident() + let (namespace @ (Some(sym::clippy) | None), Some(name)) = namespace_and_lint(lint) else { + return; + }; + + if namespace.is_none() && matches!( - ident.name.as_str(), + name.as_str(), "ambiguous_glob_reexports" | "dead_code" | "deprecated" @@ -39,9 +41,9 @@ pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { return; } - if extract_clippy_lint(lint).is_some_and(|symbol| { - matches!( - symbol.as_str(), + if namespace == Some(sym::clippy) + && matches!( + name.as_str(), "wildcard_imports" | "enum_glob_use" | "redundant_pub_crate" @@ -52,7 +54,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { | "disallowed_types" | "unused_trait_names" ) - }) { + { return; } }, diff --git a/src/tools/clippy/clippy_lints/src/attrs/utils.rs b/src/tools/clippy/clippy_lints/src/attrs/utils.rs index 3bb02688bf2..96de0642904 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/utils.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/utils.rs @@ -75,13 +75,18 @@ fn is_relevant_expr(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_> /// Returns the lint name if it is clippy lint. pub(super) fn extract_clippy_lint(lint: &MetaItemInner) -> Option<Symbol> { - if let Some(meta_item) = lint.meta_item() - && meta_item.path.segments.len() > 1 - && let tool_name = meta_item.path.segments[0].ident - && tool_name.name == sym::clippy - { - let lint_name = meta_item.path.segments.last().unwrap().ident.name; - return Some(lint_name); + match namespace_and_lint(lint) { + (Some(sym::clippy), name) => name, + _ => None, + } +} + +/// Returns the lint namespace, if any, as well as the lint name. (`None`, `None`) means +/// the lint had less than 1 or more than 2 segments. +pub(super) fn namespace_and_lint(lint: &MetaItemInner) -> (Option<Symbol>, Option<Symbol>) { + match lint.meta_item().map(|m| m.path.segments.as_slice()).unwrap_or_default() { + [name] => (None, Some(name.ident.name)), + [namespace, name] => (Some(namespace.ident.name), Some(name.ident.name)), + _ => (None, None), } - None } diff --git a/src/tools/clippy/clippy_lints/src/booleans.rs b/src/tools/clippy/clippy_lints/src/booleans.rs index f68a7a89b39..f8c30d1c881 100644 --- a/src/tools/clippy/clippy_lints/src/booleans.rs +++ b/src/tools/clippy/clippy_lints/src/booleans.rs @@ -5,11 +5,11 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::SpanRangeExt; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item}; use rustc_ast::ast::LitKind; +use rustc_attr_parsing::RustcVersion; use rustc_errors::Applicability; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr}; use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, UnOp}; use rustc_lint::{LateContext, LateLintPass, Level}; -use rustc_attr_parsing::RustcVersion; use rustc_session::impl_lint_pass; use rustc_span::def_id::LocalDefId; use rustc_span::{Span, sym}; diff --git a/src/tools/clippy/clippy_lints/src/casts/as_pointer_underscore.rs b/src/tools/clippy/clippy_lints/src/casts/as_pointer_underscore.rs new file mode 100644 index 00000000000..536126fd02b --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/casts/as_pointer_underscore.rs @@ -0,0 +1,19 @@ +use rustc_errors::Applicability; +use rustc_lint::LateContext; +use rustc_middle::ty::Ty; + +pub fn check<'tcx>(cx: &LateContext<'tcx>, ty_into: Ty<'_>, cast_to_hir: &'tcx rustc_hir::Ty<'tcx>) { + if let rustc_hir::TyKind::Ptr(rustc_hir::MutTy { ty, .. }) = cast_to_hir.kind + && matches!(ty.kind, rustc_hir::TyKind::Infer) + { + clippy_utils::diagnostics::span_lint_and_sugg( + cx, + super::AS_POINTER_UNDERSCORE, + cast_to_hir.span, + "using inferred pointer cast", + "use explicit type", + ty_into.to_string(), + Applicability::MachineApplicable, + ); + } +} diff --git a/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs index 4dd51dcbc9a..67aa33ca06c 100644 --- a/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::msrvs::Msrv; use clippy_utils::source::snippet_with_context; -use clippy_utils::std_or_core; +use clippy_utils::{is_lint_allowed, msrvs, std_or_core}; use rustc_errors::Applicability; use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Ty, TyKind}; use rustc_lint::LateContext; @@ -13,15 +14,12 @@ pub(super) fn check<'tcx>( expr: &'tcx Expr<'_>, cast_expr: &'tcx Expr<'_>, cast_to: &'tcx Ty<'_>, -) { + msrv: &Msrv, +) -> bool { if matches!(cast_to.kind, TyKind::Ptr(_)) && let ExprKind::AddrOf(BorrowKind::Ref, mutability, e) = cast_expr.kind - && let Some(std_or_core) = std_or_core(cx) + && !is_lint_allowed(cx, BORROW_AS_PTR, expr.hir_id) { - let macro_name = match mutability { - Mutability::Not => "addr_of", - Mutability::Mut => "addr_of_mut", - }; let mut app = Applicability::MachineApplicable; let snip = snippet_with_context(cx, e.span, cast_expr.span.ctxt(), "..", &mut app).0; // Fix #9884 @@ -31,17 +29,36 @@ pub(super) fn check<'tcx>( .get(base.hir_id) .is_some_and(|x| x.iter().any(|adj| matches!(adj.kind, Adjust::Deref(_)))) }) { - return; + return false; } + let suggestion = if msrv.meets(msrvs::RAW_REF_OP) { + let operator_kind = match mutability { + Mutability::Not => "const", + Mutability::Mut => "mut", + }; + format!("&raw {operator_kind} {snip}") + } else { + let Some(std_or_core) = std_or_core(cx) else { + return false; + }; + let macro_name = match mutability { + Mutability::Not => "addr_of", + Mutability::Mut => "addr_of_mut", + }; + format!("{std_or_core}::ptr::{macro_name}!({snip})") + }; + span_lint_and_sugg( cx, BORROW_AS_PTR, expr.span, "borrow as raw pointer", "try", - format!("{std_or_core}::ptr::{macro_name}!({snip})"), + suggestion, Applicability::MachineApplicable, ); + return true; } + false } diff --git a/src/tools/clippy/clippy_lints/src/casts/mod.rs b/src/tools/clippy/clippy_lints/src/casts/mod.rs index 8b884399f92..c64c0e15144 100644 --- a/src/tools/clippy/clippy_lints/src/casts/mod.rs +++ b/src/tools/clippy/clippy_lints/src/casts/mod.rs @@ -1,3 +1,4 @@ +mod as_pointer_underscore; mod as_ptr_cast_mut; mod as_underscore; mod borrow_as_ptr; @@ -574,13 +575,13 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does /// Checks for the usage of `&expr as *const T` or - /// `&mut expr as *mut T`, and suggest using `ptr::addr_of` or - /// `ptr::addr_of_mut` instead. + /// `&mut expr as *mut T`, and suggest using `&raw const` or + /// `&raw mut` instead. /// /// ### Why is this bad? /// This would improve readability and avoid creating a reference /// that points to an uninitialized value or unaligned place. - /// Read the `ptr::addr_of` docs for more information. + /// Read the `&raw` explanation in the Reference for more information. /// /// ### Example /// ```no_run @@ -593,10 +594,10 @@ declare_clippy_lint! { /// Use instead: /// ```no_run /// let val = 1; - /// let p = std::ptr::addr_of!(val); + /// let p = &raw const val; /// /// let mut val_mut = 1; - /// let p_mut = std::ptr::addr_of_mut!(val_mut); + /// let p_mut = &raw mut val_mut; /// ``` #[clippy::version = "1.60.0"] pub BORROW_AS_PTR, @@ -726,6 +727,33 @@ declare_clippy_lint! { "using `as` to cast a reference to pointer" } +declare_clippy_lint! { + /// ### What it does + /// Checks for the usage of `as *const _` or `as *mut _` conversion using inferred type. + /// + /// ### Why restrict this? + /// The conversion might include a dangerous cast that might go undetected due to the type being inferred. + /// + /// ### Example + /// ```no_run + /// fn as_usize<T>(t: &T) -> usize { + /// // BUG: `t` is already a reference, so we will here + /// // return a dangling pointer to a temporary value instead + /// &t as *const _ as usize + /// } + /// ``` + /// Use instead: + /// ```no_run + /// fn as_usize<T>(t: &T) -> usize { + /// t as *const T as usize + /// } + /// ``` + #[clippy::version = "1.81.0"] + pub AS_POINTER_UNDERSCORE, + restriction, + "detects `as *mut _` and `as *const _` conversion" +} + pub struct Casts { msrv: Msrv, } @@ -763,6 +791,7 @@ impl_lint_pass!(Casts => [ CAST_NAN_TO_INT, ZERO_PTR, REF_AS_PTR, + AS_POINTER_UNDERSCORE, ]); impl<'tcx> LateLintPass<'tcx> for Casts { @@ -805,11 +834,15 @@ impl<'tcx> LateLintPass<'tcx> for Casts { } as_underscore::check(cx, expr, cast_to_hir); - - if self.msrv.meets(msrvs::PTR_FROM_REF) { + as_pointer_underscore::check(cx, cast_to, cast_to_hir); + + let was_borrow_as_ptr_emitted = if self.msrv.meets(msrvs::BORROW_AS_PTR) { + borrow_as_ptr::check(cx, expr, cast_from_expr, cast_to_hir, &self.msrv) + } else { + false + }; + if self.msrv.meets(msrvs::PTR_FROM_REF) && !was_borrow_as_ptr_emitted { ref_as_ptr::check(cx, expr, cast_from_expr, cast_to_hir); - } else if self.msrv.meets(msrvs::BORROW_AS_PTR) { - borrow_as_ptr::check(cx, expr, cast_from_expr, cast_to_hir); } } diff --git a/src/tools/clippy/clippy_lints/src/comparison_chain.rs b/src/tools/clippy/clippy_lints/src/comparison_chain.rs index c85e3500ebd..61c92d441d0 100644 --- a/src/tools/clippy/clippy_lints/src/comparison_chain.rs +++ b/src/tools/clippy/clippy_lints/src/comparison_chain.rs @@ -1,6 +1,8 @@ -use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::sugg::Sugg; use clippy_utils::ty::implements_trait; use clippy_utils::{SpanlessEq, if_sequence, is_else_clause, is_in_const_context}; +use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; @@ -120,13 +122,19 @@ impl<'tcx> LateLintPass<'tcx> for ComparisonChain { return; } } - span_lint_and_help( + let ExprKind::Binary(_, lhs, rhs) = conds[0].kind else { + unreachable!(); + }; + let lhs = Sugg::hir(cx, lhs, "..").maybe_par(); + let rhs = Sugg::hir(cx, rhs, "..").addr(); + span_lint_and_sugg( cx, COMPARISON_CHAIN, expr.span, "`if` chain can be rewritten with `match`", - None, - "consider rewriting the `if` chain to use `cmp` and `match`", + "consider rewriting the `if` chain with `match`", + format!("match {lhs}.cmp({rhs}) {{...}}"), + Applicability::HasPlaceholders, ); } } diff --git a/src/tools/clippy/clippy_lints/src/declared_lints.rs b/src/tools/clippy/clippy_lints/src/declared_lints.rs index 022ed180ed8..7451fb909ef 100644 --- a/src/tools/clippy/clippy_lints/src/declared_lints.rs +++ b/src/tools/clippy/clippy_lints/src/declared_lints.rs @@ -55,6 +55,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::attrs::INLINE_ALWAYS_INFO, crate::attrs::MIXED_ATTRIBUTES_STYLE_INFO, crate::attrs::NON_MINIMAL_CFG_INFO, + crate::attrs::REPR_PACKED_WITHOUT_ABI_INFO, crate::attrs::SHOULD_PANIC_WITHOUT_EXPECT_INFO, crate::attrs::UNNECESSARY_CLIPPY_CFG_INFO, crate::attrs::USELESS_ATTRIBUTE_INFO, @@ -75,6 +76,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::cargo::NEGATIVE_FEATURE_NAMES_INFO, crate::cargo::REDUNDANT_FEATURE_NAMES_INFO, crate::cargo::WILDCARD_DEPENDENCIES_INFO, + crate::casts::AS_POINTER_UNDERSCORE_INFO, crate::casts::AS_PTR_CAST_MUT_INFO, crate::casts::AS_UNDERSCORE_INFO, crate::casts::BORROW_AS_PTR_INFO, @@ -139,6 +141,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::doc::DOC_LAZY_CONTINUATION_INFO, crate::doc::DOC_LINK_WITH_QUOTES_INFO, crate::doc::DOC_MARKDOWN_INFO, + crate::doc::DOC_NESTED_REFDEFS_INFO, crate::doc::EMPTY_DOCS_INFO, crate::doc::EMPTY_LINE_AFTER_DOC_COMMENTS_INFO, crate::doc::EMPTY_LINE_AFTER_OUTER_ATTR_INFO, @@ -277,6 +280,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::literal_representation::MISTYPED_LITERAL_SUFFIXES_INFO, crate::literal_representation::UNREADABLE_LITERAL_INFO, crate::literal_representation::UNUSUAL_BYTE_GROUPINGS_INFO, + crate::literal_string_with_formatting_args::LITERAL_STRING_WITH_FORMATTING_ARGS_INFO, crate::loops::EMPTY_LOOP_INFO, crate::loops::EXPLICIT_COUNTER_LOOP_INFO, crate::loops::EXPLICIT_INTO_ITER_LOOP_INFO, diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs index 6819ad547f8..3b3a78cb115 100644 --- a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs +++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs @@ -26,7 +26,8 @@ declare_clippy_lint! { /// To ensure that every numeric type is chosen explicitly rather than implicitly. /// /// ### Known problems - /// This lint can only be allowed at the function level or above. + /// This lint is implemented using a custom algorithm independent of rustc's inference, + /// which results in many false positives and false negatives. /// /// ### Example /// ```no_run @@ -36,8 +37,8 @@ declare_clippy_lint! { /// /// Use instead: /// ```no_run - /// let i = 10i32; - /// let f = 1.23f64; + /// let i = 10_i32; + /// let f = 1.23_f64; /// ``` #[clippy::version = "1.52.0"] pub DEFAULT_NUMERIC_FALLBACK, diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index e3959903fdd..653726872c6 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -877,7 +877,8 @@ impl TyCoercionStability { | ty::CoroutineClosure(..) | ty::Never | ty::Tuple(_) - | ty::Alias(ty::Projection, _) => Self::Deref, + | ty::Alias(ty::Projection, _) + | ty::UnsafeBinder(_) => Self::Deref, }; } } @@ -1003,7 +1004,10 @@ fn report<'tcx>( let needs_paren = match cx.tcx.parent_hir_node(data.first_expr.hir_id) { Node::Expr(e) => match e.kind { ExprKind::Call(callee, _) if callee.hir_id != data.first_expr.hir_id => false, - ExprKind::Call(..) => expr.precedence() < ExprPrecedence::Unambiguous || matches!(expr.kind, ExprKind::Field(..)), + ExprKind::Call(..) => { + expr.precedence() < ExprPrecedence::Unambiguous + || matches!(expr.kind, ExprKind::Field(..)) + }, _ => expr.precedence() < e.precedence(), }, _ => false, @@ -1016,11 +1020,7 @@ fn report<'tcx>( }) ); - let sugg = if !snip_is_macro - && needs_paren - && !has_enclosing_paren(&snip) - && !is_in_tuple - { + let sugg = if !snip_is_macro && needs_paren && !has_enclosing_paren(&snip) && !is_in_tuple { format!("({snip})") } else { snip.into() diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs index f65edd36253..7c2f5efd8dd 100644 --- a/src/tools/clippy/clippy_lints/src/derive.rs +++ b/src/tools/clippy/clippy_lints/src/derive.rs @@ -6,9 +6,7 @@ use clippy_utils::{has_non_exhaustive_attr, is_lint_allowed, match_def_path, pat use rustc_errors::Applicability; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr, walk_fn, walk_item}; -use rustc_hir::{ - self as hir, BlockCheckMode, BodyId, Expr, ExprKind, FnDecl, Impl, Item, ItemKind, UnsafeSource, -}; +use rustc_hir::{self as hir, BlockCheckMode, BodyId, Expr, ExprKind, FnDecl, Impl, Item, ItemKind, UnsafeSource}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{ @@ -453,7 +451,7 @@ fn check_partial_eq_without_eq<'tcx>(cx: &LateContext<'tcx>, span: Span, trait_r && cx.tcx.is_diagnostic_item(sym::PartialEq, def_id) && !has_non_exhaustive_attr(cx.tcx, *adt) && !ty_implements_eq_trait(cx.tcx, ty, eq_trait_def_id) - && let typing_env = typing_env_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id) + && let typing_env = typing_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id) && let Some(local_def_id) = adt.did().as_local() // If all of our fields implement `Eq`, we can implement `Eq` too && adt @@ -484,7 +482,7 @@ fn ty_implements_eq_trait<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, eq_trait_id: De } /// Creates the `ParamEnv` used for the give type's derived `Eq` impl. -fn typing_env_env_for_derived_eq(tcx: TyCtxt<'_>, did: DefId, eq_trait_id: DefId) -> ty::TypingEnv<'_> { +fn typing_env_for_derived_eq(tcx: TyCtxt<'_>, did: DefId, eq_trait_id: DefId) -> ty::TypingEnv<'_> { // Initial map from generic index to param def. // Vec<(param_def, needs_eq)> let mut params = tcx diff --git a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs index a0cb36f88dc..a78c392e208 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs @@ -43,7 +43,6 @@ declare_clippy_lint! { /// ```no_run /// use serde::Serialize; /// - /// // Example code where clippy issues a warning /// println!("warns"); /// /// // The diagnostic will contain the message "no serializing" diff --git a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs index 1e660b1957a..c4ed118b7c9 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs @@ -35,7 +35,6 @@ declare_clippy_lint! { /// ``` /// /// ```rust,ignore - /// // Example code where clippy issues a warning /// let xs = vec![1, 2, 3, 4]; /// xs.leak(); // Vec::leak is disallowed in the config. /// // The diagnostic contains the message "no leaking memory". @@ -47,7 +46,6 @@ declare_clippy_lint! { /// /// Use instead: /// ```rust,ignore - /// // Example code which does not raise clippy warning /// let mut xs = Vec::new(); // Vec::new is _not_ disallowed in the config. /// xs.push(123); // Vec::push is _not_ disallowed in the config. /// ``` diff --git a/src/tools/clippy/clippy_lints/src/doc/include_in_doc_without_cfg.rs b/src/tools/clippy/clippy_lints/src/doc/include_in_doc_without_cfg.rs index 0bb16a0c77d..4b40fc0b1ee 100644 --- a/src/tools/clippy/clippy_lints/src/doc/include_in_doc_without_cfg.rs +++ b/src/tools/clippy/clippy_lints/src/doc/include_in_doc_without_cfg.rs @@ -1,9 +1,9 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_opt; -use rustc_ast::{AttrStyle}; +use rustc_ast::AttrStyle; use rustc_errors::Applicability; +use rustc_hir::{AttrArgs, AttrKind, Attribute}; use rustc_lint::LateContext; -use rustc_hir::{Attribute, AttrKind, AttrArgs}; use super::DOC_INCLUDE_WITHOUT_CFG; diff --git a/src/tools/clippy/clippy_lints/src/doc/mod.rs b/src/tools/clippy/clippy_lints/src/doc/mod.rs index f65acd7978a..b2135fe18bd 100644 --- a/src/tools/clippy/clippy_lints/src/doc/mod.rs +++ b/src/tools/clippy/clippy_lints/src/doc/mod.rs @@ -5,7 +5,7 @@ mod too_long_first_doc_paragraph; use clippy_config::Conf; use clippy_utils::attrs::is_doc_hidden; -use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; +use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_then}; use clippy_utils::macros::{is_panic, root_macro_call_first_node}; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::visitors::Visitable; @@ -17,6 +17,7 @@ use pulldown_cmark::Event::{ use pulldown_cmark::Tag::{BlockQuote, CodeBlock, FootnoteDefinition, Heading, Item, Link, Paragraph}; use pulldown_cmark::{BrokenLink, CodeBlockKind, CowStr, Options, TagEnd}; use rustc_data_structures::fx::FxHashSet; +use rustc_errors::Applicability; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::{AnonConst, Attribute, Expr, ImplItemKind, ItemKind, Node, Safety, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; @@ -563,6 +564,32 @@ declare_clippy_lint! { "check if files included in documentation are behind `cfg(doc)`" } +declare_clippy_lint! { + /// ### What it does + /// Warns if a link reference definition appears at the start of a + /// list item or quote. + /// + /// ### Why is this bad? + /// This is probably intended as an intra-doc link. If it is really + /// supposed to be a reference definition, it can be written outside + /// of the list item or quote. + /// + /// ### Example + /// ```no_run + /// //! - [link]: description + /// ``` + /// Use instead: + /// ```no_run + /// //! - [link][]: description (for intra-doc link) + /// //! + /// //! [link]: destination (for link reference definition) + /// ``` + #[clippy::version = "1.84.0"] + pub DOC_NESTED_REFDEFS, + suspicious, + "link reference defined in list item or quote" +} + pub struct Documentation { valid_idents: FxHashSet<String>, check_private_items: bool, @@ -580,6 +607,7 @@ impl Documentation { impl_lint_pass!(Documentation => [ DOC_LINK_WITH_QUOTES, DOC_MARKDOWN, + DOC_NESTED_REFDEFS, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, @@ -831,6 +859,31 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize Start(BlockQuote(_)) => { blockquote_level += 1; containers.push(Container::Blockquote); + if let Some((next_event, next_range)) = events.peek() { + let next_start = match next_event { + End(TagEnd::BlockQuote) => next_range.end, + _ => next_range.start, + }; + if let Some(refdefrange) = looks_like_refdef(doc, range.start..next_start) && + let Some(refdefspan) = fragments.span(cx, refdefrange.clone()) + { + span_lint_and_then( + cx, + DOC_NESTED_REFDEFS, + refdefspan, + "link reference defined in quote", + |diag| { + diag.span_suggestion_short( + refdefspan.shrink_to_hi(), + "for an intra-doc link, add `[]` between the label and the colon", + "[]", + Applicability::MaybeIncorrect, + ); + diag.help("link definitions are not shown in rendered documentation"); + } + ); + } + } }, End(TagEnd::BlockQuote) => { blockquote_level -= 1; @@ -869,11 +922,42 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize in_heading = true; } if let Start(Item) = event { - if let Some((_next_event, next_range)) = events.peek() { - containers.push(Container::List(next_range.start - range.start)); + let indent = if let Some((next_event, next_range)) = events.peek() { + let next_start = match next_event { + End(TagEnd::Item) => next_range.end, + _ => next_range.start, + }; + if let Some(refdefrange) = looks_like_refdef(doc, range.start..next_start) && + let Some(refdefspan) = fragments.span(cx, refdefrange.clone()) + { + span_lint_and_then( + cx, + DOC_NESTED_REFDEFS, + refdefspan, + "link reference defined in list item", + |diag| { + diag.span_suggestion_short( + refdefspan.shrink_to_hi(), + "for an intra-doc link, add `[]` between the label and the colon", + "[]", + Applicability::MaybeIncorrect, + ); + diag.help("link definitions are not shown in rendered documentation"); + } + ); + refdefrange.start - range.start + } else { + let mut start = next_range.start; + if start > 0 && doc.as_bytes().get(start - 1) == Some(&b'\\') { + // backslashes aren't in the event stream... + start -= 1; + } + start - range.start + } } else { - containers.push(Container::List(0)); - } + 0 + }; + containers.push(Container::List(indent)); } ticks_unbalanced = false; paragraph_range = range; @@ -1045,3 +1129,25 @@ impl<'tcx> Visitor<'tcx> for FindPanicUnwrap<'_, 'tcx> { self.cx.tcx.hir() } } + +#[expect(clippy::range_plus_one)] // inclusive ranges aren't the same type +fn looks_like_refdef(doc: &str, range: Range<usize>) -> Option<Range<usize>> { + let offset = range.start; + let mut iterator = doc.as_bytes()[range].iter().copied().enumerate(); + let mut start = None; + while let Some((i, byte)) = iterator.next() { + match byte { + b'\\' => { + iterator.next(); + }, + b'[' => { + start = Some(i + offset); + }, + b']' if let Some(start) = start => { + return Some(start..i + offset + 1); + }, + _ => {}, + } + } + None +} diff --git a/src/tools/clippy/clippy_lints/src/explicit_write.rs b/src/tools/clippy/clippy_lints/src/explicit_write.rs index 0550c22761a..a5a4e05b3a6 100644 --- a/src/tools/clippy/clippy_lints/src/explicit_write.rs +++ b/src/tools/clippy/clippy_lints/src/explicit_write.rs @@ -58,7 +58,7 @@ impl<'tcx> LateLintPass<'tcx> for ExplicitWrite { // match call to write_fmt && let ExprKind::MethodCall(write_fun, write_recv, [write_arg], _) = *look_in_block(cx, &write_call.kind) && let ExprKind::Call(write_recv_path, []) = write_recv.kind - && write_fun.ident.name.as_str() == "write_fmt" + && write_fun.ident.name == sym::write_fmt && let Some(def_id) = path_def_id(cx, write_recv_path) { // match calls to std::io::stdout() / std::io::stderr () diff --git a/src/tools/clippy/clippy_lints/src/functions/mod.rs b/src/tools/clippy/clippy_lints/src/functions/mod.rs index be3d0f7ad63..243eb5cbfd4 100644 --- a/src/tools/clippy/clippy_lints/src/functions/mod.rs +++ b/src/tools/clippy/clippy_lints/src/functions/mod.rs @@ -10,6 +10,7 @@ mod too_many_lines; use clippy_config::Conf; use clippy_utils::def_path_def_ids; +use clippy_utils::msrvs::Msrv; use rustc_hir as hir; use rustc_hir::intravisit; use rustc_lint::{LateContext, LateLintPass}; @@ -455,6 +456,7 @@ pub struct Functions { /// A set of resolved `def_id` of traits that are configured to allow /// function params renaming. trait_ids: DefIdSet, + msrv: Msrv, } impl Functions { @@ -469,6 +471,7 @@ impl Functions { .iter() .flat_map(|p| def_path_def_ids(tcx, &p.split("::").collect::<Vec<_>>())) .collect(), + msrv: conf.msrv.clone(), } } } @@ -518,12 +521,12 @@ impl<'tcx> LateLintPass<'tcx> for Functions { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) { must_use::check_item(cx, item); - result::check_item(cx, item, self.large_error_threshold); + result::check_item(cx, item, self.large_error_threshold, &self.msrv); } fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) { must_use::check_impl_item(cx, item); - result::check_impl_item(cx, item, self.large_error_threshold); + result::check_impl_item(cx, item, self.large_error_threshold, &self.msrv); impl_trait_in_params::check_impl_item(cx, item); renamed_function_params::check_impl_item(cx, item, &self.trait_ids); } @@ -532,8 +535,10 @@ impl<'tcx> LateLintPass<'tcx> for Functions { too_many_arguments::check_trait_item(cx, item, self.too_many_arguments_threshold); not_unsafe_ptr_arg_deref::check_trait_item(cx, item); must_use::check_trait_item(cx, item); - result::check_trait_item(cx, item, self.large_error_threshold); + result::check_trait_item(cx, item, self.large_error_threshold, &self.msrv); impl_trait_in_params::check_trait_item(cx, item, self.avoid_breaking_exported_api); ref_option::check_trait_item(cx, item, self.avoid_breaking_exported_api); } + + extract_msrv_attr!(LateContext); } diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs index 2b26285429a..afdb5d5306a 100644 --- a/src/tools/clippy/clippy_lints/src/functions/must_use.rs +++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs @@ -28,7 +28,7 @@ pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> let is_public = cx.effective_visibilities.is_exported(item.owner_id.def_id); let fn_header_span = item.span.with_hi(sig.decl.output.span().hi()); if let Some(attr) = attr { - check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, sig); + check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, attrs, sig); } else if is_public && !is_proc_macro(attrs) && !attrs.iter().any(|a| a.has_name(sym::no_mangle)) { check_must_use_candidate( cx, @@ -50,7 +50,7 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Imp let attrs = cx.tcx.hir().attrs(item.hir_id()); let attr = cx.tcx.get_attr(item.owner_id, sym::must_use); if let Some(attr) = attr { - check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, sig); + check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, attrs, sig); } else if is_public && !is_proc_macro(attrs) && trait_ref_of_method(cx, item.owner_id.def_id).is_none() { check_must_use_candidate( cx, @@ -73,7 +73,7 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr let attrs = cx.tcx.hir().attrs(item.hir_id()); let attr = cx.tcx.get_attr(item.owner_id, sym::must_use); if let Some(attr) = attr { - check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, sig); + check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr, attrs, sig); } else if let hir::TraitFn::Provided(eid) = *eid { let body = cx.tcx.hir().body(eid); if attr.is_none() && is_public && !is_proc_macro(attrs) { @@ -91,6 +91,7 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr } } +#[allow(clippy::too_many_arguments)] fn check_needless_must_use( cx: &LateContext<'_>, decl: &hir::FnDecl<'_>, @@ -98,21 +99,54 @@ fn check_needless_must_use( item_span: Span, fn_header_span: Span, attr: &Attribute, + attrs: &[Attribute], sig: &FnSig<'_>, ) { if in_external_macro(cx.sess(), item_span) { return; } if returns_unit(decl) { - span_lint_and_then( - cx, - MUST_USE_UNIT, - fn_header_span, - "this unit-returning function has a `#[must_use]` attribute", - |diag| { - diag.span_suggestion(attr.span, "remove the attribute", "", Applicability::MachineApplicable); - }, - ); + if attrs.len() == 1 { + span_lint_and_then( + cx, + MUST_USE_UNIT, + fn_header_span, + "this unit-returning function has a `#[must_use]` attribute", + |diag| { + diag.span_suggestion(attr.span, "remove the attribute", "", Applicability::MachineApplicable); + }, + ); + } else { + // When there are multiple attributes, it is not sufficient to simply make `must_use` empty, see + // issue #12320. + span_lint_and_then( + cx, + MUST_USE_UNIT, + fn_header_span, + "this unit-returning function has a `#[must_use]` attribute", + |diag| { + let mut attrs_without_must_use = attrs.to_vec(); + attrs_without_must_use.retain(|a| a.id != attr.id); + let sugg_str = attrs_without_must_use + .iter() + .map(|a| { + if a.value_str().is_none() { + return a.name_or_empty().to_string(); + } + format!("{} = \"{}\"", a.name_or_empty(), a.value_str().unwrap()) + }) + .collect::<Vec<_>>() + .join(", "); + + diag.span_suggestion( + attrs[0].span.with_hi(attrs[attrs.len() - 1].span.hi()), + "change these attributes to", + sugg_str, + Applicability::MachineApplicable, + ); + }, + ); + } } else if attr.value_str().is_none() && is_must_use_ty(cx, return_ty(cx, item_id)) { // Ignore async functions unless Future::Output type is a must_use type if sig.header.is_async() { diff --git a/src/tools/clippy/clippy_lints/src/functions/result.rs b/src/tools/clippy/clippy_lints/src/functions/result.rs index d4eaa166320..674d78eaae7 100644 --- a/src/tools/clippy/clippy_lints/src/functions/result.rs +++ b/src/tools/clippy/clippy_lints/src/functions/result.rs @@ -1,3 +1,4 @@ +use clippy_utils::msrvs::{self, Msrv}; use rustc_errors::Diag; use rustc_hir as hir; use rustc_lint::{LateContext, LintContext}; @@ -6,8 +7,8 @@ use rustc_middle::ty::{self, Ty}; use rustc_span::{Span, sym}; use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then}; -use clippy_utils::trait_ref_of_method; use clippy_utils::ty::{AdtVariantInfo, approx_ty_size, is_type_diagnostic_item}; +use clippy_utils::{is_no_std_crate, trait_ref_of_method}; use super::{RESULT_LARGE_ERR, RESULT_UNIT_ERR}; @@ -34,19 +35,24 @@ fn result_err_ty<'tcx>( } } -pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &hir::Item<'tcx>, large_err_threshold: u64) { +pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &hir::Item<'tcx>, large_err_threshold: u64, msrv: &Msrv) { if let hir::ItemKind::Fn(ref sig, _generics, _) = item.kind && let Some((hir_ty, err_ty)) = result_err_ty(cx, sig.decl, item.owner_id.def_id, item.span) { if cx.effective_visibilities.is_exported(item.owner_id.def_id) { let fn_header_span = item.span.with_hi(sig.decl.output.span().hi()); - check_result_unit_err(cx, err_ty, fn_header_span); + check_result_unit_err(cx, err_ty, fn_header_span, msrv); } check_result_large_err(cx, err_ty, hir_ty.span, large_err_threshold); } } -pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &hir::ImplItem<'tcx>, large_err_threshold: u64) { +pub(super) fn check_impl_item<'tcx>( + cx: &LateContext<'tcx>, + item: &hir::ImplItem<'tcx>, + large_err_threshold: u64, + msrv: &Msrv, +) { // Don't lint if method is a trait's implementation, we can't do anything about those if let hir::ImplItemKind::Fn(ref sig, _) = item.kind && let Some((hir_ty, err_ty)) = result_err_ty(cx, sig.decl, item.owner_id.def_id, item.span) @@ -54,26 +60,31 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &hir::ImplItem { if cx.effective_visibilities.is_exported(item.owner_id.def_id) { let fn_header_span = item.span.with_hi(sig.decl.output.span().hi()); - check_result_unit_err(cx, err_ty, fn_header_span); + check_result_unit_err(cx, err_ty, fn_header_span, msrv); } check_result_large_err(cx, err_ty, hir_ty.span, large_err_threshold); } } -pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &hir::TraitItem<'tcx>, large_err_threshold: u64) { +pub(super) fn check_trait_item<'tcx>( + cx: &LateContext<'tcx>, + item: &hir::TraitItem<'tcx>, + large_err_threshold: u64, + msrv: &Msrv, +) { if let hir::TraitItemKind::Fn(ref sig, _) = item.kind { let fn_header_span = item.span.with_hi(sig.decl.output.span().hi()); if let Some((hir_ty, err_ty)) = result_err_ty(cx, sig.decl, item.owner_id.def_id, item.span) { if cx.effective_visibilities.is_exported(item.owner_id.def_id) { - check_result_unit_err(cx, err_ty, fn_header_span); + check_result_unit_err(cx, err_ty, fn_header_span, msrv); } check_result_large_err(cx, err_ty, hir_ty.span, large_err_threshold); } } } -fn check_result_unit_err(cx: &LateContext<'_>, err_ty: Ty<'_>, fn_header_span: Span) { - if err_ty.is_unit() { +fn check_result_unit_err(cx: &LateContext<'_>, err_ty: Ty<'_>, fn_header_span: Span, msrv: &Msrv) { + if err_ty.is_unit() && (!is_no_std_crate(cx) || msrv.meets(msrvs::ERROR_IN_CORE)) { span_lint_and_help( cx, RESULT_UNIT_ERR, diff --git a/src/tools/clippy/clippy_lints/src/if_not_else.rs b/src/tools/clippy/clippy_lints/src/if_not_else.rs index 120c5396a1c..2806d4d0e5d 100644 --- a/src/tools/clippy/clippy_lints/src/if_not_else.rs +++ b/src/tools/clippy/clippy_lints/src/if_not_else.rs @@ -1,9 +1,13 @@ use clippy_utils::consts::{ConstEvalCtxt, Constant}; -use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg}; use clippy_utils::is_else_clause; +use clippy_utils::source::{HasSession, indent_of, reindent_multiline, snippet}; +use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; +use rustc_span::Span; +use std::borrow::Cow; declare_clippy_lint! { /// ### What it does @@ -54,7 +58,7 @@ fn is_zero_const(expr: &Expr<'_>, cx: &LateContext<'_>) -> bool { impl LateLintPass<'_> for IfNotElse { fn check_expr(&mut self, cx: &LateContext<'_>, e: &Expr<'_>) { - if let ExprKind::If(cond, _, Some(els)) = e.kind + if let ExprKind::If(cond, cond_inner, Some(els)) = e.kind && let ExprKind::DropTemps(cond) = cond.kind && let ExprKind::Block(..) = els.kind { @@ -79,8 +83,52 @@ impl LateLintPass<'_> for IfNotElse { // } // ``` if !e.span.from_expansion() && !is_else_clause(cx.tcx, e) { - span_lint_and_help(cx, IF_NOT_ELSE, e.span, msg, None, help); + match cond.kind { + ExprKind::Unary(UnOp::Not, _) | ExprKind::Binary(_, _, _) => span_lint_and_sugg( + cx, + IF_NOT_ELSE, + e.span, + msg, + "try", + make_sugg(cx, &cond.kind, cond_inner.span, els.span, "..", Some(e.span)).to_string(), + Applicability::MachineApplicable, + ), + _ => span_lint_and_help(cx, IF_NOT_ELSE, e.span, msg, None, help), + } } } } } + +fn make_sugg<'a>( + sess: &impl HasSession, + cond_kind: &'a ExprKind<'a>, + cond_inner: Span, + els_span: Span, + default: &'a str, + indent_relative_to: Option<Span>, +) -> Cow<'a, str> { + let cond_inner_snip = snippet(sess, cond_inner, default); + let els_snip = snippet(sess, els_span, default); + let indent = indent_relative_to.and_then(|s| indent_of(sess, s)); + + let suggestion = match cond_kind { + ExprKind::Unary(UnOp::Not, cond_rest) => { + format!( + "if {} {} else {}", + snippet(sess, cond_rest.span, default), + els_snip, + cond_inner_snip + ) + }, + ExprKind::Binary(_, lhs, rhs) => { + let lhs_snip = snippet(sess, lhs.span, default); + let rhs_snip = snippet(sess, rhs.span, default); + + format!("if {lhs_snip} == {rhs_snip} {els_snip} else {cond_inner_snip}") + }, + _ => String::new(), + }; + + reindent_multiline(suggestion.into(), true, indent) +} diff --git a/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs b/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs index 6cee7cfaca2..b10206dcd05 100644 --- a/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs +++ b/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs @@ -2,7 +2,7 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint; use clippy_utils::is_in_test; use clippy_utils::msrvs::Msrv; -use rustc_attr_parsing::{StabilityLevel, StableSince, RustcVersion}; +use rustc_attr_parsing::{RustcVersion, StabilityLevel, StableSince}; use rustc_data_structures::fx::FxHashMap; use rustc_hir::{Expr, ExprKind, HirId}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs index c2030a5ab09..15650c4f732 100644 --- a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs +++ b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs @@ -135,7 +135,7 @@ fn lint_slice(cx: &LateContext<'_>, slice: &SliceLintInformation) { .map(|(index, _)| *index) .collect::<FxIndexSet<_>>(); - let value_name = |index| format!("{}_{index}", slice.ident.name); + let value_name = |index| format!("{}_{}", slice.ident.name, index); if let Some(max_index) = used_indices.iter().max() { let opt_ref = if slice.needs_ref { "ref " } else { "" }; @@ -150,6 +150,18 @@ fn lint_slice(cx: &LateContext<'_>, slice: &SliceLintInformation) { .collect::<Vec<_>>(); let pat_sugg = format!("[{}, ..]", pat_sugg_idents.join(", ")); + let mut suggestions = Vec::new(); + + // Add the binding pattern suggestion + if !slice.pattern_spans.is_empty() { + suggestions.extend(slice.pattern_spans.iter().map(|span| (*span, pat_sugg.clone()))); + } + + // Add the index replacement suggestions + if !slice.index_use.is_empty() { + suggestions.extend(slice.index_use.iter().map(|(index, span)| (*span, value_name(*index)))); + } + span_lint_and_then( cx, INDEX_REFUTABLE_SLICE, @@ -157,28 +169,10 @@ fn lint_slice(cx: &LateContext<'_>, slice: &SliceLintInformation) { "this binding can be a slice pattern to avoid indexing", |diag| { diag.multipart_suggestion( - "try using a slice pattern here", - slice - .pattern_spans - .iter() - .map(|span| (*span, pat_sugg.clone())) - .collect(), + "replace the binding and indexed access with a slice pattern", + suggestions, Applicability::MaybeIncorrect, ); - - diag.multipart_suggestion( - "and replace the index expressions here", - slice - .index_use - .iter() - .map(|(index, span)| (*span, value_name(*index))) - .collect(), - Applicability::MaybeIncorrect, - ); - - // The lint message doesn't contain a warning about the removed index expression, - // since `filter_lintable_slices` will only return slices where all access indices - // are known at compile time. Therefore, they can be removed without side effects. }, ); } diff --git a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs index ae2c3e0491f..f666ed0a440 100644 --- a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs +++ b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs @@ -2,7 +2,7 @@ use clippy_config::Conf; use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::ty::{deref_chain, get_adt_inherent_method}; -use clippy_utils::{higher, is_from_proc_macro}; +use clippy_utils::{higher, is_from_proc_macro, is_in_test}; use rustc_ast::ast::RangeLimits; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -42,39 +42,50 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does - /// Checks for usage of indexing or slicing. Arrays are special cases, this lint - /// does report on arrays if we can tell that slicing operations are in bounds and does not - /// lint on constant `usize` indexing on arrays because that is handled by rustc's `const_err` lint. + /// Checks for usage of indexing or slicing that may panic at runtime. + /// + /// This lint does not report on indexing or slicing operations + /// that always panic, clippy's `out_of_bound_indexing` already + /// handles those cases. /// /// ### Why restrict this? /// To avoid implicit panics from indexing and slicing. + /// /// There are “checked” alternatives which do not panic, and can be used with `unwrap()` to make /// an explicit panic when it is desired. /// + /// ### Limitations + /// This lint does not check for the usage of indexing or slicing on strings. These are covered + /// by the more specific `string_slice` lint. + /// /// ### Example /// ```rust,no_run /// // Vector - /// let x = vec![0; 5]; + /// let x = vec![0, 1, 2, 3]; /// /// x[2]; + /// x[100]; /// &x[2..100]; /// /// // Array /// let y = [0, 1, 2, 3]; /// - /// &y[10..100]; - /// &y[10..]; + /// let i = 10; // Could be a runtime value + /// let j = 20; + /// &y[i..j]; /// ``` /// /// Use instead: /// ```no_run - /// # let x = vec![0; 5]; - /// # let y = [0, 1, 2, 3]; + /// # let x = vec![0, 1, 2, 3]; /// x.get(2); + /// x.get(100); /// x.get(2..100); /// - /// y.get(10); - /// y.get(10..100); + /// # let y = [0, 1, 2, 3]; + /// let i = 10; + /// let j = 20; + /// y.get(i..j); /// ``` #[clippy::version = "pre 1.29.0"] pub INDEXING_SLICING, @@ -85,12 +96,14 @@ declare_clippy_lint! { impl_lint_pass!(IndexingSlicing => [INDEXING_SLICING, OUT_OF_BOUNDS_INDEXING]); pub struct IndexingSlicing { + allow_indexing_slicing_in_tests: bool, suppress_restriction_lint_in_const: bool, } impl IndexingSlicing { pub fn new(conf: &'static Conf) -> Self { Self { + allow_indexing_slicing_in_tests: conf.allow_indexing_slicing_in_tests, suppress_restriction_lint_in_const: conf.suppress_restriction_lint_in_const, } } @@ -111,6 +124,7 @@ impl<'tcx> LateLintPass<'tcx> for IndexingSlicing { { let note = "the suggestion might not be applicable in constant blocks"; let ty = cx.typeck_results().expr_ty(array).peel_refs(); + let allowed_in_tests = self.allow_indexing_slicing_in_tests && is_in_test(cx.tcx, expr.hir_id); if let Some(range) = higher::Range::hir(index) { // Ranged indexes, i.e., &x[n..m], &x[n..], &x[..n] and &x[..] if let ty::Array(_, s) = ty.kind() { @@ -160,6 +174,10 @@ impl<'tcx> LateLintPass<'tcx> for IndexingSlicing { (None, None) => return, // [..] is ok. }; + if allowed_in_tests { + return; + } + span_lint_and_then(cx, INDEXING_SLICING, expr.span, "slicing may panic", |diag| { diag.help(help_msg); @@ -198,6 +216,10 @@ impl<'tcx> LateLintPass<'tcx> for IndexingSlicing { } } + if allowed_in_tests { + return; + } + span_lint_and_then(cx, INDEXING_SLICING, expr.span, "indexing may panic", |diag| { diag.help("consider using `.get(n)` or `.get_mut(n)` instead"); diff --git a/src/tools/clippy/clippy_lints/src/large_include_file.rs b/src/tools/clippy/clippy_lints/src/large_include_file.rs index 66d4c40ab5e..f3d62b513e8 100644 --- a/src/tools/clippy/clippy_lints/src/large_include_file.rs +++ b/src/tools/clippy/clippy_lints/src/large_include_file.rs @@ -2,8 +2,8 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::macros::root_macro_call_first_node; use clippy_utils::source::snippet_opt; -use rustc_ast::{LitKind}; -use rustc_hir::{Expr, ExprKind, Attribute, AttrArgs, AttrKind}; +use rustc_ast::LitKind; +use rustc_hir::{AttrArgs, AttrKind, Attribute, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; use rustc_span::{Span, sym}; diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs index 3e8315588cc..d33013ba663 100644 --- a/src/tools/clippy/clippy_lints/src/lib.rs +++ b/src/tools/clippy/clippy_lints/src/lib.rs @@ -9,6 +9,7 @@ #![feature(iter_partition_in_place)] #![feature(let_chains)] #![feature(never_type)] +#![feature(round_char_boundary)] #![feature(rustc_private)] #![feature(stmt_expr_attributes)] #![feature(unwrap_infallible)] @@ -17,7 +18,8 @@ clippy::missing_docs_in_private_items, clippy::must_use_candidate, rustc::diagnostic_outside_of_impl, - rustc::untranslatable_diagnostic + rustc::untranslatable_diagnostic, + clippy::literal_string_with_formatting_args )] #![warn( trivial_casts, @@ -49,6 +51,7 @@ extern crate rustc_lexer; extern crate rustc_lint; extern crate rustc_middle; extern crate rustc_parse; +extern crate rustc_parse_format; extern crate rustc_resolve; extern crate rustc_session; extern crate rustc_span; @@ -196,6 +199,7 @@ mod let_with_type_underscore; mod lifetimes; mod lines_filter_map_ok; mod literal_representation; +mod literal_string_with_formatting_args; mod loops; mod macro_metavars_in_unsafe; mod macro_use; @@ -957,6 +961,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_late_pass(move |_| Box::new(manual_div_ceil::ManualDivCeil::new(conf))); store.register_late_pass(|_| Box::new(manual_is_power_of_two::ManualIsPowerOfTwo)); store.register_late_pass(|_| Box::new(non_zero_suggestions::NonZeroSuggestions)); + store.register_late_pass(|_| Box::new(literal_string_with_formatting_args::LiteralStringWithFormattingArg)); store.register_late_pass(move |_| Box::new(unused_trait_names::UnusedTraitNames::new(conf))); store.register_late_pass(|_| Box::new(manual_ignore_case_cmp::ManualIgnoreCaseCmp)); store.register_late_pass(|_| Box::new(unnecessary_literal_bound::UnnecessaryLiteralBound)); diff --git a/src/tools/clippy/clippy_lints/src/lifetimes.rs b/src/tools/clippy/clippy_lints/src/lifetimes.rs index 35b14776e59..8b2eee34a97 100644 --- a/src/tools/clippy/clippy_lints/src/lifetimes.rs +++ b/src/tools/clippy/clippy_lints/src/lifetimes.rs @@ -643,8 +643,7 @@ fn report_extra_impl_lifetimes<'tcx>(cx: &LateContext<'tcx>, impl_: &'tcx Impl<' // An `impl` lifetime is elidable if it satisfies the following conditions: // - It is used exactly once. -// - That single use is not in a bounded type or `GenericArgs` in a `WherePredicate`. (Note that -// `GenericArgs` are different from `GenericParam`s.) +// - That single use is not in a `WherePredicate`. fn report_elidable_impl_lifetimes<'tcx>( cx: &LateContext<'tcx>, impl_: &'tcx Impl<'_>, @@ -658,12 +657,6 @@ fn report_elidable_impl_lifetimes<'tcx>( lifetime, in_where_predicate: false, .. - } - | Usage { - lifetime, - in_bounded_ty: false, - in_generics_arg: false, - .. }, ] = usages.as_slice() { diff --git a/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs b/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs new file mode 100644 index 00000000000..49353a1b76b --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs @@ -0,0 +1,167 @@ +use rustc_ast::{LitKind, StrStyle}; +use rustc_hir::{Expr, ExprKind}; +use rustc_lexer::is_ident; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_parse_format::{ParseMode, Parser, Piece}; +use rustc_session::declare_lint_pass; +use rustc_span::{BytePos, Span}; + +use clippy_utils::diagnostics::span_lint; +use clippy_utils::mir::enclosing_mir; + +declare_clippy_lint! { + /// ### What it does + /// Checks if string literals have formatting arguments outside of macros + /// using them (like `format!`). + /// + /// ### Why is this bad? + /// It will likely not generate the expected content. + /// + /// ### Example + /// ```no_run + /// let x: Option<usize> = None; + /// let y = "hello"; + /// x.expect("{y:?}"); + /// ``` + /// Use instead: + /// ```no_run + /// let x: Option<usize> = None; + /// let y = "hello"; + /// x.expect(&format!("{y:?}")); + /// ``` + #[clippy::version = "1.83.0"] + pub LITERAL_STRING_WITH_FORMATTING_ARGS, + suspicious, + "Checks if string literals have formatting arguments" +} + +declare_lint_pass!(LiteralStringWithFormattingArg => [LITERAL_STRING_WITH_FORMATTING_ARGS]); + +fn emit_lint(cx: &LateContext<'_>, expr: &Expr<'_>, spans: &[(Span, Option<String>)]) { + if !spans.is_empty() + && let Some(mir) = enclosing_mir(cx.tcx, expr.hir_id) + { + let spans = spans + .iter() + .filter_map(|(span, name)| { + if let Some(name) = name { + // We need to check that the name is a local. + if !mir + .var_debug_info + .iter() + .any(|local| !local.source_info.span.from_expansion() && local.name.as_str() == name) + { + return None; + } + } + Some(*span) + }) + .collect::<Vec<_>>(); + match spans.len() { + 0 => {}, + 1 => { + span_lint( + cx, + LITERAL_STRING_WITH_FORMATTING_ARGS, + spans, + "this looks like a formatting argument but it is not part of a formatting macro", + ); + }, + _ => { + span_lint( + cx, + LITERAL_STRING_WITH_FORMATTING_ARGS, + spans, + "these look like formatting arguments but are not part of a formatting macro", + ); + }, + } + } +} + +impl LateLintPass<'_> for LiteralStringWithFormattingArg { + fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { + if expr.span.from_expansion() { + return; + } + if let ExprKind::Lit(lit) = expr.kind { + let (add, symbol) = match lit.node { + LitKind::Str(symbol, style) => { + let add = match style { + StrStyle::Cooked => 1, + StrStyle::Raw(nb) => nb as usize + 2, + }; + (add, symbol) + }, + _ => return, + }; + let fmt_str = symbol.as_str(); + let lo = expr.span.lo(); + let mut current = fmt_str; + let mut diff_len = 0; + + let mut parser = Parser::new(current, None, None, false, ParseMode::Format); + let mut spans = Vec::new(); + while let Some(piece) = parser.next() { + if let Some(error) = parser.errors.last() { + // We simply ignore the errors and move after them. + if error.span.end >= current.len() { + break; + } + // We find the closest char to where the error location ends. + let pos = current.floor_char_boundary(error.span.end); + // We get the next character. + current = if let Some((next_char_pos, _)) = current[pos..].char_indices().nth(1) { + // We make the parser start from this new location. + ¤t[pos + next_char_pos..] + } else { + break; + }; + diff_len = fmt_str.len() - current.len(); + parser = Parser::new(current, None, None, false, ParseMode::Format); + } else if let Piece::NextArgument(arg) = piece { + let mut pos = arg.position_span; + pos.start += diff_len; + pos.end += diff_len; + + let start = fmt_str[..pos.start].rfind('{').unwrap_or(pos.start); + // If this is a unicode character escape, we don't want to lint. + if start > 1 && fmt_str[..start].ends_with("\\u") { + continue; + } + + if fmt_str[start + 1..].trim_start().starts_with('}') { + // We ignore `{}`. + continue; + } + + let end = fmt_str[start + 1..] + .find('}') + .map_or(pos.end, |found| start + 1 + found) + + 1; + let ident_start = start + 1; + let colon_pos = fmt_str[ident_start..end].find(':'); + let ident_end = colon_pos.unwrap_or(end - 1); + let mut name = None; + if ident_start < ident_end + && let arg = &fmt_str[ident_start..ident_end] + && !arg.is_empty() + && is_ident(arg) + { + name = Some(arg.to_string()); + } else if colon_pos.is_none() { + // Not a `{:?}`. + continue; + } + spans.push(( + expr.span + .with_hi(lo + BytePos((start + add).try_into().unwrap())) + .with_lo(lo + BytePos((end + add).try_into().unwrap())), + name, + )); + } + } + emit_lint(cx, expr, &spans); + } + } +} diff --git a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs index 3c77db84a40..9f3b0957eab 100644 --- a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs +++ b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs @@ -74,7 +74,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn { if let Some(vis_snip) = vis_span.get_source_text(cx) && let Some(header_snip) = header_span.get_source_text(cx) && let Some(ret_pos) = position_before_rarrow(&header_snip) - && let Some((ret_sugg, ret_snip)) = suggested_ret(cx, output) + && let Some((_, ret_snip)) = suggested_ret(cx, output) { let header_snip = if vis_snip.is_empty() { format!("async {}", &header_snip[..ret_pos]) @@ -82,19 +82,14 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn { format!("{} async {}", vis_snip, &header_snip[vis_snip.len() + 1..ret_pos]) }; - let help = format!("make the function `async` and {ret_sugg}"); - diag.span_suggestion( - header_span, - help, - format!("{header_snip}{ret_snip}"), - Applicability::MachineApplicable, - ); + let body_snip = snippet_block(cx, closure_body.value.span, "..", Some(block.span)).to_string(); - let body_snip = snippet_block(cx, closure_body.value.span, "..", Some(block.span)); - diag.span_suggestion( - block.span, - "move the body of the async block to the enclosing function", - body_snip, + diag.multipart_suggestion( + "make the function `async` and return the output of the future directly", + vec![ + (header_span, format!("{header_snip}{ret_snip}")), + (block.span, body_snip), + ], Applicability::MachineApplicable, ); } diff --git a/src/tools/clippy/clippy_lints/src/manual_float_methods.rs b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs index 0e08e2eb83d..a1951b9da44 100644 --- a/src/tools/clippy/clippy_lints/src/manual_float_methods.rs +++ b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs @@ -5,9 +5,9 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::SpanRangeExt; use clippy_utils::{is_from_proc_macro, path_to_local}; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Constness, Expr, ExprKind}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; +use rustc_hir::{BinOpKind, Constness, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass, Lint, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_middle::ty::TyCtxt; @@ -129,9 +129,7 @@ fn is_not_const(tcx: TyCtxt<'_>, def_id: DefId) -> bool { | DefKind::Ctor(..) | DefKind::AssocConst => false, - DefKind::Fn - | DefKind::AssocFn - | DefKind::Closure => tcx.constness(def_id) == Constness::NotConst, + DefKind::Fn | DefKind::AssocFn | DefKind::Closure => tcx.constness(def_id) == Constness::NotConst, } } diff --git a/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs b/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs index 4fee3bf7aa9..841adfec462 100644 --- a/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs +++ b/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs @@ -43,21 +43,21 @@ impl LateLintPass<'_> for ManualIsPowerOfTwo { && bin_op.node == BinOpKind::Eq { // a.count_ones() == 1 - if let ExprKind::MethodCall(method_name, reciever, [], _) = left.kind + if let ExprKind::MethodCall(method_name, receiver, [], _) = left.kind && method_name.ident.as_str() == "count_ones" - && let &Uint(_) = cx.typeck_results().expr_ty(reciever).kind() + && let &Uint(_) = cx.typeck_results().expr_ty(receiver).kind() && check_lit(right, 1) { - build_sugg(cx, expr, reciever, &mut applicability); + build_sugg(cx, expr, receiver, &mut applicability); } // 1 == a.count_ones() - if let ExprKind::MethodCall(method_name, reciever, [], _) = right.kind + if let ExprKind::MethodCall(method_name, receiver, [], _) = right.kind && method_name.ident.as_str() == "count_ones" - && let &Uint(_) = cx.typeck_results().expr_ty(reciever).kind() + && let &Uint(_) = cx.typeck_results().expr_ty(receiver).kind() && check_lit(left, 1) { - build_sugg(cx, expr, reciever, &mut applicability); + build_sugg(cx, expr, receiver, &mut applicability); } // a & (a - 1) == 0 @@ -115,8 +115,8 @@ impl LateLintPass<'_> for ManualIsPowerOfTwo { } } -fn build_sugg(cx: &LateContext<'_>, expr: &Expr<'_>, reciever: &Expr<'_>, applicability: &mut Applicability) { - let snippet = snippet_with_applicability(cx, reciever.span, "..", applicability); +fn build_sugg(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, applicability: &mut Applicability) { + let snippet = snippet_with_applicability(cx, receiver.span, "..", applicability); span_lint_and_sugg( cx, diff --git a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs index 20984bc40ca..b72a61a4384 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs @@ -74,8 +74,8 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) { // check if using the same bindings as before HirIdMapEntry::Occupied(entry) => return *entry.get() == b_id, } - // the names technically don't have to match; this makes the lint more conservative - && cx.tcx.hir().name(a_id) == cx.tcx.hir().name(b_id) + // the names technically don't have to match; this makes the lint more conservative + && cx.tcx.hir().name(a_id) == cx.tcx.hir().name(b_id) && cx.typeck_results().expr_ty(a) == cx.typeck_results().expr_ty(b) && pat_contains_local(lhs.pat, a_id) && pat_contains_local(rhs.pat, b_id) @@ -149,16 +149,12 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) { let move_pat_snip = snippet_with_applicability(cx, move_arm.pat.span, "<pat2>", &mut appl); let keep_pat_snip = snippet_with_applicability(cx, keep_arm.pat.span, "<pat1>", &mut appl); - diag.span_suggestion( - keep_arm.pat.span, - "or try merging the arm patterns", - format!("{keep_pat_snip} | {move_pat_snip}"), - appl, - ) - .span_suggestion( - adjusted_arm_span(cx, move_arm.span), - "and remove this obsolete arm", - "", + diag.multipart_suggestion( + "or try merging the arm patterns and removing the obsolete arm", + vec![ + (keep_arm.pat.span, format!("{keep_pat_snip} | {move_pat_snip}")), + (adjusted_arm_span(cx, move_arm.span), String::new()), + ], appl, ) .help("try changing either arm body"); diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs index 64969271764..1fd2ebcb54a 100644 --- a/src/tools/clippy/clippy_lints/src/matches/mod.rs +++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs @@ -27,7 +27,9 @@ mod wild_in_or_pats; use clippy_config::Conf; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::walk_span_to_context; -use clippy_utils::{higher, is_direct_expn_of, is_in_const_context, is_span_match, span_contains_cfg}; +use clippy_utils::{ + higher, is_direct_expn_of, is_in_const_context, is_span_match, span_contains_cfg, span_extract_comments, +}; use rustc_hir::{Arm, Expr, ExprKind, LetStmt, MatchSource, Pat, PatKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; @@ -1059,7 +1061,28 @@ impl<'tcx> LateLintPass<'tcx> for Matches { } redundant_pattern_match::check_match(cx, expr, ex, arms); - single_match::check(cx, ex, arms, expr); + let source_map = cx.tcx.sess.source_map(); + let mut match_comments = span_extract_comments(source_map, expr.span); + // We remove comments from inside arms block. + if !match_comments.is_empty() { + for arm in arms { + for comment in span_extract_comments(source_map, arm.body.span) { + if let Some(index) = match_comments + .iter() + .enumerate() + .find(|(_, cm)| **cm == comment) + .map(|(index, _)| index) + { + match_comments.remove(index); + } + } + } + } + // If there are still comments, it means they are outside of the arms, therefore + // we should not lint. + if match_comments.is_empty() { + single_match::check(cx, ex, arms, expr); + } match_bool::check(cx, ex, arms, expr); overlapping_arms::check(cx, ex, arms); match_wild_enum::check(cx, ex, arms); diff --git a/src/tools/clippy/clippy_lints/src/matches/needless_match.rs b/src/tools/clippy/clippy_lints/src/matches/needless_match.rs index 6f7d6902640..73822314b4b 100644 --- a/src/tools/clippy/clippy_lints/src/matches/needless_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/needless_match.rs @@ -3,7 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::{is_type_diagnostic_item, same_type_and_consts}; use clippy_utils::{ - eq_expr_value, get_parent_expr_for_hir, higher, is_else_clause, is_res_lang_ctor, over, path_res, + SpanlessEq, eq_expr_value, get_parent_expr_for_hir, higher, is_else_clause, is_res_lang_ctor, over, path_res, peel_blocks_with_stmt, }; use rustc_errors::Applicability; @@ -90,7 +90,9 @@ fn check_if_let_inner(cx: &LateContext<'_>, if_let: &higher::IfLet<'_>) -> bool } // Recursively check for each `else if let` phrase, - if let Some(ref nested_if_let) = higher::IfLet::hir(cx, if_else) { + if let Some(ref nested_if_let) = higher::IfLet::hir(cx, if_else) + && SpanlessEq::new(cx).eq_expr(nested_if_let.let_expr, if_let.let_expr) + { return check_if_let_inner(cx, nested_if_let); } diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs index 95a4bf6f60d..3ca20479f8e 100644 --- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs @@ -129,7 +129,7 @@ fn report_single_pattern(cx: &LateContext<'_>, ex: &Expr<'_>, arm: &Arm<'_>, exp PatKind::Lit(Expr { kind: ExprKind::Lit(lit), .. - }) if lit.node.is_str() => pat_ref_count + 1, + }) if lit.node.is_str() || lit.node.is_bytestr() => pat_ref_count + 1, _ => pat_ref_count, }; // References are only implicitly added to the pattern, so no overflow here. diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs index cf7f276dabb..b04d761d486 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs @@ -2,6 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::{is_expr_identity_function, is_expr_untyped_identity_function, is_trait_method}; use rustc_errors::Applicability; use rustc_hir as hir; +use rustc_hir::ExprKind; use rustc_lint::LateContext; use rustc_span::{Span, sym}; @@ -21,6 +22,15 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, filter_map_arg: if is_trait_method(cx, expr, sym::Iterator) && let Some(applicability) = is_identity(cx, filter_map_arg) { + // check if the iterator is from an empty array, see issue #12653 + if let ExprKind::MethodCall(_, recv, ..) = expr.kind + && let ExprKind::MethodCall(_, recv2, ..) = recv.kind + && let ExprKind::Array(arr) = recv2.kind + && arr.is_empty() + { + return; + } + span_lint_and_sugg( cx, FILTER_MAP_IDENTITY, diff --git a/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs b/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs index 80703618a11..1ebb71e251a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs @@ -19,7 +19,7 @@ fn extract_count_with_applicability( ) -> Option<String> { let start = range.start?; let end = range.end?; - // TODO: This doens't handle if either the start or end are negative literals, or if the start is + // TODO: This doesn't handle if either the start or end are negative literals, or if the start is // not a literal. In the first case, we need to be careful about how we handle computing the // count to avoid overflows. In the second, we may need to add parenthesis to make the // suggestion correct. diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs index 7d1d5d69c99..810287fa541 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mod.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs @@ -1864,7 +1864,6 @@ declare_clippy_lint! { /// /// ### Example /// ```no_run - /// // example code where clippy issues a warning /// let opt: Option<u32> = None; /// /// opt.unwrap_or_else(|| 42); @@ -3839,13 +3838,11 @@ declare_clippy_lint! { /// /// ### Example /// ```no_run - /// // example code where clippy issues a warning /// vec![Some(1)].into_iter().filter(Option::is_some); /// /// ``` /// Use instead: /// ```no_run - /// // example code which does not raise clippy warning /// vec![Some(1)].into_iter().flatten(); /// ``` #[clippy::version = "1.77.0"] @@ -3865,13 +3862,11 @@ declare_clippy_lint! { /// /// ### Example /// ```no_run - /// // example code where clippy issues a warning /// vec![Ok::<i32, String>(1)].into_iter().filter(Result::is_ok); /// /// ``` /// Use instead: /// ```no_run - /// // example code which does not raise clippy warning /// vec![Ok::<i32, String>(1)].into_iter().flatten(); /// ``` #[clippy::version = "1.77.0"] @@ -3969,7 +3964,7 @@ declare_clippy_lint! { /// /// ### Why is this bad? /// - /// In the aformentioned cases it is not necessary to call `min()` or `max()` + /// In the aforementioned cases it is not necessary to call `min()` or `max()` /// to compare values, it may even cause confusion. /// /// ### Example @@ -4982,6 +4977,10 @@ impl Methods { } map_identity::check(cx, expr, recv, m_arg, name, span); manual_inspect::check(cx, expr, m_arg, name, span, &self.msrv); + crate::useless_conversion::check_function_application(cx, expr, recv, m_arg); + }, + ("map_break" | "map_continue", [m_arg]) => { + crate::useless_conversion::check_function_application(cx, expr, recv, m_arg); }, ("map_or", [def, map]) => { option_map_or_none::check(cx, expr, recv, def, map); diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs b/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs index 829c118d291..c41ce2481d7 100644 --- a/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs +++ b/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs @@ -1,9 +1,6 @@ -use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::match_def_path; -use clippy_utils::source::snippet_with_applicability; +use clippy_utils::diagnostics::span_lint_and_note; use clippy_utils::ty::is_type_diagnostic_item; -use rustc_errors::Applicability; -use rustc_hir::Expr; +use rustc_hir::{Expr, ExprKind, QPath}; use rustc_lint::LateContext; use rustc_span::sym; @@ -11,20 +8,17 @@ use super::NEEDLESS_OPTION_TAKE; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) { // Checks if expression type is equal to sym::Option and if the expr is not a syntactic place - if !recv.is_syntactic_place_expr() && is_expr_option(cx, recv) && has_expr_as_ref_path(cx, recv) { - let mut applicability = Applicability::MachineApplicable; - span_lint_and_sugg( - cx, - NEEDLESS_OPTION_TAKE, - expr.span, - "called `Option::take()` on a temporary value", - "try", - format!( - "{}", - snippet_with_applicability(cx, recv.span, "..", &mut applicability) - ), - applicability, - ); + if !recv.is_syntactic_place_expr() && is_expr_option(cx, recv) { + if let Some(function_name) = source_of_temporary_value(recv) { + span_lint_and_note( + cx, + NEEDLESS_OPTION_TAKE, + expr.span, + "called `Option::take()` on a temporary value", + None, + format!("`{function_name}` creates a temporary value, so calling take() has no effect"), + ); + } } } @@ -33,9 +27,24 @@ fn is_expr_option(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { is_type_diagnostic_item(cx, expr_type, sym::Option) } -fn has_expr_as_ref_path(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - if let Some(ref_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) { - return match_def_path(cx, ref_id, &["core", "option", "Option", "as_ref"]); +/// Returns the string of the function call that creates the temporary. +/// When this function is called, we are reasonably certain that the `ExprKind` is either +/// `Call` or `MethodCall` because we already checked that the expression is not +/// `is_syntactic_place_expr()`. +fn source_of_temporary_value<'a>(expr: &'a Expr<'_>) -> Option<&'a str> { + match expr.peel_borrows().kind { + ExprKind::Call(function, _) => { + if let ExprKind::Path(QPath::Resolved(_, func_path)) = function.kind { + if !func_path.segments.is_empty() { + return Some(func_path.segments[0].ident.name.as_str()); + } + } + if let ExprKind::Path(QPath::TypeRelative(_, func_path_segment)) = function.kind { + return Some(func_path_segment.ident.name.as_str()); + } + None + }, + ExprKind::MethodCall(path_segment, ..) => Some(path_segment.ident.name.as_str()), + _ => None, } - false } diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs index c91be33b1cd..c6d4ef5911e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs @@ -129,7 +129,7 @@ fn check_manual_split_once_indirect( let ctxt = expr.span.ctxt(); let mut parents = cx.tcx.hir().parent_iter(expr.hir_id); if let (_, Node::LetStmt(local)) = parents.next()? - && let PatKind::Binding(BindingMode::MUT, iter_binding_id, iter_ident, None) = local.pat.kind + && let PatKind::Binding(BindingMode::MUT, iter_binding_id, _, None) = local.pat.kind && let (iter_stmt_id, Node::Stmt(_)) = parents.next()? && let (_, Node::Block(enclosing_block)) = parents.next()? && let mut stmts = enclosing_block @@ -162,16 +162,20 @@ fn check_manual_split_once_indirect( UnwrapKind::Unwrap => ".unwrap()", UnwrapKind::QuestionMark => "?", }; - diag.span_suggestion_verbose( - local.span, - format!("try `{r}split_once`"), - format!("let ({lhs}, {rhs}) = {self_snip}.{r}split_once({pat_snip}){unwrap};"), + + // Add a multipart suggestion + diag.multipart_suggestion( + format!("replace with `{r}split_once`"), + vec![ + ( + local.span, + format!("let ({lhs}, {rhs}) = {self_snip}.{r}split_once({pat_snip}){unwrap};"), + ), + (first.span, String::new()), // Remove the first usage + (second.span, String::new()), // Remove the second usage + ], app, ); - - let remove_msg = format!("remove the `{iter_ident}` usages"); - diag.span_suggestion(first.span, remove_msg.clone(), "", app); - diag.span_suggestion(second.span, remove_msg, "", app); }); } diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs index 029704882dd..671c189a98e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs @@ -6,6 +6,7 @@ use clippy_utils::ty::{get_iterator_item_ty, implements_trait}; use clippy_utils::visitors::for_each_expr_without_closures; use clippy_utils::{can_mut_borrow_both, fn_def_id, get_parent_expr, path_to_local}; use core::ops::ControlFlow; +use itertools::Itertools; use rustc_errors::Applicability; use rustc_hir::def_id::DefId; use rustc_hir::{BindingMode, Expr, ExprKind, Node, PatKind}; @@ -122,14 +123,13 @@ pub fn check_for_loop_iter( } else { Applicability::MachineApplicable }; - diag.span_suggestion(expr.span, "use", snippet.to_owned(), applicability); - if !references_to_binding.is_empty() { - diag.multipart_suggestion( - "remove any references to the binding", - references_to_binding, - applicability, - ); - } + + let combined = references_to_binding + .into_iter() + .chain(vec![(expr.span, snippet.to_owned())]) + .collect_vec(); + + diag.multipart_suggestion("remove any references to the binding", combined, applicability); }, ); return true; diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs index 603916e06c9..9a45b04d1a6 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::is_trait_method; use clippy_utils::sugg::Sugg; use clippy_utils::ty::implements_trait; +use clippy_utils::{is_trait_method, std_or_core}; use rustc_errors::Applicability; use rustc_hir::{Closure, Expr, ExprKind, Mutability, Param, Pat, PatKind, Path, PathSegment, QPath}; use rustc_lint::LateContext; @@ -211,8 +211,10 @@ pub(super) fn check<'tcx>( trigger.vec_name, if is_unstable { "_unstable" } else { "" }, trigger.closure_arg, - if trigger.reverse { - format!("std::cmp::Reverse({})", trigger.closure_body) + if let Some(std_or_core) = std_or_core(cx) + && trigger.reverse + { + format!("{}::cmp::Reverse({})", std_or_core, trigger.closure_body) } else { trigger.closure_body.to_string() }, diff --git a/src/tools/clippy/clippy_lints/src/missing_const_for_thread_local.rs b/src/tools/clippy/clippy_lints/src/missing_const_for_thread_local.rs index 9a44a3c980c..e2ca4458eda 100644 --- a/src/tools/clippy/clippy_lints/src/missing_const_for_thread_local.rs +++ b/src/tools/clippy/clippy_lints/src/missing_const_for_thread_local.rs @@ -27,14 +27,12 @@ declare_clippy_lint! { /// /// ### Example /// ```no_run - /// // example code where clippy issues a warning /// thread_local! { /// static BUF: String = String::new(); /// } /// ``` /// Use instead: /// ```no_run - /// // example code which does not raise clippy warning /// thread_local! { /// static BUF: String = const { String::new() }; /// } diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs index 9e44bb02c56..7feff7e4d3f 100644 --- a/src/tools/clippy/clippy_lints/src/no_effect.rs +++ b/src/tools/clippy/clippy_lints/src/no_effect.rs @@ -8,7 +8,7 @@ use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ BinOpKind, BlockCheckMode, Expr, ExprKind, HirId, HirIdMap, ItemKind, LocalSource, Node, PatKind, Stmt, StmtKind, - UnsafeSource, StructTailExpr, is_range_literal, + StructTailExpr, UnsafeSource, is_range_literal, }; use rustc_infer::infer::TyCtxtInferExt as _; use rustc_lint::{LateContext, LateLintPass, LintContext}; diff --git a/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs b/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs index d369978b8be..2083f2bf628 100644 --- a/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs +++ b/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs @@ -43,8 +43,8 @@ impl Context { _ => (), } - let (_, r_ty) = (cx.typeck_results().expr_ty(l), cx.typeck_results().expr_ty(r)); - if r_ty.peel_refs().is_floating_point() && r_ty.peel_refs().is_floating_point() { + let (l_ty, r_ty) = (cx.typeck_results().expr_ty(l), cx.typeck_results().expr_ty(r)); + if l_ty.peel_refs().is_floating_point() && r_ty.peel_refs().is_floating_point() { span_lint(cx, FLOAT_ARITHMETIC, expr.span, "floating-point arithmetic detected"); self.expr_id = Some(expr.hir_id); } diff --git a/src/tools/clippy/clippy_lints/src/precedence.rs b/src/tools/clippy/clippy_lints/src/precedence.rs index 37f5dd5583b..031f0931059 100644 --- a/src/tools/clippy/clippy_lints/src/precedence.rs +++ b/src/tools/clippy/clippy_lints/src/precedence.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_with_applicability; +use rustc_ast::ast::BinOpKind::{Add, BitAnd, BitOr, BitXor, Div, Mul, Rem, Shl, Shr, Sub}; use rustc_ast::ast::{BinOpKind, Expr, ExprKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; @@ -12,6 +13,7 @@ declare_clippy_lint! { /// and suggests to add parentheses. Currently it catches the following: /// * mixed usage of arithmetic and bit shifting/combining operators without /// parentheses + /// * mixed usage of bitmasking and bit shifting operators without parentheses /// /// ### Why is this bad? /// Not everyone knows the precedence of those operators by @@ -20,6 +22,7 @@ declare_clippy_lint! { /// /// ### Example /// * `1 << 2 + 3` equals 32, while `(1 << 2) + 3` equals 7 + /// * `0x2345 & 0xF000 >> 12` equals 5, while `(0x2345 & 0xF000) >> 12` equals 2 #[clippy::version = "pre 1.29.0"] pub PRECEDENCE, complexity, @@ -51,8 +54,13 @@ impl EarlyLintPass for Precedence { return; } let mut applicability = Applicability::MachineApplicable; - match (is_arith_expr(left), is_arith_expr(right)) { - (true, true) => { + match (op, get_bin_opt(left), get_bin_opt(right)) { + ( + BitAnd | BitOr | BitXor, + Some(Shl | Shr | Add | Div | Mul | Rem | Sub), + Some(Shl | Shr | Add | Div | Mul | Rem | Sub), + ) + | (Shl | Shr, Some(Add | Div | Mul | Rem | Sub), Some(Add | Div | Mul | Rem | Sub)) => { let sugg = format!( "({}) {} ({})", snippet_with_applicability(cx, left.span, "..", &mut applicability), @@ -61,7 +69,8 @@ impl EarlyLintPass for Precedence { ); span_sugg(expr, sugg, applicability); }, - (true, false) => { + (BitAnd | BitOr | BitXor, Some(Shl | Shr | Add | Div | Mul | Rem | Sub), _) + | (Shl | Shr, Some(Add | Div | Mul | Rem | Sub), _) => { let sugg = format!( "({}) {} {}", snippet_with_applicability(cx, left.span, "..", &mut applicability), @@ -70,7 +79,8 @@ impl EarlyLintPass for Precedence { ); span_sugg(expr, sugg, applicability); }, - (false, true) => { + (BitAnd | BitOr | BitXor, _, Some(Shl | Shr | Add | Div | Mul | Rem | Sub)) + | (Shl | Shr, _, Some(Add | Div | Mul | Rem | Sub)) => { let sugg = format!( "{} {} ({})", snippet_with_applicability(cx, left.span, "..", &mut applicability), @@ -79,27 +89,20 @@ impl EarlyLintPass for Precedence { ); span_sugg(expr, sugg, applicability); }, - (false, false) => (), + _ => (), } } } } -fn is_arith_expr(expr: &Expr) -> bool { +fn get_bin_opt(expr: &Expr) -> Option<BinOpKind> { match expr.kind { - ExprKind::Binary(Spanned { node: op, .. }, _, _) => is_arith_op(op), - _ => false, + ExprKind::Binary(Spanned { node: op, .. }, _, _) => Some(op), + _ => None, } } #[must_use] fn is_bit_op(op: BinOpKind) -> bool { - use rustc_ast::ast::BinOpKind::{BitAnd, BitOr, BitXor, Shl, Shr}; matches!(op, BitXor | BitAnd | BitOr | Shl | Shr) } - -#[must_use] -fn is_arith_op(op: BinOpKind) -> bool { - use rustc_ast::ast::BinOpKind::{Add, Div, Mul, Rem, Sub}; - matches!(op, Add | Sub | Mul | Div | Rem) -} diff --git a/src/tools/clippy/clippy_lints/src/question_mark.rs b/src/tools/clippy/clippy_lints/src/question_mark.rs index 77abe7151f0..ffc3b86c502 100644 --- a/src/tools/clippy/clippy_lints/src/question_mark.rs +++ b/src/tools/clippy/clippy_lints/src/question_mark.rs @@ -59,7 +59,7 @@ pub struct QuestionMark { /// As for why we need this in the first place: <https://github.com/rust-lang/rust-clippy/issues/8628> try_block_depth_stack: Vec<u32>, /// Keeps track of the number of inferred return type closures we are inside, to avoid problems - /// with the `Err(x.into())` expansion being ambiguious. + /// with the `Err(x.into())` expansion being ambiguous. inferred_ret_closure_stack: u16, } diff --git a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs index 8e3472b1b5a..7038b19d275 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs @@ -85,7 +85,8 @@ impl<'tcx> LateLintPass<'tcx> for RedundantSlicing { let (expr_ty, expr_ref_count) = peel_middle_ty_refs(cx.typeck_results().expr_ty(expr)); let (indexed_ty, indexed_ref_count) = peel_middle_ty_refs(cx.typeck_results().expr_ty(indexed)); let parent_expr = get_parent_expr(cx, expr); - let needs_parens_for_prefix = parent_expr.is_some_and(|parent| parent.precedence() > ExprPrecedence::Prefix); + let needs_parens_for_prefix = + parent_expr.is_some_and(|parent| parent.precedence() > ExprPrecedence::Prefix); if expr_ty == indexed_ty { if expr_ref_count > indexed_ref_count { diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs index 7ae0310b6d9..83199ba0f70 100644 --- a/src/tools/clippy/clippy_lints/src/shadow.rs +++ b/src/tools/clippy/clippy_lints/src/shadow.rs @@ -1,6 +1,9 @@ +use std::ops::ControlFlow; + use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::path_to_local_id; use clippy_utils::source::snippet; -use clippy_utils::visitors::is_local_used; +use clippy_utils::visitors::{Descend, Visitable, for_each_expr}; use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::Res; use rustc_hir::def_id::LocalDefId; @@ -175,9 +178,31 @@ fn is_shadow(cx: &LateContext<'_>, owner: LocalDefId, first: ItemLocalId, second false } +/// Checks if the given local is used, except for in child expression of `except`. +/// +/// This is a version of [`is_local_used`](clippy_utils::visitors::is_local_used), used to +/// implement the fix for <https://github.com/rust-lang/rust-clippy/issues/10780>. +pub fn is_local_used_except<'tcx>( + cx: &LateContext<'tcx>, + visitable: impl Visitable<'tcx>, + id: HirId, + except: Option<HirId>, +) -> bool { + for_each_expr(cx, visitable, |e| { + if except.is_some_and(|it| it == e.hir_id) { + ControlFlow::Continue(Descend::No) + } else if path_to_local_id(e, id) { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(Descend::Yes) + } + }) + .is_some() +} + fn lint_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, shadowed: HirId, span: Span) { let (lint, msg) = match find_init(cx, pat.hir_id) { - Some(expr) if is_self_shadow(cx, pat, expr, shadowed) => { + Some((expr, _)) if is_self_shadow(cx, pat, expr, shadowed) => { let msg = format!( "`{}` is shadowed by itself in `{}`", snippet(cx, pat.span, "_"), @@ -185,7 +210,7 @@ fn lint_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, shadowed: HirId, span: Span) ); (SHADOW_SAME, msg) }, - Some(expr) if is_local_used(cx, expr, shadowed) => { + Some((expr, except)) if is_local_used_except(cx, expr, shadowed, except) => { let msg = format!("`{}` is shadowed", snippet(cx, pat.span, "_")); (SHADOW_REUSE, msg) }, @@ -232,15 +257,32 @@ fn is_self_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, mut expr: &Expr<'_>, hir_ /// Finds the "init" expression for a pattern: `let <pat> = <init>;` (or `if let`) or /// `match <init> { .., <pat> => .., .. }` -fn find_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Expr<'tcx>> { - for (_, node) in cx.tcx.hir().parent_iter(hir_id) { +/// +/// For closure arguments passed to a method call, returns the method call, and the `HirId` of the +/// closure (which will later be skipped). This is for <https://github.com/rust-lang/rust-clippy/issues/10780> +fn find_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<(&'tcx Expr<'tcx>, Option<HirId>)> { + for (hir_id, node) in cx.tcx.hir().parent_iter(hir_id) { let init = match node { - Node::Arm(_) | Node::Pat(_) => continue, + Node::Arm(_) | Node::Pat(_) | Node::PatField(_) | Node::Param(_) => continue, Node::Expr(expr) => match expr.kind { - ExprKind::Match(e, _, _) | ExprKind::Let(&LetExpr { init: e, .. }) => Some(e), + ExprKind::Match(e, _, _) | ExprKind::Let(&LetExpr { init: e, .. }) => Some((e, None)), + // If we're a closure argument, then a parent call is also an associated item. + ExprKind::Closure(_) => { + if let Some((_, node)) = cx.tcx.hir().parent_iter(hir_id).next() { + match node { + Node::Expr(expr) => match expr.kind { + ExprKind::MethodCall(_, _, _, _) | ExprKind::Call(_, _) => Some((expr, Some(hir_id))), + _ => None, + }, + _ => None, + } + } else { + None + } + }, _ => None, }, - Node::LetStmt(local) => local.init, + Node::LetStmt(local) => local.init.map(|init| (init, None)), _ => None, }; return init; diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs index 1a5b958e6a6..c690696aefc 100644 --- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs +++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs @@ -99,16 +99,10 @@ impl<'tcx> LateLintPass<'tcx> for SignificantDropTightening<'tcx> { snippet(cx, apa.last_bind_ident.span, ".."), ) }; - diag.span_suggestion_verbose( - apa.first_stmt_span, + + diag.multipart_suggestion_verbose( "merge the temporary construction with its single usage", - stmt, - Applicability::MaybeIncorrect, - ); - diag.span_suggestion( - apa.last_stmt_span, - "remove separated single usage", - "", + vec![(apa.first_stmt_span, stmt), (apa.last_stmt_span, String::new())], Applicability::MaybeIncorrect, ); }, diff --git a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs index bb11daecc07..9737b84cdb9 100644 --- a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs +++ b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs @@ -86,7 +86,8 @@ impl LateLintPass<'_> for SingleRangeInVecInit { return; }; - let ExprKind::Struct(QPath::LangItem(lang_item, ..), [start, end], StructTailExpr::None) = inner_expr.kind else { + let ExprKind::Struct(QPath::LangItem(lang_item, ..), [start, end], StructTailExpr::None) = inner_expr.kind + else { return; }; diff --git a/src/tools/clippy/clippy_lints/src/strings.rs b/src/tools/clippy/clippy_lints/src/strings.rs index e09c0706006..2925f355d0b 100644 --- a/src/tools/clippy/clippy_lints/src/strings.rs +++ b/src/tools/clippy/clippy_lints/src/strings.rs @@ -370,12 +370,10 @@ declare_clippy_lint! { /// /// ### Example /// ```no_run - /// // example code where clippy issues a warning /// let _ = "str".to_string(); /// ``` /// Use instead: /// ```no_run - /// // example code which does not raise clippy warning /// let _ = "str".to_owned(); /// ``` #[clippy::version = "pre 1.29.0"] @@ -424,13 +422,11 @@ declare_clippy_lint! { /// /// ### Example /// ```no_run - /// // example code where clippy issues a warning /// let msg = String::from("Hello World"); /// let _ = msg.to_string(); /// ``` /// Use instead: /// ```no_run - /// // example code which does not raise clippy warning /// let msg = String::from("Hello World"); /// let _ = msg.clone(); /// ``` diff --git a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs index 0702f6d1e74..d2727968c0c 100644 --- a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs +++ b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet_with_context; -use clippy_utils::visitors::{for_each_local_assignment, for_each_value_source, is_local_used}; +use clippy_utils::visitors::{for_each_local_assignment, for_each_value_source}; use core::ops::ControlFlow; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; @@ -71,25 +71,38 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, local: &'tcx LetStmt<'_>) { local.span, "this let-binding has unit value", |diag| { + let mut suggestions = Vec::new(); + + // Suggest omitting the `let` binding if let Some(expr) = &local.init { let mut app = Applicability::MachineApplicable; let snip = snippet_with_context(cx, expr.span, local.span.ctxt(), "()", &mut app).0; - diag.span_suggestion(local.span, "omit the `let` binding", format!("{snip};"), app); + suggestions.push((local.span, format!("{snip};"))); } - if let PatKind::Binding(_, binding_hir_id, ident, ..) = local.pat.kind + // If this is a binding pattern, we need to add suggestions to remove any usages + // of the variable + if let PatKind::Binding(_, binding_hir_id, ..) = local.pat.kind && let Some(body_id) = cx.enclosing_body.as_ref() - && let body = cx.tcx.hir().body(*body_id) - && is_local_used(cx, body, binding_hir_id) { - let identifier = ident.as_str(); + let body = cx.tcx.hir().body(*body_id); + + // Collect variable usages let mut visitor = UnitVariableCollector::new(binding_hir_id); walk_body(&mut visitor, body); - visitor.spans.into_iter().for_each(|span| { - let msg = - format!("variable `{identifier}` of type `()` can be replaced with explicit `()`"); - diag.span_suggestion(span, msg, "()", Applicability::MachineApplicable); - }); + + // Add suggestions for replacing variable usages + suggestions.extend(visitor.spans.into_iter().map(|span| (span, "()".to_string()))); + } + + // Emit appropriate diagnostic based on whether there are usages of the let binding + if !suggestions.is_empty() { + let message = if suggestions.len() == 1 { + "omit the `let` binding" + } else { + "omit the `let` binding and replace variable usages with `()`" + }; + diag.multipart_suggestion(message, suggestions, Applicability::MachineApplicable); } }, ); diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs index 0f4bd286145..47d6fe7db76 100644 --- a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs +++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs @@ -25,13 +25,13 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { return; } - let (reciever, args) = match expr.kind { + let (receiver, args) = match expr.kind { ExprKind::Call(_, args) => (None, args), ExprKind::MethodCall(_, receiver, args, _) => (Some(receiver), args), _ => return, }; - let args_to_recover = reciever + let args_to_recover = receiver .into_iter() .chain(args) .filter(|arg| { diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_literal_bound.rs b/src/tools/clippy/clippy_lints/src/unnecessary_literal_bound.rs index 80ce6711126..8165a45bc5b 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_literal_bound.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_literal_bound.rs @@ -17,7 +17,7 @@ declare_clippy_lint! { /// /// ### Why is this bad? /// - /// This leaves the caller unable to use the `&str` as `&'static str`, causing unneccessary allocations or confusion. + /// This leaves the caller unable to use the `&str` as `&'static str`, causing unnecessary allocations or confusion. /// This is also most likely what you meant to write. /// /// ### Example diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs index 0a90d31db7e..1df229c330e 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs @@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; use clippy_utils::ty::is_copy; use clippy_utils::{get_parent_expr, path_to_local}; -use rustc_hir::{BindingMode, Expr, ExprField, ExprKind, Node, PatKind, Path, QPath, UnOp, StructTailExpr}; +use rustc_hir::{BindingMode, Expr, ExprField, ExprKind, Node, PatKind, Path, QPath, StructTailExpr, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; @@ -63,7 +63,9 @@ impl LateLintPass<'_> for UnnecessaryStruct { // all fields match, no base given path.span }, - (Some(path), StructTailExpr::Base(base)) if base_is_suitable(cx, expr, base) && path_matches_base(path, base) => { + (Some(path), StructTailExpr::Base(base)) + if base_is_suitable(cx, expr, base) && path_matches_base(path, base) => + { // all fields match, has base: ensure that the path of the base matches base.span }, diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs index 3b05abc546f..7ffab81a544 100644 --- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs +++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs @@ -1,8 +1,10 @@ use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg, span_lint_and_then}; use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_context}; -use clippy_utils::sugg::Sugg; +use clippy_utils::sugg::{DiagExt as _, Sugg}; use clippy_utils::ty::{is_copy, is_type_diagnostic_item, same_type_and_consts}; -use clippy_utils::{get_parent_expr, is_trait_method, is_ty_alias, path_to_local}; +use clippy_utils::{ + get_parent_expr, is_inherent_method_call, is_trait_item, is_trait_method, is_ty_alias, path_to_local, +}; use rustc_errors::Applicability; use rustc_hir::def_id::DefId; use rustc_hir::{BindingMode, Expr, ExprKind, HirId, MatchSource, Node, PatKind}; @@ -10,7 +12,7 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::Obligation; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::traits::ObligationCause; -use rustc_middle::ty::{self, EarlyBinder, GenericArg, GenericArgsRef, Ty, TypeVisitableExt}; +use rustc_middle::ty::{self, AdtDef, EarlyBinder, GenericArg, GenericArgsRef, Ty, TypeVisitableExt}; use rustc_session::impl_lint_pass; use rustc_span::{Span, sym}; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt; @@ -382,3 +384,50 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { } } } + +/// Check if `arg` is a `Into::into` or `From::from` applied to `receiver` to give `expr`, through a +/// higher-order mapping function. +pub fn check_function_application(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) { + if has_eligible_receiver(cx, recv, expr) + && (is_trait_item(cx, arg, sym::Into) || is_trait_item(cx, arg, sym::From)) + && let ty::FnDef(_, args) = cx.typeck_results().expr_ty(arg).kind() + && let &[from_ty, to_ty] = args.into_type_list(cx.tcx).as_slice() + && same_type_and_consts(from_ty, to_ty) + { + span_lint_and_then( + cx, + USELESS_CONVERSION, + expr.span.with_lo(recv.span.hi()), + format!("useless conversion to the same type: `{from_ty}`"), + |diag| { + diag.suggest_remove_item( + cx, + expr.span.with_lo(recv.span.hi()), + "consider removing", + Applicability::MachineApplicable, + ); + }, + ); + } +} + +fn has_eligible_receiver(cx: &LateContext<'_>, recv: &Expr<'_>, expr: &Expr<'_>) -> bool { + let recv_ty = cx.typeck_results().expr_ty(recv); + if is_inherent_method_call(cx, expr) + && let Some(recv_ty_defid) = recv_ty.ty_adt_def().map(AdtDef::did) + { + if let Some(diag_name) = cx.tcx.get_diagnostic_name(recv_ty_defid) + && matches!(diag_name, sym::Option | sym::Result) + { + return true; + } + + if cx.tcx.is_diagnostic_item(sym::ControlFlow, recv_ty_defid) { + return true; + } + } + if is_trait_method(cx, expr, sym::Iterator) { + return true; + } + false +} diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs index 521bf6a5fed..d2970c93f8e 100644 --- a/src/tools/clippy/clippy_lints/src/utils/author.rs +++ b/src/tools/clippy/clippy_lints/src/utils/author.rs @@ -3,8 +3,8 @@ use rustc_ast::LitIntType; use rustc_ast::ast::{LitFloatType, LitKind}; use rustc_data_structures::fx::FxHashMap; use rustc_hir::{ - self as hir, BindingMode, CaptureBy, Closure, ClosureKind, ConstArg, ConstArgKind, CoroutineKind, - ExprKind, FnRetTy, HirId, Lit, PatKind, QPath, StmtKind, TyKind, StructTailExpr, + self as hir, BindingMode, CaptureBy, Closure, ClosureKind, ConstArg, ConstArgKind, CoroutineKind, ExprKind, + FnRetTy, HirId, Lit, PatKind, QPath, StmtKind, StructTailExpr, TyKind, }; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::declare_lint_pass; @@ -625,7 +625,7 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> { }, ExprKind::UnsafeBinderCast(..) => { unimplemented!("unsafe binders are not implemented yet"); - } + }, } } diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs index 5483e80f932..bfcce81c498 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs @@ -1,8 +1,7 @@ use crate::utils::internal_lints::lint_without_lint_pass::is_lint_ref_type; use clippy_utils::diagnostics::span_lint_and_help; use regex::Regex; -use rustc_ast as ast; -use rustc_hir::{Item, ItemKind, Mutability}; +use rustc_hir::{Attribute, Item, ItemKind, Mutability}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; @@ -51,7 +50,7 @@ impl<'tcx> LateLintPass<'tcx> for AlmostStandardFormulation { .hir() .attrs(item.hir_id()) .iter() - .filter_map(|attr| ast::Attribute::doc_str(attr).map(|sym| (sym, attr))); + .filter_map(|attr| Attribute::doc_str(attr).map(|sym| (sym, attr))); if is_lint_ref_type(cx, ty) { for (line, attr) in lines { let cur_line = line.as_str().trim(); diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs index 9e400d2391f..e454427adde 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs @@ -92,7 +92,7 @@ impl<'tcx> LateLintPass<'tcx> for InterningDefinedSymbol { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if let ExprKind::Call(func, [arg]) = &expr.kind && let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(func).kind() - && match_def_path(cx, *def_id, &paths::SYMBOL_INTERN) + && cx.tcx.is_diagnostic_item(sym::SymbolIntern, *def_id) && let Some(Constant::Str(arg)) = ConstEvalCtxt::new(cx).eval_simple(arg) && let value = Symbol::intern(&arg).as_u32() && let Some(&def_id) = self.symbol_map.get(&value) diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs index 496343d82c8..dac1951489c 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs @@ -3,6 +3,7 @@ use clippy_utils::macros::root_macro_call_first_node; use clippy_utils::{is_lint_allowed, match_def_path, paths}; use rustc_ast::ast::LitKind; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; +use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_hir::hir_id::CRATE_HIR_ID; use rustc_hir::intravisit::Visitor; @@ -13,7 +14,6 @@ use rustc_session::impl_lint_pass; use rustc_span::source_map::Spanned; use rustc_span::symbol::Symbol; use rustc_span::{Span, sym}; -use {rustc_ast as ast, rustc_hir as hir}; declare_clippy_lint! { /// ### What it does @@ -249,11 +249,11 @@ fn check_invalid_clippy_version_attribute(cx: &LateContext<'_>, item: &'_ Item<' pub(super) fn extract_clippy_version_value(cx: &LateContext<'_>, item: &'_ Item<'_>) -> Option<Symbol> { let attrs = cx.tcx.hir().attrs(item.hir_id()); attrs.iter().find_map(|attr| { - if let ast::AttrKind::Normal(attr_kind) = &attr.kind + if let hir::AttrKind::Normal(attr_kind) = &attr.kind // Identify attribute - && let [tool_name, attr_name] = &attr_kind.item.path.segments[..] - && tool_name.ident.name == sym::clippy - && attr_name.ident.name == sym::version + && let [tool_name, attr_name] = &attr_kind.path.segments[..] + && tool_name.name == sym::clippy + && attr_name.name == sym::version && let Some(version) = attr.value_str() { Some(version) diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs index 3742be0e103..49aad881994 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs @@ -1,29 +1,29 @@ use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::paths; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::match_type; -use clippy_utils::{match_function_call, paths}; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::Span; +use rustc_span::{Span, sym}; declare_clippy_lint! { /// ### What it does /// - /// Detects symbol comparision using `Symbol::intern`. + /// Detects symbol comparison using `Symbol::intern`. /// /// ### Why is this bad? /// - /// Comparision via `Symbol::as_str()` is faster if the interned symbols are not reused. + /// Comparison via `Symbol::as_str()` is faster if the interned symbols are not reused. /// /// ### Example /// /// None, see suggestion. pub SLOW_SYMBOL_COMPARISONS, internal, - "detects slow comparisions of symbol" + "detects slow comparisons of symbol" } declare_lint_pass!(SlowSymbolComparisons => [SLOW_SYMBOL_COMPARISONS]); @@ -34,7 +34,12 @@ fn check_slow_comparison<'tcx>( op2: &'tcx Expr<'tcx>, ) -> Option<(Span, String)> { if match_type(cx, cx.typeck_results().expr_ty(op1), &paths::SYMBOL) - && let Some([symbol_name_expr]) = match_function_call(cx, op2, &paths::SYMBOL_INTERN) + && let ExprKind::Call(fun, args) = op2.kind + && let ExprKind::Path(ref qpath) = fun.kind + && cx + .tcx + .is_diagnostic_item(sym::SymbolIntern, cx.qpath_res(qpath, fun.hir_id).opt_def_id()?) + && let [symbol_name_expr] = args && let Some(Constant::Str(symbol_name)) = ConstEvalCtxt::new(cx).eval_simple(symbol_name_expr) { Some((op1.span, symbol_name)) diff --git a/src/tools/clippy/clippy_lints/src/zombie_processes.rs b/src/tools/clippy/clippy_lints/src/zombie_processes.rs index 4a13c10166f..a702e0785a9 100644 --- a/src/tools/clippy/clippy_lints/src/zombie_processes.rs +++ b/src/tools/clippy/clippy_lints/src/zombie_processes.rs @@ -2,13 +2,14 @@ use ControlFlow::{Break, Continue}; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::{fn_def_id, get_enclosing_block, match_any_def_paths, match_def_path, path_to_local_id, paths}; use rustc_ast::Mutability; +use rustc_ast::visit::visit_opt; use rustc_errors::Applicability; use rustc_hir::intravisit::{Visitor, walk_block, walk_expr, walk_local}; use rustc_hir::{Expr, ExprKind, HirId, LetStmt, Node, PatKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_session::declare_lint_pass; -use rustc_span::sym; +use rustc_span::{Span, sym}; use std::ops::ControlFlow; declare_clippy_lint! { @@ -22,6 +23,17 @@ declare_clippy_lint! { /// which can eventually lead to resource exhaustion, so it's recommended to call `wait()` in long-running applications. /// Such processes are called "zombie processes". /// + /// To reduce the rate of false positives, if the spawned process is assigned to a binding, the lint actually works the other way around; it + /// conservatively checks that all uses of a variable definitely don't call `wait()` and only then emits a warning. + /// For that reason, a seemingly unrelated use can get called out as calling `wait()` in help messages. + /// + /// ### Control flow + /// If a `wait()` call exists in an if/then block but not in the else block (or there is no else block), + /// then this still gets linted as not calling `wait()` in all code paths. + /// Likewise, when early-returning from the function, `wait()` calls that appear after the return expression + /// are also not accepted. + /// In other words, the `wait()` call must be unconditionally reachable after the spawn expression. + /// /// ### Example /// ```rust /// use std::process::Command; @@ -53,26 +65,47 @@ impl<'tcx> LateLintPass<'tcx> for ZombieProcesses { if let PatKind::Binding(_, local_id, ..) = local.pat.kind && let Some(enclosing_block) = get_enclosing_block(cx, expr.hir_id) => { - let mut vis = WaitFinder::WalkUpTo(cx, local_id); - - // If it does have a `wait()` call, we're done. Don't lint. - if let Break(BreakReason::WaitFound) = walk_block(&mut vis, enclosing_block) { - return; - } + let mut vis = WaitFinder { + cx, + local_id, + state: VisitorState::WalkUpToLocal, + early_return: None, + missing_wait_branch: None, + }; + + let res = ( + walk_block(&mut vis, enclosing_block), + vis.missing_wait_branch, + vis.early_return, + ); + + let cause = match res { + (Break(MaybeWait(wait_span)), _, Some(return_span)) => { + Cause::EarlyReturn { wait_span, return_span } + }, + (Break(MaybeWait(_)), _, None) => return, + (Continue(()), None, _) => Cause::NeverWait, + (Continue(()), Some(MissingWaitBranch::MissingElse { if_span, wait_span }), _) => { + Cause::MissingElse { wait_span, if_span } + }, + (Continue(()), Some(MissingWaitBranch::MissingWaitInBranch { branch_span, wait_span }), _) => { + Cause::MissingWaitInBranch { wait_span, branch_span } + }, + }; // Don't emit a suggestion since the binding is used later - check(cx, expr, false); + check(cx, expr, cause, false); }, Node::LetStmt(&LetStmt { pat, .. }) if let PatKind::Wild = pat.kind => { // `let _ = child;`, also dropped immediately without `wait()`ing - check(cx, expr, true); + check(cx, expr, Cause::NeverWait, true); }, Node::Stmt(&Stmt { kind: StmtKind::Semi(_), .. }) => { // Immediately dropped. E.g. `std::process::Command::new("echo").spawn().unwrap();` - check(cx, expr, true); + check(cx, expr, Cause::NeverWait, true); }, _ => {}, } @@ -80,21 +113,10 @@ impl<'tcx> LateLintPass<'tcx> for ZombieProcesses { } } -enum BreakReason { - WaitFound, - EarlyReturn, -} +struct MaybeWait(Span); /// A visitor responsible for finding a `wait()` call on a local variable. /// -/// Conditional `wait()` calls are assumed to not call wait: -/// ```ignore -/// let mut c = Command::new("").spawn().unwrap(); -/// if true { -/// c.wait(); -/// } -/// ``` -/// /// Note that this visitor does NOT explicitly look for `wait()` calls directly, but rather does the /// inverse -- checking if all uses of the local are either: /// - a field access (`child.{stderr,stdin,stdout}`) @@ -104,43 +126,50 @@ enum BreakReason { /// /// None of these are sufficient to prevent zombie processes. /// Doing it like this means more FNs, but FNs are better than FPs. -/// -/// `return` expressions, conditional or not, short-circuit the visitor because -/// if a `wait()` call hadn't been found at that point, it might never reach one at a later point: -/// ```ignore -/// let mut c = Command::new("").spawn().unwrap(); -/// if true { -/// return; // Break(BreakReason::EarlyReturn) -/// } -/// c.wait(); // this might not be reachable -/// ``` -enum WaitFinder<'a, 'tcx> { - WalkUpTo(&'a LateContext<'tcx>, HirId), - Found(&'a LateContext<'tcx>, HirId), +struct WaitFinder<'a, 'tcx> { + cx: &'a LateContext<'tcx>, + local_id: HirId, + state: VisitorState, + early_return: Option<Span>, + // When joining two if branches where one of them doesn't call `wait()`, stores its span for more targetted help + // messages + missing_wait_branch: Option<MissingWaitBranch>, +} + +#[derive(PartialEq)] +enum VisitorState { + WalkUpToLocal, + LocalFound, +} + +#[derive(Copy, Clone)] +enum MissingWaitBranch { + MissingElse { if_span: Span, wait_span: Span }, + MissingWaitInBranch { branch_span: Span, wait_span: Span }, } impl<'tcx> Visitor<'tcx> for WaitFinder<'_, 'tcx> { type NestedFilter = nested_filter::OnlyBodies; - type Result = ControlFlow<BreakReason>; + type Result = ControlFlow<MaybeWait>; fn visit_local(&mut self, l: &'tcx LetStmt<'tcx>) -> Self::Result { - if let Self::WalkUpTo(cx, local_id) = *self + if self.state == VisitorState::WalkUpToLocal && let PatKind::Binding(_, pat_id, ..) = l.pat.kind - && local_id == pat_id + && self.local_id == pat_id { - *self = Self::Found(cx, local_id); + self.state = VisitorState::LocalFound; } walk_local(self, l) } fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) -> Self::Result { - let Self::Found(cx, local_id) = *self else { + if self.state != VisitorState::LocalFound { return walk_expr(self, ex); - }; + } - if path_to_local_id(ex, local_id) { - match cx.tcx.parent_hir_node(ex.hir_id) { + if path_to_local_id(ex, self.local_id) { + match self.cx.tcx.parent_hir_node(ex.hir_id) { Node::Stmt(Stmt { kind: StmtKind::Semi(_), .. @@ -148,29 +177,33 @@ impl<'tcx> Visitor<'tcx> for WaitFinder<'_, 'tcx> { Node::Expr(expr) if let ExprKind::Field(..) = expr.kind => {}, Node::Expr(expr) if let ExprKind::AddrOf(_, Mutability::Not, _) = expr.kind => {}, Node::Expr(expr) - if let Some(fn_did) = fn_def_id(cx, expr) - && match_any_def_paths(cx, fn_did, &[&paths::CHILD_ID, &paths::CHILD_KILL]).is_some() => {}, + if let Some(fn_did) = fn_def_id(self.cx, expr) + && match_any_def_paths(self.cx, fn_did, &[&paths::CHILD_ID, &paths::CHILD_KILL]).is_some() => { + }, // Conservatively assume that all other kinds of nodes call `.wait()` somehow. - _ => return Break(BreakReason::WaitFound), + _ => return Break(MaybeWait(ex.span)), } } else { match ex.kind { - ExprKind::Ret(..) => return Break(BreakReason::EarlyReturn), + ExprKind::Ret(e) => { + visit_opt!(self, visit_expr, e); + if self.early_return.is_none() { + self.early_return = Some(ex.span); + } + + return Continue(()); + }, ExprKind::If(cond, then, None) => { walk_expr(self, cond)?; - // A `wait()` call in an if expression with no else is not enough: - // - // let c = spawn(); - // if true { - // c.wait(); - // } - // - // This might not call wait(). However, early returns are propagated, - // because they might lead to a later wait() not being called. - if let Break(BreakReason::EarlyReturn) = walk_expr(self, then) { - return Break(BreakReason::EarlyReturn); + if let Break(MaybeWait(wait_span)) = walk_expr(self, then) + && self.missing_wait_branch.is_none() + { + self.missing_wait_branch = Some(MissingWaitBranch::MissingElse { + if_span: ex.span, + wait_span, + }); } return Continue(()); @@ -179,22 +212,31 @@ impl<'tcx> Visitor<'tcx> for WaitFinder<'_, 'tcx> { ExprKind::If(cond, then, Some(else_)) => { walk_expr(self, cond)?; - #[expect(clippy::unnested_or_patterns)] match (walk_expr(self, then), walk_expr(self, else_)) { - (Continue(()), Continue(())) + (Continue(()), Continue(())) => {}, // `wait()` in one branch but nothing in the other does not count - | (Continue(()), Break(BreakReason::WaitFound)) - | (Break(BreakReason::WaitFound), Continue(())) => {}, - - // `wait()` in both branches is ok - (Break(BreakReason::WaitFound), Break(BreakReason::WaitFound)) => { - return Break(BreakReason::WaitFound); + (Continue(()), Break(MaybeWait(wait_span))) => { + if self.missing_wait_branch.is_none() { + self.missing_wait_branch = Some(MissingWaitBranch::MissingWaitInBranch { + branch_span: ex.span.shrink_to_lo().to(then.span), + wait_span, + }); + } + }, + (Break(MaybeWait(wait_span)), Continue(())) => { + if self.missing_wait_branch.is_none() { + self.missing_wait_branch = Some(MissingWaitBranch::MissingWaitInBranch { + branch_span: then.span.shrink_to_hi().to(else_.span), + wait_span, + }); + } }, - // Propagate early returns in either branch - (Break(BreakReason::EarlyReturn), _) | (_, Break(BreakReason::EarlyReturn)) => { - return Break(BreakReason::EarlyReturn); + // `wait()` in both branches is ok + (Break(MaybeWait(wait_span)), Break(MaybeWait(_))) => { + self.missing_wait_branch = None; + return Break(MaybeWait(wait_span)); }, } @@ -208,8 +250,40 @@ impl<'tcx> Visitor<'tcx> for WaitFinder<'_, 'tcx> { } fn nested_visit_map(&mut self) -> Self::Map { - let (Self::Found(cx, _) | Self::WalkUpTo(cx, _)) = self; - cx.tcx.hir() + self.cx.tcx.hir() + } +} + +#[derive(Copy, Clone)] +enum Cause { + /// No call to `wait()` at all + NeverWait, + /// `wait()` call exists, but not all code paths definitely lead to one due to + /// an early return + EarlyReturn { wait_span: Span, return_span: Span }, + /// `wait()` call exists in some if branches but not this one + MissingWaitInBranch { wait_span: Span, branch_span: Span }, + /// `wait()` call exists in an if/then branch but it is missing an else block + MissingElse { wait_span: Span, if_span: Span }, +} + +impl Cause { + fn message(self) -> &'static str { + match self { + Cause::NeverWait => "spawned process is never `wait()`ed on", + Cause::EarlyReturn { .. } | Cause::MissingWaitInBranch { .. } | Cause::MissingElse { .. } => { + "spawned process is not `wait()`ed on in all code paths" + }, + } + } + + fn fallback_help(self) -> &'static str { + match self { + Cause::NeverWait => "consider calling `.wait()`", + Cause::EarlyReturn { .. } | Cause::MissingWaitInBranch { .. } | Cause::MissingElse { .. } => { + "consider calling `.wait()` in all code paths" + }, + } } } @@ -220,7 +294,7 @@ impl<'tcx> Visitor<'tcx> for WaitFinder<'_, 'tcx> { /// `let _ = <expr that spawns child>;`. /// /// This checks if the program doesn't unconditionally exit after the spawn expression. -fn check<'tcx>(cx: &LateContext<'tcx>, spawn_expr: &'tcx Expr<'tcx>, emit_suggestion: bool) { +fn check<'tcx>(cx: &LateContext<'tcx>, spawn_expr: &'tcx Expr<'tcx>, cause: Cause, emit_suggestion: bool) { let Some(block) = get_enclosing_block(cx, spawn_expr.hir_id) else { return; }; @@ -234,27 +308,46 @@ fn check<'tcx>(cx: &LateContext<'tcx>, spawn_expr: &'tcx Expr<'tcx>, emit_sugges return; } - span_lint_and_then( - cx, - ZOMBIE_PROCESSES, - spawn_expr.span, - "spawned process is never `wait()`ed on", - |diag| { - if emit_suggestion { - diag.span_suggestion( - spawn_expr.span.shrink_to_hi(), - "try", - ".wait()", - Applicability::MaybeIncorrect, + span_lint_and_then(cx, ZOMBIE_PROCESSES, spawn_expr.span, cause.message(), |diag| { + match cause { + Cause::EarlyReturn { wait_span, return_span } => { + diag.span_note( + return_span, + "no `wait()` call exists on the code path to this early return", ); - } else { - diag.note("consider calling `.wait()`"); - } + diag.span_note( + wait_span, + "`wait()` call exists, but it is unreachable due to the early return", + ); + }, + Cause::MissingWaitInBranch { wait_span, branch_span } => { + diag.span_note(branch_span, "`wait()` is not called in this if branch"); + diag.span_note(wait_span, "`wait()` is called in the other branch"); + }, + Cause::MissingElse { if_span, wait_span } => { + diag.span_note( + if_span, + "this if expression has a `wait()` call, but it is missing an else block", + ); + diag.span_note(wait_span, "`wait()` called here"); + }, + Cause::NeverWait => {}, + } + + if emit_suggestion { + diag.span_suggestion( + spawn_expr.span.shrink_to_hi(), + "try", + ".wait()", + Applicability::MaybeIncorrect, + ); + } else { + diag.help(cause.fallback_help()); + } - diag.note("not doing so might leave behind zombie processes") - .note("see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning"); - }, - ); + diag.note("not doing so might leave behind zombie processes") + .note("see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning"); + }); } /// Checks if the given expression exits the process. diff --git a/src/tools/clippy/clippy_utils/README.md b/src/tools/clippy/clippy_utils/README.md index 61476a82ba0..73fefbcd570 100644 --- a/src/tools/clippy/clippy_utils/README.md +++ b/src/tools/clippy/clippy_utils/README.md @@ -8,7 +8,7 @@ This crate is only guaranteed to build with this `nightly` toolchain: <!-- begin autogenerated nightly --> ``` -nightly-2024-11-28 +nightly-2024-12-26 ``` <!-- end autogenerated nightly --> diff --git a/src/tools/clippy/clippy_utils/src/ast_utils.rs b/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs index 623d9c76086..623d9c76086 100644 --- a/src/tools/clippy/clippy_utils/src/ast_utils.rs +++ b/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs diff --git a/src/tools/clippy/clippy_utils/src/attrs.rs b/src/tools/clippy/clippy_utils/src/attrs.rs index 922afffb876..09de5c05537 100644 --- a/src/tools/clippy/clippy_utils/src/attrs.rs +++ b/src/tools/clippy/clippy_utils/src/attrs.rs @@ -133,11 +133,7 @@ fn parse_attrs<F: FnMut(u64)>(sess: &Session, attrs: &[impl AttributeExt], name: } } -pub fn get_unique_attr<'a, A: AttributeExt>( - sess: &'a Session, - attrs: &'a [A], - name: &'static str, -) -> Option<&'a A> { +pub fn get_unique_attr<'a, A: AttributeExt>(sess: &'a Session, attrs: &'a [A], name: &'static str) -> Option<&'a A> { let mut unique_attr: Option<&A> = None; for attr in get_attr(sess, attrs, name) { if let Some(duplicate) = unique_attr { diff --git a/src/tools/clippy/clippy_utils/src/higher.rs b/src/tools/clippy/clippy_utils/src/higher.rs index d216879cbd2..4e12577b6df 100644 --- a/src/tools/clippy/clippy_utils/src/higher.rs +++ b/src/tools/clippy/clippy_utils/src/higher.rs @@ -8,7 +8,7 @@ use crate::ty::is_type_diagnostic_item; use rustc_ast::ast; use rustc_hir as hir; -use rustc_hir::{Arm, Block, Expr, ExprKind, StructTailExpr, HirId, LoopSource, MatchSource, Node, Pat, QPath}; +use rustc_hir::{Arm, Block, Expr, ExprKind, HirId, LoopSource, MatchSource, Node, Pat, QPath, StructTailExpr}; use rustc_lint::LateContext; use rustc_span::{Span, sym, symbol}; diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs index 4b604f658b8..ed52c481de1 100644 --- a/src/tools/clippy/clippy_utils/src/hir_utils.rs +++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs @@ -7,10 +7,10 @@ use rustc_data_structures::fx::FxHasher; use rustc_hir::MatchSource::TryDesugar; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ - AssocItemConstraint, BinOpKind, BindingMode, Block, BodyId, Closure, ConstArg, ConstArgKind, Expr, - ExprField, ExprKind, FnRetTy, GenericArg, GenericArgs, HirId, HirIdMap, InlineAsmOperand, LetExpr, Lifetime, - LifetimeName, Pat, PatField, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, TraitBoundModifiers, Ty, - TyKind, StructTailExpr, + AssocItemConstraint, BinOpKind, BindingMode, Block, BodyId, Closure, ConstArg, ConstArgKind, Expr, ExprField, + ExprKind, FnRetTy, GenericArg, GenericArgs, HirId, HirIdMap, InlineAsmOperand, LetExpr, Lifetime, LifetimeName, + Pat, PatField, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, StructTailExpr, TraitBoundModifiers, Ty, + TyKind, }; use rustc_lexer::{TokenKind, tokenize}; use rustc_lint::LateContext; @@ -386,7 +386,7 @@ impl HirEqInterExpr<'_, '_, '_> { self.eq_qpath(l_path, r_path) && match (lo, ro) { (StructTailExpr::Base(l),StructTailExpr::Base(r)) => self.eq_expr(l, r), - (StructTailExpr::None, StructTailExpr::None) => true, + (StructTailExpr::None, StructTailExpr::None) | (StructTailExpr::DefaultFields(_), StructTailExpr::DefaultFields(_)) => true, _ => false, } @@ -473,10 +473,10 @@ impl HirEqInterExpr<'_, '_, '_> { (ConstArgKind::Anon(l_an), ConstArgKind::Anon(r_an)) => self.eq_body(l_an.body, r_an.body), (ConstArgKind::Infer(..), ConstArgKind::Infer(..)) => true, // Use explicit match for now since ConstArg is undergoing flux. - (ConstArgKind::Path(..), ConstArgKind::Anon(..)) | (ConstArgKind::Anon(..), ConstArgKind::Path(..)) - | (ConstArgKind::Infer(..), _) | (_, ConstArgKind::Infer(..)) => { - false - }, + (ConstArgKind::Path(..), ConstArgKind::Anon(..)) + | (ConstArgKind::Anon(..), ConstArgKind::Path(..)) + | (ConstArgKind::Infer(..), _) + | (_, ConstArgKind::Infer(..)) => false, } } @@ -1043,7 +1043,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { if let Some(ty) = ty { self.hash_ty(ty); } - } + }, ExprKind::Err(_) => {}, } } @@ -1255,7 +1255,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { }, TyKind::UnsafeBinder(binder) => { self.hash_ty(binder.inner_ty); - } + }, TyKind::Err(_) | TyKind::Infer | TyKind::Never diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index 02bbddb413a..77c597f8534 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -1960,43 +1960,6 @@ pub fn in_automatically_derived(tcx: TyCtxt<'_>, id: HirId) -> bool { }) } -/// Matches a function call with the given path and returns the arguments. -/// -/// Usage: -/// -/// ```rust,ignore -/// if let Some(args) = match_function_call(cx, cmp_max_call, &paths::CMP_MAX); -/// ``` -/// This function is deprecated. Use [`match_function_call_with_def_id`]. -pub fn match_function_call<'tcx>( - cx: &LateContext<'tcx>, - expr: &'tcx Expr<'_>, - path: &[&str], -) -> Option<&'tcx [Expr<'tcx>]> { - if let ExprKind::Call(fun, args) = expr.kind - && let ExprKind::Path(ref qpath) = fun.kind - && let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id() - && match_def_path(cx, fun_def_id, path) - { - return Some(args); - }; - None -} - -pub fn match_function_call_with_def_id<'tcx>( - cx: &LateContext<'tcx>, - expr: &'tcx Expr<'_>, - fun_def_id: DefId, -) -> Option<&'tcx [Expr<'tcx>]> { - if let ExprKind::Call(fun, args) = expr.kind - && let ExprKind::Path(ref qpath) = fun.kind - && cx.qpath_res(qpath, fun.hir_id).opt_def_id() == Some(fun_def_id) - { - return Some(args); - }; - None -} - /// Checks if the given `DefId` matches any of the paths. Returns the index of matching path, if /// any. /// @@ -2273,15 +2236,19 @@ pub fn std_or_core(cx: &LateContext<'_>) -> Option<&'static str> { } pub fn is_no_std_crate(cx: &LateContext<'_>) -> bool { - cx.tcx.hir().attrs(hir::CRATE_HIR_ID).iter().any(|attr| { - attr.name_or_empty() == sym::no_std - }) + cx.tcx + .hir() + .attrs(hir::CRATE_HIR_ID) + .iter() + .any(|attr| attr.name_or_empty() == sym::no_std) } pub fn is_no_core_crate(cx: &LateContext<'_>) -> bool { - cx.tcx.hir().attrs(hir::CRATE_HIR_ID).iter().any(|attr| { - attr.name_or_empty() == sym::no_core - }) + cx.tcx + .hir() + .attrs(hir::CRATE_HIR_ID) + .iter() + .any(|attr| attr.name_or_empty() == sym::no_core) } /// Check if parent of a hir node is a trait implementation block. @@ -2980,12 +2947,18 @@ pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool { /// /// Comments are returned wrapped with their relevant delimiters pub fn span_extract_comment(sm: &SourceMap, span: Span) -> String { + span_extract_comments(sm, span).join("\n") +} + +/// Returns all the comments a given span contains. +/// +/// Comments are returned wrapped with their relevant delimiters. +pub fn span_extract_comments(sm: &SourceMap, span: Span) -> Vec<String> { let snippet = sm.span_to_snippet(span).unwrap_or_default(); - let res = tokenize_with_text(&snippet) + tokenize_with_text(&snippet) .filter(|(t, ..)| matches!(t, TokenKind::BlockComment { .. } | TokenKind::LineComment { .. })) - .map(|(_, s, _)| s) - .join("\n"); - res + .map(|(_, s, _)| s.to_string()) + .collect::<Vec<_>>() } pub fn span_find_starting_semi(sm: &SourceMap, span: Span) -> Span { diff --git a/src/tools/clippy/clippy_utils/src/msrvs.rs b/src/tools/clippy/clippy_utils/src/msrvs.rs index 1e6368fab36..98bcedecccc 100644 --- a/src/tools/clippy/clippy_utils/src/msrvs.rs +++ b/src/tools/clippy/clippy_utils/src/msrvs.rs @@ -1,5 +1,5 @@ use rustc_ast::attr::AttributeExt; -use rustc_attr_parsing::{parse_version, RustcVersion}; +use rustc_attr_parsing::{RustcVersion, parse_version}; use rustc_session::Session; use rustc_span::{Symbol, sym}; use serde::Deserialize; @@ -19,11 +19,12 @@ macro_rules! msrv_aliases { // names may refer to stabilized feature flags or library items msrv_aliases! { 1,83,0 { CONST_EXTERN_FN, CONST_FLOAT_BITS_CONV, CONST_FLOAT_CLASSIFY } - 1,82,0 { IS_NONE_OR, REPEAT_N } - 1,81,0 { LINT_REASONS_STABILIZATION } - 1,80,0 { BOX_INTO_ITER} + 1,82,0 { IS_NONE_OR, REPEAT_N, RAW_REF_OP } + 1,81,0 { LINT_REASONS_STABILIZATION, ERROR_IN_CORE } + 1,80,0 { BOX_INTO_ITER } 1,77,0 { C_STR_LITERALS } 1,76,0 { PTR_FROM_REF, OPTION_RESULT_INSPECT } + 1,74,0 { REPR_RUST } 1,73,0 { MANUAL_DIV_CEIL } 1,71,0 { TUPLE_ARRAY_CONVERSIONS, BUILD_HASHER_HASH_ONE } 1,70,0 { OPTION_RESULT_IS_VARIANT_AND, BINARY_HEAP_RETAIN } diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs index bb40a9430a7..8cb8cd59014 100644 --- a/src/tools/clippy/clippy_utils/src/paths.rs +++ b/src/tools/clippy/clippy_utils/src/paths.rs @@ -23,7 +23,6 @@ pub const LATE_LINT_PASS: [&str; 3] = ["rustc_lint", "passes", "LateLintPass"]; pub const LINT: [&str; 2] = ["rustc_lint_defs", "Lint"]; pub const SYMBOL: [&str; 3] = ["rustc_span", "symbol", "Symbol"]; pub const SYMBOL_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Symbol", "as_str"]; -pub const SYMBOL_INTERN: [&str; 4] = ["rustc_span", "symbol", "Symbol", "intern"]; pub const SYMBOL_TO_IDENT_STRING: [&str; 4] = ["rustc_span", "symbol", "Symbol", "to_ident_string"]; pub const SYM_MODULE: [&str; 3] = ["rustc_span", "symbol", "sym"]; pub const SYNTAX_CONTEXT: [&str; 3] = ["rustc_span", "hygiene", "SyntaxContext"]; diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index 104ae154e36..428b40c5771 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -109,7 +109,7 @@ fn check_rvalue<'tcx>( ) -> McfResult { match rvalue { Rvalue::ThreadLocalRef(_) => Err((span, "cannot access thread local storage in const fn".into())), - Rvalue::Len(place) | Rvalue::Discriminant(place) | Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) => { + Rvalue::Discriminant(place) | Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) => { check_place(tcx, *place, span, body, msrv) }, Rvalue::CopyForDeref(place) => check_place(tcx, *place, span, body, msrv), diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty/mod.rs index bc3c3ca5c21..32e7c2bbf7c 100644 --- a/src/tools/clippy/clippy_utils/src/ty.rs +++ b/src/tools/clippy/clippy_utils/src/ty/mod.rs @@ -171,7 +171,7 @@ pub fn should_call_clone_as_function(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { ) } -/// Returns true if ty has `iter` or `iter_mut` methods +/// If `ty` is known to have a `iter` or `iter_mut` method, returns a symbol representing the type. pub fn has_iter_method(cx: &LateContext<'_>, probably_ref_ty: Ty<'_>) -> Option<Symbol> { // FIXME: instead of this hard-coded list, we should check if `<adt>::iter` // exists and has the desired signature. Unfortunately FnCtxt is not exported diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs index 71499b1293a..7a3a861a9ca 100644 --- a/src/tools/clippy/clippy_utils/src/visitors.rs +++ b/src/tools/clippy/clippy_utils/src/visitors.rs @@ -7,7 +7,7 @@ use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::intravisit::{self, Visitor, walk_block, walk_expr}; use rustc_hir::{ AnonConst, Arm, Block, BlockCheckMode, Body, BodyId, Expr, ExprKind, HirId, ItemId, ItemKind, LetExpr, Pat, QPath, - Stmt, UnOp, UnsafeSource, StructTailExpr, + Stmt, StructTailExpr, UnOp, UnsafeSource, }; use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; @@ -677,6 +677,9 @@ pub fn for_each_unconsumed_temporary<'tcx, B>( ExprKind::Type(e, _) => { helper(typeck, consume, e, f)?; }, + ExprKind::UnsafeBinderCast(_, e, _) => { + helper(typeck, consume, e, f)?; + }, // Either drops temporaries, jumps out of the current expression, or has no sub expression. ExprKind::DropTemps(_) @@ -694,7 +697,6 @@ pub fn for_each_unconsumed_temporary<'tcx, B>( | ExprKind::Continue(_) | ExprKind::InlineAsm(_) | ExprKind::OffsetOf(..) - | ExprKind::UnsafeBinderCast(..) | ExprKind::Err(_) => (), } ControlFlow::Continue(()) diff --git a/src/tools/clippy/lintcheck/src/main.rs b/src/tools/clippy/lintcheck/src/main.rs index 8c62dd3ed38..03e2a24f6f9 100644 --- a/src/tools/clippy/lintcheck/src/main.rs +++ b/src/tools/clippy/lintcheck/src/main.rs @@ -17,7 +17,8 @@ #![allow( clippy::collapsible_else_if, clippy::needless_borrows_for_generic_args, - clippy::module_name_repetitions + clippy::module_name_repetitions, + clippy::literal_string_with_formatting_args )] mod config; diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain index fb159ca2ae0..1000d90f52a 100644 --- a/src/tools/clippy/rust-toolchain +++ b/src/tools/clippy/rust-toolchain @@ -1,6 +1,6 @@ [toolchain] # begin autogenerated nightly -channel = "nightly-2024-11-28" +channel = "nightly-2024-12-26" # end autogenerated nightly components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] profile = "minimal" diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs index 32ee668cda1..75ef60a5dc8 100644 --- a/src/tools/clippy/src/driver.rs +++ b/src/tools/clippy/src/driver.rs @@ -285,7 +285,7 @@ pub fn main() { let cap_lints_allow = arg_value(&orig_args, "--cap-lints", |val| val == "allow").is_some() && arg_value(&orig_args, "--force-warn", |val| val.contains("clippy::")).is_none(); - // If `--no-deps` is enabled only lint the primary pacakge + // If `--no-deps` is enabled only lint the primary package let relevant_package = !no_deps || env::var("CARGO_PRIMARY_PACKAGE").is_ok(); // Do not run Clippy for Cargo's info queries so that invalid CLIPPY_ARGS are not cached @@ -303,7 +303,7 @@ pub fn main() { .set_using_internal_features(using_internal_features) .run(); } - return Ok(()); + Ok(()) })) } diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed index 98591e15bec..3bcabb4ab2d 100644 --- a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed +++ b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed @@ -23,7 +23,7 @@ fn main() { let _ = rustc_span::sym::proc_dash_macro; // interning a keyword - let _ = rustc_span::symbol::kw::SelfLower; + let _ = rustc_span::kw::SelfLower; // Interning a symbol that is not defined let _ = Symbol::intern("xyz123"); diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr index 6d86768d344..c4d0308979f 100644 --- a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr +++ b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr @@ -27,7 +27,7 @@ error: interning a defined symbol --> tests/ui-internal/interning_defined_symbol.rs:26:13 | LL | let _ = Symbol::intern("self"); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::symbol::kw::SelfLower` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::kw::SelfLower` error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed index 8e7f020c1f6..3d9deb705ac 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed +++ b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed @@ -14,8 +14,8 @@ use rustc_span::symbol::{Ident, Symbol}; fn main() { Symbol::intern("foo") == rustc_span::sym::clippy; - Symbol::intern("foo") == rustc_span::symbol::kw::SelfLower; - Symbol::intern("foo") != rustc_span::symbol::kw::SelfUpper; + Symbol::intern("foo") == rustc_span::kw::SelfLower; + Symbol::intern("foo") != rustc_span::kw::SelfUpper; Ident::empty().name == rustc_span::sym::clippy; rustc_span::sym::clippy == Ident::empty().name; } diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr index 668c11722f9..1742603eff6 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr +++ b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr @@ -15,13 +15,13 @@ error: unnecessary `Symbol` to string conversion --> tests/ui-internal/unnecessary_symbol_str.rs:17:5 | LL | Symbol::intern("foo").to_string() == "self"; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") == rustc_span::symbol::kw::SelfLower` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") == rustc_span::kw::SelfLower` error: unnecessary `Symbol` to string conversion --> tests/ui-internal/unnecessary_symbol_str.rs:18:5 | LL | Symbol::intern("foo").to_ident_string() != "Self"; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") != rustc_span::symbol::kw::SelfUpper` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") != rustc_span::kw::SelfUpper` error: unnecessary `Symbol` to string conversion --> tests/ui-internal/unnecessary_symbol_str.rs:19:5 diff --git a/src/tools/clippy/tests/ui-toml/indexing_slicing/clippy.toml b/src/tools/clippy/tests/ui-toml/indexing_slicing/clippy.toml new file mode 100644 index 00000000000..7e83868332f --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/indexing_slicing/clippy.toml @@ -0,0 +1 @@ +allow-indexing-slicing-in-tests = true diff --git a/src/tools/clippy/tests/ui-toml/indexing_slicing/indexing_slicing.rs b/src/tools/clippy/tests/ui-toml/indexing_slicing/indexing_slicing.rs new file mode 100644 index 00000000000..0a0da88ea1f --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/indexing_slicing/indexing_slicing.rs @@ -0,0 +1,19 @@ +//@compile-flags: --test +#![warn(clippy::indexing_slicing)] +#![allow(clippy::no_effect)] + +fn main() { + let x = [1, 2, 3, 4]; + let index: usize = 1; + &x[index..]; +} + +#[cfg(test)] +mod tests { + #[test] + fn test_fn() { + let x = [1, 2, 3, 4]; + let index: usize = 1; + &x[index..]; + } +} diff --git a/src/tools/clippy/tests/ui-toml/indexing_slicing/indexing_slicing.stderr b/src/tools/clippy/tests/ui-toml/indexing_slicing/indexing_slicing.stderr new file mode 100644 index 00000000000..5a4de8337b4 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/indexing_slicing/indexing_slicing.stderr @@ -0,0 +1,12 @@ +error: slicing may panic + --> tests/ui-toml/indexing_slicing/indexing_slicing.rs:8:6 + | +LL | &x[index..]; + | ^^^^^^^^^^ + | + = help: consider using `.get(n..)` or .get_mut(n..)` instead + = note: `-D clippy::indexing-slicing` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::indexing_slicing)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs index 8a6dd36501c..184c6d17ba4 100644 --- a/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs +++ b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs @@ -1,4 +1,5 @@ #![warn(clippy::large_include_file)] +#![allow(clippy::literal_string_with_formatting_args)] // Good const GOOD_INCLUDE_BYTES: &[u8; 68] = include_bytes!("../../ui/author.rs"); diff --git a/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr index 9e1494a47bb..82b926cc53b 100644 --- a/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr +++ b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr @@ -1,5 +1,5 @@ error: attempted to include a large file - --> tests/ui-toml/large_include_file/large_include_file.rs:13:43 + --> tests/ui-toml/large_include_file/large_include_file.rs:14:43 | LL | const TOO_BIG_INCLUDE_BYTES: &[u8; 654] = include_bytes!("too_big.txt"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,7 +9,7 @@ LL | const TOO_BIG_INCLUDE_BYTES: &[u8; 654] = include_bytes!("too_big.txt"); = help: to override `-D warnings` add `#[allow(clippy::large_include_file)]` error: attempted to include a large file - --> tests/ui-toml/large_include_file/large_include_file.rs:15:35 + --> tests/ui-toml/large_include_file/large_include_file.rs:16:35 | LL | const TOO_BIG_INCLUDE_STR: &str = include_str!("too_big.txt"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -17,7 +17,7 @@ LL | const TOO_BIG_INCLUDE_STR: &str = include_str!("too_big.txt"); = note: the configuration allows a maximum size of 600 bytes error: attempted to include a large file - --> tests/ui-toml/large_include_file/large_include_file.rs:18:1 + --> tests/ui-toml/large_include_file/large_include_file.rs:19:1 | LL | #[doc = include_str!("too_big.txt")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed new file mode 100644 index 00000000000..36540bf1dcf --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed @@ -0,0 +1,24 @@ +#![deny(clippy::index_refutable_slice)] + +fn below_limit() { + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some([_, _, _, _, _, _, _, slice_7, ..]) = slice { + //~^ ERROR: binding can be a slice pattern + // This would usually not be linted but is included now due to the + // index limit in the config file + println!("{}", slice_7); + } +} + +fn above_limit() { + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some(slice) = slice { + // This will not be linted as 8 is above the limit + println!("{}", slice[8]); + } +} + +fn main() { + below_limit(); + above_limit(); +} diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs index e64c8ff3290..da76bb20fd9 100644 --- a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs +++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs @@ -1,7 +1,5 @@ #![deny(clippy::index_refutable_slice)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - fn below_limit() { let slice: Option<&[u32]> = Some(&[1, 2, 3]); if let Some(slice) = slice { diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr index 3ea600c7d7b..022deb330e6 100644 --- a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr +++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr @@ -1,5 +1,5 @@ error: this binding can be a slice pattern to avoid indexing - --> tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs:7:17 + --> tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs:5:17 | LL | if let Some(slice) = slice { | ^^^^^ @@ -9,14 +9,14 @@ note: the lint level is defined here | LL | #![deny(clippy::index_refutable_slice)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([_, _, _, _, _, _, _, slice_7, ..]) = slice { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([_, _, _, _, _, _, _, slice_7, ..]) = slice { +LL | +LL | // This would usually not be linted but is included now due to the +LL | // index limit in the config file +LL ~ println!("{}", slice_7); | -LL | println!("{}", slice_7); - | ~~~~~~~ error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr index 6fa583fc041..200129da25f 100644 --- a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr +++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr @@ -6,6 +6,7 @@ error: error reading Clippy's configuration file: unknown field `foobar`, expect allow-comparison-to-zero allow-dbg-in-tests allow-expect-in-tests + allow-indexing-slicing-in-tests allow-mixed-uninlined-format-args allow-one-hash-in-raw-strings allow-panic-in-tests @@ -93,6 +94,7 @@ error: error reading Clippy's configuration file: unknown field `barfoo`, expect allow-comparison-to-zero allow-dbg-in-tests allow-expect-in-tests + allow-indexing-slicing-in-tests allow-mixed-uninlined-format-args allow-one-hash-in-raw-strings allow-panic-in-tests @@ -180,6 +182,7 @@ error: error reading Clippy's configuration file: unknown field `allow_mixed_uni allow-comparison-to-zero allow-dbg-in-tests allow-expect-in-tests + allow-indexing-slicing-in-tests allow-mixed-uninlined-format-args allow-one-hash-in-raw-strings allow-panic-in-tests diff --git a/src/tools/clippy/tests/ui/as_pointer_underscore.fixed b/src/tools/clippy/tests/ui/as_pointer_underscore.fixed new file mode 100644 index 00000000000..db06486ecb0 --- /dev/null +++ b/src/tools/clippy/tests/ui/as_pointer_underscore.fixed @@ -0,0 +1,15 @@ +#![warn(clippy::as_pointer_underscore)] +#![crate_type = "lib"] +#![no_std] + +struct S; + +fn f(s: &S) -> usize { + &s as *const &S as usize + //~^ ERROR: using inferred pointer cast +} + +fn g(s: &mut S) -> usize { + s as *mut S as usize + //~^ ERROR: using inferred pointer cast +} diff --git a/src/tools/clippy/tests/ui/as_pointer_underscore.rs b/src/tools/clippy/tests/ui/as_pointer_underscore.rs new file mode 100644 index 00000000000..955c702ccc9 --- /dev/null +++ b/src/tools/clippy/tests/ui/as_pointer_underscore.rs @@ -0,0 +1,15 @@ +#![warn(clippy::as_pointer_underscore)] +#![crate_type = "lib"] +#![no_std] + +struct S; + +fn f(s: &S) -> usize { + &s as *const _ as usize + //~^ ERROR: using inferred pointer cast +} + +fn g(s: &mut S) -> usize { + s as *mut _ as usize + //~^ ERROR: using inferred pointer cast +} diff --git a/src/tools/clippy/tests/ui/as_pointer_underscore.stderr b/src/tools/clippy/tests/ui/as_pointer_underscore.stderr new file mode 100644 index 00000000000..270056f3645 --- /dev/null +++ b/src/tools/clippy/tests/ui/as_pointer_underscore.stderr @@ -0,0 +1,17 @@ +error: using inferred pointer cast + --> tests/ui/as_pointer_underscore.rs:8:11 + | +LL | &s as *const _ as usize + | ^^^^^^^^ help: use explicit type: `*const &S` + | + = note: `-D clippy::as-pointer-underscore` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::as_pointer_underscore)]` + +error: using inferred pointer cast + --> tests/ui/as_pointer_underscore.rs:13:10 + | +LL | s as *mut _ as usize + | ^^^^^^ help: use explicit type: `*mut S` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs index fbf84337382..1815dd58f51 100644 --- a/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs +++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs @@ -2,6 +2,7 @@ #![allow(incomplete_features)] #![allow(clippy::field_reassign_with_default)] #![allow(clippy::eq_op)] +#![allow(clippy::literal_string_with_formatting_args)] extern crate proc_macro; diff --git a/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.fixed b/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.fixed new file mode 100644 index 00000000000..2950b158deb --- /dev/null +++ b/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.fixed @@ -0,0 +1,11 @@ +// Make sure that `ref_as_ptr` is not emitted when `borrow_as_ptr` is. + +#![warn(clippy::ref_as_ptr, clippy::borrow_as_ptr)] + +fn f<T>(_: T) {} + +fn main() { + let mut val = 0; + f(&raw const val); + f(&raw mut val); +} diff --git a/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.rs b/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.rs new file mode 100644 index 00000000000..19eb8f29233 --- /dev/null +++ b/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.rs @@ -0,0 +1,11 @@ +// Make sure that `ref_as_ptr` is not emitted when `borrow_as_ptr` is. + +#![warn(clippy::ref_as_ptr, clippy::borrow_as_ptr)] + +fn f<T>(_: T) {} + +fn main() { + let mut val = 0; + f(&val as *const _); + f(&mut val as *mut i32); +} diff --git a/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.stderr b/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.stderr new file mode 100644 index 00000000000..82a27af303c --- /dev/null +++ b/src/tools/clippy/tests/ui/borrow_and_ref_as_ptr.stderr @@ -0,0 +1,17 @@ +error: borrow as raw pointer + --> tests/ui/borrow_and_ref_as_ptr.rs:9:7 + | +LL | f(&val as *const _); + | ^^^^^^^^^^^^^^^^ help: try: `&raw const val` + | + = note: `-D clippy::borrow-as-ptr` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::borrow_as_ptr)]` + +error: borrow as raw pointer + --> tests/ui/borrow_and_ref_as_ptr.rs:10:7 + | +LL | f(&mut val as *mut i32); + | ^^^^^^^^^^^^^^^^^^^^ help: try: `&raw mut val` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.fixed b/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.fixed new file mode 100644 index 00000000000..d6842e60a3e --- /dev/null +++ b/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.fixed @@ -0,0 +1,19 @@ +#![warn(clippy::borrow_as_ptr)] +#![allow(clippy::useless_vec)] + +fn a() -> i32 { + 0 +} + +#[clippy::msrv = "1.82"] +fn main() { + let val = 1; + let _p = &raw const val; + let _p = &0 as *const i32; + let _p = &a() as *const i32; + let vec = vec![1]; + let _p = &vec.len() as *const usize; + + let mut val_mut = 1; + let _p_mut = &raw mut val_mut; +} diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.rs b/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.rs new file mode 100644 index 00000000000..3c9daed18f1 --- /dev/null +++ b/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.rs @@ -0,0 +1,19 @@ +#![warn(clippy::borrow_as_ptr)] +#![allow(clippy::useless_vec)] + +fn a() -> i32 { + 0 +} + +#[clippy::msrv = "1.82"] +fn main() { + let val = 1; + let _p = &val as *const i32; + let _p = &0 as *const i32; + let _p = &a() as *const i32; + let vec = vec![1]; + let _p = &vec.len() as *const usize; + + let mut val_mut = 1; + let _p_mut = &mut val_mut as *mut i32; +} diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.stderr b/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.stderr new file mode 100644 index 00000000000..5611fcae8d4 --- /dev/null +++ b/src/tools/clippy/tests/ui/borrow_as_ptr_raw_ref.stderr @@ -0,0 +1,17 @@ +error: borrow as raw pointer + --> tests/ui/borrow_as_ptr_raw_ref.rs:11:14 + | +LL | let _p = &val as *const i32; + | ^^^^^^^^^^^^^^^^^^ help: try: `&raw const val` + | + = note: `-D clippy::borrow-as-ptr` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::borrow_as_ptr)]` + +error: borrow as raw pointer + --> tests/ui/borrow_as_ptr_raw_ref.rs:18:18 + | +LL | let _p_mut = &mut val_mut as *mut i32; + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&raw mut val_mut` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/comparison_chain.rs b/src/tools/clippy/tests/ui/comparison_chain.rs index 266cee4c338..cab460d100d 100644 --- a/src/tools/clippy/tests/ui/comparison_chain.rs +++ b/src/tools/clippy/tests/ui/comparison_chain.rs @@ -1,3 +1,4 @@ +//@no-rustfix #![allow(dead_code)] #![warn(clippy::comparison_chain)] @@ -238,4 +239,16 @@ const fn sign_i8(n: i8) -> Sign { } } +fn needs_parens() -> &'static str { + let (x, y) = (1, 2); + if x + 1 > y * 2 { + //~^ ERROR: `if` chain can be rewritten with `match` + "aa" + } else if x + 1 < y * 2 { + "bb" + } else { + "cc" + } +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/comparison_chain.stderr b/src/tools/clippy/tests/ui/comparison_chain.stderr index 96d8d819e6a..814004e3d4b 100644 --- a/src/tools/clippy/tests/ui/comparison_chain.stderr +++ b/src/tools/clippy/tests/ui/comparison_chain.stderr @@ -1,5 +1,5 @@ error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:14:5 + --> tests/ui/comparison_chain.rs:15:5 | LL | / if x > y { LL | | @@ -7,14 +7,13 @@ LL | | a() LL | | } else if x < y { LL | | b() LL | | } - | |_____^ + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&y) {...}` | - = help: consider rewriting the `if` chain to use `cmp` and `match` = note: `-D clippy::comparison-chain` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::comparison_chain)]` error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:28:5 + --> tests/ui/comparison_chain.rs:29:5 | LL | / if x > y { LL | | @@ -23,12 +22,10 @@ LL | | } else if x < y { ... | LL | | c() LL | | } - | |_____^ - | - = help: consider rewriting the `if` chain to use `cmp` and `match` + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&y) {...}` error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:37:5 + --> tests/ui/comparison_chain.rs:38:5 | LL | / if x > y { LL | | @@ -37,12 +34,10 @@ LL | | } else if y > x { ... | LL | | c() LL | | } - | |_____^ - | - = help: consider rewriting the `if` chain to use `cmp` and `match` + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&y) {...}` error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:46:5 + --> tests/ui/comparison_chain.rs:47:5 | LL | / if x > 1 { LL | | @@ -51,12 +46,10 @@ LL | | } else if x < 1 { ... | LL | | c() LL | | } - | |_____^ - | - = help: consider rewriting the `if` chain to use `cmp` and `match` + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&1) {...}` error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:121:5 + --> tests/ui/comparison_chain.rs:122:5 | LL | / if x > y { LL | | @@ -64,12 +57,10 @@ LL | | a() LL | | } else if x < y { LL | | b() LL | | } - | |_____^ - | - = help: consider rewriting the `if` chain to use `cmp` and `match` + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&y) {...}` error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:128:5 + --> tests/ui/comparison_chain.rs:129:5 | LL | / if x > y { LL | | @@ -78,12 +69,10 @@ LL | | } else if x < y { ... | LL | | c() LL | | } - | |_____^ - | - = help: consider rewriting the `if` chain to use `cmp` and `match` + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&y) {...}` error: `if` chain can be rewritten with `match` - --> tests/ui/comparison_chain.rs:137:5 + --> tests/ui/comparison_chain.rs:138:5 | LL | / if x > y { LL | | @@ -92,9 +81,19 @@ LL | | } else if y > x { ... | LL | | c() LL | | } - | |_____^ + | |_____^ help: consider rewriting the `if` chain with `match`: `match x.cmp(&y) {...}` + +error: `if` chain can be rewritten with `match` + --> tests/ui/comparison_chain.rs:244:5 | - = help: consider rewriting the `if` chain to use `cmp` and `match` +LL | / if x + 1 > y * 2 { +LL | | +LL | | "aa" +LL | | } else if x + 1 < y * 2 { +... | +LL | | "cc" +LL | | } + | |_____^ help: consider rewriting the `if` chain with `match`: `match (x + 1).cmp(&(y * 2)) {...}` -error: aborting due to 7 previous errors +error: aborting due to 8 previous errors diff --git a/src/tools/clippy/tests/ui/default_union_representation.rs b/src/tools/clippy/tests/ui/default_union_representation.rs index 41308b077ba..ba63cde2fa9 100644 --- a/src/tools/clippy/tests/ui/default_union_representation.rs +++ b/src/tools/clippy/tests/ui/default_union_representation.rs @@ -1,5 +1,6 @@ #![feature(transparent_unions)] #![warn(clippy::default_union_representation)] +#![allow(clippy::repr_packed_without_abi)] union NoAttribute { //~^ ERROR: this union has the default representation diff --git a/src/tools/clippy/tests/ui/default_union_representation.stderr b/src/tools/clippy/tests/ui/default_union_representation.stderr index c7ef70a0b8e..d558a3e8de1 100644 --- a/src/tools/clippy/tests/ui/default_union_representation.stderr +++ b/src/tools/clippy/tests/ui/default_union_representation.stderr @@ -1,5 +1,5 @@ error: this union has the default representation - --> tests/ui/default_union_representation.rs:4:1 + --> tests/ui/default_union_representation.rs:5:1 | LL | / union NoAttribute { LL | | @@ -13,7 +13,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::default_union_representation)]` error: this union has the default representation - --> tests/ui/default_union_representation.rs:17:1 + --> tests/ui/default_union_representation.rs:18:1 | LL | / union ReprPacked { LL | | @@ -25,7 +25,7 @@ LL | | } = help: consider annotating `ReprPacked` with `#[repr(C)]` to explicitly specify memory layout error: this union has the default representation - --> tests/ui/default_union_representation.rs:36:1 + --> tests/ui/default_union_representation.rs:37:1 | LL | / union ReprAlign { LL | | @@ -37,7 +37,7 @@ LL | | } = help: consider annotating `ReprAlign` with `#[repr(C)]` to explicitly specify memory layout error: this union has the default representation - --> tests/ui/default_union_representation.rs:57:1 + --> tests/ui/default_union_representation.rs:58:1 | LL | / union ZSTAndTwoFields { LL | | diff --git a/src/tools/clippy/tests/ui/derive.rs b/src/tools/clippy/tests/ui/derive.rs index b06dd78608f..d03cc01a08b 100644 --- a/src/tools/clippy/tests/ui/derive.rs +++ b/src/tools/clippy/tests/ui/derive.rs @@ -2,6 +2,7 @@ clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::needless_lifetimes, + clippy::repr_packed_without_abi, dead_code )] #![warn(clippy::expl_impl_clone_on_copy)] diff --git a/src/tools/clippy/tests/ui/derive.stderr b/src/tools/clippy/tests/ui/derive.stderr index 0eb4b3c1ada..d70a5985522 100644 --- a/src/tools/clippy/tests/ui/derive.stderr +++ b/src/tools/clippy/tests/ui/derive.stderr @@ -1,5 +1,5 @@ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:13:1 + --> tests/ui/derive.rs:14:1 | LL | / impl Clone for Qux { LL | | @@ -10,7 +10,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:13:1 + --> tests/ui/derive.rs:14:1 | LL | / impl Clone for Qux { LL | | @@ -23,7 +23,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::expl_impl_clone_on_copy)]` error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:38:1 + --> tests/ui/derive.rs:39:1 | LL | / impl<'a> Clone for Lt<'a> { LL | | @@ -34,7 +34,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:38:1 + --> tests/ui/derive.rs:39:1 | LL | / impl<'a> Clone for Lt<'a> { LL | | @@ -45,7 +45,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:50:1 + --> tests/ui/derive.rs:51:1 | LL | / impl Clone for BigArray { LL | | @@ -56,7 +56,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:50:1 + --> tests/ui/derive.rs:51:1 | LL | / impl Clone for BigArray { LL | | @@ -67,7 +67,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:62:1 + --> tests/ui/derive.rs:63:1 | LL | / impl Clone for FnPtr { LL | | @@ -78,7 +78,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:62:1 + --> tests/ui/derive.rs:63:1 | LL | / impl Clone for FnPtr { LL | | @@ -89,7 +89,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:83:1 + --> tests/ui/derive.rs:84:1 | LL | / impl<T: Clone> Clone for Generic2<T> { LL | | @@ -100,7 +100,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:83:1 + --> tests/ui/derive.rs:84:1 | LL | / impl<T: Clone> Clone for Generic2<T> { LL | | diff --git a/src/tools/clippy/tests/ui/doc/doc_lazy_list.fixed b/src/tools/clippy/tests/ui/doc/doc_lazy_list.fixed index da537518a2b..0822cc7c635 100644 --- a/src/tools/clippy/tests/ui/doc/doc_lazy_list.fixed +++ b/src/tools/clippy/tests/ui/doc/doc_lazy_list.fixed @@ -75,3 +75,9 @@ fn seven() {} /// ] //~^ ERROR: doc list item without indentation fn eight() {} + +#[rustfmt::skip] +// https://github.com/rust-lang/rust-clippy/issues/13705 +/// - \[text in square brackets\] with a long following description +/// that goes over multiple lines +pub fn backslash_escaped_square_braces() {} diff --git a/src/tools/clippy/tests/ui/doc/doc_lazy_list.rs b/src/tools/clippy/tests/ui/doc/doc_lazy_list.rs index 3cc18e35780..068de140e00 100644 --- a/src/tools/clippy/tests/ui/doc/doc_lazy_list.rs +++ b/src/tools/clippy/tests/ui/doc/doc_lazy_list.rs @@ -75,3 +75,9 @@ fn seven() {} /// ] //~^ ERROR: doc list item without indentation fn eight() {} + +#[rustfmt::skip] +// https://github.com/rust-lang/rust-clippy/issues/13705 +/// - \[text in square brackets\] with a long following description +/// that goes over multiple lines +pub fn backslash_escaped_square_braces() {} diff --git a/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.fixed b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.fixed new file mode 100644 index 00000000000..8939a03d2e3 --- /dev/null +++ b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.fixed @@ -0,0 +1,133 @@ +// https://github.com/rust-lang/rust/issues/133150 +#![warn(clippy::doc_nested_refdefs)] +#[rustfmt::skip] +/// > [link][]: def +//~^ ERROR: link reference defined in quote +/// +/// > [link][]: def (title) +//~^ ERROR: link reference defined in quote +/// +/// > [link][]: def "title" +//~^ ERROR: link reference defined in quote +/// +/// > [link]: not def +/// +/// > [link][]: notdef +/// +/// > [link]\: notdef +pub struct Empty; + +#[rustfmt::skip] +/// > [link][]: def +//~^ ERROR: link reference defined in quote +/// > inner text +/// +/// > [link][]: def (title) +//~^ ERROR: link reference defined in quote +/// > inner text +/// +/// > [link][]: def "title" +//~^ ERROR: link reference defined in quote +/// > inner text +/// +/// > [link]: not def +/// > inner text +/// +/// > [link][]: notdef +/// > inner text +/// +/// > [link]\: notdef +/// > inner text +pub struct NotEmpty; + +#[rustfmt::skip] +/// > [link][]: def +//~^ ERROR: link reference defined in quote +/// > +/// > inner text +/// +/// > [link][]: def (title) +//~^ ERROR: link reference defined in quote +/// > +/// > inner text +/// +/// > [link][]: def "title" +//~^ ERROR: link reference defined in quote +/// > +/// > inner text +/// +/// > [link]: not def +/// > +/// > inner text +/// +/// > [link][]: notdef +/// > +/// > inner text +/// +/// > [link]\: notdef +/// > +/// > inner text +pub struct NotEmptyLoose; + +#[rustfmt::skip] +/// > first lines +/// > [link]: def +/// +/// > first lines +/// > [link]: def (title) +/// +/// > firs lines +/// > [link]: def "title" +/// +/// > firs lines +/// > [link]: not def +/// +/// > first lines +/// > [link][]: notdef +/// +/// > first lines +/// > [link]\: notdef +pub struct NotAtStartTight; + +#[rustfmt::skip] +/// > first lines +/// > +/// > [link]: def +/// +/// > first lines +/// > +/// > [link]: def (title) +/// +/// > firs lines +/// > +/// > [link]: def "title" +/// +/// > firs lines +/// > +/// > [link]: not def +/// +/// > first lines +/// > +/// > [link][]: notdef +/// +/// > first lines +/// > +/// > [link]\: notdef +pub struct NotAtStartLoose; + +#[rustfmt::skip] +/// > - [link][]: def +//~^ ERROR: link reference defined in list item +/// > +/// > - [link][]: def (title) +//~^ ERROR: link reference defined in list item +/// > +/// > - [link][]: def "title" +//~^ ERROR: link reference defined in list item +/// > +/// > - [link]: not def +/// > +/// > - [link][]: notdef +/// > +/// > - [link]\: notdef +pub struct ListNested; diff --git a/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.rs b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.rs new file mode 100644 index 00000000000..f861242384b --- /dev/null +++ b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.rs @@ -0,0 +1,133 @@ +// https://github.com/rust-lang/rust/issues/133150 +#![warn(clippy::doc_nested_refdefs)] +#[rustfmt::skip] +/// > [link]: def +//~^ ERROR: link reference defined in quote +/// +/// > [link]: def (title) +//~^ ERROR: link reference defined in quote +/// +/// > [link]: def "title" +//~^ ERROR: link reference defined in quote +/// +/// > [link]: not def +/// +/// > [link][]: notdef +/// +/// > [link]\: notdef +pub struct Empty; + +#[rustfmt::skip] +/// > [link]: def +//~^ ERROR: link reference defined in quote +/// > inner text +/// +/// > [link]: def (title) +//~^ ERROR: link reference defined in quote +/// > inner text +/// +/// > [link]: def "title" +//~^ ERROR: link reference defined in quote +/// > inner text +/// +/// > [link]: not def +/// > inner text +/// +/// > [link][]: notdef +/// > inner text +/// +/// > [link]\: notdef +/// > inner text +pub struct NotEmpty; + +#[rustfmt::skip] +/// > [link]: def +//~^ ERROR: link reference defined in quote +/// > +/// > inner text +/// +/// > [link]: def (title) +//~^ ERROR: link reference defined in quote +/// > +/// > inner text +/// +/// > [link]: def "title" +//~^ ERROR: link reference defined in quote +/// > +/// > inner text +/// +/// > [link]: not def +/// > +/// > inner text +/// +/// > [link][]: notdef +/// > +/// > inner text +/// +/// > [link]\: notdef +/// > +/// > inner text +pub struct NotEmptyLoose; + +#[rustfmt::skip] +/// > first lines +/// > [link]: def +/// +/// > first lines +/// > [link]: def (title) +/// +/// > firs lines +/// > [link]: def "title" +/// +/// > firs lines +/// > [link]: not def +/// +/// > first lines +/// > [link][]: notdef +/// +/// > first lines +/// > [link]\: notdef +pub struct NotAtStartTight; + +#[rustfmt::skip] +/// > first lines +/// > +/// > [link]: def +/// +/// > first lines +/// > +/// > [link]: def (title) +/// +/// > firs lines +/// > +/// > [link]: def "title" +/// +/// > firs lines +/// > +/// > [link]: not def +/// +/// > first lines +/// > +/// > [link][]: notdef +/// +/// > first lines +/// > +/// > [link]\: notdef +pub struct NotAtStartLoose; + +#[rustfmt::skip] +/// > - [link]: def +//~^ ERROR: link reference defined in list item +/// > +/// > - [link]: def (title) +//~^ ERROR: link reference defined in list item +/// > +/// > - [link]: def "title" +//~^ ERROR: link reference defined in list item +/// > +/// > - [link]: not def +/// > +/// > - [link][]: notdef +/// > +/// > - [link]\: notdef +pub struct ListNested; diff --git a/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.stderr b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.stderr new file mode 100644 index 00000000000..448659b8941 --- /dev/null +++ b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_blockquote.stderr @@ -0,0 +1,148 @@ +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:4:7 + | +LL | /// > [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation + = note: `-D clippy::doc-nested-refdefs` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::doc_nested_refdefs)]` +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:7:7 + | +LL | /// > [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def (title) + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:10:7 + | +LL | /// > [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def "title" + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:21:7 + | +LL | /// > [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:25:7 + | +LL | /// > [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def (title) + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:29:7 + | +LL | /// > [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def "title" + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:44:7 + | +LL | /// > [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:49:7 + | +LL | /// > [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def (title) + | ++ + +error: link reference defined in quote + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:54:7 + | +LL | /// > [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > [link][]: def "title" + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:119:9 + | +LL | /// > - [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > - [link][]: def + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:122:9 + | +LL | /// > - [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > - [link][]: def (title) + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_blockquote.rs:125:9 + | +LL | /// > - [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// > - [link][]: def "title" + | ++ + +error: aborting due to 12 previous errors + diff --git a/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.fixed b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.fixed new file mode 100644 index 00000000000..fcfcfcc4073 --- /dev/null +++ b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.fixed @@ -0,0 +1,71 @@ +// https://github.com/rust-lang/rust/issues/133150 +#![warn(clippy::doc_nested_refdefs)] +#[rustfmt::skip] +/// - [link][]: def +//~^ ERROR: link reference defined in list item +/// +/// - [link][]: def (title) +//~^ ERROR: link reference defined in list item +/// +/// - [link][]: def "title" +//~^ ERROR: link reference defined in list item +/// +/// - [link]: not def +/// +/// - [link][]: notdef +/// +/// - [link]\: notdef +pub struct Empty; + +#[rustfmt::skip] +/// - [link][]: def +//~^ ERROR: link reference defined in list item +/// - [link][]: def (title) +//~^ ERROR: link reference defined in list item +/// - [link][]: def "title" +//~^ ERROR: link reference defined in list item +/// - [link]: not def +/// - [link][]: notdef +/// - [link]\: notdef +pub struct EmptyTight; + +#[rustfmt::skip] +/// - [link][]: def +//~^ ERROR: link reference defined in list item +/// inner text +/// +/// - [link][]: def (title) +//~^ ERROR: link reference defined in list item +/// inner text +/// +/// - [link][]: def "title" +//~^ ERROR: link reference defined in list item +/// inner text +/// +/// - [link]: not def +/// inner text +/// +/// - [link][]: notdef +/// inner text +/// +/// - [link]\: notdef +/// inner text +pub struct NotEmpty; + +#[rustfmt::skip] +/// - [link][]: def +//~^ ERROR: link reference defined in list item +/// inner text +/// - [link][]: def (title) +//~^ ERROR: link reference defined in list item +/// inner text +/// - [link][]: def "title" +//~^ ERROR: link reference defined in list item +/// inner text +/// - [link]: not def +/// inner text +/// - [link][]: notdef +/// inner text +/// - [link]\: notdef +/// inner text +pub struct NotEmptyTight; diff --git a/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.rs b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.rs new file mode 100644 index 00000000000..53368de4616 --- /dev/null +++ b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.rs @@ -0,0 +1,71 @@ +// https://github.com/rust-lang/rust/issues/133150 +#![warn(clippy::doc_nested_refdefs)] +#[rustfmt::skip] +/// - [link]: def +//~^ ERROR: link reference defined in list item +/// +/// - [link]: def (title) +//~^ ERROR: link reference defined in list item +/// +/// - [link]: def "title" +//~^ ERROR: link reference defined in list item +/// +/// - [link]: not def +/// +/// - [link][]: notdef +/// +/// - [link]\: notdef +pub struct Empty; + +#[rustfmt::skip] +/// - [link]: def +//~^ ERROR: link reference defined in list item +/// - [link]: def (title) +//~^ ERROR: link reference defined in list item +/// - [link]: def "title" +//~^ ERROR: link reference defined in list item +/// - [link]: not def +/// - [link][]: notdef +/// - [link]\: notdef +pub struct EmptyTight; + +#[rustfmt::skip] +/// - [link]: def +//~^ ERROR: link reference defined in list item +/// inner text +/// +/// - [link]: def (title) +//~^ ERROR: link reference defined in list item +/// inner text +/// +/// - [link]: def "title" +//~^ ERROR: link reference defined in list item +/// inner text +/// +/// - [link]: not def +/// inner text +/// +/// - [link][]: notdef +/// inner text +/// +/// - [link]\: notdef +/// inner text +pub struct NotEmpty; + +#[rustfmt::skip] +/// - [link]: def +//~^ ERROR: link reference defined in list item +/// inner text +/// - [link]: def (title) +//~^ ERROR: link reference defined in list item +/// inner text +/// - [link]: def "title" +//~^ ERROR: link reference defined in list item +/// inner text +/// - [link]: not def +/// inner text +/// - [link][]: notdef +/// inner text +/// - [link]\: notdef +/// inner text +pub struct NotEmptyTight; diff --git a/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.stderr b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.stderr new file mode 100644 index 00000000000..27314c7e968 --- /dev/null +++ b/src/tools/clippy/tests/ui/doc/doc_nested_refdef_list_item.stderr @@ -0,0 +1,148 @@ +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:4:7 + | +LL | /// - [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation + = note: `-D clippy::doc-nested-refdefs` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::doc_nested_refdefs)]` +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:7:7 + | +LL | /// - [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def (title) + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:10:7 + | +LL | /// - [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def "title" + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:21:7 + | +LL | /// - [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:23:7 + | +LL | /// - [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def (title) + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:25:7 + | +LL | /// - [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def "title" + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:33:7 + | +LL | /// - [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:37:7 + | +LL | /// - [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def (title) + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:41:7 + | +LL | /// - [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def "title" + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:56:7 + | +LL | /// - [link]: def + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:59:7 + | +LL | /// - [link]: def (title) + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def (title) + | ++ + +error: link reference defined in list item + --> tests/ui/doc/doc_nested_refdef_list_item.rs:62:7 + | +LL | /// - [link]: def "title" + | ^^^^^^ + | + = help: link definitions are not shown in rendered documentation +help: for an intra-doc link, add `[]` between the label and the colon + | +LL | /// - [link][]: def "title" + | ++ + +error: aborting due to 12 previous errors + diff --git a/src/tools/clippy/tests/ui/filter_map_identity.fixed b/src/tools/clippy/tests/ui/filter_map_identity.fixed index f3f6848e5f9..fdd020fcd77 100644 --- a/src/tools/clippy/tests/ui/filter_map_identity.fixed +++ b/src/tools/clippy/tests/ui/filter_map_identity.fixed @@ -81,3 +81,8 @@ fn main() { //~^ ERROR: use of } } + +fn issue12653() -> impl Iterator<Item = u8> { + [].into_iter().filter_map(|x| x) + // No lint +} diff --git a/src/tools/clippy/tests/ui/filter_map_identity.rs b/src/tools/clippy/tests/ui/filter_map_identity.rs index b9aa9c05be8..a626de9f5bb 100644 --- a/src/tools/clippy/tests/ui/filter_map_identity.rs +++ b/src/tools/clippy/tests/ui/filter_map_identity.rs @@ -81,3 +81,8 @@ fn main() { //~^ ERROR: use of } } + +fn issue12653() -> impl Iterator<Item = u8> { + [].into_iter().filter_map(|x| x) + // No lint +} diff --git a/src/tools/clippy/tests/ui/format.fixed b/src/tools/clippy/tests/ui/format.fixed index 2b32fdeae2b..3dc8eb79ba2 100644 --- a/src/tools/clippy/tests/ui/format.fixed +++ b/src/tools/clippy/tests/ui/format.fixed @@ -6,7 +6,8 @@ clippy::needless_borrow, clippy::uninlined_format_args, clippy::needless_raw_string_hashes, - clippy::useless_vec + clippy::useless_vec, + clippy::literal_string_with_formatting_args )] struct Foo(pub String); diff --git a/src/tools/clippy/tests/ui/format.rs b/src/tools/clippy/tests/ui/format.rs index bad192067e9..eaf33c2a6c9 100644 --- a/src/tools/clippy/tests/ui/format.rs +++ b/src/tools/clippy/tests/ui/format.rs @@ -6,7 +6,8 @@ clippy::needless_borrow, clippy::uninlined_format_args, clippy::needless_raw_string_hashes, - clippy::useless_vec + clippy::useless_vec, + clippy::literal_string_with_formatting_args )] struct Foo(pub String); diff --git a/src/tools/clippy/tests/ui/format.stderr b/src/tools/clippy/tests/ui/format.stderr index faa80b48000..1368c8cd77e 100644 --- a/src/tools/clippy/tests/ui/format.stderr +++ b/src/tools/clippy/tests/ui/format.stderr @@ -1,5 +1,5 @@ error: useless use of `format!` - --> tests/ui/format.rs:19:5 + --> tests/ui/format.rs:20:5 | LL | format!("foo"); | ^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()` @@ -8,19 +8,19 @@ LL | format!("foo"); = help: to override `-D warnings` add `#[allow(clippy::useless_format)]` error: useless use of `format!` - --> tests/ui/format.rs:20:5 + --> tests/ui/format.rs:21:5 | LL | format!("{{}}"); | ^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"{}".to_string()` error: useless use of `format!` - --> tests/ui/format.rs:21:5 + --> tests/ui/format.rs:22:5 | LL | format!("{{}} abc {{}}"); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"{} abc {}".to_string()` error: useless use of `format!` - --> tests/ui/format.rs:22:5 + --> tests/ui/format.rs:23:5 | LL | / format!( LL | | r##"foo {{}} @@ -35,67 +35,67 @@ LL ~ " bar"##.to_string(); | error: useless use of `format!` - --> tests/ui/format.rs:27:13 + --> tests/ui/format.rs:28:13 | LL | let _ = format!(""); | ^^^^^^^^^^^ help: consider using `String::new()`: `String::new()` error: useless use of `format!` - --> tests/ui/format.rs:29:5 + --> tests/ui/format.rs:30:5 | LL | format!("{}", "foo"); | ^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()` error: useless use of `format!` - --> tests/ui/format.rs:37:5 + --> tests/ui/format.rs:38:5 | LL | format!("{}", arg); | ^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `arg.to_string()` error: useless use of `format!` - --> tests/ui/format.rs:67:5 + --> tests/ui/format.rs:68:5 | LL | format!("{}", 42.to_string()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `42.to_string()` error: useless use of `format!` - --> tests/ui/format.rs:69:5 + --> tests/ui/format.rs:70:5 | LL | format!("{}", x.display().to_string()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.display().to_string()` error: useless use of `format!` - --> tests/ui/format.rs:73:18 + --> tests/ui/format.rs:74:18 | LL | let _ = Some(format!("{}", a + "bar")); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `a + "bar"` error: useless use of `format!` - --> tests/ui/format.rs:77:22 + --> tests/ui/format.rs:78:22 | LL | let _s: String = format!("{}", &*v.join("\n")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `(&*v.join("\n")).to_string()` error: useless use of `format!` - --> tests/ui/format.rs:83:13 + --> tests/ui/format.rs:84:13 | LL | let _ = format!("{x}"); | ^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.to_string()` error: useless use of `format!` - --> tests/ui/format.rs:85:13 + --> tests/ui/format.rs:86:13 | LL | let _ = format!("{y}", y = x); | ^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.to_string()` error: useless use of `format!` - --> tests/ui/format.rs:89:13 + --> tests/ui/format.rs:90:13 | LL | let _ = format!("{abc}"); | ^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `abc.to_string()` error: useless use of `format!` - --> tests/ui/format.rs:91:13 + --> tests/ui/format.rs:92:13 | LL | let _ = format!("{xx}"); | ^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `xx.to_string()` diff --git a/src/tools/clippy/tests/ui/if_not_else.fixed b/src/tools/clippy/tests/ui/if_not_else.fixed new file mode 100644 index 00000000000..11d1e13179c --- /dev/null +++ b/src/tools/clippy/tests/ui/if_not_else.fixed @@ -0,0 +1,73 @@ +#![warn(clippy::all)] +#![warn(clippy::if_not_else)] + +fn foo() -> bool { + unimplemented!() +} +fn bla() -> bool { + unimplemented!() +} + +fn main() { + if bla() { + println!("Bunny"); + } else { + //~^ ERROR: unnecessary boolean `not` operation + println!("Bugs"); + } + if 4 == 5 { + println!("Bunny"); + } else { + //~^ ERROR: unnecessary `!=` operation + println!("Bugs"); + } + if !foo() { + println!("Foo"); + } else if !bla() { + println!("Bugs"); + } else { + println!("Bunny"); + } + + if (foo() && bla()) { + println!("both true"); + } else { + #[cfg(not(debug_assertions))] + println!("not debug"); + #[cfg(debug_assertions)] + println!("debug"); + if foo() { + println!("foo"); + } else if bla() { + println!("bla"); + } else { + println!("both false"); + } + } +} + +fn with_comments() { + if foo() { + println!("foo"); /* foo */ + } else { + /* foo is false */ + println!("foo is false"); + } + + if bla() { + println!("bla"); // bla + } else { + // bla is false + println!("bla"); + } +} + +fn with_annotations() { + #[cfg(debug_assertions)] + if foo() { + println!("foo"); /* foo */ + } else { + /* foo is false */ + println!("foo is false"); + } +} diff --git a/src/tools/clippy/tests/ui/if_not_else.rs b/src/tools/clippy/tests/ui/if_not_else.rs index fd30e3702a2..fcc67e163e8 100644 --- a/src/tools/clippy/tests/ui/if_not_else.rs +++ b/src/tools/clippy/tests/ui/if_not_else.rs @@ -28,4 +28,46 @@ fn main() { } else { println!("Bunny"); } + + if !(foo() && bla()) { + #[cfg(not(debug_assertions))] + println!("not debug"); + #[cfg(debug_assertions)] + println!("debug"); + if foo() { + println!("foo"); + } else if bla() { + println!("bla"); + } else { + println!("both false"); + } + } else { + println!("both true"); + } +} + +fn with_comments() { + if !foo() { + /* foo is false */ + println!("foo is false"); + } else { + println!("foo"); /* foo */ + } + + if !bla() { + // bla is false + println!("bla"); + } else { + println!("bla"); // bla + } +} + +fn with_annotations() { + #[cfg(debug_assertions)] + if !foo() { + /* foo is false */ + println!("foo is false"); + } else { + println!("foo"); /* foo */ + } } diff --git a/src/tools/clippy/tests/ui/if_not_else.stderr b/src/tools/clippy/tests/ui/if_not_else.stderr index 92fed7b1bf7..b01cb5af11f 100644 --- a/src/tools/clippy/tests/ui/if_not_else.stderr +++ b/src/tools/clippy/tests/ui/if_not_else.stderr @@ -9,9 +9,17 @@ LL | | println!("Bunny"); LL | | } | |_____^ | - = help: remove the `!` and swap the blocks of the `if`/`else` = note: `-D clippy::if-not-else` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::if_not_else)]` +help: try + | +LL ~ if bla() { +LL + println!("Bunny"); +LL + } else { +LL + +LL + println!("Bugs"); +LL + } + | error: unnecessary `!=` operation --> tests/ui/if_not_else.rs:18:5 @@ -24,7 +32,109 @@ LL | | println!("Bunny"); LL | | } | |_____^ | - = help: change to `==` and swap the blocks of the `if`/`else` +help: try + | +LL ~ if 4 == 5 { +LL + println!("Bunny"); +LL + } else { +LL + +LL + println!("Bugs"); +LL + } + | + +error: unnecessary boolean `not` operation + --> tests/ui/if_not_else.rs:32:5 + | +LL | / if !(foo() && bla()) { +LL | | #[cfg(not(debug_assertions))] +LL | | println!("not debug"); +LL | | #[cfg(debug_assertions)] +... | +LL | | println!("both true"); +LL | | } + | |_____^ + | +help: try + | +LL ~ if (foo() && bla()) { +LL + println!("both true"); +LL + } else { +LL + #[cfg(not(debug_assertions))] +LL + println!("not debug"); +LL + #[cfg(debug_assertions)] +LL + println!("debug"); +LL + if foo() { +LL + println!("foo"); +LL + } else if bla() { +LL + println!("bla"); +LL + } else { +LL + println!("both false"); +LL + } +LL + } + | + +error: unnecessary boolean `not` operation + --> tests/ui/if_not_else.rs:50:5 + | +LL | / if !foo() { +LL | | /* foo is false */ +LL | | println!("foo is false"); +LL | | } else { +LL | | println!("foo"); /* foo */ +LL | | } + | |_____^ + | +help: try + | +LL ~ if foo() { +LL + println!("foo"); /* foo */ +LL + } else { +LL + /* foo is false */ +LL + println!("foo is false"); +LL + } + | + +error: unnecessary boolean `not` operation + --> tests/ui/if_not_else.rs:57:5 + | +LL | / if !bla() { +LL | | // bla is false +LL | | println!("bla"); +LL | | } else { +LL | | println!("bla"); // bla +LL | | } + | |_____^ + | +help: try + | +LL ~ if bla() { +LL + println!("bla"); // bla +LL + } else { +LL + // bla is false +LL + println!("bla"); +LL + } + | + +error: unnecessary boolean `not` operation + --> tests/ui/if_not_else.rs:67:5 + | +LL | / if !foo() { +LL | | /* foo is false */ +LL | | println!("foo is false"); +LL | | } else { +LL | | println!("foo"); /* foo */ +LL | | } + | |_____^ + | +help: try + | +LL ~ if foo() { +LL + println!("foo"); /* foo */ +LL + } else { +LL + /* foo is false */ +LL + println!("foo is false"); +LL + } + | -error: aborting due to 2 previous errors +error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.fixed b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.fixed new file mode 100644 index 00000000000..ea8e56e18b0 --- /dev/null +++ b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.fixed @@ -0,0 +1,177 @@ +#![deny(clippy::index_refutable_slice)] +#![allow(clippy::uninlined_format_args, clippy::needless_lifetimes)] + +enum SomeEnum<T> { + One(T), + Two(T), + Three(T), + Four(T), +} + +fn lintable_examples() { + // Try with reference + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some([slice_0, ..]) = slice { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + println!("{}", slice_0); + } + + // Try with copy + let slice: Option<[u32; 3]> = Some([1, 2, 3]); + if let Some([slice_0, ..]) = slice { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + println!("{}", slice_0); + } + + // Try with long slice and small indices + let slice: Option<[u32; 9]> = Some([1, 2, 3, 4, 5, 6, 7, 8, 9]); + if let Some([slice_0, _, slice_2, ..]) = slice { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + println!("{}", slice_2); + println!("{}", slice_0); + } + + // Multiple bindings + let slice_wrapped: SomeEnum<[u32; 3]> = SomeEnum::One([5, 6, 7]); + if let SomeEnum::One([slice_0, ..]) | SomeEnum::Three([slice_0, ..]) = slice_wrapped { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + println!("{}", slice_0); + } + + // Two lintable slices in one if let + let a_wrapped: SomeEnum<[u32; 3]> = SomeEnum::One([9, 5, 1]); + let b_wrapped: Option<[u32; 2]> = Some([4, 6]); + if let (SomeEnum::Three([_, _, a_2, ..]), Some([_, b_1, ..])) = (a_wrapped, b_wrapped) { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + //~| ERROR: this binding can be a slice pattern to avoid indexing + println!("{} -> {}", a_2, b_1); + } + + // This requires the slice values to be borrowed as the slice values can only be + // borrowed and `String` doesn't implement copy + let slice: Option<[String; 2]> = Some([String::from("1"), String::from("2")]); + if let Some([_, ref slice_1, ..]) = slice { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + println!("{:?}", slice_1); + } + println!("{:?}", slice); + + // This should not suggest using the `ref` keyword as the scrutinee is already + // a reference + let slice: Option<[String; 2]> = Some([String::from("1"), String::from("2")]); + if let Some([slice_0, ..]) = &slice { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + println!("{:?}", slice_0); + } + println!("{:?}", slice); +} + +fn slice_index_above_limit() { + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + + if let Some(slice) = slice { + // Would cause a panic, IDK + println!("{}", slice[7]); + } +} + +fn slice_is_used() { + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some(slice) = slice { + println!("{:?}", slice.len()); + } + + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some(slice) = slice { + println!("{:?}", slice.to_vec()); + } + + let opt: Option<[String; 2]> = Some([String::from("Hello"), String::from("world")]); + if let Some(slice) = opt { + if !slice.is_empty() { + println!("first: {}", slice[0]); + } + } +} + +/// The slice is used by an external function and should therefore not be linted +fn check_slice_as_arg() { + fn is_interesting<T>(slice: &[T; 2]) -> bool { + !slice.is_empty() + } + + let slice_wrapped: Option<[String; 2]> = Some([String::from("Hello"), String::from("world")]); + if let Some(slice) = &slice_wrapped { + if is_interesting(slice) { + println!("This is interesting {}", slice[0]); + } + } + println!("{:?}", slice_wrapped); +} + +fn check_slice_in_struct() { + #[derive(Debug)] + struct Wrapper<'a> { + inner: Option<&'a [String]>, + is_awesome: bool, + } + + impl<'a> Wrapper<'a> { + fn is_super_awesome(&self) -> bool { + self.is_awesome + } + } + + let inner = &[String::from("New"), String::from("World")]; + let wrap = Wrapper { + inner: Some(inner), + is_awesome: true, + }; + + // Test 1: Field access + if let Some([slice_0, ..]) = wrap.inner { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + if wrap.is_awesome { + println!("This is awesome! {}", slice_0); + } + } + + // Test 2: function access + if let Some([slice_0, ..]) = wrap.inner { + //~^ ERROR: this binding can be a slice pattern to avoid indexing + if wrap.is_super_awesome() { + println!("This is super awesome! {}", slice_0); + } + } + println!("Complete wrap: {:?}", wrap); +} + +/// This would be a nice additional feature to have in the future, but adding it +/// now would make the PR too large. This is therefore only a test that we don't +/// lint cases we can't make a reasonable suggestion for +fn mutable_slice_index() { + // Mut access + let mut slice: Option<[String; 1]> = Some([String::from("Penguin")]); + if let Some(ref mut slice) = slice { + slice[0] = String::from("Mr. Penguin"); + } + println!("Use after modification: {:?}", slice); + + // Mut access on reference + let mut slice: Option<[String; 1]> = Some([String::from("Cat")]); + if let Some(slice) = &mut slice { + slice[0] = String::from("Lord Meow Meow"); + } + println!("Use after modification: {:?}", slice); +} + +/// The lint will ignore bindings with sub patterns as it would be hard +/// to build correct suggestions for these instances :) +fn binding_with_sub_pattern() { + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some(slice @ [_, _, _]) = slice { + println!("{:?}", slice[2]); + } +} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs index a4cb50bd682..1c1d1c4cbe4 100644 --- a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs +++ b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs @@ -1,8 +1,6 @@ #![deny(clippy::index_refutable_slice)] #![allow(clippy::uninlined_format_args, clippy::needless_lifetimes)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - enum SomeEnum<T> { One(T), Two(T), diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr index 8819cb0e28b..14ee2e54cab 100644 --- a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr +++ b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr @@ -1,5 +1,5 @@ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:16:17 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:14:17 | LL | if let Some(slice) = slice { | ^^^^^ @@ -9,150 +9,134 @@ note: the lint level is defined here | LL | #![deny(clippy::index_refutable_slice)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([slice_0, ..]) = slice { - | ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([slice_0, ..]) = slice { +LL | +LL ~ println!("{}", slice_0); | -LL | println!("{}", slice_0); - | ~~~~~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:23:17 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:21:17 | LL | if let Some(slice) = slice { | ^^^^^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([slice_0, ..]) = slice { - | ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([slice_0, ..]) = slice { +LL | +LL ~ println!("{}", slice_0); | -LL | println!("{}", slice_0); - | ~~~~~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:30:17 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:28:17 | LL | if let Some(slice) = slice { | ^^^^^ | -help: try using a slice pattern here - | -LL | if let Some([slice_0, _, slice_2, ..]) = slice { - | ~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and replace the index expressions here +help: replace the binding and indexed access with a slice pattern | +LL ~ if let Some([slice_0, _, slice_2, ..]) = slice { +LL | LL ~ println!("{}", slice_2); LL ~ println!("{}", slice_0); | error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:38:26 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:36:26 | LL | if let SomeEnum::One(slice) | SomeEnum::Three(slice) = slice_wrapped { | ^^^^^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let SomeEnum::One([slice_0, ..]) | SomeEnum::Three([slice_0, ..]) = slice_wrapped { - | ~~~~~~~~~~~~~ ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let SomeEnum::One([slice_0, ..]) | SomeEnum::Three([slice_0, ..]) = slice_wrapped { +LL | +LL ~ println!("{}", slice_0); | -LL | println!("{}", slice_0); - | ~~~~~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:46:29 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:44:29 | LL | if let (SomeEnum::Three(a), Some(b)) = (a_wrapped, b_wrapped) { | ^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let (SomeEnum::Three([_, _, a_2, ..]), Some(b)) = (a_wrapped, b_wrapped) { - | ~~~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let (SomeEnum::Three([_, _, a_2, ..]), Some(b)) = (a_wrapped, b_wrapped) { +LL | +LL | +LL ~ println!("{} -> {}", a_2, b[1]); | -LL | println!("{} -> {}", a_2, b[1]); - | ~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:46:38 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:44:38 | LL | if let (SomeEnum::Three(a), Some(b)) = (a_wrapped, b_wrapped) { | ^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let (SomeEnum::Three(a), Some([_, b_1, ..])) = (a_wrapped, b_wrapped) { - | ~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let (SomeEnum::Three(a), Some([_, b_1, ..])) = (a_wrapped, b_wrapped) { +LL | +LL | +LL ~ println!("{} -> {}", a[2], b_1); | -LL | println!("{} -> {}", a[2], b_1); - | ~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:55:21 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:53:21 | LL | if let Some(ref slice) = slice { | ^^^^^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([_, ref slice_1, ..]) = slice { - | ~~~~~~~~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([_, ref slice_1, ..]) = slice { +LL | +LL ~ println!("{:?}", slice_1); | -LL | println!("{:?}", slice_1); - | ~~~~~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:64:17 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:62:17 | LL | if let Some(slice) = &slice { | ^^^^^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([slice_0, ..]) = &slice { - | ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([slice_0, ..]) = &slice { +LL | +LL ~ println!("{:?}", slice_0); | -LL | println!("{:?}", slice_0); - | ~~~~~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:134:17 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:132:17 | LL | if let Some(slice) = wrap.inner { | ^^^^^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([slice_0, ..]) = wrap.inner { - | ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([slice_0, ..]) = wrap.inner { +LL | +LL | if wrap.is_awesome { +LL ~ println!("This is awesome! {}", slice_0); | -LL | println!("This is awesome! {}", slice_0); - | ~~~~~~~ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:142:17 + --> tests/ui/index_refutable_slice/if_let_slice_binding.rs:140:17 | LL | if let Some(slice) = wrap.inner { | ^^^^^ | -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([slice_0, ..]) = wrap.inner { - | ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([slice_0, ..]) = wrap.inner { +LL | +LL | if wrap.is_super_awesome() { +LL ~ println!("This is super awesome! {}", slice_0); | -LL | println!("This is super awesome! {}", slice_0); - | ~~~~~~~ error: aborting due to 10 previous errors diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.fixed b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.fixed new file mode 100644 index 00000000000..72edc539f04 --- /dev/null +++ b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.fixed @@ -0,0 +1,29 @@ +#![deny(clippy::index_refutable_slice)] + +extern crate if_chain; +use if_chain::if_chain; + +macro_rules! if_let_slice_macro { + () => { + // This would normally be linted + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some(slice) = slice { + println!("{}", slice[0]); + } + }; +} + +fn main() { + // Don't lint this + if_let_slice_macro!(); + + // Do lint this + if_chain! { + let slice: Option<&[u32]> = Some(&[1, 2, 3]); + if let Some([slice_0, ..]) = slice; + //~^ ERROR: this binding can be a slice pattern to avoid indexing + then { + println!("{}", slice_0); + } + } +} diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs index 5d9fad48889..7b474ba423b 100644 --- a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs +++ b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs @@ -1,7 +1,5 @@ #![deny(clippy::index_refutable_slice)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - extern crate if_chain; use if_chain::if_chain; diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr index 69f0aaa9777..64741abb911 100644 --- a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr +++ b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr @@ -1,5 +1,5 @@ error: this binding can be a slice pattern to avoid indexing - --> tests/ui/index_refutable_slice/slice_indexing_in_macro.rs:25:21 + --> tests/ui/index_refutable_slice/slice_indexing_in_macro.rs:23:21 | LL | if let Some(slice) = slice; | ^^^^^ @@ -9,14 +9,13 @@ note: the lint level is defined here | LL | #![deny(clippy::index_refutable_slice)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: try using a slice pattern here +help: replace the binding and indexed access with a slice pattern | -LL | if let Some([slice_0, ..]) = slice; - | ~~~~~~~~~~~~~ -help: and replace the index expressions here +LL ~ if let Some([slice_0, ..]) = slice; +LL | +LL | then { +LL ~ println!("{}", slice_0); | -LL | println!("{}", slice_0); - | ~~~~~~~ error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui/let_unit.fixed b/src/tools/clippy/tests/ui/let_unit.fixed new file mode 100644 index 00000000000..3456e274f6a --- /dev/null +++ b/src/tools/clippy/tests/ui/let_unit.fixed @@ -0,0 +1,196 @@ +#![warn(clippy::let_unit_value)] +#![allow(unused, clippy::no_effect, clippy::needless_late_init, path_statements)] + +macro_rules! let_and_return { + ($n:expr) => {{ + let ret = $n; + }}; +} + +fn main() { + println!("x"); + let _y = 1; // this is fine + let _z = ((), 1); // this as well + if true { + // do not lint this, since () is explicit + let _a = (); + let () = dummy(); + let () = (); + () = dummy(); + () = (); + let _a: () = (); + let _a: () = dummy(); + } + + consume_units_with_for_loop(); // should be fine as well + + multiline_sugg(); + + let_and_return!(()) // should be fine +} + +fn dummy() {} + +// Related to issue #1964 +fn consume_units_with_for_loop() { + // `for_let_unit` lint should not be triggered by consuming them using for loop. + let v = vec![(), (), ()]; + let mut count = 0; + for _ in v { + count += 1; + } + assert_eq!(count, 3); + + // Same for consuming from some other Iterator<Item = ()>. + let (tx, rx) = ::std::sync::mpsc::channel(); + tx.send(()).unwrap(); + drop(tx); + + count = 0; + for _ in rx.iter() { + count += 1; + } + assert_eq!(count, 1); +} + +fn multiline_sugg() { + let v: Vec<u8> = vec![2]; + + v + .into_iter() + .map(|i| i * 2) + .filter(|i| i % 2 == 0) + .map(|_| ()) + .next() + .unwrap(); +} + +#[derive(Copy, Clone)] +pub struct ContainsUnit(()); // should be fine + +fn _returns_generic() { + fn f<T>() -> T { + unimplemented!() + } + fn f2<T, U>(_: T) -> U { + unimplemented!() + } + fn f3<T>(x: T) -> T { + x + } + fn f5<T: Default>(x: bool) -> Option<T> { + x.then(|| T::default()) + } + + let _: () = f(); + let x: () = f(); + + let _: () = f2(0i32); + let x: () = f2(0i32); + + let _: () = f3(()); + let x: () = f3(()); + + fn f4<T>(mut x: Vec<T>) -> T { + x.pop().unwrap() + } + let _: () = f4(vec![()]); + let x: () = f4(vec![()]); + + let _: () = { + let x = 5; + f2(x) + }; + + let _: () = if true { f() } else { f2(0) }; + let x: () = if true { f() } else { f2(0) }; + + match Some(0) { + None => f2(1), + Some(0) => f(), + Some(1) => f2(3), + Some(_) => (), + }; + + let _: () = f5(true).unwrap(); + + #[allow(clippy::let_unit_value)] + { + let x = f(); + let y; + let z; + match 0 { + 0 => { + y = f(); + z = f(); + }, + 1 => { + println!("test"); + y = f(); + z = f3(()); + }, + _ => panic!(), + } + + let x1; + let x2; + if true { + x1 = f(); + x2 = x1; + } else { + x2 = f(); + x1 = x2; + } + + let opt; + match f5(true) { + Some(x) => opt = x, + None => panic!(), + }; + + #[warn(clippy::let_unit_value)] + { + let _: () = x; + let _: () = y; + let _: () = z; + let _: () = x1; + let _: () = x2; + let _: () = opt; + } + } + + let () = f(); +} + +fn attributes() { + fn f() {} + + #[allow(clippy::let_unit_value)] + let _ = f(); + #[expect(clippy::let_unit_value)] + let _ = f(); +} + +async fn issue10433() { + let _pending: () = std::future::pending().await; +} + +pub async fn issue11502(a: ()) {} + +pub fn issue12594() { + fn returns_unit() {} + + fn returns_result<T>(res: T) -> Result<T, ()> { + Ok(res) + } + + fn actual_test() { + // create first a unit value'd value + returns_unit(); + returns_result(()).unwrap(); + returns_result(()).unwrap(); + // make sure we replace only the first variable + let res = 1; + returns_result(res).unwrap(); + } +} diff --git a/src/tools/clippy/tests/ui/let_unit.rs b/src/tools/clippy/tests/ui/let_unit.rs index 530103ffaf6..e2dafbcb771 100644 --- a/src/tools/clippy/tests/ui/let_unit.rs +++ b/src/tools/clippy/tests/ui/let_unit.rs @@ -1,8 +1,6 @@ #![warn(clippy::let_unit_value)] #![allow(unused, clippy::no_effect, clippy::needless_late_init, path_statements)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - macro_rules! let_and_return { ($n:expr) => {{ let ret = $n; diff --git a/src/tools/clippy/tests/ui/let_unit.stderr b/src/tools/clippy/tests/ui/let_unit.stderr index 6f149454af2..a2f368f22e5 100644 --- a/src/tools/clippy/tests/ui/let_unit.stderr +++ b/src/tools/clippy/tests/ui/let_unit.stderr @@ -1,5 +1,5 @@ error: this let-binding has unit value - --> tests/ui/let_unit.rs:13:5 + --> tests/ui/let_unit.rs:11:5 | LL | let _x = println!("x"); | ^^^^^^^^^^^^^^^^^^^^^^^ help: omit the `let` binding: `println!("x");` @@ -8,7 +8,7 @@ LL | let _x = println!("x"); = help: to override `-D warnings` add `#[allow(clippy::let_unit_value)]` error: this let-binding has unit value - --> tests/ui/let_unit.rs:61:5 + --> tests/ui/let_unit.rs:59:5 | LL | / let _ = v LL | | .into_iter() @@ -31,7 +31,7 @@ LL + .unwrap(); | error: this let-binding has unit value - --> tests/ui/let_unit.rs:110:5 + --> tests/ui/let_unit.rs:108:5 | LL | / let x = match Some(0) { LL | | None => f2(1), @@ -52,23 +52,17 @@ LL + }; | error: this let-binding has unit value - --> tests/ui/let_unit.rs:191:9 + --> tests/ui/let_unit.rs:189:9 | LL | let res = returns_unit(); | ^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: omit the `let` binding - | -LL | returns_unit(); - | -help: variable `res` of type `()` can be replaced with explicit `()` +help: omit the `let` binding and replace variable usages with `()` | -LL | returns_result(()).unwrap(); - | ~~ -help: variable `res` of type `()` can be replaced with explicit `()` +LL ~ returns_unit(); +LL ~ returns_result(()).unwrap(); +LL ~ returns_result(()).unwrap(); | -LL | returns_result(()).unwrap(); - | ~~ error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/literal_string_with_formatting_arg.rs b/src/tools/clippy/tests/ui/literal_string_with_formatting_arg.rs new file mode 100644 index 00000000000..f257c66f59d --- /dev/null +++ b/src/tools/clippy/tests/ui/literal_string_with_formatting_arg.rs @@ -0,0 +1,37 @@ +#![warn(clippy::literal_string_with_formatting_args)] +#![allow(clippy::unnecessary_literal_unwrap)] + +fn main() { + let x: Option<usize> = None; + let y = "hello"; + x.expect("{y} {}"); //~ literal_string_with_formatting_args + x.expect(" {y} bla"); //~ literal_string_with_formatting_args + x.expect("{:?}"); //~ literal_string_with_formatting_args + x.expect("{y:?}"); //~ literal_string_with_formatting_args + x.expect(" {y:?} {y:?} "); //~ literal_string_with_formatting_args + x.expect(" {y:..} {y:?} "); //~ literal_string_with_formatting_args + x.expect(r"{y:?} {y:?} "); //~ literal_string_with_formatting_args + x.expect(r"{y:?} y:?}"); //~ literal_string_with_formatting_args + x.expect(r##" {y:?} {y:?} "##); //~ literal_string_with_formatting_args + // Ensure that it doesn't try to go in the middle of a unicode character. + x.expect("———{:?}"); //~ literal_string_with_formatting_args + + // Should not lint! + format!("{y:?}"); + println!("{y:?}"); + x.expect(" {} "); // We ignore `{}` to limit false positives. + x.expect(" { } "); // We ignore `{}` to limit false positives. + x.expect("{{y} {x"); + x.expect("{{y:?}"); + x.expect(" {0}"); // If it only contains an integer, we ignore it. + x.expect(r##" {x:?} "##); // `x` doesn't exist so we shoud not lint + x.expect("{y:...}"); + let _ = "fn main {\n\ + }"; + // Unicode characters escape should not lint either. + "\u{0052}".to_string(); + + // Regression test for <https://github.com/rust-lang/rust-clippy/issues/13838>. + let x: Option<usize> = Some(0); + x.expect("{…}"); +} diff --git a/src/tools/clippy/tests/ui/literal_string_with_formatting_arg.stderr b/src/tools/clippy/tests/ui/literal_string_with_formatting_arg.stderr new file mode 100644 index 00000000000..32a84f600da --- /dev/null +++ b/src/tools/clippy/tests/ui/literal_string_with_formatting_arg.stderr @@ -0,0 +1,71 @@ +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:7:15 + | +LL | x.expect("{y} {}"); + | ^^^ + | + = note: `-D clippy::literal-string-with-formatting-args` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::literal_string_with_formatting_args)]` + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:8:16 + | +LL | x.expect(" {y} bla"); + | ^^^ + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:9:15 + | +LL | x.expect("{:?}"); + | ^^^^ + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:10:15 + | +LL | x.expect("{y:?}"); + | ^^^^^ + +error: these look like formatting arguments but are not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:11:16 + | +LL | x.expect(" {y:?} {y:?} "); + | ^^^^^ ^^^^^ + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:12:23 + | +LL | x.expect(" {y:..} {y:?} "); + | ^^^^^ + +error: these look like formatting arguments but are not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:13:16 + | +LL | x.expect(r"{y:?} {y:?} "); + | ^^^^^ ^^^^^ + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:14:16 + | +LL | x.expect(r"{y:?} y:?}"); + | ^^^^^ + +error: these look like formatting arguments but are not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:15:19 + | +LL | x.expect(r##" {y:?} {y:?} "##); + | ^^^^^ ^^^^^ + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:17:18 + | +LL | x.expect("———{:?}"); + | ^^^^ + +error: this looks like a formatting argument but it is not part of a formatting macro + --> tests/ui/literal_string_with_formatting_arg.rs:27:19 + | +LL | x.expect(r##" {x:?} "##); // `x` doesn't exist so we shoud not lint + | ^^^^^ + +error: aborting due to 11 previous errors + diff --git a/src/tools/clippy/tests/ui/manual_async_fn.fixed b/src/tools/clippy/tests/ui/manual_async_fn.fixed new file mode 100644 index 00000000000..dc1cb8e11fc --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_async_fn.fixed @@ -0,0 +1,116 @@ +#![warn(clippy::manual_async_fn)] +#![allow(clippy::needless_pub_self, unused)] + +use std::future::Future; + +async fn fut() -> i32 { 42 } + +#[rustfmt::skip] +async fn fut2() -> i32 { 42 } + +#[rustfmt::skip] +async fn fut3() -> i32 { 42 } + +async fn empty_fut() {} + +#[rustfmt::skip] +async fn empty_fut2() {} + +#[rustfmt::skip] +async fn empty_fut3() {} + +async fn core_fut() -> i32 { 42 } + +// should be ignored +fn has_other_stmts() -> impl core::future::Future<Output = i32> { + let _ = 42; + async move { 42 } +} + +// should be ignored +fn not_fut() -> i32 { + 42 +} + +// should be ignored +async fn already_async() -> impl Future<Output = i32> { + async { 42 } +} + +struct S; +impl S { + async fn inh_fut() -> i32 { + // NOTE: this code is here just to check that the indentation is correct in the suggested fix + let a = 42; + let b = 21; + if a < b { + let c = 21; + let d = 42; + if c < d { + let _ = 42; + } + } + 42 + } + + // should be ignored + fn not_fut(&self) -> i32 { + 42 + } + + // should be ignored + fn has_other_stmts() -> impl core::future::Future<Output = i32> { + let _ = 42; + async move { 42 } + } + + // should be ignored + async fn already_async(&self) -> impl Future<Output = i32> { + async { 42 } + } +} + +// Tests related to lifetime capture + +async fn elided(_: &i32) -> i32 { 42 } + +// should be ignored +fn elided_not_bound(_: &i32) -> impl Future<Output = i32> { + async { 42 } +} + +#[allow(clippy::needless_lifetimes)] +async fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> i32 { 42 } + +// should be ignored +#[allow(clippy::needless_lifetimes)] +fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> { + async { 42 } +} + +// should be ignored +mod issue_5765 { + use std::future::Future; + + struct A; + impl A { + fn f(&self) -> impl Future<Output = ()> { + async {} + } + } + + fn test() { + let _future = { + let a = A; + a.f() + }; + } +} + +pub async fn issue_10450() -> i32 { 42 } + +pub(crate) async fn issue_10450_2() -> i32 { 42 } + +pub(self) async fn issue_10450_3() -> i32 { 42 } + +fn main() {} diff --git a/src/tools/clippy/tests/ui/manual_async_fn.rs b/src/tools/clippy/tests/ui/manual_async_fn.rs index 6b8ac5033a9..9ca7654a368 100644 --- a/src/tools/clippy/tests/ui/manual_async_fn.rs +++ b/src/tools/clippy/tests/ui/manual_async_fn.rs @@ -1,8 +1,6 @@ #![warn(clippy::manual_async_fn)] #![allow(clippy::needless_pub_self, unused)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - use std::future::Future; fn fut() -> impl Future<Output = i32> { @@ -99,6 +97,7 @@ fn elided_not_bound(_: &i32) -> impl Future<Output = i32> { async { 42 } } +#[allow(clippy::needless_lifetimes)] fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b { async { 42 } } diff --git a/src/tools/clippy/tests/ui/manual_async_fn.stderr b/src/tools/clippy/tests/ui/manual_async_fn.stderr index f88fc30b3b5..68a97243436 100644 --- a/src/tools/clippy/tests/ui/manual_async_fn.stderr +++ b/src/tools/clippy/tests/ui/manual_async_fn.stderr @@ -1,5 +1,5 @@ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:8:1 + --> tests/ui/manual_async_fn.rs:6:1 | LL | fn fut() -> impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -8,116 +8,84 @@ LL | fn fut() -> impl Future<Output = i32> { = help: to override `-D warnings` add `#[allow(clippy::manual_async_fn)]` help: make the function `async` and return the output of the future directly | -LL | async fn fut() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | fn fut() -> impl Future<Output = i32> { 42 } - | ~~~~~~ +LL | async fn fut() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:13:1 + --> tests/ui/manual_async_fn.rs:11:1 | LL | fn fut2() ->impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | async fn fut2() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | fn fut2() ->impl Future<Output = i32> { 42 } - | ~~~~~~ +LL | async fn fut2() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:18:1 + --> tests/ui/manual_async_fn.rs:16:1 | LL | fn fut3()-> impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | async fn fut3() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | fn fut3()-> impl Future<Output = i32> { 42 } - | ~~~~~~ +LL | async fn fut3() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:22:1 + --> tests/ui/manual_async_fn.rs:20:1 | LL | fn empty_fut() -> impl Future<Output = ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: make the function `async` and remove the return type - | -LL | async fn empty_fut() { - | ~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function +help: make the function `async` and return the output of the future directly | -LL | fn empty_fut() -> impl Future<Output = ()> {} - | ~~ +LL | async fn empty_fut() {} + | ~~~~~~~~~~~~~~~~~~~~ ~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:27:1 + --> tests/ui/manual_async_fn.rs:25:1 | LL | fn empty_fut2() ->impl Future<Output = ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: make the function `async` and remove the return type - | -LL | async fn empty_fut2() { - | ~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function +help: make the function `async` and return the output of the future directly | -LL | fn empty_fut2() ->impl Future<Output = ()> {} - | ~~ +LL | async fn empty_fut2() {} + | ~~~~~~~~~~~~~~~~~~~~~ ~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:32:1 + --> tests/ui/manual_async_fn.rs:30:1 | LL | fn empty_fut3()-> impl Future<Output = ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: make the function `async` and remove the return type - | -LL | async fn empty_fut3() { - | ~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function +help: make the function `async` and return the output of the future directly | -LL | fn empty_fut3()-> impl Future<Output = ()> {} - | ~~ +LL | async fn empty_fut3() {} + | ~~~~~~~~~~~~~~~~~~~~~ ~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:36:1 + --> tests/ui/manual_async_fn.rs:34:1 | LL | fn core_fut() -> impl core::future::Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | async fn core_fut() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | fn core_fut() -> impl core::future::Future<Output = i32> { 42 } - | ~~~~~~ +LL | async fn core_fut() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:58:5 + --> tests/ui/manual_async_fn.rs:56:5 | LL | fn inh_fut() -> impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | async fn inh_fut() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL ~ fn inh_fut() -> impl Future<Output = i32> { +LL ~ async fn inh_fut() -> i32 { LL + // NOTE: this code is here just to check that the indentation is correct in the suggested fix LL + let a = 42; LL + let b = 21; @@ -133,79 +101,59 @@ LL + } | error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:93:1 + --> tests/ui/manual_async_fn.rs:91:1 | LL | fn elided(_: &i32) -> impl Future<Output = i32> + '_ { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | async fn elided(_: &i32) -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | fn elided(_: &i32) -> impl Future<Output = i32> + '_ { 42 } - | ~~~~~~ +LL | async fn elided(_: &i32) -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:102:1 + --> tests/ui/manual_async_fn.rs:101:1 | LL | fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | async fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b { 42 } - | ~~~~~~ +LL | async fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:131:1 + --> tests/ui/manual_async_fn.rs:130:1 | LL | pub fn issue_10450() -> impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | pub async fn issue_10450() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | pub fn issue_10450() -> impl Future<Output = i32> { 42 } - | ~~~~~~ +LL | pub async fn issue_10450() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:135:1 + --> tests/ui/manual_async_fn.rs:134:1 | LL | pub(crate) fn issue_10450_2() -> impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | pub(crate) async fn issue_10450_2() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | pub(crate) fn issue_10450_2() -> impl Future<Output = i32> { 42 } - | ~~~~~~ +LL | pub(crate) async fn issue_10450_2() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: this function can be simplified using the `async fn` syntax - --> tests/ui/manual_async_fn.rs:139:1 + --> tests/ui/manual_async_fn.rs:138:1 | LL | pub(self) fn issue_10450_3() -> impl Future<Output = i32> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `async` and return the output of the future directly | -LL | pub(self) async fn issue_10450_3() -> i32 { - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: move the body of the async block to the enclosing function - | -LL | pub(self) fn issue_10450_3() -> impl Future<Output = i32> { 42 } - | ~~~~~~ +LL | pub(self) async fn issue_10450_3() -> i32 { 42 } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~ error: aborting due to 13 previous errors diff --git a/src/tools/clippy/tests/ui/manual_split_once.fixed b/src/tools/clippy/tests/ui/manual_split_once.fixed new file mode 100644 index 00000000000..aaac6a048e1 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_split_once.fixed @@ -0,0 +1,144 @@ +#![warn(clippy::manual_split_once)] +#![allow(unused, clippy::iter_skip_next, clippy::iter_nth_zero)] + +extern crate itertools; + +#[allow(unused_imports)] +use itertools::Itertools; + +fn main() { + let _ = "key=value".splitn(2, '=').nth(2); + let _ = "key=value".split_once('=').unwrap().1; + let _ = "key=value".split_once('=').unwrap().1; + let (_, _) = "key=value".split_once('=').unwrap(); + + let s = String::from("key=value"); + let _ = s.split_once('=').unwrap().1; + + let s = Box::<str>::from("key=value"); + let _ = s.split_once('=').unwrap().1; + + let s = &"key=value"; + let _ = s.split_once('=').unwrap().1; + + fn _f(s: &str) -> Option<&str> { + let _ = s.split_once('=')?.1; + let _ = s.split_once('=')?.1; + let _ = s.rsplit_once('=')?.0; + let _ = s.rsplit_once('=')?.0; + None + } + + // Don't lint, slices don't have `split_once` + let _ = [0, 1, 2].splitn(2, |&x| x == 1).nth(1).unwrap(); + + // `rsplitn` gives the results in the reverse order of `rsplit_once` + let _ = "key=value".rsplit_once('=').unwrap().0; + let (_, _) = "key=value".rsplit_once('=').map(|(x, y)| (y, x)).unwrap(); + let _ = s.rsplit_once('=').map(|x| x.0); +} + +fn indirect() -> Option<()> { + let (l, r) = "a.b.c".split_once('.').unwrap(); + + + + let (l, r) = "a.b.c".split_once('.')?; + + + + let (l, r) = "a.b.c".rsplit_once('.').unwrap(); + + + + let (l, r) = "a.b.c".rsplit_once('.')?; + + + + // could lint, currently doesn't + + let mut iter = "a.b.c".splitn(2, '.'); + let other = 1; + let l = iter.next()?; + let r = iter.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + let mut mut_binding = iter.next()?; + let r = iter.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + let tuple = (iter.next()?, iter.next()?); + + // should not lint + + let mut missing_unwrap = "a.b.c".splitn(2, '.'); + let l = missing_unwrap.next(); + let r = missing_unwrap.next(); + + let mut mixed_unrap = "a.b.c".splitn(2, '.'); + let unwrap = mixed_unrap.next().unwrap(); + let question_mark = mixed_unrap.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + let same_name = iter.next()?; + let same_name = iter.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + let shadows_existing = "d"; + let shadows_existing = iter.next()?; + let r = iter.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + let becomes_shadowed = iter.next()?; + let becomes_shadowed = "d"; + let r = iter.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + let l = iter.next()?; + let r = iter.next()?; + let third_usage = iter.next()?; + + let mut n_three = "a.b.c".splitn(3, '.'); + let l = n_three.next()?; + let r = n_three.next()?; + + let mut iter = "a.b.c".splitn(2, '.'); + { + let in_block = iter.next()?; + } + let r = iter.next()?; + + let mut lacks_binding = "a.b.c".splitn(2, '.'); + let _ = lacks_binding.next()?; + let r = lacks_binding.next()?; + + let mut mapped = "a.b.c".splitn(2, '.').map(|_| "~"); + let l = iter.next()?; + let r = iter.next()?; + + let mut assigned = ""; + let mut iter = "a.b.c".splitn(2, '.'); + let l = iter.next()?; + assigned = iter.next()?; + + None +} + +#[clippy::msrv = "1.51"] +fn _msrv_1_51() { + // `str::split_once` was stabilized in 1.52. Do not lint this + let _ = "key=value".splitn(2, '=').nth(1).unwrap(); + + let mut iter = "a.b.c".splitn(2, '.'); + let a = iter.next().unwrap(); + let b = iter.next().unwrap(); +} + +#[clippy::msrv = "1.52"] +fn _msrv_1_52() { + let _ = "key=value".split_once('=').unwrap().1; + + let (a, b) = "a.b.c".split_once('.').unwrap(); + + +} diff --git a/src/tools/clippy/tests/ui/manual_split_once.rs b/src/tools/clippy/tests/ui/manual_split_once.rs index e13c827468b..113e1737c97 100644 --- a/src/tools/clippy/tests/ui/manual_split_once.rs +++ b/src/tools/clippy/tests/ui/manual_split_once.rs @@ -1,8 +1,6 @@ #![warn(clippy::manual_split_once)] #![allow(unused, clippy::iter_skip_next, clippy::iter_nth_zero)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - extern crate itertools; #[allow(unused_imports)] diff --git a/src/tools/clippy/tests/ui/manual_split_once.stderr b/src/tools/clippy/tests/ui/manual_split_once.stderr index 566204ad876..366d860f25e 100644 --- a/src/tools/clippy/tests/ui/manual_split_once.stderr +++ b/src/tools/clippy/tests/ui/manual_split_once.stderr @@ -1,5 +1,5 @@ error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:13:13 + --> tests/ui/manual_split_once.rs:11:13 | LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1` @@ -8,79 +8,79 @@ LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap(); = help: to override `-D warnings` add `#[allow(clippy::manual_split_once)]` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:14:13 + --> tests/ui/manual_split_once.rs:12:13 | LL | let _ = "key=value".splitn(2, '=').skip(1).next().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:15:18 + --> tests/ui/manual_split_once.rs:13:18 | LL | let (_, _) = "key=value".splitn(2, '=').next_tuple().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=')` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:18:13 + --> tests/ui/manual_split_once.rs:16:13 | LL | let _ = s.splitn(2, '=').nth(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:21:13 + --> tests/ui/manual_split_once.rs:19:13 | LL | let _ = s.splitn(2, '=').nth(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:24:13 + --> tests/ui/manual_split_once.rs:22:13 | LL | let _ = s.splitn(2, '=').skip(1).next().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:27:17 + --> tests/ui/manual_split_once.rs:25:17 | LL | let _ = s.splitn(2, '=').nth(1)?; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=')?.1` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:28:17 + --> tests/ui/manual_split_once.rs:26:17 | LL | let _ = s.splitn(2, '=').skip(1).next()?; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=')?.1` error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:29:17 + --> tests/ui/manual_split_once.rs:27:17 | LL | let _ = s.rsplitn(2, '=').nth(1)?; | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=')?.0` error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:30:17 + --> tests/ui/manual_split_once.rs:28:17 | LL | let _ = s.rsplitn(2, '=').skip(1).next()?; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=')?.0` error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:38:13 + --> tests/ui/manual_split_once.rs:36:13 | LL | let _ = "key=value".rsplitn(2, '=').nth(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".rsplit_once('=').unwrap().0` error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:39:18 + --> tests/ui/manual_split_once.rs:37:18 | LL | let (_, _) = "key=value".rsplitn(2, '=').next_tuple().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".rsplit_once('=').map(|(x, y)| (y, x))` error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:40:13 + --> tests/ui/manual_split_once.rs:38:13 | LL | let _ = s.rsplitn(2, '=').nth(1); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=').map(|x| x.0)` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:44:5 + --> tests/ui/manual_split_once.rs:42:5 | LL | let mut iter = "a.b.c".splitn(2, '.'); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -89,21 +89,15 @@ LL | let l = iter.next().unwrap(); LL | let r = iter.next().unwrap(); | ----------------------------- second usage here | -help: try `split_once` - | -LL | let (l, r) = "a.b.c".split_once('.').unwrap(); +help: replace with `split_once` | -help: remove the `iter` usages - | -LL - let l = iter.next().unwrap(); - | -help: remove the `iter` usages - | -LL - let r = iter.next().unwrap(); +LL ~ let (l, r) = "a.b.c".split_once('.').unwrap(); +LL ~ +LL ~ | error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:48:5 + --> tests/ui/manual_split_once.rs:46:5 | LL | let mut iter = "a.b.c".splitn(2, '.'); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -112,21 +106,15 @@ LL | let l = iter.next()?; LL | let r = iter.next()?; | --------------------- second usage here | -help: try `split_once` - | -LL | let (l, r) = "a.b.c".split_once('.')?; - | -help: remove the `iter` usages - | -LL - let l = iter.next()?; +help: replace with `split_once` | -help: remove the `iter` usages - | -LL - let r = iter.next()?; +LL ~ let (l, r) = "a.b.c".split_once('.')?; +LL ~ +LL ~ | error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:52:5 + --> tests/ui/manual_split_once.rs:50:5 | LL | let mut iter = "a.b.c".rsplitn(2, '.'); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -135,21 +123,15 @@ LL | let r = iter.next().unwrap(); LL | let l = iter.next().unwrap(); | ----------------------------- second usage here | -help: try `rsplit_once` - | -LL | let (l, r) = "a.b.c".rsplit_once('.').unwrap(); - | -help: remove the `iter` usages - | -LL - let r = iter.next().unwrap(); +help: replace with `rsplit_once` | -help: remove the `iter` usages - | -LL - let l = iter.next().unwrap(); +LL ~ let (l, r) = "a.b.c".rsplit_once('.').unwrap(); +LL ~ +LL ~ | error: manual implementation of `rsplit_once` - --> tests/ui/manual_split_once.rs:56:5 + --> tests/ui/manual_split_once.rs:54:5 | LL | let mut iter = "a.b.c".rsplitn(2, '.'); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -158,27 +140,21 @@ LL | let r = iter.next()?; LL | let l = iter.next()?; | --------------------- second usage here | -help: try `rsplit_once` - | -LL | let (l, r) = "a.b.c".rsplit_once('.')?; - | -help: remove the `iter` usages +help: replace with `rsplit_once` | -LL - let r = iter.next()?; - | -help: remove the `iter` usages - | -LL - let l = iter.next()?; +LL ~ let (l, r) = "a.b.c".rsplit_once('.')?; +LL ~ +LL ~ | error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:141:13 + --> tests/ui/manual_split_once.rs:139:13 | LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1` error: manual implementation of `split_once` - --> tests/ui/manual_split_once.rs:143:5 + --> tests/ui/manual_split_once.rs:141:5 | LL | let mut iter = "a.b.c".splitn(2, '.'); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -187,17 +163,11 @@ LL | let a = iter.next().unwrap(); LL | let b = iter.next().unwrap(); | ----------------------------- second usage here | -help: try `split_once` - | -LL | let (a, b) = "a.b.c".split_once('.').unwrap(); - | -help: remove the `iter` usages - | -LL - let a = iter.next().unwrap(); - | -help: remove the `iter` usages +help: replace with `split_once` | -LL - let b = iter.next().unwrap(); +LL ~ let (a, b) = "a.b.c".split_once('.').unwrap(); +LL ~ +LL ~ | error: aborting due to 19 previous errors diff --git a/src/tools/clippy/tests/ui/match_same_arms.stderr b/src/tools/clippy/tests/ui/match_same_arms.stderr index 3c0382767c3..4a4772da143 100644 --- a/src/tools/clippy/tests/ui/match_same_arms.stderr +++ b/src/tools/clippy/tests/ui/match_same_arms.stderr @@ -20,13 +20,10 @@ LL | (1, .., 3) => 42, | ^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns +help: or try merging the arm patterns and removing the obsolete arm | -LL | (1, .., 3) | (.., 3) => 42, - | ~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm - | -LL - (.., 3) => 42, +LL ~ (1, .., 3) | (.., 3) => 42, +LL ~ _ => 0, | error: this match arm has an identical body to another arm @@ -36,13 +33,11 @@ LL | 51 => 1, | ^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | 51 | 42 => 1, - | ~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - 42 => 1, +LL - 51 => 1, +LL + 51 | 42 => 1, | error: this match arm has an identical body to another arm @@ -52,13 +47,10 @@ LL | 41 => 2, | ^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns +help: or try merging the arm patterns and removing the obsolete arm | -LL | 41 | 52 => 2, - | ~~~~~~~ -help: and remove this obsolete arm - | -LL - 52 => 2, +LL ~ 41 | 52 => 2, +LL ~ _ => 0, | error: this match arm has an identical body to another arm @@ -68,13 +60,11 @@ LL | 2 => 2, | ^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | 2 | 1 => 2, - | ~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - 1 => 2, +LL - 2 => 2, +LL + 2 | 1 => 2, | error: this match arm has an identical body to another arm @@ -84,13 +74,11 @@ LL | 3 => 2, | ^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns +help: or try merging the arm patterns and removing the obsolete arm | -LL | 3 | 1 => 2, - | ~~~~~ -help: and remove this obsolete arm - | -LL - 1 => 2, +LL ~ 2 => 2, +LL | +LL ~ 3 | 1 => 2, | error: this match arm has an identical body to another arm @@ -100,14 +88,11 @@ LL | 2 => 2, | ^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns +help: or try merging the arm patterns and removing the obsolete arm | -LL | 2 | 3 => 2, - | ~~~~~ -help: and remove this obsolete arm - | -LL - 3 => 2, -LL + +LL ~ 2 | 3 => 2, +LL | +LL ~ | error: this match arm has an identical body to another arm @@ -117,13 +102,11 @@ LL | CommandInfo::External { name, .. } => name.to_string(), | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | CommandInfo::External { name, .. } | CommandInfo::BuiltIn { name, .. } => name.to_string(), - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - CommandInfo::BuiltIn { name, .. } => name.to_string(), +LL - CommandInfo::External { name, .. } => name.to_string(), +LL + CommandInfo::External { name, .. } | CommandInfo::BuiltIn { name, .. } => name.to_string(), | error: aborting due to 8 previous errors diff --git a/src/tools/clippy/tests/ui/match_same_arms2.fixed b/src/tools/clippy/tests/ui/match_same_arms2.fixed new file mode 100644 index 00000000000..b7d377f1ebf --- /dev/null +++ b/src/tools/clippy/tests/ui/match_same_arms2.fixed @@ -0,0 +1,259 @@ +#![warn(clippy::match_same_arms)] +#![allow( + clippy::disallowed_names, + clippy::diverging_sub_expression, + clippy::uninlined_format_args, + clippy::match_single_binding, + clippy::match_like_matches_macro +)] + +fn bar<T>(_: T) {} +fn foo() -> bool { + unimplemented!() +} + +fn match_same_arms() { + let _ = match 42 { + _ => { + foo(); + let mut a = 42 + [23].len() as i32; + if true { + a += 7; + } + a = -31 - a; + a + }, + }; + //~^^^^^^^^^^^^^^^^^^^ ERROR: this match arm has an identical body to the `_` wildcard arm + + let _ = match 42 { + 51 | 42 => foo(), //~ ERROR: this match arm has an identical body to another arm + _ => true, + }; + + let _ = match Some(42) { + None | Some(_) => 24, //~ ERROR: this match arm has an identical body to another arm + }; + + let _ = match Some(42) { + Some(foo) => 24, + None => 24, + }; + + let _ = match Some(42) { + Some(42) => 24, + Some(a) => 24, // bindings are different + None => 0, + }; + + let _ = match Some(42) { + Some(a) if a > 0 => 24, + Some(a) => 24, // one arm has a guard + None => 0, + }; + + match (Some(42), Some(42)) { + (None, Some(a)) | (Some(a), None) => bar(a), //~ ERROR: this match arm has an identical body to another arm + _ => (), + } + + // No warning because guards are different + let _ = match Some(42) { + Some(a) if a == 42 => a, + Some(a) if a == 24 => a, + Some(_) => 24, + None => 0, + }; + + let _ = match (Some(42), Some(42)) { + (None, Some(a)) | (Some(a), None) if a == 42 => a, //~ ERROR: this match arm has an identical body to another arm + _ => 0, + }; + + match (Some(42), Some(42)) { + (Some(a), ..) | (.., Some(a)) => bar(a), //~ ERROR: this match arm has an identical body to another arm + _ => (), + } + + let _ = match Some(()) { + Some(()) => 0.0, + None => -0.0, + }; + + match (Some(42), Some("")) { + (Some(a), None) => bar(a), + (None, Some(a)) => bar(a), // bindings have different types + _ => (), + } + + let x: Result<i32, &str> = Ok(3); + + // No warning because of the guard. + match x { + Ok(x) if x * x == 64 => println!("ok"), + Ok(_) => println!("ok"), + Err(_) => println!("err"), + } + + // This used to be a false positive; see issue #1996. + match x { + Ok(3) => println!("ok"), + Ok(x) if x * x == 64 => println!("ok 64"), + Ok(_) => println!("ok"), + Err(_) => println!("err"), + } + + match (x, Some(1i32)) { + (Ok(x), Some(_)) | (Ok(_), Some(x)) => println!("ok {}", x), //~ ERROR: this match arm has an identical body to another arm + _ => println!("err"), + } + + // No warning; different types for `x`. + match (x, Some(1.0f64)) { + (Ok(x), Some(_)) => println!("ok {}", x), + (Ok(_), Some(x)) => println!("ok {}", x), + _ => println!("err"), + } + + // False negative #2251. + match x { + Ok(_tmp) => println!("ok"), + Ok(_) | Ok(3) => println!("ok"), //~ ERROR: this match arm has an identical body to another arm + Err(_) => { + unreachable!(); + }, + } + + // False positive #1390 + macro_rules! empty { + ($e:expr) => {}; + } + match 0 { + 0 => { + empty!(0); + }, + 1 => { + empty!(1); + }, + x => { + empty!(x); + }, + }; + + // still lint if the tokens are the same + match 0 { + 1 | 0 => { + empty!(0); + }, + x => { + empty!(x); + }, + } + //~^^^^^^^ ERROR: this match arm has an identical body to another arm + + match_expr_like_matches_macro_priority(); +} + +fn match_expr_like_matches_macro_priority() { + enum E { + A, + B, + C, + } + let x = E::A; + let _ans = match x { + E::A => false, + E::B => false, + _ => true, + }; +} + +fn main() { + let _ = match Some(0) { + Some(0) => 0, + Some(1) => 1, + #[cfg(feature = "foo")] + Some(2) => 2, + _ => 1, + }; + + enum Foo { + X(u32), + Y(u32), + Z(u32), + } + + // Don't lint. `Foo::X(0)` and `Foo::Z(_)` overlap with the arm in between. + let _ = match Foo::X(0) { + Foo::X(0) => 1, + Foo::X(_) | Foo::Y(_) | Foo::Z(0) => 2, + Foo::Z(_) => 1, + _ => 0, + }; + + // Suggest moving `Foo::Z(_)` up. + let _ = match Foo::X(0) { + Foo::X(0) | Foo::Z(_) => 1, //~ ERROR: this match arm has an identical body to another arm + Foo::X(_) | Foo::Y(_) => 2, + _ => 0, + }; + + // Suggest moving `Foo::X(0)` down. + let _ = match Foo::X(0) { + Foo::Y(_) | Foo::Z(0) => 2, + Foo::Z(_) | Foo::X(0) => 1, //~ ERROR: this match arm has an identical body to another arm + _ => 0, + }; + + // Don't lint. + let _ = match 0 { + -2 => 1, + -5..=50 => 2, + -150..=88 => 1, + _ => 3, + }; + + struct Bar { + x: u32, + y: u32, + z: u32, + } + + // Lint. + let _ = match None { + Some(Bar { y: 10, z: 0, .. }) => 2, + None => 50, + Some(Bar { y: 0, x: 5, .. }) | Some(Bar { x: 0, y: 5, .. }) => 1, //~ ERROR: this match arm has an identical body to another arm + _ => 200, + }; + + let _ = match 0 { + 0 => todo!(), + 1 => todo!(), + 2 => core::convert::identity::<u32>(todo!()), + 3 => core::convert::identity::<u32>(todo!()), + _ => 5, + }; + + let _ = match 0 { + 1 | 0 => cfg!(not_enable), + _ => false, + }; +} + +// issue #8919, fixed on https://github.com/rust-lang/rust/pull/97312 +mod with_lifetime { + enum MaybeStaticStr<'a> { + Static(&'static str), + Borrowed(&'a str), + } + + impl<'a> MaybeStaticStr<'a> { + fn get(&self) -> &'a str { + match *self { + MaybeStaticStr::Borrowed(s) | MaybeStaticStr::Static(s) => s, + //~^ ERROR: this match arm has an identical body to another arm + } + } + } +} diff --git a/src/tools/clippy/tests/ui/match_same_arms2.rs b/src/tools/clippy/tests/ui/match_same_arms2.rs index dedd02e7873..dfd15d10c3d 100644 --- a/src/tools/clippy/tests/ui/match_same_arms2.rs +++ b/src/tools/clippy/tests/ui/match_same_arms2.rs @@ -7,8 +7,6 @@ clippy::match_like_matches_macro )] -//@no-rustfix: need to change the suggestion to a multipart suggestion - fn bar<T>(_: T) {} fn foo() -> bool { unimplemented!() diff --git a/src/tools/clippy/tests/ui/match_same_arms2.stderr b/src/tools/clippy/tests/ui/match_same_arms2.stderr index 3a28b5afc2b..525a25e9287 100644 --- a/src/tools/clippy/tests/ui/match_same_arms2.stderr +++ b/src/tools/clippy/tests/ui/match_same_arms2.stderr @@ -1,5 +1,5 @@ error: this match arm has an identical body to the `_` wildcard arm - --> tests/ui/match_same_arms2.rs:19:9 + --> tests/ui/match_same_arms2.rs:17:9 | LL | / 42 => { LL | | foo(); @@ -12,7 +12,7 @@ LL | | _ => { | = help: or try changing either arm body note: `_` wildcard arm here - --> tests/ui/match_same_arms2.rs:28:9 + --> tests/ui/match_same_arms2.rs:26:9 | LL | / _ => { LL | | foo(); @@ -26,119 +26,103 @@ LL | | }, = help: to override `-D warnings` add `#[allow(clippy::match_same_arms)]` error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:42:9 + --> tests/ui/match_same_arms2.rs:40:9 | LL | 51 => foo(), | ^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | 51 | 42 => foo(), - | ~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - 42 => foo(), +LL - 51 => foo(), +LL + 51 | 42 => foo(), | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:48:9 + --> tests/ui/match_same_arms2.rs:46:9 | LL | None => 24, | ^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | None | Some(_) => 24, - | ~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - Some(_) => 24, +LL - None => 24, +LL + None | Some(_) => 24, | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:70:9 + --> tests/ui/match_same_arms2.rs:68:9 | LL | (None, Some(a)) => bar(a), | ^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | (None, Some(a)) | (Some(a), None) => bar(a), - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - (Some(a), None) => bar(a), +LL - (None, Some(a)) => bar(a), +LL + (None, Some(a)) | (Some(a), None) => bar(a), | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:84:9 + --> tests/ui/match_same_arms2.rs:82:9 | LL | (None, Some(a)) if a == 42 => a, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | (None, Some(a)) | (Some(a), None) if a == 42 => a, - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - (Some(a), None) if a == 42 => a, +LL - (None, Some(a)) if a == 42 => a, +LL + (None, Some(a)) | (Some(a), None) if a == 42 => a, | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:89:9 + --> tests/ui/match_same_arms2.rs:87:9 | LL | (Some(a), ..) => bar(a), | ^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns +help: or try merging the arm patterns and removing the obsolete arm | -LL | (Some(a), ..) | (.., Some(a)) => bar(a), - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm - | -LL - (.., Some(a)) => bar(a), +LL ~ (Some(a), ..) | (.., Some(a)) => bar(a), +LL ~ _ => (), | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:123:9 + --> tests/ui/match_same_arms2.rs:121:9 | LL | (Ok(x), Some(_)) => println!("ok {}", x), | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | (Ok(x), Some(_)) | (Ok(_), Some(x)) => println!("ok {}", x), - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | -LL - (Ok(_), Some(x)) => println!("ok {}", x), +LL ~ (Ok(x), Some(_)) | (Ok(_), Some(x)) => println!("ok {}", x), +LL ~ _ => println!("err"), | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:139:9 + --> tests/ui/match_same_arms2.rs:137:9 | LL | Ok(_) => println!("ok"), | ^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | Ok(_) | Ok(3) => println!("ok"), - | ~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - Ok(3) => println!("ok"), +LL - Ok(_) => println!("ok"), +LL + Ok(_) | Ok(3) => println!("ok"), | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:166:9 + --> tests/ui/match_same_arms2.rs:164:9 | LL | / 1 => { LL | | empty!(0); @@ -146,95 +130,82 @@ LL | | }, | |_________^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | 1 | 0 => { - | ~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - 0 => { LL - empty!(0); LL - }, +LL - 1 => { +LL + 1 | 0 => { | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:217:9 + --> tests/ui/match_same_arms2.rs:215:9 | LL | Foo::X(0) => 1, | ^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | Foo::X(0) | Foo::Z(_) => 1, - | ~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | -LL - Foo::Z(_) => 1, +LL ~ Foo::X(0) | Foo::Z(_) => 1, +LL | Foo::X(_) | Foo::Y(_) => 2, +LL ~ _ => 0, | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:227:9 + --> tests/ui/match_same_arms2.rs:225:9 | LL | Foo::Z(_) => 1, | ^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | Foo::Z(_) | Foo::X(0) => 1, - | ~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | -LL - Foo::X(0) => 1, +LL ~ Foo::Y(_) | Foo::Z(0) => 2, +LL ~ Foo::Z(_) | Foo::X(0) => 1, | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:250:9 + --> tests/ui/match_same_arms2.rs:248:9 | LL | Some(Bar { y: 0, x: 5, .. }) => 1, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns +help: or try merging the arm patterns and removing the obsolete arm | -LL | Some(Bar { y: 0, x: 5, .. }) | Some(Bar { x: 0, y: 5, .. }) => 1, - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm - | -LL - Some(Bar { x: 0, y: 5, .. }) => 1, +LL ~ Some(Bar { y: 10, z: 0, .. }) => 2, +LL | None => 50, +LL ~ Some(Bar { y: 0, x: 5, .. }) | Some(Bar { x: 0, y: 5, .. }) => 1, | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:264:9 + --> tests/ui/match_same_arms2.rs:262:9 | LL | 1 => cfg!(not_enable), | ^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | 1 | 0 => cfg!(not_enable), - | ~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - 0 => cfg!(not_enable), +LL - 1 => cfg!(not_enable), +LL + 1 | 0 => cfg!(not_enable), | error: this match arm has an identical body to another arm - --> tests/ui/match_same_arms2.rs:280:17 + --> tests/ui/match_same_arms2.rs:278:17 | LL | MaybeStaticStr::Borrowed(s) => s, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: try changing either arm body -help: or try merging the arm patterns - | -LL | MaybeStaticStr::Borrowed(s) | MaybeStaticStr::Static(s) => s, - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -help: and remove this obsolete arm +help: or try merging the arm patterns and removing the obsolete arm | LL - MaybeStaticStr::Static(s) => s, +LL - MaybeStaticStr::Borrowed(s) => s, +LL + MaybeStaticStr::Borrowed(s) | MaybeStaticStr::Static(s) => s, | error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/must_use_unit.fixed b/src/tools/clippy/tests/ui/must_use_unit.fixed index f255cb66652..b92d9379c90 100644 --- a/src/tools/clippy/tests/ui/must_use_unit.fixed +++ b/src/tools/clippy/tests/ui/must_use_unit.fixed @@ -23,3 +23,9 @@ fn main() { fn foo() {} ); } + +#[cfg_attr(all(), deprecated)] +fn issue_12320() {} + +#[cfg_attr(all(), deprecated, doc = "foo")] +fn issue_12320_2() {} diff --git a/src/tools/clippy/tests/ui/must_use_unit.rs b/src/tools/clippy/tests/ui/must_use_unit.rs index 1305910ed0e..c77e7282750 100644 --- a/src/tools/clippy/tests/ui/must_use_unit.rs +++ b/src/tools/clippy/tests/ui/must_use_unit.rs @@ -26,3 +26,9 @@ fn main() { fn foo() {} ); } + +#[cfg_attr(all(), must_use, deprecated)] +fn issue_12320() {} + +#[cfg_attr(all(), deprecated, doc = "foo", must_use)] +fn issue_12320_2() {} diff --git a/src/tools/clippy/tests/ui/must_use_unit.stderr b/src/tools/clippy/tests/ui/must_use_unit.stderr index c2ee2edda7d..b435568deea 100644 --- a/src/tools/clippy/tests/ui/must_use_unit.stderr +++ b/src/tools/clippy/tests/ui/must_use_unit.stderr @@ -25,5 +25,21 @@ LL | #[must_use = "With note"] LL | pub fn must_use_with_note() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 3 previous errors +error: this unit-returning function has a `#[must_use]` attribute + --> tests/ui/must_use_unit.rs:31:1 + | +LL | #[cfg_attr(all(), must_use, deprecated)] + | -------------------- help: change these attributes to: `deprecated` +LL | fn issue_12320() {} + | ^^^^^^^^^^^^^^^^ + +error: this unit-returning function has a `#[must_use]` attribute + --> tests/ui/must_use_unit.rs:34:1 + | +LL | #[cfg_attr(all(), deprecated, doc = "foo", must_use)] + | --------------------------------- help: change these attributes to: `deprecated, doc = "foo"` +LL | fn issue_12320_2() {} + | ^^^^^^^^^^^^^^^^^^ + +error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.fixed b/src/tools/clippy/tests/ui/needless_lifetimes.fixed index cfa4cf9da3c..8196d608abd 100644 --- a/src/tools/clippy/tests/ui/needless_lifetimes.fixed +++ b/src/tools/clippy/tests/ui/needless_lifetimes.fixed @@ -562,4 +562,18 @@ mod rayon { } } +mod issue13749 { + pub struct Generic<T>(T); + // Non elidable lifetime + #[expect(clippy::extra_unused_lifetimes)] + impl<'a, T> Generic<T> where T: 'a {} +} + +mod issue13749bis { + pub struct Generic<T>(T); + // Non elidable lifetime + #[expect(clippy::extra_unused_lifetimes)] + impl<'a, T: 'a> Generic<T> {} +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.rs b/src/tools/clippy/tests/ui/needless_lifetimes.rs index 5e9d5116426..b55dd99c46d 100644 --- a/src/tools/clippy/tests/ui/needless_lifetimes.rs +++ b/src/tools/clippy/tests/ui/needless_lifetimes.rs @@ -562,4 +562,18 @@ mod rayon { } } +mod issue13749 { + pub struct Generic<T>(T); + // Non elidable lifetime + #[expect(clippy::extra_unused_lifetimes)] + impl<'a, T> Generic<T> where T: 'a {} +} + +mod issue13749bis { + pub struct Generic<T>(T); + // Non elidable lifetime + #[expect(clippy::extra_unused_lifetimes)] + impl<'a, T: 'a> Generic<T> {} +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/needless_match.fixed b/src/tools/clippy/tests/ui/needless_match.fixed index a936eb463f9..06c6169d0da 100644 --- a/src/tools/clippy/tests/ui/needless_match.fixed +++ b/src/tools/clippy/tests/ui/needless_match.fixed @@ -245,4 +245,57 @@ mod issue9084 { } } +fn a() -> Option<()> { + Some(()) +} +fn b() -> Option<()> { + Some(()) +} +fn c() -> Option<()> { + Some(()) +} + +#[allow(clippy::ifs_same_cond)] +pub fn issue13574() -> Option<()> { + // Do not lint. + // The right hand of all these arms are different functions. + let _ = { + if let Some(a) = a() { + Some(a) + } else if let Some(b) = b() { + Some(b) + } else if let Some(c) = c() { + Some(c) + } else { + None + } + }; + + const A: Option<()> = Some(()); + const B: Option<()> = Some(()); + const C: Option<()> = Some(()); + const D: Option<()> = Some(()); + + let _ = { + if let Some(num) = A { + Some(num) + } else if let Some(num) = B { + Some(num) + } else if let Some(num) = C { + Some(num) + } else if let Some(num) = D { + Some(num) + } else { + None + } + }; + + // Same const, should lint + let _ = { + A + }; + + None +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/needless_match.rs b/src/tools/clippy/tests/ui/needless_match.rs index b1dd6ff075d..6b71de68e1b 100644 --- a/src/tools/clippy/tests/ui/needless_match.rs +++ b/src/tools/clippy/tests/ui/needless_match.rs @@ -289,4 +289,65 @@ mod issue9084 { } } +fn a() -> Option<()> { + Some(()) +} +fn b() -> Option<()> { + Some(()) +} +fn c() -> Option<()> { + Some(()) +} + +#[allow(clippy::ifs_same_cond)] +pub fn issue13574() -> Option<()> { + // Do not lint. + // The right hand of all these arms are different functions. + let _ = { + if let Some(a) = a() { + Some(a) + } else if let Some(b) = b() { + Some(b) + } else if let Some(c) = c() { + Some(c) + } else { + None + } + }; + + const A: Option<()> = Some(()); + const B: Option<()> = Some(()); + const C: Option<()> = Some(()); + const D: Option<()> = Some(()); + + let _ = { + if let Some(num) = A { + Some(num) + } else if let Some(num) = B { + Some(num) + } else if let Some(num) = C { + Some(num) + } else if let Some(num) = D { + Some(num) + } else { + None + } + }; + + // Same const, should lint + let _ = { + if let Some(num) = A { + Some(num) + } else if let Some(num) = A { + Some(num) + } else if let Some(num) = A { + Some(num) + } else { + None + } + }; + + None +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/needless_match.stderr b/src/tools/clippy/tests/ui/needless_match.stderr index 5bcab467aea..1410585cb2e 100644 --- a/src/tools/clippy/tests/ui/needless_match.stderr +++ b/src/tools/clippy/tests/ui/needless_match.stderr @@ -131,5 +131,17 @@ LL | | _ => e, LL | | }; | |_________^ help: replace it with: `e` -error: aborting due to 13 previous errors +error: this if-let expression is unnecessary + --> tests/ui/needless_match.rs:339:9 + | +LL | / if let Some(num) = A { +LL | | Some(num) +LL | | } else if let Some(num) = A { +LL | | Some(num) +... | +LL | | None +LL | | } + | |_________^ help: replace it with: `A` + +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/needless_option_take.fixed b/src/tools/clippy/tests/ui/needless_option_take.fixed deleted file mode 100644 index d732a2686cb..00000000000 --- a/src/tools/clippy/tests/ui/needless_option_take.fixed +++ /dev/null @@ -1,13 +0,0 @@ -fn main() { - println!("Testing non erroneous option_take_on_temporary"); - let mut option = Some(1); - let _ = Box::new(move || option.take().unwrap()); - - println!("Testing non erroneous option_take_on_temporary"); - let x = Some(3); - x.as_ref(); - - println!("Testing erroneous option_take_on_temporary"); - let x = Some(3); - x.as_ref(); -} diff --git a/src/tools/clippy/tests/ui/needless_option_take.rs b/src/tools/clippy/tests/ui/needless_option_take.rs index f947d874e06..c6807718a75 100644 --- a/src/tools/clippy/tests/ui/needless_option_take.rs +++ b/src/tools/clippy/tests/ui/needless_option_take.rs @@ -1,3 +1,15 @@ +struct MyStruct; + +impl MyStruct { + pub fn get_option() -> Option<Self> { + todo!() + } +} + +fn return_option() -> Option<i32> { + todo!() +} + fn main() { println!("Testing non erroneous option_take_on_temporary"); let mut option = Some(1); @@ -7,7 +19,40 @@ fn main() { let x = Some(3); x.as_ref(); - println!("Testing erroneous option_take_on_temporary"); let x = Some(3); x.as_ref().take(); + //~^ ERROR: called `Option::take()` on a temporary value + + println!("Testing non erroneous option_take_on_temporary"); + let mut x = Some(3); + let y = x.as_mut(); + + let mut x = Some(3); + let y = x.as_mut().take(); + //~^ ERROR: called `Option::take()` on a temporary value + let y = x.replace(289).take(); + //~^ ERROR: called `Option::take()` on a temporary value + + let y = Some(3).as_mut().take(); + //~^ ERROR: called `Option::take()` on a temporary value + + let y = Option::as_mut(&mut x).take(); + //~^ ERROR: called `Option::take()` on a temporary value + + let x = return_option(); + let x = return_option().take(); + //~^ ERROR: called `Option::take()` on a temporary value + + let x = MyStruct::get_option(); + let x = MyStruct::get_option().take(); + //~^ ERROR: called `Option::take()` on a temporary value + + let mut my_vec = vec![1, 2, 3]; + my_vec.push(4); + let y = my_vec.first(); + let y = my_vec.first().take(); + //~^ ERROR: called `Option::take()` on a temporary value + + let y = my_vec.first().take(); + //~^ ERROR: called `Option::take()` on a temporary value } diff --git a/src/tools/clippy/tests/ui/needless_option_take.stderr b/src/tools/clippy/tests/ui/needless_option_take.stderr index 4a73ccb86d0..e036bd53170 100644 --- a/src/tools/clippy/tests/ui/needless_option_take.stderr +++ b/src/tools/clippy/tests/ui/needless_option_take.stderr @@ -1,11 +1,76 @@ error: called `Option::take()` on a temporary value - --> tests/ui/needless_option_take.rs:12:5 + --> tests/ui/needless_option_take.rs:23:5 | LL | x.as_ref().take(); - | ^^^^^^^^^^^^^^^^^ help: try: `x.as_ref()` + | ^^^^^^^^^^^^^^^^^ | + = note: `as_ref` creates a temporary value, so calling take() has no effect = note: `-D clippy::needless-option-take` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::needless_option_take)]` -error: aborting due to 1 previous error +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:31:13 + | +LL | let y = x.as_mut().take(); + | ^^^^^^^^^^^^^^^^^ + | + = note: `as_mut` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:33:13 + | +LL | let y = x.replace(289).take(); + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `replace` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:36:13 + | +LL | let y = Some(3).as_mut().take(); + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `as_mut` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:39:13 + | +LL | let y = Option::as_mut(&mut x).take(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `as_mut` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:43:13 + | +LL | let x = return_option().take(); + | ^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `return_option` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:47:13 + | +LL | let x = MyStruct::get_option().take(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `get_option` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:53:13 + | +LL | let y = my_vec.first().take(); + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `first` creates a temporary value, so calling take() has no effect + +error: called `Option::take()` on a temporary value + --> tests/ui/needless_option_take.rs:56:13 + | +LL | let y = my_vec.first().take(); + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `first` creates a temporary value, so calling take() has no effect + +error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/precedence.fixed b/src/tools/clippy/tests/ui/precedence.fixed index c25c2062ace..9864dd2550b 100644 --- a/src/tools/clippy/tests/ui/precedence.fixed +++ b/src/tools/clippy/tests/ui/precedence.fixed @@ -20,6 +20,10 @@ fn main() { 1 ^ (1 - 1); 3 | (2 - 1); 3 & (5 - 2); + 0x0F00 & (0x00F0 << 4); + 0x0F00 & (0xF000 >> 4); + (0x0F00 << 1) ^ 3; + (0x0F00 << 1) | 2; let b = 3; trip!(b * 8); diff --git a/src/tools/clippy/tests/ui/precedence.rs b/src/tools/clippy/tests/ui/precedence.rs index dc242ecf4c7..9ef5c43833f 100644 --- a/src/tools/clippy/tests/ui/precedence.rs +++ b/src/tools/clippy/tests/ui/precedence.rs @@ -20,6 +20,10 @@ fn main() { 1 ^ 1 - 1; 3 | 2 - 1; 3 & 5 - 2; + 0x0F00 & 0x00F0 << 4; + 0x0F00 & 0xF000 >> 4; + 0x0F00 << 1 ^ 3; + 0x0F00 << 1 | 2; let b = 3; trip!(b * 8); diff --git a/src/tools/clippy/tests/ui/precedence.stderr b/src/tools/clippy/tests/ui/precedence.stderr index 8057c25a5e4..0d63e827d66 100644 --- a/src/tools/clippy/tests/ui/precedence.stderr +++ b/src/tools/clippy/tests/ui/precedence.stderr @@ -43,5 +43,29 @@ error: operator precedence can trip the unwary LL | 3 & 5 - 2; | ^^^^^^^^^ help: consider parenthesizing your expression: `3 & (5 - 2)` -error: aborting due to 7 previous errors +error: operator precedence can trip the unwary + --> tests/ui/precedence.rs:23:5 + | +LL | 0x0F00 & 0x00F0 << 4; + | ^^^^^^^^^^^^^^^^^^^^ help: consider parenthesizing your expression: `0x0F00 & (0x00F0 << 4)` + +error: operator precedence can trip the unwary + --> tests/ui/precedence.rs:24:5 + | +LL | 0x0F00 & 0xF000 >> 4; + | ^^^^^^^^^^^^^^^^^^^^ help: consider parenthesizing your expression: `0x0F00 & (0xF000 >> 4)` + +error: operator precedence can trip the unwary + --> tests/ui/precedence.rs:25:5 + | +LL | 0x0F00 << 1 ^ 3; + | ^^^^^^^^^^^^^^^ help: consider parenthesizing your expression: `(0x0F00 << 1) ^ 3` + +error: operator precedence can trip the unwary + --> tests/ui/precedence.rs:26:5 + | +LL | 0x0F00 << 1 | 2; + | ^^^^^^^^^^^^^^^ help: consider parenthesizing your expression: `(0x0F00 << 1) | 2` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/print_literal.fixed b/src/tools/clippy/tests/ui/print_literal.fixed index a7157c07f8a..1705a7ff01b 100644 --- a/src/tools/clippy/tests/ui/print_literal.fixed +++ b/src/tools/clippy/tests/ui/print_literal.fixed @@ -1,5 +1,5 @@ #![warn(clippy::print_literal)] -#![allow(clippy::uninlined_format_args)] +#![allow(clippy::uninlined_format_args, clippy::literal_string_with_formatting_args)] fn main() { // these should be fine diff --git a/src/tools/clippy/tests/ui/print_literal.rs b/src/tools/clippy/tests/ui/print_literal.rs index 4b04b42744c..d10b26b5887 100644 --- a/src/tools/clippy/tests/ui/print_literal.rs +++ b/src/tools/clippy/tests/ui/print_literal.rs @@ -1,5 +1,5 @@ #![warn(clippy::print_literal)] -#![allow(clippy::uninlined_format_args)] +#![allow(clippy::uninlined_format_args, clippy::literal_string_with_formatting_args)] fn main() { // these should be fine diff --git a/src/tools/clippy/tests/ui/rename.fixed b/src/tools/clippy/tests/ui/rename.fixed index 47149622ef7..47d6e119543 100644 --- a/src/tools/clippy/tests/ui/rename.fixed +++ b/src/tools/clippy/tests/ui/rename.fixed @@ -15,6 +15,7 @@ #![allow(clippy::mixed_read_write_in_expression)] #![allow(clippy::manual_find_map)] #![allow(clippy::manual_filter_map)] +#![allow(unpredictable_function_pointer_comparisons)] #![allow(clippy::useless_conversion)] #![allow(clippy::redundant_pattern_matching)] #![allow(clippy::match_result_ok)] diff --git a/src/tools/clippy/tests/ui/rename.rs b/src/tools/clippy/tests/ui/rename.rs index 7a78a5d280d..12c7db69be2 100644 --- a/src/tools/clippy/tests/ui/rename.rs +++ b/src/tools/clippy/tests/ui/rename.rs @@ -15,6 +15,7 @@ #![allow(clippy::mixed_read_write_in_expression)] #![allow(clippy::manual_find_map)] #![allow(clippy::manual_filter_map)] +#![allow(unpredictable_function_pointer_comparisons)] #![allow(clippy::useless_conversion)] #![allow(clippy::redundant_pattern_matching)] #![allow(clippy::match_result_ok)] diff --git a/src/tools/clippy/tests/ui/rename.stderr b/src/tools/clippy/tests/ui/rename.stderr index dc24bc16d0e..1ec45c4f1f7 100644 --- a/src/tools/clippy/tests/ui/rename.stderr +++ b/src/tools/clippy/tests/ui/rename.stderr @@ -1,5 +1,5 @@ error: lint `clippy::almost_complete_letter_range` has been renamed to `clippy::almost_complete_range` - --> tests/ui/rename.rs:64:9 + --> tests/ui/rename.rs:65:9 | LL | #![warn(clippy::almost_complete_letter_range)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::almost_complete_range` @@ -8,397 +8,397 @@ LL | #![warn(clippy::almost_complete_letter_range)] = help: to override `-D warnings` add `#[allow(renamed_and_removed_lints)]` error: lint `clippy::blacklisted_name` has been renamed to `clippy::disallowed_names` - --> tests/ui/rename.rs:65:9 + --> tests/ui/rename.rs:66:9 | LL | #![warn(clippy::blacklisted_name)] | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_names` error: lint `clippy::block_in_if_condition_expr` has been renamed to `clippy::blocks_in_conditions` - --> tests/ui/rename.rs:66:9 + --> tests/ui/rename.rs:67:9 | LL | #![warn(clippy::block_in_if_condition_expr)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions` error: lint `clippy::block_in_if_condition_stmt` has been renamed to `clippy::blocks_in_conditions` - --> tests/ui/rename.rs:67:9 + --> tests/ui/rename.rs:68:9 | LL | #![warn(clippy::block_in_if_condition_stmt)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions` error: lint `clippy::blocks_in_if_conditions` has been renamed to `clippy::blocks_in_conditions` - --> tests/ui/rename.rs:68:9 + --> tests/ui/rename.rs:69:9 | LL | #![warn(clippy::blocks_in_if_conditions)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions` error: lint `clippy::box_vec` has been renamed to `clippy::box_collection` - --> tests/ui/rename.rs:69:9 + --> tests/ui/rename.rs:70:9 | LL | #![warn(clippy::box_vec)] | ^^^^^^^^^^^^^^^ help: use the new name: `clippy::box_collection` error: lint `clippy::const_static_lifetime` has been renamed to `clippy::redundant_static_lifetimes` - --> tests/ui/rename.rs:70:9 + --> tests/ui/rename.rs:71:9 | LL | #![warn(clippy::const_static_lifetime)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_static_lifetimes` error: lint `clippy::cyclomatic_complexity` has been renamed to `clippy::cognitive_complexity` - --> tests/ui/rename.rs:71:9 + --> tests/ui/rename.rs:72:9 | LL | #![warn(clippy::cyclomatic_complexity)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::cognitive_complexity` error: lint `clippy::derive_hash_xor_eq` has been renamed to `clippy::derived_hash_with_manual_eq` - --> tests/ui/rename.rs:72:9 + --> tests/ui/rename.rs:73:9 | LL | #![warn(clippy::derive_hash_xor_eq)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::derived_hash_with_manual_eq` error: lint `clippy::disallowed_method` has been renamed to `clippy::disallowed_methods` - --> tests/ui/rename.rs:73:9 + --> tests/ui/rename.rs:74:9 | LL | #![warn(clippy::disallowed_method)] | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_methods` error: lint `clippy::disallowed_type` has been renamed to `clippy::disallowed_types` - --> tests/ui/rename.rs:74:9 + --> tests/ui/rename.rs:75:9 | LL | #![warn(clippy::disallowed_type)] | ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_types` error: lint `clippy::eval_order_dependence` has been renamed to `clippy::mixed_read_write_in_expression` - --> tests/ui/rename.rs:75:9 + --> tests/ui/rename.rs:76:9 | LL | #![warn(clippy::eval_order_dependence)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::mixed_read_write_in_expression` error: lint `clippy::find_map` has been renamed to `clippy::manual_find_map` - --> tests/ui/rename.rs:76:9 + --> tests/ui/rename.rs:77:9 | LL | #![warn(clippy::find_map)] | ^^^^^^^^^^^^^^^^ help: use the new name: `clippy::manual_find_map` error: lint `clippy::filter_map` has been renamed to `clippy::manual_filter_map` - --> tests/ui/rename.rs:77:9 + --> tests/ui/rename.rs:78:9 | LL | #![warn(clippy::filter_map)] | ^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::manual_filter_map` error: lint `clippy::fn_address_comparisons` has been renamed to `unpredictable_function_pointer_comparisons` - --> tests/ui/rename.rs:78:9 + --> tests/ui/rename.rs:79:9 | LL | #![warn(clippy::fn_address_comparisons)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unpredictable_function_pointer_comparisons` error: lint `clippy::identity_conversion` has been renamed to `clippy::useless_conversion` - --> tests/ui/rename.rs:79:9 + --> tests/ui/rename.rs:80:9 | LL | #![warn(clippy::identity_conversion)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::useless_conversion` error: lint `clippy::if_let_redundant_pattern_matching` has been renamed to `clippy::redundant_pattern_matching` - --> tests/ui/rename.rs:80:9 + --> tests/ui/rename.rs:81:9 | LL | #![warn(clippy::if_let_redundant_pattern_matching)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_pattern_matching` error: lint `clippy::if_let_some_result` has been renamed to `clippy::match_result_ok` - --> tests/ui/rename.rs:81:9 + --> tests/ui/rename.rs:82:9 | LL | #![warn(clippy::if_let_some_result)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::match_result_ok` error: lint `clippy::incorrect_clone_impl_on_copy_type` has been renamed to `clippy::non_canonical_clone_impl` - --> tests/ui/rename.rs:82:9 + --> tests/ui/rename.rs:83:9 | LL | #![warn(clippy::incorrect_clone_impl_on_copy_type)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::non_canonical_clone_impl` error: lint `clippy::incorrect_partial_ord_impl_on_ord_type` has been renamed to `clippy::non_canonical_partial_ord_impl` - --> tests/ui/rename.rs:83:9 + --> tests/ui/rename.rs:84:9 | LL | #![warn(clippy::incorrect_partial_ord_impl_on_ord_type)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::non_canonical_partial_ord_impl` error: lint `clippy::integer_arithmetic` has been renamed to `clippy::arithmetic_side_effects` - --> tests/ui/rename.rs:84:9 + --> tests/ui/rename.rs:85:9 | LL | #![warn(clippy::integer_arithmetic)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::arithmetic_side_effects` error: lint `clippy::logic_bug` has been renamed to `clippy::overly_complex_bool_expr` - --> tests/ui/rename.rs:85:9 + --> tests/ui/rename.rs:86:9 | LL | #![warn(clippy::logic_bug)] | ^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::overly_complex_bool_expr` error: lint `clippy::new_without_default_derive` has been renamed to `clippy::new_without_default` - --> tests/ui/rename.rs:86:9 + --> tests/ui/rename.rs:87:9 | LL | #![warn(clippy::new_without_default_derive)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::new_without_default` error: lint `clippy::option_and_then_some` has been renamed to `clippy::bind_instead_of_map` - --> tests/ui/rename.rs:87:9 + --> tests/ui/rename.rs:88:9 | LL | #![warn(clippy::option_and_then_some)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::bind_instead_of_map` error: lint `clippy::option_expect_used` has been renamed to `clippy::expect_used` - --> tests/ui/rename.rs:88:9 + --> tests/ui/rename.rs:89:9 | LL | #![warn(clippy::option_expect_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used` error: lint `clippy::option_map_unwrap_or` has been renamed to `clippy::map_unwrap_or` - --> tests/ui/rename.rs:89:9 + --> tests/ui/rename.rs:90:9 | LL | #![warn(clippy::option_map_unwrap_or)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or` error: lint `clippy::option_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or` - --> tests/ui/rename.rs:90:9 + --> tests/ui/rename.rs:91:9 | LL | #![warn(clippy::option_map_unwrap_or_else)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or` error: lint `clippy::option_unwrap_used` has been renamed to `clippy::unwrap_used` - --> tests/ui/rename.rs:91:9 + --> tests/ui/rename.rs:92:9 | LL | #![warn(clippy::option_unwrap_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used` error: lint `clippy::overflow_check_conditional` has been renamed to `clippy::panicking_overflow_checks` - --> tests/ui/rename.rs:92:9 + --> tests/ui/rename.rs:93:9 | LL | #![warn(clippy::overflow_check_conditional)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::panicking_overflow_checks` error: lint `clippy::ref_in_deref` has been renamed to `clippy::needless_borrow` - --> tests/ui/rename.rs:93:9 + --> tests/ui/rename.rs:94:9 | LL | #![warn(clippy::ref_in_deref)] | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::needless_borrow` error: lint `clippy::result_expect_used` has been renamed to `clippy::expect_used` - --> tests/ui/rename.rs:94:9 + --> tests/ui/rename.rs:95:9 | LL | #![warn(clippy::result_expect_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used` error: lint `clippy::result_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or` - --> tests/ui/rename.rs:95:9 + --> tests/ui/rename.rs:96:9 | LL | #![warn(clippy::result_map_unwrap_or_else)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or` error: lint `clippy::result_unwrap_used` has been renamed to `clippy::unwrap_used` - --> tests/ui/rename.rs:96:9 + --> tests/ui/rename.rs:97:9 | LL | #![warn(clippy::result_unwrap_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used` error: lint `clippy::single_char_push_str` has been renamed to `clippy::single_char_add_str` - --> tests/ui/rename.rs:97:9 + --> tests/ui/rename.rs:98:9 | LL | #![warn(clippy::single_char_push_str)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::single_char_add_str` error: lint `clippy::stutter` has been renamed to `clippy::module_name_repetitions` - --> tests/ui/rename.rs:98:9 + --> tests/ui/rename.rs:99:9 | LL | #![warn(clippy::stutter)] | ^^^^^^^^^^^^^^^ help: use the new name: `clippy::module_name_repetitions` error: lint `clippy::thread_local_initializer_can_be_made_const` has been renamed to `clippy::missing_const_for_thread_local` - --> tests/ui/rename.rs:99:9 + --> tests/ui/rename.rs:100:9 | LL | #![warn(clippy::thread_local_initializer_can_be_made_const)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::missing_const_for_thread_local` error: lint `clippy::to_string_in_display` has been renamed to `clippy::recursive_format_impl` - --> tests/ui/rename.rs:100:9 + --> tests/ui/rename.rs:101:9 | LL | #![warn(clippy::to_string_in_display)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::recursive_format_impl` error: lint `clippy::unwrap_or_else_default` has been renamed to `clippy::unwrap_or_default` - --> tests/ui/rename.rs:101:9 + --> tests/ui/rename.rs:102:9 | LL | #![warn(clippy::unwrap_or_else_default)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_or_default` error: lint `clippy::zero_width_space` has been renamed to `clippy::invisible_characters` - --> tests/ui/rename.rs:102:9 + --> tests/ui/rename.rs:103:9 | LL | #![warn(clippy::zero_width_space)] | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::invisible_characters` error: lint `clippy::cast_ref_to_mut` has been renamed to `invalid_reference_casting` - --> tests/ui/rename.rs:103:9 + --> tests/ui/rename.rs:104:9 | LL | #![warn(clippy::cast_ref_to_mut)] | ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_reference_casting` error: lint `clippy::clone_double_ref` has been renamed to `suspicious_double_ref_op` - --> tests/ui/rename.rs:104:9 + --> tests/ui/rename.rs:105:9 | LL | #![warn(clippy::clone_double_ref)] | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `suspicious_double_ref_op` error: lint `clippy::cmp_nan` has been renamed to `invalid_nan_comparisons` - --> tests/ui/rename.rs:105:9 + --> tests/ui/rename.rs:106:9 | LL | #![warn(clippy::cmp_nan)] | ^^^^^^^^^^^^^^^ help: use the new name: `invalid_nan_comparisons` error: lint `clippy::drop_bounds` has been renamed to `drop_bounds` - --> tests/ui/rename.rs:106:9 + --> tests/ui/rename.rs:107:9 | LL | #![warn(clippy::drop_bounds)] | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `drop_bounds` error: lint `clippy::drop_copy` has been renamed to `dropping_copy_types` - --> tests/ui/rename.rs:107:9 + --> tests/ui/rename.rs:108:9 | LL | #![warn(clippy::drop_copy)] | ^^^^^^^^^^^^^^^^^ help: use the new name: `dropping_copy_types` error: lint `clippy::drop_ref` has been renamed to `dropping_references` - --> tests/ui/rename.rs:108:9 + --> tests/ui/rename.rs:109:9 | LL | #![warn(clippy::drop_ref)] | ^^^^^^^^^^^^^^^^ help: use the new name: `dropping_references` error: lint `clippy::fn_null_check` has been renamed to `useless_ptr_null_checks` - --> tests/ui/rename.rs:109:9 + --> tests/ui/rename.rs:110:9 | LL | #![warn(clippy::fn_null_check)] | ^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `useless_ptr_null_checks` error: lint `clippy::for_loop_over_option` has been renamed to `for_loops_over_fallibles` - --> tests/ui/rename.rs:110:9 + --> tests/ui/rename.rs:111:9 | LL | #![warn(clippy::for_loop_over_option)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles` error: lint `clippy::for_loop_over_result` has been renamed to `for_loops_over_fallibles` - --> tests/ui/rename.rs:111:9 + --> tests/ui/rename.rs:112:9 | LL | #![warn(clippy::for_loop_over_result)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles` error: lint `clippy::for_loops_over_fallibles` has been renamed to `for_loops_over_fallibles` - --> tests/ui/rename.rs:112:9 + --> tests/ui/rename.rs:113:9 | LL | #![warn(clippy::for_loops_over_fallibles)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles` error: lint `clippy::forget_copy` has been renamed to `forgetting_copy_types` - --> tests/ui/rename.rs:113:9 + --> tests/ui/rename.rs:114:9 | LL | #![warn(clippy::forget_copy)] | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_copy_types` error: lint `clippy::forget_ref` has been renamed to `forgetting_references` - --> tests/ui/rename.rs:114:9 + --> tests/ui/rename.rs:115:9 | LL | #![warn(clippy::forget_ref)] | ^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_references` error: lint `clippy::into_iter_on_array` has been renamed to `array_into_iter` - --> tests/ui/rename.rs:115:9 + --> tests/ui/rename.rs:116:9 | LL | #![warn(clippy::into_iter_on_array)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `array_into_iter` error: lint `clippy::invalid_atomic_ordering` has been renamed to `invalid_atomic_ordering` - --> tests/ui/rename.rs:116:9 + --> tests/ui/rename.rs:117:9 | LL | #![warn(clippy::invalid_atomic_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_atomic_ordering` error: lint `clippy::invalid_ref` has been renamed to `invalid_value` - --> tests/ui/rename.rs:117:9 + --> tests/ui/rename.rs:118:9 | LL | #![warn(clippy::invalid_ref)] | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_value` error: lint `clippy::invalid_utf8_in_unchecked` has been renamed to `invalid_from_utf8_unchecked` - --> tests/ui/rename.rs:118:9 + --> tests/ui/rename.rs:119:9 | LL | #![warn(clippy::invalid_utf8_in_unchecked)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_from_utf8_unchecked` error: lint `clippy::let_underscore_drop` has been renamed to `let_underscore_drop` - --> tests/ui/rename.rs:119:9 + --> tests/ui/rename.rs:120:9 | LL | #![warn(clippy::let_underscore_drop)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `let_underscore_drop` error: lint `clippy::maybe_misused_cfg` has been renamed to `unexpected_cfgs` - --> tests/ui/rename.rs:120:9 + --> tests/ui/rename.rs:121:9 | LL | #![warn(clippy::maybe_misused_cfg)] | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unexpected_cfgs` error: lint `clippy::mem_discriminant_non_enum` has been renamed to `enum_intrinsics_non_enums` - --> tests/ui/rename.rs:121:9 + --> tests/ui/rename.rs:122:9 | LL | #![warn(clippy::mem_discriminant_non_enum)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `enum_intrinsics_non_enums` error: lint `clippy::mismatched_target_os` has been renamed to `unexpected_cfgs` - --> tests/ui/rename.rs:122:9 + --> tests/ui/rename.rs:123:9 | LL | #![warn(clippy::mismatched_target_os)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unexpected_cfgs` error: lint `clippy::panic_params` has been renamed to `non_fmt_panics` - --> tests/ui/rename.rs:123:9 + --> tests/ui/rename.rs:124:9 | LL | #![warn(clippy::panic_params)] | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `non_fmt_panics` error: lint `clippy::positional_named_format_parameters` has been renamed to `named_arguments_used_positionally` - --> tests/ui/rename.rs:124:9 + --> tests/ui/rename.rs:125:9 | LL | #![warn(clippy::positional_named_format_parameters)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `named_arguments_used_positionally` error: lint `clippy::temporary_cstring_as_ptr` has been renamed to `dangling_pointers_from_temporaries` - --> tests/ui/rename.rs:125:9 + --> tests/ui/rename.rs:126:9 | LL | #![warn(clippy::temporary_cstring_as_ptr)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `dangling_pointers_from_temporaries` error: lint `clippy::undropped_manually_drops` has been renamed to `undropped_manually_drops` - --> tests/ui/rename.rs:126:9 + --> tests/ui/rename.rs:127:9 | LL | #![warn(clippy::undropped_manually_drops)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `undropped_manually_drops` error: lint `clippy::unknown_clippy_lints` has been renamed to `unknown_lints` - --> tests/ui/rename.rs:127:9 + --> tests/ui/rename.rs:128:9 | LL | #![warn(clippy::unknown_clippy_lints)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unknown_lints` error: lint `clippy::unused_label` has been renamed to `unused_labels` - --> tests/ui/rename.rs:128:9 + --> tests/ui/rename.rs:129:9 | LL | #![warn(clippy::unused_label)] | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unused_labels` error: lint `clippy::vtable_address_comparisons` has been renamed to `ambiguous_wide_pointer_comparisons` - --> tests/ui/rename.rs:129:9 + --> tests/ui/rename.rs:130:9 | LL | #![warn(clippy::vtable_address_comparisons)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `ambiguous_wide_pointer_comparisons` error: lint `clippy::reverse_range_loop` has been renamed to `clippy::reversed_empty_ranges` - --> tests/ui/rename.rs:130:9 + --> tests/ui/rename.rs:131:9 | LL | #![warn(clippy::reverse_range_loop)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::reversed_empty_ranges` diff --git a/src/tools/clippy/tests/ui/repr_packed_without_abi.rs b/src/tools/clippy/tests/ui/repr_packed_without_abi.rs new file mode 100644 index 00000000000..16b5ededee9 --- /dev/null +++ b/src/tools/clippy/tests/ui/repr_packed_without_abi.rs @@ -0,0 +1,37 @@ +#![deny(clippy::repr_packed_without_abi)] + +#[repr(packed)] +struct NetworkPacketHeader { + header_length: u8, + header_version: u16, +} + +#[repr(packed)] +union Foo { + a: u8, + b: u16, +} + +#[repr(C, packed)] +struct NoLintCNetworkPacketHeader { + header_length: u8, + header_version: u16, +} + +#[repr(Rust, packed)] +struct NoLintRustNetworkPacketHeader { + header_length: u8, + header_version: u16, +} + +#[repr(packed, C)] +union NotLintCFoo { + a: u8, + b: u16, +} + +#[repr(packed, Rust)] +union NotLintRustFoo { + a: u8, + b: u16, +} diff --git a/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr b/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr new file mode 100644 index 00000000000..4f7acd00db3 --- /dev/null +++ b/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr @@ -0,0 +1,35 @@ +error: item uses `packed` representation without ABI-qualification + --> tests/ui/repr_packed_without_abi.rs:4:1 + | +LL | #[repr(packed)] + | ------ `packed` representation set here +LL | / struct NetworkPacketHeader { +LL | | header_length: u8, +LL | | header_version: u16, +LL | | } + | |_^ + | + = warning: unqualified `#[repr(packed)]` defaults to `#[repr(Rust, packed)]`, which has no stable ABI + = help: qualify the desired ABI explicity via `#[repr(C, packed)]` or `#[repr(Rust, packed)]` +note: the lint level is defined here + --> tests/ui/repr_packed_without_abi.rs:1:9 + | +LL | #![deny(clippy::repr_packed_without_abi)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: item uses `packed` representation without ABI-qualification + --> tests/ui/repr_packed_without_abi.rs:10:1 + | +LL | #[repr(packed)] + | ------ `packed` representation set here +LL | / union Foo { +LL | | a: u8, +LL | | b: u16, +LL | | } + | |_^ + | + = warning: unqualified `#[repr(packed)]` defaults to `#[repr(Rust, packed)]`, which has no stable ABI + = help: qualify the desired ABI explicity via `#[repr(C, packed)]` or `#[repr(Rust, packed)]` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/result_unit_error_no_std.rs b/src/tools/clippy/tests/ui/result_unit_error_no_std.rs new file mode 100644 index 00000000000..1e7a028a7fc --- /dev/null +++ b/src/tools/clippy/tests/ui/result_unit_error_no_std.rs @@ -0,0 +1,26 @@ +#![feature(lang_items, start, libc)] +#![no_std] +#![warn(clippy::result_unit_err)] + +#[clippy::msrv = "1.80"] +pub fn returns_unit_error_no_lint() -> Result<u32, ()> { + Err(()) +} + +#[clippy::msrv = "1.81"] +pub fn returns_unit_error_lint() -> Result<u32, ()> { + Err(()) +} + +#[start] +fn main(_argc: isize, _argv: *const *const u8) -> isize { + 0 +} + +#[panic_handler] +fn panic(_info: &core::panic::PanicInfo) -> ! { + loop {} +} + +#[lang = "eh_personality"] +extern "C" fn eh_personality() {} diff --git a/src/tools/clippy/tests/ui/result_unit_error_no_std.stderr b/src/tools/clippy/tests/ui/result_unit_error_no_std.stderr new file mode 100644 index 00000000000..33692e60554 --- /dev/null +++ b/src/tools/clippy/tests/ui/result_unit_error_no_std.stderr @@ -0,0 +1,12 @@ +error: this returns a `Result<_, ()>` + --> tests/ui/result_unit_error_no_std.rs:11:1 + | +LL | pub fn returns_unit_error_lint() -> Result<u32, ()> { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: use a custom `Error` type instead + = note: `-D clippy::result-unit-err` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::result_unit_err)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/shadow.rs b/src/tools/clippy/tests/ui/shadow.rs index 258dba9dd83..31944f5ef1b 100644 --- a/src/tools/clippy/tests/ui/shadow.rs +++ b/src/tools/clippy/tests/ui/shadow.rs @@ -119,4 +119,26 @@ fn ice_8748() { }]; } +// https://github.com/rust-lang/rust-clippy/issues/10780 +fn shadow_closure() { + // These are not shadow_unrelated; but they are correctly shadow_reuse + let x = Some(1); + #[allow(clippy::shadow_reuse)] + let y = x.map(|x| x + 1); + let z = x.map(|x| x + 1); + let a: Vec<Option<u8>> = [100u8, 120, 140] + .iter() + .map(|i| i.checked_mul(2)) + .map(|i| i.map(|i| i - 10)) + .collect(); +} + +struct Issue13795 { + value: i32, +} + +fn issue13795(value: Issue13795) { + let Issue13795 { value, .. } = value; +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/shadow.stderr b/src/tools/clippy/tests/ui/shadow.stderr index fdd149a2216..c8c524b3a2f 100644 --- a/src/tools/clippy/tests/ui/shadow.stderr +++ b/src/tools/clippy/tests/ui/shadow.stderr @@ -280,5 +280,41 @@ note: previous binding is here LL | let x = 1; | ^ -error: aborting due to 23 previous errors +error: `x` is shadowed + --> tests/ui/shadow.rs:128:20 + | +LL | let z = x.map(|x| x + 1); + | ^ + | +note: previous binding is here + --> tests/ui/shadow.rs:125:9 + | +LL | let x = Some(1); + | ^ + +error: `i` is shadowed + --> tests/ui/shadow.rs:132:25 + | +LL | .map(|i| i.map(|i| i - 10)) + | ^ + | +note: previous binding is here + --> tests/ui/shadow.rs:132:15 + | +LL | .map(|i| i.map(|i| i - 10)) + | ^ + +error: `value` is shadowed by itself in `value` + --> tests/ui/shadow.rs:141:22 + | +LL | let Issue13795 { value, .. } = value; + | ^^^^^ + | +note: previous binding is here + --> tests/ui/shadow.rs:140:15 + | +LL | fn issue13795(value: Issue13795) { + | ^^^^^ + +error: aborting due to 26 previous errors diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.fixed b/src/tools/clippy/tests/ui/significant_drop_tightening.fixed new file mode 100644 index 00000000000..ed05f6e0c8d --- /dev/null +++ b/src/tools/clippy/tests/ui/significant_drop_tightening.fixed @@ -0,0 +1,144 @@ +#![warn(clippy::significant_drop_tightening)] + +use std::sync::Mutex; + +pub fn complex_return_triggers_the_lint() -> i32 { + fn foo() -> i32 { + 1 + } + let mutex = Mutex::new(1); + let lock = mutex.lock().unwrap(); + let _ = *lock; + let _ = *lock; + drop(lock); + foo() +} + +pub fn issue_10413() { + let mutex = Mutex::new(Some(1)); + let opt = Some(1); + if opt.is_some() { + let lock = mutex.lock().unwrap(); + let _ = *lock; + if opt.is_some() { + let _ = *lock; + } + } +} + +pub fn issue_11128() { + use std::mem::drop as unlock; + + struct Foo { + droppable: Option<Vec<i32>>, + mutex: Mutex<Vec<i32>>, + } + + impl Drop for Foo { + fn drop(&mut self) { + if let Some(droppable) = self.droppable.take() { + let lock = self.mutex.lock().unwrap(); + let idx_opt = lock.iter().copied().find(|el| Some(el) == droppable.first()); + if let Some(idx) = idx_opt { + let local_droppable = vec![lock.first().copied().unwrap_or_default()]; + unlock(lock); + drop(local_droppable); + } + } + } + } +} + +pub fn issue_11160() -> bool { + let mutex = Mutex::new(1i32); + let lock = mutex.lock().unwrap(); + let _ = lock.abs(); + true +} + +pub fn issue_11189() { + struct Number { + pub value: u32, + } + + fn do_something() -> Result<(), ()> { + let number = Mutex::new(Number { value: 1 }); + let number2 = Mutex::new(Number { value: 2 }); + let number3 = Mutex::new(Number { value: 3 }); + let mut lock = number.lock().unwrap(); + let mut lock2 = number2.lock().unwrap(); + let mut lock3 = number3.lock().unwrap(); + lock.value += 1; + lock2.value += 1; + lock3.value += 1; + drop((lock, lock2, lock3)); + Ok(()) + } +} + +pub fn path_return_can_be_ignored() -> i32 { + let mutex = Mutex::new(1); + let lock = mutex.lock().unwrap(); + let rslt = *lock; + let _ = *lock; + rslt +} + +pub fn post_bindings_can_be_ignored() { + let mutex = Mutex::new(1); + let lock = mutex.lock().unwrap(); + let rslt = *lock; + let another = rslt; + let _ = another; +} + +pub fn unnecessary_contention_with_multiple_owned_results() { + { + let mutex = Mutex::new(1i32); + let lock = mutex.lock().unwrap(); + let _ = lock.abs(); + let _ = lock.is_positive(); + } + + { + let mutex = Mutex::new(1i32); + let lock = mutex.lock().unwrap(); + let rslt0 = lock.abs(); + let rslt1 = lock.is_positive(); + drop(lock); + do_heavy_computation_that_takes_time((rslt0, rslt1)); + } +} + +pub fn unnecessary_contention_with_single_owned_results() { + { + let mutex = Mutex::new(1i32); + let lock = mutex.lock().unwrap(); + let _ = lock.abs(); + } + { + let mutex = Mutex::new(vec![1i32]); + let mut lock = mutex.lock().unwrap(); + lock.clear(); + } + + { + let mutex = Mutex::new(1i32); + + let rslt0 = mutex.lock().unwrap().abs(); + + do_heavy_computation_that_takes_time(rslt0); + } + { + let mutex = Mutex::new(vec![1i32]); + + mutex.lock().unwrap().clear(); + + do_heavy_computation_that_takes_time(()); + } +} + +// Marker used for illustration purposes. +pub fn do_heavy_computation_that_takes_time<T>(_: T) {} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.rs b/src/tools/clippy/tests/ui/significant_drop_tightening.rs index 77538167548..e5f17278f0f 100644 --- a/src/tools/clippy/tests/ui/significant_drop_tightening.rs +++ b/src/tools/clippy/tests/ui/significant_drop_tightening.rs @@ -1,7 +1,5 @@ #![warn(clippy::significant_drop_tightening)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - use std::sync::Mutex; pub fn complex_return_triggers_the_lint() -> i32 { diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.stderr b/src/tools/clippy/tests/ui/significant_drop_tightening.stderr index 7d7e3ac7d0a..aef774a3d36 100644 --- a/src/tools/clippy/tests/ui/significant_drop_tightening.stderr +++ b/src/tools/clippy/tests/ui/significant_drop_tightening.stderr @@ -1,5 +1,5 @@ error: temporary with significant `Drop` can be early dropped - --> tests/ui/significant_drop_tightening.rs:12:9 + --> tests/ui/significant_drop_tightening.rs:10:9 | LL | pub fn complex_return_triggers_the_lint() -> i32 { | __________________________________________________- @@ -23,7 +23,7 @@ LL + drop(lock); | error: temporary with significant `Drop` can be early dropped - --> tests/ui/significant_drop_tightening.rs:106:13 + --> tests/ui/significant_drop_tightening.rs:104:13 | LL | / { LL | | let mutex = Mutex::new(1i32); @@ -43,7 +43,7 @@ LL + drop(lock); | error: temporary with significant `Drop` can be early dropped - --> tests/ui/significant_drop_tightening.rs:127:13 + --> tests/ui/significant_drop_tightening.rs:125:13 | LL | / { LL | | let mutex = Mutex::new(1i32); @@ -59,14 +59,11 @@ help: merge the temporary construction with its single usage | LL ~ LL + let rslt0 = mutex.lock().unwrap().abs(); - | -help: remove separated single usage - | -LL - let rslt0 = lock.abs(); +LL ~ | error: temporary with significant `Drop` can be early dropped - --> tests/ui/significant_drop_tightening.rs:133:17 + --> tests/ui/significant_drop_tightening.rs:131:17 | LL | / { LL | | let mutex = Mutex::new(vec![1i32]); @@ -82,10 +79,7 @@ help: merge the temporary construction with its single usage | LL ~ LL + mutex.lock().unwrap().clear(); - | -help: remove separated single usage - | -LL - lock.clear(); +LL ~ | error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/single_match.fixed b/src/tools/clippy/tests/ui/single_match.fixed index 4016b2699d6..d3d5fd8b35c 100644 --- a/src/tools/clippy/tests/ui/single_match.fixed +++ b/src/tools/clippy/tests/ui/single_match.fixed @@ -17,7 +17,13 @@ fn single_match() { }; let x = Some(1u8); - if let Some(y) = x { println!("{:?}", y) } + match x { + // Note the missing block braces. + // We suggest `if let Some(y) = x { .. }` because the macro + // is expanded before we can do anything. + Some(y) => println!("{:?}", y), + _ => (), + } let z = (1u8, 1u8); if let (2..=3, 7..=9) = z { dummy() }; @@ -297,6 +303,10 @@ fn issue11365() { if let Some(A | B) = &Some(A) { println!() } } +fn issue12758(s: &[u8]) { + if &s[0..3] == b"foo" { println!() } +} + #[derive(Eq, PartialEq)] pub struct Data([u8; 4]); @@ -318,5 +328,25 @@ fn irrefutable_match() { - println!() + println!(); + + let mut x = vec![1i8]; + + // Should not lint. + match x.pop() { + // bla + Some(u) => println!("{u}"), + // more comments! + None => {}, + } + // Should not lint. + match x.pop() { + // bla + Some(u) => { + // bla + println!("{u}"); + }, + // bla + None => {}, + } } diff --git a/src/tools/clippy/tests/ui/single_match.rs b/src/tools/clippy/tests/ui/single_match.rs index 75edaa60605..2f3547c5063 100644 --- a/src/tools/clippy/tests/ui/single_match.rs +++ b/src/tools/clippy/tests/ui/single_match.rs @@ -361,6 +361,13 @@ fn issue11365() { } } +fn issue12758(s: &[u8]) { + match &s[0..3] { + b"foo" => println!(), + _ => {}, + } +} + #[derive(Eq, PartialEq)] pub struct Data([u8; 4]); @@ -401,4 +408,24 @@ fn irrefutable_match() { CONST_I32 => println!(), _ => {}, } + + let mut x = vec![1i8]; + + // Should not lint. + match x.pop() { + // bla + Some(u) => println!("{u}"), + // more comments! + None => {}, + } + // Should not lint. + match x.pop() { + // bla + Some(u) => { + // bla + println!("{u}"); + }, + // bla + None => {}, + } } diff --git a/src/tools/clippy/tests/ui/single_match.stderr b/src/tools/clippy/tests/ui/single_match.stderr index dd03737279a..54bbfbac093 100644 --- a/src/tools/clippy/tests/ui/single_match.stderr +++ b/src/tools/clippy/tests/ui/single_match.stderr @@ -19,15 +19,6 @@ LL ~ }; | error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` - --> tests/ui/single_match.rs:23:5 - | -LL | / match x { -... | -LL | | _ => (), -LL | | } - | |_____^ help: try: `if let Some(y) = x { println!("{:?}", y) }` - -error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> tests/ui/single_match.rs:32:5 | LL | / match z { @@ -213,8 +204,17 @@ LL | | None | Some(_) => {}, LL | | } | |_____^ help: try: `if let Some(A | B) = &Some(A) { println!() }` +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match.rs:365:5 + | +LL | / match &s[0..3] { +LL | | b"foo" => println!(), +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if &s[0..3] == b"foo" { println!() }` + error: this pattern is irrefutable, `match` is useless - --> tests/ui/single_match.rs:371:5 + --> tests/ui/single_match.rs:378:5 | LL | / match DATA { LL | | DATA => println!(), @@ -223,7 +223,7 @@ LL | | } | |_____^ help: try: `println!();` error: this pattern is irrefutable, `match` is useless - --> tests/ui/single_match.rs:376:5 + --> tests/ui/single_match.rs:383:5 | LL | / match CONST_I32 { LL | | CONST_I32 => println!(), @@ -232,7 +232,7 @@ LL | | } | |_____^ help: try: `println!();` error: this pattern is irrefutable, `match` is useless - --> tests/ui/single_match.rs:382:5 + --> tests/ui/single_match.rs:389:5 | LL | / match i { LL | | i => { @@ -252,7 +252,7 @@ LL + } | error: this pattern is irrefutable, `match` is useless - --> tests/ui/single_match.rs:390:5 + --> tests/ui/single_match.rs:397:5 | LL | / match i { LL | | i => {}, @@ -261,7 +261,7 @@ LL | | } | |_____^ help: `match` expression can be removed error: this pattern is irrefutable, `match` is useless - --> tests/ui/single_match.rs:395:5 + --> tests/ui/single_match.rs:402:5 | LL | / match i { LL | | i => (), @@ -270,13 +270,13 @@ LL | | } | |_____^ help: `match` expression can be removed error: this pattern is irrefutable, `match` is useless - --> tests/ui/single_match.rs:400:5 + --> tests/ui/single_match.rs:407:5 | LL | / match CONST_I32 { LL | | CONST_I32 => println!(), LL | | _ => {}, LL | | } - | |_____^ help: try: `println!()` + | |_____^ help: try: `println!();` error: aborting due to 26 previous errors diff --git a/src/tools/clippy/tests/ui/trailing_empty_array.rs b/src/tools/clippy/tests/ui/trailing_empty_array.rs index 3d06c262168..309a5920dfd 100644 --- a/src/tools/clippy/tests/ui/trailing_empty_array.rs +++ b/src/tools/clippy/tests/ui/trailing_empty_array.rs @@ -1,4 +1,5 @@ #![warn(clippy::trailing_empty_array)] +#![allow(clippy::repr_packed_without_abi)] // Do lint: diff --git a/src/tools/clippy/tests/ui/trailing_empty_array.stderr b/src/tools/clippy/tests/ui/trailing_empty_array.stderr index 756381478f2..7ebff372cf7 100644 --- a/src/tools/clippy/tests/ui/trailing_empty_array.stderr +++ b/src/tools/clippy/tests/ui/trailing_empty_array.stderr @@ -1,5 +1,5 @@ error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:5:1 + --> tests/ui/trailing_empty_array.rs:6:1 | LL | / struct RarelyUseful { LL | | @@ -13,7 +13,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::trailing_empty_array)]` error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:11:1 + --> tests/ui/trailing_empty_array.rs:12:1 | LL | / struct OnlyField { LL | | @@ -24,7 +24,7 @@ LL | | } = help: consider annotating `OnlyField` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:16:1 + --> tests/ui/trailing_empty_array.rs:17:1 | LL | / struct GenericArrayType<T> { LL | | @@ -36,7 +36,7 @@ LL | | } = help: consider annotating `GenericArrayType` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:23:1 + --> tests/ui/trailing_empty_array.rs:24:1 | LL | / struct OnlyAnotherAttribute { LL | | @@ -48,7 +48,7 @@ LL | | } = help: consider annotating `OnlyAnotherAttribute` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:30:1 + --> tests/ui/trailing_empty_array.rs:31:1 | LL | / struct OnlyADeriveAttribute { LL | | @@ -60,7 +60,7 @@ LL | | } = help: consider annotating `OnlyADeriveAttribute` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:37:1 + --> tests/ui/trailing_empty_array.rs:38:1 | LL | / struct ZeroSizedWithConst { LL | | @@ -72,7 +72,7 @@ LL | | } = help: consider annotating `ZeroSizedWithConst` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:47:1 + --> tests/ui/trailing_empty_array.rs:48:1 | LL | / struct ZeroSizedWithConstFunction { LL | | @@ -84,7 +84,7 @@ LL | | } = help: consider annotating `ZeroSizedWithConstFunction` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:56:1 + --> tests/ui/trailing_empty_array.rs:57:1 | LL | / struct ZeroSizedWithConstFunction2 { LL | | @@ -96,7 +96,7 @@ LL | | } = help: consider annotating `ZeroSizedWithConstFunction2` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:62:1 + --> tests/ui/trailing_empty_array.rs:63:1 | LL | struct ZeroSizedArrayWrapper([usize; 0]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -104,7 +104,7 @@ LL | struct ZeroSizedArrayWrapper([usize; 0]); = help: consider annotating `ZeroSizedArrayWrapper` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:65:1 + --> tests/ui/trailing_empty_array.rs:66:1 | LL | struct TupleStruct(i32, [usize; 0]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -112,7 +112,7 @@ LL | struct TupleStruct(i32, [usize; 0]); = help: consider annotating `TupleStruct` with `#[repr(C)]` or another `repr` attribute error: trailing zero-sized array in a struct which is not marked with a `repr` attribute - --> tests/ui/trailing_empty_array.rs:68:1 + --> tests/ui/trailing_empty_array.rs:69:1 | LL | / struct LotsOfFields { LL | | diff --git a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.fixed b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.fixed index f0d570efdce..9911d131707 100644 --- a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.fixed +++ b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.fixed @@ -3,6 +3,7 @@ //@[edition2021] edition:2021 #![warn(clippy::uninlined_format_args)] +#![allow(clippy::literal_string_with_formatting_args)] fn main() { let var = 1; diff --git a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr index 0541dd9a7d7..4b154abac5b 100644 --- a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr +++ b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2018.stderr @@ -1,5 +1,5 @@ error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:10:5 + --> tests/ui/uninlined_format_args_panic.rs:11:5 | LL | println!("val='{}'", var); | ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.fixed b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.fixed index 7c0f28c4576..87b74670565 100644 --- a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.fixed +++ b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.fixed @@ -3,6 +3,7 @@ //@[edition2021] edition:2021 #![warn(clippy::uninlined_format_args)] +#![allow(clippy::literal_string_with_formatting_args)] fn main() { let var = 1; diff --git a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.stderr b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.stderr index 3615eaa9dee..7638d3f8bba 100644 --- a/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.stderr +++ b/src/tools/clippy/tests/ui/uninlined_format_args_panic.edition2021.stderr @@ -1,5 +1,5 @@ error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:10:5 + --> tests/ui/uninlined_format_args_panic.rs:11:5 | LL | println!("val='{}'", var); | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -13,7 +13,7 @@ LL + println!("val='{var}'"); | error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:13:9 + --> tests/ui/uninlined_format_args_panic.rs:14:9 | LL | panic!("p1 {}", var); | ^^^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL + panic!("p1 {var}"); | error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:16:9 + --> tests/ui/uninlined_format_args_panic.rs:17:9 | LL | panic!("p2 {0}", var); | ^^^^^^^^^^^^^^^^^^^^^ @@ -37,7 +37,7 @@ LL + panic!("p2 {var}"); | error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:19:9 + --> tests/ui/uninlined_format_args_panic.rs:20:9 | LL | panic!("p3 {var}", var = var); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -49,7 +49,7 @@ LL + panic!("p3 {var}"); | error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:29:5 + --> tests/ui/uninlined_format_args_panic.rs:30:5 | LL | assert!(var == 1, "p5 {}", var); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -61,7 +61,7 @@ LL + assert!(var == 1, "p5 {var}"); | error: variables can be used directly in the `format!` string - --> tests/ui/uninlined_format_args_panic.rs:30:5 + --> tests/ui/uninlined_format_args_panic.rs:31:5 | LL | debug_assert!(var == 1, "p6 {}", var); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/uninlined_format_args_panic.rs b/src/tools/clippy/tests/ui/uninlined_format_args_panic.rs index fa594d9a96f..647c69bc5c4 100644 --- a/src/tools/clippy/tests/ui/uninlined_format_args_panic.rs +++ b/src/tools/clippy/tests/ui/uninlined_format_args_panic.rs @@ -3,6 +3,7 @@ //@[edition2021] edition:2021 #![warn(clippy::uninlined_format_args)] +#![allow(clippy::literal_string_with_formatting_args)] fn main() { let var = 1; diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed new file mode 100644 index 00000000000..dc5e163ff04 --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed @@ -0,0 +1,201 @@ +#![allow(unused_assignments)] +#![warn(clippy::unnecessary_to_owned)] + +#[allow(dead_code)] +#[derive(Clone, Copy)] +enum FileType { + Account, + PrivateKey, + Certificate, +} + +fn main() { + let path = std::path::Path::new("x"); + + let _ = check_files(&[(FileType::Account, path)]); + let _ = check_files_vec(vec![(FileType::Account, path)]); + + // negative tests + let _ = check_files_ref(&[(FileType::Account, path)]); + let _ = check_files_mut(&[(FileType::Account, path)]); + let _ = check_files_ref_mut(&[(FileType::Account, path)]); + let _ = check_files_self_and_arg(&[(FileType::Account, path)]); + let _ = check_files_mut_path_buf(&[(FileType::Account, std::path::PathBuf::new())]); + + check_mut_iteratee_and_modify_inner_variable(); +} + +// `check_files` and its variants are based on: +// https://github.com/breard-r/acmed/blob/1f0dcc32aadbc5e52de6d23b9703554c0f925113/acmed/src/storage.rs#L262 +fn check_files(files: &[(FileType, &std::path::Path)]) -> bool { + for (t, path) in files { + let other = match get_file_path(t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() || !other.is_file() { + return false; + } + } + true +} + +fn check_files_vec(files: Vec<(FileType, &std::path::Path)>) -> bool { + for (t, path) in files.iter() { + let other = match get_file_path(t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() || !other.is_file() { + return false; + } + } + true +} + +fn check_files_ref(files: &[(FileType, &std::path::Path)]) -> bool { + for (ref t, path) in files.iter().copied() { + let other = match get_file_path(t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() || !other.is_file() { + return false; + } + } + true +} + +#[allow(unused_assignments)] +fn check_files_mut(files: &[(FileType, &std::path::Path)]) -> bool { + for (mut t, path) in files.iter().copied() { + t = FileType::PrivateKey; + let other = match get_file_path(&t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() || !other.is_file() { + return false; + } + } + true +} + +fn check_files_ref_mut(files: &[(FileType, &std::path::Path)]) -> bool { + for (ref mut t, path) in files.iter().copied() { + *t = FileType::PrivateKey; + let other = match get_file_path(t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() || !other.is_file() { + return false; + } + } + true +} + +fn check_files_self_and_arg(files: &[(FileType, &std::path::Path)]) -> bool { + for (t, path) in files.iter().copied() { + let other = match get_file_path(&t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.join(path).is_file() || !other.is_file() { + return false; + } + } + true +} + +#[allow(unused_assignments)] +fn check_files_mut_path_buf(files: &[(FileType, std::path::PathBuf)]) -> bool { + for (mut t, path) in files.iter().cloned() { + t = FileType::PrivateKey; + let other = match get_file_path(&t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() || !other.is_file() { + return false; + } + } + true +} + +fn get_file_path(_file_type: &FileType) -> Result<std::path::PathBuf, std::io::Error> { + Ok(std::path::PathBuf::new()) +} + +// Issue 12098 +// https://github.com/rust-lang/rust-clippy/issues/12098 +// no message emits +fn check_mut_iteratee_and_modify_inner_variable() { + struct Test { + list: Vec<String>, + mut_this: bool, + } + + impl Test { + fn list(&self) -> &[String] { + &self.list + } + } + + let mut test = Test { + list: vec![String::from("foo"), String::from("bar")], + mut_this: false, + }; + + for _item in test.list().to_vec() { + println!("{}", _item); + + test.mut_this = true; + { + test.mut_this = true; + } + } +} + +mod issue_12821 { + fn foo() { + let v: Vec<_> = "hello".chars().collect(); + for c in v.iter() { + //~^ ERROR: unnecessary use of `cloned` + println!("{c}"); // should not suggest to remove `&` + } + } + + fn bar() { + let v: Vec<_> = "hello".chars().collect(); + for c in v.iter() { + //~^ ERROR: unnecessary use of `cloned` + let ref_c = c; //~ HELP: remove any references to the binding + println!("{ref_c}"); + } + } + + fn baz() { + let v: Vec<_> = "hello".chars().enumerate().collect(); + for (i, c) in v.iter() { + //~^ ERROR: unnecessary use of `cloned` + let ref_c = c; //~ HELP: remove any references to the binding + let ref_i = i; + println!("{i} {ref_c}"); // should not suggest to remove `&` from `i` + } + } +} diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs index 331b7b25271..8f797ac717f 100644 --- a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs +++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs @@ -1,8 +1,6 @@ #![allow(unused_assignments)] #![warn(clippy::unnecessary_to_owned)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - #[allow(dead_code)] #[derive(Clone, Copy)] enum FileType { diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr index e3592e3cbbd..6f2ae0ab1f3 100644 --- a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr @@ -1,71 +1,58 @@ error: unnecessary use of `copied` - --> tests/ui/unnecessary_iter_cloned.rs:33:22 + --> tests/ui/unnecessary_iter_cloned.rs:31:22 | LL | for (t, path) in files.iter().copied() { | ^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::unnecessary-to-owned` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::unnecessary_to_owned)]` -help: use - | -LL | for (t, path) in files { - | ~~~~~ help: remove any references to the binding | -LL - let other = match get_file_path(&t) { -LL + let other = match get_file_path(t) { +LL ~ for (t, path) in files { +LL ~ let other = match get_file_path(t) { | error: unnecessary use of `copied` - --> tests/ui/unnecessary_iter_cloned.rs:48:22 + --> tests/ui/unnecessary_iter_cloned.rs:46:22 | LL | for (t, path) in files.iter().copied() { | ^^^^^^^^^^^^^^^^^^^^^ | -help: use - | -LL | for (t, path) in files.iter() { - | ~~~~~~~~~~~~ help: remove any references to the binding | -LL - let other = match get_file_path(&t) { -LL + let other = match get_file_path(t) { +LL ~ for (t, path) in files.iter() { +LL ~ let other = match get_file_path(t) { | error: unnecessary use of `cloned` - --> tests/ui/unnecessary_iter_cloned.rs:179:18 + --> tests/ui/unnecessary_iter_cloned.rs:177:18 | LL | for c in v.iter().cloned() { - | ^^^^^^^^^^^^^^^^^ help: use: `v.iter()` + | ^^^^^^^^^^^^^^^^^ help: remove any references to the binding: `v.iter()` error: unnecessary use of `cloned` - --> tests/ui/unnecessary_iter_cloned.rs:187:18 + --> tests/ui/unnecessary_iter_cloned.rs:185:18 | LL | for c in v.iter().cloned() { | ^^^^^^^^^^^^^^^^^ | -help: use - | -LL | for c in v.iter() { - | ~~~~~~~~ help: remove any references to the binding | -LL - let ref_c = &c; -LL + let ref_c = c; +LL ~ for c in v.iter() { +LL | +LL ~ let ref_c = c; | error: unnecessary use of `cloned` - --> tests/ui/unnecessary_iter_cloned.rs:196:23 + --> tests/ui/unnecessary_iter_cloned.rs:194:23 | LL | for (i, c) in v.iter().cloned() { | ^^^^^^^^^^^^^^^^^ | -help: use - | -LL | for (i, c) in v.iter() { - | ~~~~~~~~ help: remove any references to the binding | +LL ~ for (i, c) in v.iter() { +LL | LL ~ let ref_c = c; LL ~ let ref_i = i; | diff --git a/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.fixed b/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.fixed new file mode 100644 index 00000000000..c7be000b820 --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.fixed @@ -0,0 +1,20 @@ +#![no_std] +extern crate alloc; +use alloc::vec; +use alloc::vec::Vec; + +fn issue_11524() -> Vec<i32> { + let mut vec = vec![1, 2, 3]; + + // Should lint and suggest `vec.sort_by_key(|a| a + 1);` + vec.sort_by_key(|a| a + 1); + vec +} + +fn issue_11524_2() -> Vec<i32> { + let mut vec = vec![1, 2, 3]; + + // Should lint and suggest `vec.sort_by_key(|b| core::cmp::Reverse(b + 1));` + vec.sort_by_key(|b| core::cmp::Reverse(b + 1)); + vec +} diff --git a/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.rs b/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.rs new file mode 100644 index 00000000000..5f44be97c61 --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.rs @@ -0,0 +1,20 @@ +#![no_std] +extern crate alloc; +use alloc::vec; +use alloc::vec::Vec; + +fn issue_11524() -> Vec<i32> { + let mut vec = vec![1, 2, 3]; + + // Should lint and suggest `vec.sort_by_key(|a| a + 1);` + vec.sort_by(|a, b| (a + 1).cmp(&(b + 1))); + vec +} + +fn issue_11524_2() -> Vec<i32> { + let mut vec = vec![1, 2, 3]; + + // Should lint and suggest `vec.sort_by_key(|b| core::cmp::Reverse(b + 1));` + vec.sort_by(|a, b| (b + 1).cmp(&(a + 1))); + vec +} diff --git a/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.stderr b/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.stderr new file mode 100644 index 00000000000..a57fbc7a632 --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_sort_by_no_std.stderr @@ -0,0 +1,17 @@ +error: consider using `sort_by_key` + --> tests/ui/unnecessary_sort_by_no_std.rs:10:5 + | +LL | vec.sort_by(|a, b| (a + 1).cmp(&(b + 1))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_by_key(|a| a + 1)` + | + = note: `-D clippy::unnecessary-sort-by` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::unnecessary_sort_by)]` + +error: consider using `sort_by_key` + --> tests/ui/unnecessary_sort_by_no_std.rs:18:5 + | +LL | vec.sort_by(|a, b| (b + 1).cmp(&(a + 1))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_by_key(|b| core::cmp::Reverse(b + 1))` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed new file mode 100644 index 00000000000..fdcac8fb08d --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed @@ -0,0 +1,587 @@ +#![allow( + clippy::needless_borrow, + clippy::needless_borrows_for_generic_args, + clippy::ptr_arg, + clippy::manual_async_fn, + clippy::needless_lifetimes +)] +#![warn(clippy::unnecessary_to_owned, clippy::redundant_clone)] + +use std::borrow::Cow; +use std::ffi::{CStr, CString, OsStr, OsString}; +use std::ops::Deref; + +#[derive(Clone)] +struct X(String); + +impl Deref for X { + type Target = [u8]; + fn deref(&self) -> &[u8] { + self.0.as_bytes() + } +} + +impl AsRef<str> for X { + fn as_ref(&self) -> &str { + self.0.as_str() + } +} + +#[allow(clippy::to_string_trait_impl)] +impl ToString for X { + fn to_string(&self) -> String { + self.0.to_string() + } +} + +impl X { + fn join(&self, other: impl AsRef<str>) -> Self { + let mut s = self.0.clone(); + s.push_str(other.as_ref()); + Self(s) + } +} + +#[allow(dead_code)] +#[derive(Clone)] +enum FileType { + Account, + PrivateKey, + Certificate, +} + +fn main() { + let c_str = CStr::from_bytes_with_nul(&[0]).unwrap(); + let os_str = OsStr::new("x"); + let path = std::path::Path::new("x"); + let s = "x"; + let array = ["x"]; + let array_ref = &["x"]; + let slice = &["x"][..]; + let x = X(String::from("x")); + let x_ref = &x; + + require_c_str(&Cow::from(c_str)); + require_c_str(c_str); + + require_os_str(os_str); + require_os_str(&Cow::from(os_str)); + require_os_str(os_str); + + require_path(path); + require_path(&Cow::from(path)); + require_path(path); + + require_str(s); + require_str(&Cow::from(s)); + require_str(s); + require_str(x_ref.as_ref()); + + require_slice(slice); + require_slice(&Cow::from(slice)); + require_slice(array.as_ref()); + require_slice(array_ref.as_ref()); + require_slice(slice); + require_slice(&x_ref.to_owned()); // No longer flagged because of #8759. + + require_x(&Cow::<X>::Owned(x.clone())); + require_x(&x_ref.to_owned()); // No longer flagged because of #8759. + + require_deref_c_str(c_str); + require_deref_os_str(os_str); + require_deref_path(path); + require_deref_str(s); + require_deref_slice(slice); + + require_impl_deref_c_str(c_str); + require_impl_deref_os_str(os_str); + require_impl_deref_path(path); + require_impl_deref_str(s); + require_impl_deref_slice(slice); + + require_deref_str_slice(s, slice); + require_deref_slice_str(slice, s); + + require_as_ref_c_str(c_str); + require_as_ref_os_str(os_str); + require_as_ref_path(path); + require_as_ref_str(s); + require_as_ref_str(&x); + require_as_ref_slice(array); + require_as_ref_slice(array_ref); + require_as_ref_slice(slice); + + require_impl_as_ref_c_str(c_str); + require_impl_as_ref_os_str(os_str); + require_impl_as_ref_path(path); + require_impl_as_ref_str(s); + require_impl_as_ref_str(&x); + require_impl_as_ref_slice(array); + require_impl_as_ref_slice(array_ref); + require_impl_as_ref_slice(slice); + + require_as_ref_str_slice(s, array); + require_as_ref_str_slice(s, array_ref); + require_as_ref_str_slice(s, slice); + require_as_ref_slice_str(array, s); + require_as_ref_slice_str(array_ref, s); + require_as_ref_slice_str(slice, s); + + let _ = x.join(x_ref); + + let _ = slice.iter().copied(); + let _ = slice.iter().copied(); + let _ = [std::path::PathBuf::new()][..].iter().cloned(); + let _ = [std::path::PathBuf::new()][..].iter().cloned(); + + let _ = slice.iter().copied(); + let _ = slice.iter().copied(); + let _ = [std::path::PathBuf::new()][..].iter().cloned(); + let _ = [std::path::PathBuf::new()][..].iter().cloned(); + + let _ = check_files(&[FileType::Account]); + + // negative tests + require_string(&s.to_string()); + require_string(&Cow::from(s).into_owned()); + require_string(&s.to_owned()); + require_string(&x_ref.to_string()); + + // `X` isn't copy. + require_slice(&x.to_owned()); + require_deref_slice(x.to_owned()); + + // The following should be flagged by `redundant_clone`, but not by this lint. + require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap()); + require_os_str(&OsString::from("x")); + require_path(&std::path::PathBuf::from("x")); + require_str(&String::from("x")); + require_slice(&[String::from("x")]); + + let slice = [0u8; 1024]; + let _ref_str: &str = core::str::from_utf8(&slice).expect("not UTF-8"); + let _ref_str: &str = core::str::from_utf8(b"foo").unwrap(); + let _ref_str: &str = core::str::from_utf8(b"foo".as_slice()).unwrap(); + // Expression is of type `&String`, can't suggest `str::from_utf8` here + let _ref_string = &String::from_utf8(b"foo".to_vec()).unwrap(); + macro_rules! arg_from_macro { + () => { + b"foo".to_vec() + }; + } + macro_rules! string_from_utf8_from_macro { + () => { + &String::from_utf8(b"foo".to_vec()).unwrap() + }; + } + let _ref_str: &str = &String::from_utf8(arg_from_macro!()).unwrap(); + let _ref_str: &str = string_from_utf8_from_macro!(); +} + +fn require_c_str(_: &CStr) {} +fn require_os_str(_: &OsStr) {} +fn require_path(_: &std::path::Path) {} +fn require_str(_: &str) {} +fn require_slice<T>(_: &[T]) {} +fn require_x(_: &X) {} + +fn require_deref_c_str<T: Deref<Target = CStr>>(_: T) {} +fn require_deref_os_str<T: Deref<Target = OsStr>>(_: T) {} +fn require_deref_path<T: Deref<Target = std::path::Path>>(_: T) {} +fn require_deref_str<T: Deref<Target = str>>(_: T) {} +fn require_deref_slice<T, U: Deref<Target = [T]>>(_: U) {} + +fn require_impl_deref_c_str(_: impl Deref<Target = CStr>) {} +fn require_impl_deref_os_str(_: impl Deref<Target = OsStr>) {} +fn require_impl_deref_path(_: impl Deref<Target = std::path::Path>) {} +fn require_impl_deref_str(_: impl Deref<Target = str>) {} +fn require_impl_deref_slice<T>(_: impl Deref<Target = [T]>) {} + +fn require_deref_str_slice<T: Deref<Target = str>, U, V: Deref<Target = [U]>>(_: T, _: V) {} +fn require_deref_slice_str<T, U: Deref<Target = [T]>, V: Deref<Target = str>>(_: U, _: V) {} + +fn require_as_ref_c_str<T: AsRef<CStr>>(_: T) {} +fn require_as_ref_os_str<T: AsRef<OsStr>>(_: T) {} +fn require_as_ref_path<T: AsRef<std::path::Path>>(_: T) {} +fn require_as_ref_str<T: AsRef<str>>(_: T) {} +fn require_as_ref_slice<T, U: AsRef<[T]>>(_: U) {} + +fn require_impl_as_ref_c_str(_: impl AsRef<CStr>) {} +fn require_impl_as_ref_os_str(_: impl AsRef<OsStr>) {} +fn require_impl_as_ref_path(_: impl AsRef<std::path::Path>) {} +fn require_impl_as_ref_str(_: impl AsRef<str>) {} +fn require_impl_as_ref_slice<T>(_: impl AsRef<[T]>) {} + +fn require_as_ref_str_slice<T: AsRef<str>, U, V: AsRef<[U]>>(_: T, _: V) {} +fn require_as_ref_slice_str<T, U: AsRef<[T]>, V: AsRef<str>>(_: U, _: V) {} + +// `check_files` is based on: +// https://github.com/breard-r/acmed/blob/1f0dcc32aadbc5e52de6d23b9703554c0f925113/acmed/src/storage.rs#L262 +fn check_files(file_types: &[FileType]) -> bool { + for t in file_types { + let path = match get_file_path(t) { + Ok(p) => p, + Err(_) => { + return false; + }, + }; + if !path.is_file() { + return false; + } + } + true +} + +fn get_file_path(_file_type: &FileType) -> Result<std::path::PathBuf, std::io::Error> { + Ok(std::path::PathBuf::new()) +} + +fn require_string(_: &String) {} + +#[clippy::msrv = "1.35"] +fn _msrv_1_35() { + // `copied` was stabilized in 1.36, so clippy should use `cloned`. + let _ = &["x"][..].iter().cloned(); +} + +#[clippy::msrv = "1.36"] +fn _msrv_1_36() { + let _ = &["x"][..].iter().copied(); +} + +// https://github.com/rust-lang/rust-clippy/issues/8507 +mod issue_8507 { + #![allow(dead_code)] + + struct Opaque<P>(P); + + pub trait Abstracted {} + + impl<P> Abstracted for Opaque<P> {} + + fn build<P>(p: P) -> Opaque<P> + where + P: AsRef<str>, + { + Opaque(p) + } + + // Should not lint. + fn test_str(s: &str) -> Box<dyn Abstracted> { + Box::new(build(s.to_string())) + } + + // Should not lint. + fn test_x(x: super::X) -> Box<dyn Abstracted> { + Box::new(build(x)) + } + + #[derive(Clone, Copy)] + struct Y(&'static str); + + impl AsRef<str> for Y { + fn as_ref(&self) -> &str { + self.0 + } + } + + #[allow(clippy::to_string_trait_impl)] + impl ToString for Y { + fn to_string(&self) -> String { + self.0.to_string() + } + } + + // Should lint because Y is copy. + fn test_y(y: Y) -> Box<dyn Abstracted> { + Box::new(build(y)) + } +} + +// https://github.com/rust-lang/rust-clippy/issues/8759 +mod issue_8759 { + #![allow(dead_code)] + + #[derive(Default)] + struct View {} + + impl std::borrow::ToOwned for View { + type Owned = View; + fn to_owned(&self) -> Self::Owned { + View {} + } + } + + #[derive(Default)] + struct RenderWindow { + default_view: View, + } + + impl RenderWindow { + fn default_view(&self) -> &View { + &self.default_view + } + fn set_view(&mut self, _view: &View) {} + } + + fn main() { + let mut rw = RenderWindow::default(); + rw.set_view(&rw.default_view().to_owned()); + } +} + +mod issue_8759_variant { + #![allow(dead_code)] + + #[derive(Clone, Default)] + struct View {} + + #[derive(Default)] + struct RenderWindow { + default_view: View, + } + + impl RenderWindow { + fn default_view(&self) -> &View { + &self.default_view + } + fn set_view(&mut self, _view: &View) {} + } + + fn main() { + let mut rw = RenderWindow::default(); + rw.set_view(&rw.default_view().to_owned()); + } +} + +mod issue_9317 { + #![allow(dead_code)] + + struct Bytes {} + + #[allow(clippy::to_string_trait_impl)] + impl ToString for Bytes { + fn to_string(&self) -> String { + "123".to_string() + } + } + + impl AsRef<[u8]> for Bytes { + fn as_ref(&self) -> &[u8] { + &[1, 2, 3] + } + } + + fn consume<C: AsRef<[u8]>>(c: C) { + let _ = c; + } + + pub fn main() { + let b = Bytes {}; + // Should not lint. + consume(b.to_string()); + } +} + +mod issue_9351 { + #![allow(dead_code)] + + use std::ops::Deref; + use std::path::{Path, PathBuf}; + + fn require_deref_path<T: Deref<Target = std::path::Path>>(x: T) -> T { + x + } + + fn generic_arg_used_elsewhere<T: AsRef<Path>>(_x: T, _y: T) {} + + fn id<T: AsRef<str>>(x: T) -> T { + x + } + + fn predicates_are_satisfied(_x: impl std::fmt::Write) {} + + // Should lint + fn single_return() -> impl AsRef<str> { + id("abc") + } + + // Should not lint + fn multiple_returns(b: bool) -> impl AsRef<str> { + if b { + return String::new(); + } + + id("abc".to_string()) + } + + struct S1(String); + + // Should not lint + fn fields1() -> S1 { + S1(id("abc".to_string())) + } + + struct S2 { + s: String, + } + + // Should not lint + fn fields2() { + let mut s = S2 { s: "abc".into() }; + s.s = id("abc".to_string()); + } + + pub fn main() { + let path = std::path::Path::new("x"); + let path_buf = path.to_owned(); + + // Should not lint. + let _x: PathBuf = require_deref_path(path.to_owned()); + generic_arg_used_elsewhere(path.to_owned(), path_buf); + predicates_are_satisfied(id("abc".to_string())); + } +} + +mod issue_9504 { + #![allow(dead_code)] + + async fn foo<S: AsRef<str>>(_: S) {} + async fn bar() { + foo(std::path::PathBuf::new().to_string_lossy().to_string()).await; + } +} + +mod issue_9771a { + #![allow(dead_code)] + + use std::marker::PhantomData; + + pub struct Key<K: AsRef<[u8]>, V: ?Sized>(K, PhantomData<V>); + + impl<K: AsRef<[u8]>, V: ?Sized> Key<K, V> { + pub fn new(key: K) -> Key<K, V> { + Key(key, PhantomData) + } + } + + pub fn pkh(pkh: &[u8]) -> Key<Vec<u8>, String> { + Key::new([b"pkh-", pkh].concat().to_vec()) + } +} + +mod issue_9771b { + #![allow(dead_code)] + + pub struct Key<K: AsRef<[u8]>>(K); + + pub fn from(c: &[u8]) -> Key<Vec<u8>> { + let v = [c].concat(); + Key(v.to_vec()) + } +} + +// This is a watered down version of the code in: https://github.com/oxigraph/rio +// The ICE is triggered by the call to `to_owned` on this line: +// https://github.com/oxigraph/rio/blob/66635b9ff8e5423e58932353fa40d6e64e4820f7/testsuite/src/parser_evaluator.rs#L116 +mod issue_10021 { + #![allow(unused)] + + pub struct Iri<T>(T); + + impl<T: AsRef<str>> Iri<T> { + pub fn parse(iri: T) -> Result<Self, ()> { + unimplemented!() + } + } + + pub fn parse_w3c_rdf_test_file(url: &str) -> Result<(), ()> { + let base_iri = Iri::parse(url.to_owned())?; + Ok(()) + } +} + +mod issue_10033 { + #![allow(dead_code)] + use std::fmt::Display; + use std::ops::Deref; + + fn _main() { + let f = Foo; + + // Not actually unnecessary - this calls `Foo`'s `Display` impl, not `str`'s (even though `Foo` does + // deref to `str`) + foo(&f.to_string()); + } + + fn foo(s: &str) { + println!("{}", s); + } + + struct Foo; + + impl Deref for Foo { + type Target = str; + + fn deref(&self) -> &Self::Target { + "str" + } + } + + impl Display for Foo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Foo") + } + } +} + +mod issue_11952 { + use core::future::{Future, IntoFuture}; + + fn foo<'a, T: AsRef<[u8]>>(x: T, y: &'a i32) -> impl 'a + Future<Output = Result<(), ()>> { + async move { + let _y = y; + Ok(()) + } + } + + fn bar() { + IntoFuture::into_future(foo([], &0)); + } +} + +fn borrow_checks() { + use std::borrow::Borrow; + use std::collections::HashSet; + + fn inner(a: &[&str]) { + let mut s = HashSet::from([vec!["a"]]); + s.remove(a); //~ ERROR: unnecessary use of `to_vec` + } + + let mut s = HashSet::from(["a".to_string()]); + s.remove("b"); //~ ERROR: unnecessary use of `to_owned` + s.remove("b"); //~ ERROR: unnecessary use of `to_string` + // Should not warn. + s.remove("b"); + + let mut s = HashSet::from([vec!["a"]]); + s.remove(["b"].as_slice()); //~ ERROR: unnecessary use of `to_vec` + s.remove((&["b"]).as_slice()); //~ ERROR: unnecessary use of `to_vec` + + // Should not warn. + s.remove(&["b"].to_vec().clone()); + s.remove(["a"].as_slice()); + + trait SetExt { + fn foo<Q: Borrow<str>>(&self, _: &String); + } + + impl<K> SetExt for HashSet<K> { + fn foo<Q: Borrow<str>>(&self, _: &String) {} + } + + // Should not lint! + HashSet::<i32>::new().foo::<&str>(&"".to_owned()); + HashSet::<String>::new().get(&1.to_string()); +} diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs index da0c761f795..10a9727a9a7 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs @@ -7,8 +7,6 @@ )] #![warn(clippy::unnecessary_to_owned, clippy::redundant_clone)] -//@no-rustfix: need to change the suggestion to a multipart suggestion - use std::borrow::Cow; use std::ffi::{CStr, CString, OsStr, OsString}; use std::ops::Deref; diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr b/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr index 7ab1f667d9b..498ac68cdaa 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr @@ -1,11 +1,11 @@ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:157:64 + --> tests/ui/unnecessary_to_owned.rs:155:64 | LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); | ^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:157:20 + --> tests/ui/unnecessary_to_owned.rs:155:20 | LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -13,55 +13,55 @@ LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()) = help: to override `-D warnings` add `#[allow(clippy::redundant_clone)]` error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:158:40 + --> tests/ui/unnecessary_to_owned.rs:156:40 | LL | require_os_str(&OsString::from("x").to_os_string()); | ^^^^^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:158:21 + --> tests/ui/unnecessary_to_owned.rs:156:21 | LL | require_os_str(&OsString::from("x").to_os_string()); | ^^^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:159:48 + --> tests/ui/unnecessary_to_owned.rs:157:48 | LL | require_path(&std::path::PathBuf::from("x").to_path_buf()); | ^^^^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:159:19 + --> tests/ui/unnecessary_to_owned.rs:157:19 | LL | require_path(&std::path::PathBuf::from("x").to_path_buf()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:160:35 + --> tests/ui/unnecessary_to_owned.rs:158:35 | LL | require_str(&String::from("x").to_string()); | ^^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:160:18 + --> tests/ui/unnecessary_to_owned.rs:158:18 | LL | require_str(&String::from("x").to_string()); | ^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:161:39 + --> tests/ui/unnecessary_to_owned.rs:159:39 | LL | require_slice(&[String::from("x")].to_owned()); | ^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:161:20 + --> tests/ui/unnecessary_to_owned.rs:159:20 | LL | require_slice(&[String::from("x")].to_owned()); | ^^^^^^^^^^^^^^^^^^^ error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:66:36 + --> tests/ui/unnecessary_to_owned.rs:64:36 | LL | require_c_str(&Cow::from(c_str).into_owned()); | ^^^^^^^^^^^^^ help: remove this @@ -70,415 +70,415 @@ LL | require_c_str(&Cow::from(c_str).into_owned()); = help: to override `-D warnings` add `#[allow(clippy::unnecessary_to_owned)]` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:67:19 + --> tests/ui/unnecessary_to_owned.rs:65:19 | LL | require_c_str(&c_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_os_string` - --> tests/ui/unnecessary_to_owned.rs:69:20 + --> tests/ui/unnecessary_to_owned.rs:67:20 | LL | require_os_str(&os_str.to_os_string()); | ^^^^^^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:70:38 + --> tests/ui/unnecessary_to_owned.rs:68:38 | LL | require_os_str(&Cow::from(os_str).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:71:20 + --> tests/ui/unnecessary_to_owned.rs:69:20 | LL | require_os_str(&os_str.to_owned()); | ^^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_path_buf` - --> tests/ui/unnecessary_to_owned.rs:73:18 + --> tests/ui/unnecessary_to_owned.rs:71:18 | LL | require_path(&path.to_path_buf()); | ^^^^^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:74:34 + --> tests/ui/unnecessary_to_owned.rs:72:34 | LL | require_path(&Cow::from(path).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:75:18 + --> tests/ui/unnecessary_to_owned.rs:73:18 | LL | require_path(&path.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:77:17 + --> tests/ui/unnecessary_to_owned.rs:75:17 | LL | require_str(&s.to_string()); | ^^^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:78:30 + --> tests/ui/unnecessary_to_owned.rs:76:30 | LL | require_str(&Cow::from(s).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:79:17 + --> tests/ui/unnecessary_to_owned.rs:77:17 | LL | require_str(&s.to_owned()); | ^^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:80:17 + --> tests/ui/unnecessary_to_owned.rs:78:17 | LL | require_str(&x_ref.to_string()); | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref.as_ref()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:82:19 + --> tests/ui/unnecessary_to_owned.rs:80:19 | LL | require_slice(&slice.to_vec()); | ^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:83:36 + --> tests/ui/unnecessary_to_owned.rs:81:36 | LL | require_slice(&Cow::from(slice).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:84:19 + --> tests/ui/unnecessary_to_owned.rs:82:19 | LL | require_slice(&array.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `array.as_ref()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:85:19 + --> tests/ui/unnecessary_to_owned.rs:83:19 | LL | require_slice(&array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref.as_ref()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:86:19 + --> tests/ui/unnecessary_to_owned.rs:84:19 | LL | require_slice(&slice.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:89:42 + --> tests/ui/unnecessary_to_owned.rs:87:42 | LL | require_x(&Cow::<X>::Owned(x.clone()).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:92:25 + --> tests/ui/unnecessary_to_owned.rs:90:25 | LL | require_deref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:93:26 + --> tests/ui/unnecessary_to_owned.rs:91:26 | LL | require_deref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:94:24 + --> tests/ui/unnecessary_to_owned.rs:92:24 | LL | require_deref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:95:23 + --> tests/ui/unnecessary_to_owned.rs:93:23 | LL | require_deref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:96:25 + --> tests/ui/unnecessary_to_owned.rs:94:25 | LL | require_deref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:98:30 + --> tests/ui/unnecessary_to_owned.rs:96:30 | LL | require_impl_deref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:99:31 + --> tests/ui/unnecessary_to_owned.rs:97:31 | LL | require_impl_deref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:100:29 + --> tests/ui/unnecessary_to_owned.rs:98:29 | LL | require_impl_deref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:101:28 + --> tests/ui/unnecessary_to_owned.rs:99:28 | LL | require_impl_deref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:102:30 + --> tests/ui/unnecessary_to_owned.rs:100:30 | LL | require_impl_deref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:104:29 + --> tests/ui/unnecessary_to_owned.rs:102:29 | LL | require_deref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:104:43 + --> tests/ui/unnecessary_to_owned.rs:102:43 | LL | require_deref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:105:29 + --> tests/ui/unnecessary_to_owned.rs:103:29 | LL | require_deref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:105:47 + --> tests/ui/unnecessary_to_owned.rs:103:47 | LL | require_deref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:107:26 + --> tests/ui/unnecessary_to_owned.rs:105:26 | LL | require_as_ref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:108:27 + --> tests/ui/unnecessary_to_owned.rs:106:27 | LL | require_as_ref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:109:25 + --> tests/ui/unnecessary_to_owned.rs:107:25 | LL | require_as_ref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:110:24 + --> tests/ui/unnecessary_to_owned.rs:108:24 | LL | require_as_ref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:111:24 + --> tests/ui/unnecessary_to_owned.rs:109:24 | LL | require_as_ref_str(x.to_owned()); | ^^^^^^^^^^^^ help: use: `&x` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:112:26 + --> tests/ui/unnecessary_to_owned.rs:110:26 | LL | require_as_ref_slice(array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:113:26 + --> tests/ui/unnecessary_to_owned.rs:111:26 | LL | require_as_ref_slice(array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:114:26 + --> tests/ui/unnecessary_to_owned.rs:112:26 | LL | require_as_ref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:116:31 + --> tests/ui/unnecessary_to_owned.rs:114:31 | LL | require_impl_as_ref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:117:32 + --> tests/ui/unnecessary_to_owned.rs:115:32 | LL | require_impl_as_ref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:118:30 + --> tests/ui/unnecessary_to_owned.rs:116:30 | LL | require_impl_as_ref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:119:29 + --> tests/ui/unnecessary_to_owned.rs:117:29 | LL | require_impl_as_ref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:120:29 + --> tests/ui/unnecessary_to_owned.rs:118:29 | LL | require_impl_as_ref_str(x.to_owned()); | ^^^^^^^^^^^^ help: use: `&x` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:121:31 + --> tests/ui/unnecessary_to_owned.rs:119:31 | LL | require_impl_as_ref_slice(array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:122:31 + --> tests/ui/unnecessary_to_owned.rs:120:31 | LL | require_impl_as_ref_slice(array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:123:31 + --> tests/ui/unnecessary_to_owned.rs:121:31 | LL | require_impl_as_ref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:125:30 + --> tests/ui/unnecessary_to_owned.rs:123:30 | LL | require_as_ref_str_slice(s.to_owned(), array.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:125:44 + --> tests/ui/unnecessary_to_owned.rs:123:44 | LL | require_as_ref_str_slice(s.to_owned(), array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:126:30 + --> tests/ui/unnecessary_to_owned.rs:124:30 | LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:126:44 + --> tests/ui/unnecessary_to_owned.rs:124:44 | LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:127:30 + --> tests/ui/unnecessary_to_owned.rs:125:30 | LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:127:44 + --> tests/ui/unnecessary_to_owned.rs:125:44 | LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:128:30 + --> tests/ui/unnecessary_to_owned.rs:126:30 | LL | require_as_ref_slice_str(array.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:128:48 + --> tests/ui/unnecessary_to_owned.rs:126:48 | LL | require_as_ref_slice_str(array.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:129:30 + --> tests/ui/unnecessary_to_owned.rs:127:30 | LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:129:52 + --> tests/ui/unnecessary_to_owned.rs:127:52 | LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:130:30 + --> tests/ui/unnecessary_to_owned.rs:128:30 | LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:130:48 + --> tests/ui/unnecessary_to_owned.rs:128:48 | LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:132:20 + --> tests/ui/unnecessary_to_owned.rs:130:20 | LL | let _ = x.join(&x_ref.to_string()); | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:134:13 + --> tests/ui/unnecessary_to_owned.rs:132:13 | LL | let _ = slice.to_vec().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:135:13 + --> tests/ui/unnecessary_to_owned.rs:133:13 | LL | let _ = slice.to_owned().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:136:13 + --> tests/ui/unnecessary_to_owned.rs:134:13 | LL | let _ = [std::path::PathBuf::new()][..].to_vec().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:137:13 + --> tests/ui/unnecessary_to_owned.rs:135:13 | LL | let _ = [std::path::PathBuf::new()][..].to_owned().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:139:13 + --> tests/ui/unnecessary_to_owned.rs:137:13 | LL | let _ = IntoIterator::into_iter(slice.to_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:140:13 + --> tests/ui/unnecessary_to_owned.rs:138:13 | LL | let _ = IntoIterator::into_iter(slice.to_owned()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:141:13 + --> tests/ui/unnecessary_to_owned.rs:139:13 | LL | let _ = IntoIterator::into_iter([std::path::PathBuf::new()][..].to_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:142:13 + --> tests/ui/unnecessary_to_owned.rs:140:13 | LL | let _ = IntoIterator::into_iter([std::path::PathBuf::new()][..].to_owned()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()` error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:164:26 + --> tests/ui/unnecessary_to_owned.rs:162:26 | LL | let _ref_str: &str = &String::from_utf8(slice.to_vec()).expect("not UTF-8"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -490,7 +490,7 @@ LL + let _ref_str: &str = core::str::from_utf8(&slice).expect("not UTF-8"); | error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:165:26 + --> tests/ui/unnecessary_to_owned.rs:163:26 | LL | let _ref_str: &str = &String::from_utf8(b"foo".to_vec()).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -502,7 +502,7 @@ LL + let _ref_str: &str = core::str::from_utf8(b"foo").unwrap(); | error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:166:26 + --> tests/ui/unnecessary_to_owned.rs:164:26 | LL | let _ref_str: &str = &String::from_utf8(b"foo".as_slice().to_owned()).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -514,77 +514,73 @@ LL + let _ref_str: &str = core::str::from_utf8(b"foo".as_slice()).unwrap(); | error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:223:14 + --> tests/ui/unnecessary_to_owned.rs:221:14 | LL | for t in file_types.to_vec() { | ^^^^^^^^^^^^^^^^^^^ | -help: use - | -LL | for t in file_types { - | ~~~~~~~~~~ help: remove any references to the binding | -LL - let path = match get_file_path(&t) { -LL + let path = match get_file_path(t) { +LL ~ for t in file_types { +LL ~ let path = match get_file_path(t) { | error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:246:14 + --> tests/ui/unnecessary_to_owned.rs:244:14 | LL | let _ = &["x"][..].to_vec().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `["x"][..].iter().cloned()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:251:14 + --> tests/ui/unnecessary_to_owned.rs:249:14 | LL | let _ = &["x"][..].to_vec().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `["x"][..].iter().copied()` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:299:24 + --> tests/ui/unnecessary_to_owned.rs:297:24 | LL | Box::new(build(y.to_string())) | ^^^^^^^^^^^^^ help: use: `y` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:408:12 + --> tests/ui/unnecessary_to_owned.rs:406:12 | LL | id("abc".to_string()) | ^^^^^^^^^^^^^^^^^ help: use: `"abc"` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:551:37 + --> tests/ui/unnecessary_to_owned.rs:549:37 | LL | IntoFuture::into_future(foo([].to_vec(), &0)); | ^^^^^^^^^^^ help: use: `[]` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:561:18 + --> tests/ui/unnecessary_to_owned.rs:559:18 | LL | s.remove(&a.to_vec()); | ^^^^^^^^^^^ help: replace it with: `a` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:565:14 + --> tests/ui/unnecessary_to_owned.rs:563:14 | LL | s.remove(&"b".to_owned()); | ^^^^^^^^^^^^^^^ help: replace it with: `"b"` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:566:14 + --> tests/ui/unnecessary_to_owned.rs:564:14 | LL | s.remove(&"b".to_string()); | ^^^^^^^^^^^^^^^^ help: replace it with: `"b"` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:571:14 + --> tests/ui/unnecessary_to_owned.rs:569:14 | LL | s.remove(&["b"].to_vec()); | ^^^^^^^^^^^^^^^ help: replace it with: `["b"].as_slice()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:572:14 + --> tests/ui/unnecessary_to_owned.rs:570:14 | LL | s.remove(&(&["b"]).to_vec()); | ^^^^^^^^^^^^^^^^^^ help: replace it with: `(&["b"]).as_slice()` diff --git a/src/tools/clippy/tests/ui/useless_attribute.fixed b/src/tools/clippy/tests/ui/useless_attribute.fixed index 231fc0a892a..de1062f123b 100644 --- a/src/tools/clippy/tests/ui/useless_attribute.fixed +++ b/src/tools/clippy/tests/ui/useless_attribute.fixed @@ -134,3 +134,12 @@ pub mod ambiguous_glob_exports { pub use my_prelude::*; pub use my_type::*; } + +// Regression test for https://github.com/rust-lang/rust-clippy/issues/13764 +pub mod unknown_namespace { + pub mod some_module { + pub struct SomeType; + } + #[allow(rustc::non_glob_import_of_type_ir_inherent)] + use some_module::SomeType; +} diff --git a/src/tools/clippy/tests/ui/useless_attribute.rs b/src/tools/clippy/tests/ui/useless_attribute.rs index 8dfcd2110a4..94657dd1ca3 100644 --- a/src/tools/clippy/tests/ui/useless_attribute.rs +++ b/src/tools/clippy/tests/ui/useless_attribute.rs @@ -134,3 +134,12 @@ pub mod ambiguous_glob_exports { pub use my_prelude::*; pub use my_type::*; } + +// Regression test for https://github.com/rust-lang/rust-clippy/issues/13764 +pub mod unknown_namespace { + pub mod some_module { + pub struct SomeType; + } + #[allow(rustc::non_glob_import_of_type_ir_inherent)] + use some_module::SomeType; +} diff --git a/src/tools/clippy/tests/ui/useless_conversion.fixed b/src/tools/clippy/tests/ui/useless_conversion.fixed index eff617a8016..2f7edd92bb7 100644 --- a/src/tools/clippy/tests/ui/useless_conversion.fixed +++ b/src/tools/clippy/tests/ui/useless_conversion.fixed @@ -3,6 +3,8 @@ // FIXME(static_mut_refs): Do not allow `static_mut_refs` lint #![allow(static_mut_refs)] +use std::ops::ControlFlow; + fn test_generic<T: Copy>(val: T) -> T { let _ = val; val @@ -297,3 +299,46 @@ impl From<Foo<'a'>> for Foo<'b'> { Foo } } + +fn direct_application() { + let _: Result<(), std::io::Error> = test_issue_3913(); + //~^ useless_conversion + let _: Result<(), std::io::Error> = test_issue_3913(); + //~^ useless_conversion + let _: Result<(), std::io::Error> = test_issue_3913(); + //~^ useless_conversion + let _: Result<(), std::io::Error> = test_issue_3913(); + //~^ useless_conversion + + let c: ControlFlow<()> = ControlFlow::Continue(()); + let _: ControlFlow<()> = c; + //~^ useless_conversion + let c: ControlFlow<()> = ControlFlow::Continue(()); + let _: ControlFlow<()> = c; + //~^ useless_conversion + + struct Absorb; + impl From<()> for Absorb { + fn from(_: ()) -> Self { + Self + } + } + impl From<std::io::Error> for Absorb { + fn from(_: std::io::Error) -> Self { + Self + } + } + let _: Vec<u32> = [1u32].into_iter().collect(); + //~^ useless_conversion + + // No lint for those + let _: Result<Absorb, std::io::Error> = test_issue_3913().map(Into::into); + let _: Result<(), Absorb> = test_issue_3913().map_err(Into::into); + let _: Result<Absorb, std::io::Error> = test_issue_3913().map(From::from); + let _: Result<(), Absorb> = test_issue_3913().map_err(From::from); +} + +fn gen_identity<T>(x: [T; 3]) -> Vec<T> { + x.into_iter().collect() + //~^ useless_conversion +} diff --git a/src/tools/clippy/tests/ui/useless_conversion.rs b/src/tools/clippy/tests/ui/useless_conversion.rs index 64b06620789..eacdf77f905 100644 --- a/src/tools/clippy/tests/ui/useless_conversion.rs +++ b/src/tools/clippy/tests/ui/useless_conversion.rs @@ -3,6 +3,8 @@ // FIXME(static_mut_refs): Do not allow `static_mut_refs` lint #![allow(static_mut_refs)] +use std::ops::ControlFlow; + fn test_generic<T: Copy>(val: T) -> T { let _ = T::from(val); val.into() @@ -297,3 +299,46 @@ impl From<Foo<'a'>> for Foo<'b'> { Foo } } + +fn direct_application() { + let _: Result<(), std::io::Error> = test_issue_3913().map(Into::into); + //~^ useless_conversion + let _: Result<(), std::io::Error> = test_issue_3913().map_err(Into::into); + //~^ useless_conversion + let _: Result<(), std::io::Error> = test_issue_3913().map(From::from); + //~^ useless_conversion + let _: Result<(), std::io::Error> = test_issue_3913().map_err(From::from); + //~^ useless_conversion + + let c: ControlFlow<()> = ControlFlow::Continue(()); + let _: ControlFlow<()> = c.map_break(Into::into); + //~^ useless_conversion + let c: ControlFlow<()> = ControlFlow::Continue(()); + let _: ControlFlow<()> = c.map_continue(Into::into); + //~^ useless_conversion + + struct Absorb; + impl From<()> for Absorb { + fn from(_: ()) -> Self { + Self + } + } + impl From<std::io::Error> for Absorb { + fn from(_: std::io::Error) -> Self { + Self + } + } + let _: Vec<u32> = [1u32].into_iter().map(Into::into).collect(); + //~^ useless_conversion + + // No lint for those + let _: Result<Absorb, std::io::Error> = test_issue_3913().map(Into::into); + let _: Result<(), Absorb> = test_issue_3913().map_err(Into::into); + let _: Result<Absorb, std::io::Error> = test_issue_3913().map(From::from); + let _: Result<(), Absorb> = test_issue_3913().map_err(From::from); +} + +fn gen_identity<T>(x: [T; 3]) -> Vec<T> { + x.into_iter().map(Into::into).collect() + //~^ useless_conversion +} diff --git a/src/tools/clippy/tests/ui/useless_conversion.stderr b/src/tools/clippy/tests/ui/useless_conversion.stderr index b149357bcf4..6aeb382902b 100644 --- a/src/tools/clippy/tests/ui/useless_conversion.stderr +++ b/src/tools/clippy/tests/ui/useless_conversion.stderr @@ -1,5 +1,5 @@ error: useless conversion to the same type: `T` - --> tests/ui/useless_conversion.rs:7:13 + --> tests/ui/useless_conversion.rs:9:13 | LL | let _ = T::from(val); | ^^^^^^^^^^^^ help: consider removing `T::from()`: `val` @@ -11,220 +11,268 @@ LL | #![deny(clippy::useless_conversion)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ error: useless conversion to the same type: `T` - --> tests/ui/useless_conversion.rs:8:5 + --> tests/ui/useless_conversion.rs:10:5 | LL | val.into() | ^^^^^^^^^^ help: consider removing `.into()`: `val` error: useless conversion to the same type: `i32` - --> tests/ui/useless_conversion.rs:20:22 + --> tests/ui/useless_conversion.rs:22:22 | LL | let _: i32 = 0i32.into(); | ^^^^^^^^^^^ help: consider removing `.into()`: `0i32` error: useless conversion to the same type: `std::str::Lines<'_>` - --> tests/ui/useless_conversion.rs:50:22 + --> tests/ui/useless_conversion.rs:52:22 | LL | if Some("ok") == lines.into_iter().next() {} | ^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `lines` error: useless conversion to the same type: `std::str::Lines<'_>` - --> tests/ui/useless_conversion.rs:55:21 + --> tests/ui/useless_conversion.rs:57:21 | LL | let mut lines = text.lines().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `text.lines()` error: useless conversion to the same type: `std::str::Lines<'_>` - --> tests/ui/useless_conversion.rs:61:22 + --> tests/ui/useless_conversion.rs:63:22 | LL | if Some("ok") == text.lines().into_iter().next() {} | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `text.lines()` error: useless conversion to the same type: `std::ops::Range<i32>` - --> tests/ui/useless_conversion.rs:67:13 + --> tests/ui/useless_conversion.rs:69:13 | LL | let _ = NUMBERS.into_iter().next(); | ^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `NUMBERS` error: useless conversion to the same type: `std::ops::Range<i32>` - --> tests/ui/useless_conversion.rs:72:17 + --> tests/ui/useless_conversion.rs:74:17 | LL | let mut n = NUMBERS.into_iter(); | ^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `NUMBERS` error: useless conversion to the same type: `std::string::String` - --> tests/ui/useless_conversion.rs:134:21 + --> tests/ui/useless_conversion.rs:136:21 | LL | let _: String = "foo".to_string().into(); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into()`: `"foo".to_string()` error: useless conversion to the same type: `std::string::String` - --> tests/ui/useless_conversion.rs:135:21 + --> tests/ui/useless_conversion.rs:137:21 | LL | let _: String = From::from("foo".to_string()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `From::from()`: `"foo".to_string()` error: useless conversion to the same type: `std::string::String` - --> tests/ui/useless_conversion.rs:136:13 + --> tests/ui/useless_conversion.rs:138:13 | LL | let _ = String::from("foo".to_string()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `String::from()`: `"foo".to_string()` error: useless conversion to the same type: `std::string::String` - --> tests/ui/useless_conversion.rs:137:13 + --> tests/ui/useless_conversion.rs:139:13 | LL | let _ = String::from(format!("A: {:04}", 123)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `String::from()`: `format!("A: {:04}", 123)` error: useless conversion to the same type: `std::str::Lines<'_>` - --> tests/ui/useless_conversion.rs:138:13 + --> tests/ui/useless_conversion.rs:140:13 | LL | let _ = "".lines().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `"".lines()` error: useless conversion to the same type: `std::vec::IntoIter<i32>` - --> tests/ui/useless_conversion.rs:139:13 + --> tests/ui/useless_conversion.rs:141:13 | LL | let _ = vec![1, 2, 3].into_iter().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `vec![1, 2, 3].into_iter()` error: useless conversion to the same type: `std::string::String` - --> tests/ui/useless_conversion.rs:140:21 + --> tests/ui/useless_conversion.rs:142:21 | LL | let _: String = format!("Hello {}", "world").into(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into()`: `format!("Hello {}", "world")` error: useless conversion to the same type: `i32` - --> tests/ui/useless_conversion.rs:145:13 + --> tests/ui/useless_conversion.rs:147:13 | LL | let _ = i32::from(a + b) * 3; | ^^^^^^^^^^^^^^^^ help: consider removing `i32::from()`: `(a + b)` error: useless conversion to the same type: `Foo<'a'>` - --> tests/ui/useless_conversion.rs:151:23 + --> tests/ui/useless_conversion.rs:153:23 | LL | let _: Foo<'a'> = s2.into(); | ^^^^^^^^^ help: consider removing `.into()`: `s2` error: useless conversion to the same type: `Foo<'a'>` - --> tests/ui/useless_conversion.rs:153:13 + --> tests/ui/useless_conversion.rs:155:13 | LL | let _ = Foo::<'a'>::from(s3); | ^^^^^^^^^^^^^^^^^^^^ help: consider removing `Foo::<'a'>::from()`: `s3` error: useless conversion to the same type: `std::vec::IntoIter<Foo<'a'>>` - --> tests/ui/useless_conversion.rs:155:13 + --> tests/ui/useless_conversion.rs:157:13 | LL | let _ = vec![s4, s4, s4].into_iter().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `vec![s4, s4, s4].into_iter()` error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:187:7 + --> tests/ui/useless_conversion.rs:189:7 | LL | b(vec![1, 2].into_iter()); | ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `vec![1, 2]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:177:13 + --> tests/ui/useless_conversion.rs:179:13 | LL | fn b<T: IntoIterator<Item = i32>>(_: T) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:188:7 + --> tests/ui/useless_conversion.rs:190:7 | LL | c(vec![1, 2].into_iter()); | ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `vec![1, 2]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:178:18 + --> tests/ui/useless_conversion.rs:180:18 | LL | fn c(_: impl IntoIterator<Item = i32>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:189:7 + --> tests/ui/useless_conversion.rs:191:7 | LL | d(vec![1, 2].into_iter()); | ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `vec![1, 2]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:181:12 + --> tests/ui/useless_conversion.rs:183:12 | LL | T: IntoIterator<Item = i32>, | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:192:7 + --> tests/ui/useless_conversion.rs:194:7 | LL | b(vec![1, 2].into_iter().into_iter()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`s: `vec![1, 2]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:177:13 + --> tests/ui/useless_conversion.rs:179:13 | LL | fn b<T: IntoIterator<Item = i32>>(_: T) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:193:7 + --> tests/ui/useless_conversion.rs:195:7 | LL | b(vec![1, 2].into_iter().into_iter().into_iter()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`s: `vec![1, 2]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:177:13 + --> tests/ui/useless_conversion.rs:179:13 | LL | fn b<T: IntoIterator<Item = i32>>(_: T) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:239:24 + --> tests/ui/useless_conversion.rs:241:24 | LL | foo2::<i32, _>([1, 2, 3].into_iter()); | ^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `[1, 2, 3]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:218:12 + --> tests/ui/useless_conversion.rs:220:12 | LL | I: IntoIterator<Item = i32> + Helper<X>, | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:247:14 + --> tests/ui/useless_conversion.rs:249:14 | LL | foo3([1, 2, 3].into_iter()); | ^^^^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `[1, 2, 3]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:227:12 + --> tests/ui/useless_conversion.rs:229:12 | LL | I: IntoIterator<Item = i32>, | ^^^^^^^^^^^^^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:256:16 + --> tests/ui/useless_conversion.rs:258:16 | LL | S1.foo([1, 2].into_iter()); | ^^^^^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `[1, 2]` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:253:27 + --> tests/ui/useless_conversion.rs:255:27 | LL | pub fn foo<I: IntoIterator>(&self, _: I) {} | ^^^^^^^^^^^^ error: explicit call to `.into_iter()` in function argument accepting `IntoIterator` - --> tests/ui/useless_conversion.rs:275:44 + --> tests/ui/useless_conversion.rs:277:44 | LL | v0.into_iter().interleave_shortest(v1.into_iter()); | ^^^^^^^^^^^^^^ help: consider removing the `.into_iter()`: `v1` | note: this parameter accepts any `IntoIterator`, so you don't need to call `.into_iter()` - --> tests/ui/useless_conversion.rs:262:20 + --> tests/ui/useless_conversion.rs:264:20 | LL | J: IntoIterator, | ^^^^^^^^^^^^ -error: aborting due to 28 previous errors +error: useless conversion to the same type: `()` + --> tests/ui/useless_conversion.rs:304:58 + | +LL | let _: Result<(), std::io::Error> = test_issue_3913().map(Into::into); + | ^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `std::io::Error` + --> tests/ui/useless_conversion.rs:306:58 + | +LL | let _: Result<(), std::io::Error> = test_issue_3913().map_err(Into::into); + | ^^^^^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `()` + --> tests/ui/useless_conversion.rs:308:58 + | +LL | let _: Result<(), std::io::Error> = test_issue_3913().map(From::from); + | ^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `std::io::Error` + --> tests/ui/useless_conversion.rs:310:58 + | +LL | let _: Result<(), std::io::Error> = test_issue_3913().map_err(From::from); + | ^^^^^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `()` + --> tests/ui/useless_conversion.rs:314:31 + | +LL | let _: ControlFlow<()> = c.map_break(Into::into); + | ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `()` + --> tests/ui/useless_conversion.rs:317:31 + | +LL | let _: ControlFlow<()> = c.map_continue(Into::into); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `u32` + --> tests/ui/useless_conversion.rs:331:41 + | +LL | let _: Vec<u32> = [1u32].into_iter().map(Into::into).collect(); + | ^^^^^^^^^^^^^^^^ help: consider removing + +error: useless conversion to the same type: `T` + --> tests/ui/useless_conversion.rs:342:18 + | +LL | x.into_iter().map(Into::into).collect() + | ^^^^^^^^^^^^^^^^ help: consider removing + +error: aborting due to 36 previous errors diff --git a/src/tools/clippy/tests/ui/zombie_processes.rs b/src/tools/clippy/tests/ui/zombie_processes.rs index b41bcce3f7f..6f0d2760a86 100644 --- a/src/tools/clippy/tests/ui/zombie_processes.rs +++ b/src/tools/clippy/tests/ui/zombie_processes.rs @@ -1,7 +1,7 @@ #![warn(clippy::zombie_processes)] -#![allow(clippy::if_same_then_else, clippy::ifs_same_cond)] +#![allow(clippy::if_same_then_else, clippy::ifs_same_cond, clippy::needless_return)] -use std::process::{Child, Command}; +use std::process::{Child, Command, ExitStatus}; fn main() { { @@ -12,7 +12,7 @@ fn main() { { let mut x = Command::new("").spawn().unwrap(); - //~^ ERROR: spawned process is never `wait()`ed on + //~^ zombie_processes x.kill(); x.id(); } @@ -39,7 +39,7 @@ fn main() { } { let mut x = Command::new("").spawn().unwrap(); - //~^ ERROR: spawned process is never `wait()`ed on + //~^ zombie_processes let v = &x; // (allow shared refs is fine because one cannot call `.wait()` through that) } @@ -64,14 +64,14 @@ fn main() { // It should assume that it might not exit and still lint { let mut x = Command::new("").spawn().unwrap(); - //~^ ERROR: spawned process is never `wait()`ed on + //~^ zombie_processes if true { std::process::exit(0); } } { let mut x = Command::new("").spawn().unwrap(); - //~^ ERROR: spawned process is never `wait()`ed on + //~^ zombie_processes if true { while false {} // Calling `exit()` after leaving a while loop should still be linted. @@ -97,7 +97,7 @@ fn main() { { let mut x = Command::new("").spawn().unwrap(); - //~^ ERROR: spawned process is never `wait()`ed on + //~^ zombie_processes if true { return; } @@ -106,7 +106,7 @@ fn main() { { let mut x = Command::new("").spawn().unwrap(); - //~^ ERROR: spawned process is never `wait()`ed on + //~^ zombie_processes if true { x.wait().unwrap(); } @@ -114,6 +114,26 @@ fn main() { { let mut x = Command::new("").spawn().unwrap(); + //~^ zombie_processes + if true { + x.wait().unwrap(); + } else { + // this else block exists to test the other help message + } + } + + { + let mut x = Command::new("").spawn().unwrap(); + //~^ zombie_processes + if true { + // this else block exists to test the other help message + } else { + x.wait().unwrap(); + } + } + + { + let mut x = Command::new("").spawn().unwrap(); if true { x.wait().unwrap(); } else if true { @@ -143,3 +163,8 @@ fn main() { fn process_child(c: Child) { todo!() } + +fn return_wait() -> ExitStatus { + let mut x = Command::new("").spawn().unwrap(); + return x.wait().unwrap(); +} diff --git a/src/tools/clippy/tests/ui/zombie_processes.stderr b/src/tools/clippy/tests/ui/zombie_processes.stderr index eec821a4c8f..afc518c60db 100644 --- a/src/tools/clippy/tests/ui/zombie_processes.stderr +++ b/src/tools/clippy/tests/ui/zombie_processes.stderr @@ -4,7 +4,7 @@ error: spawned process is never `wait()`ed on LL | let mut x = Command::new("").spawn().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider calling `.wait()` + = help: consider calling `.wait()` = note: not doing so might leave behind zombie processes = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning = note: `-D clippy::zombie-processes` implied by `-D warnings` @@ -16,7 +16,7 @@ error: spawned process is never `wait()`ed on LL | let mut x = Command::new("").spawn().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider calling `.wait()` + = help: consider calling `.wait()` = note: not doing so might leave behind zombie processes = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning @@ -26,7 +26,7 @@ error: spawned process is never `wait()`ed on LL | let mut x = Command::new("").spawn().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider calling `.wait()` + = help: consider calling `.wait()` = note: not doing so might leave behind zombie processes = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning @@ -36,29 +36,96 @@ error: spawned process is never `wait()`ed on LL | let mut x = Command::new("").spawn().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider calling `.wait()` + = help: consider calling `.wait()` = note: not doing so might leave behind zombie processes = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning -error: spawned process is never `wait()`ed on +error: spawned process is not `wait()`ed on in all code paths --> tests/ui/zombie_processes.rs:99:21 | LL | let mut x = Command::new("").spawn().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider calling `.wait()` +note: no `wait()` call exists on the code path to this early return + --> tests/ui/zombie_processes.rs:102:13 + | +LL | return; + | ^^^^^^ +note: `wait()` call exists, but it is unreachable due to the early return + --> tests/ui/zombie_processes.rs:104:9 + | +LL | x.wait().unwrap(); + | ^ + = help: consider calling `.wait()` in all code paths = note: not doing so might leave behind zombie processes = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning -error: spawned process is never `wait()`ed on +error: spawned process is not `wait()`ed on in all code paths --> tests/ui/zombie_processes.rs:108:21 | LL | let mut x = Command::new("").spawn().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider calling `.wait()` +note: this if expression has a `wait()` call, but it is missing an else block + --> tests/ui/zombie_processes.rs:110:9 + | +LL | / if true { +LL | | x.wait().unwrap(); +LL | | } + | |_________^ +note: `wait()` called here + --> tests/ui/zombie_processes.rs:111:13 + | +LL | x.wait().unwrap(); + | ^ + = help: consider calling `.wait()` in all code paths + = note: not doing so might leave behind zombie processes + = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning + +error: spawned process is not `wait()`ed on in all code paths + --> tests/ui/zombie_processes.rs:116:21 + | +LL | let mut x = Command::new("").spawn().unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `wait()` is not called in this if branch + --> tests/ui/zombie_processes.rs:120:10 + | +LL | } else { + | __________^ +LL | | // this else block exists to test the other help message +LL | | } + | |_________^ +note: `wait()` is called in the other branch + --> tests/ui/zombie_processes.rs:119:13 + | +LL | x.wait().unwrap(); + | ^ + = help: consider calling `.wait()` in all code paths + = note: not doing so might leave behind zombie processes + = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning + +error: spawned process is not `wait()`ed on in all code paths + --> tests/ui/zombie_processes.rs:126:21 + | +LL | let mut x = Command::new("").spawn().unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: `wait()` is not called in this if branch + --> tests/ui/zombie_processes.rs:128:9 + | +LL | / if true { +LL | | // this else block exists to test the other help message +LL | | } else { + | |_________^ +note: `wait()` is called in the other branch + --> tests/ui/zombie_processes.rs:131:13 + | +LL | x.wait().unwrap(); + | ^ + = help: consider calling `.wait()` in all code paths = note: not doing so might leave behind zombie processes = note: see https://doc.rust-lang.org/stable/std/process/struct.Child.html#warning -error: aborting due to 6 previous errors +error: aborting due to 8 previous errors diff --git a/src/tools/clippy/util/gh-pages/script.js b/src/tools/clippy/util/gh-pages/script.js index 9a5365b2158..c2197b89c56 100644 --- a/src/tools/clippy/util/gh-pages/script.js +++ b/src/tools/clippy/util/gh-pages/script.js @@ -232,13 +232,13 @@ const APPLICABILITIES_FILTER_DEFAULT = { MaybeIncorrect: true, HasPlaceholders: true, }; -const URL_PARAMS_CORRESPONDANCE = { +const URL_PARAMS_CORRESPONDENCE = { "groups_filter": "groups", "levels_filter": "levels", "applicabilities_filter": "applicabilities", "version_filter": "versions", }; -const VERSIONS_CORRESPONDANCE = { +const VERSIONS_CORRESPONDENCE = { "lte": "≤", "gte": "≥", "eq": "=", @@ -285,7 +285,7 @@ window.filters = { } function updateIfNeeded(filterName, obj2) { const obj1 = filters[filterName]; - const name = URL_PARAMS_CORRESPONDANCE[filterName]; + const name = URL_PARAMS_CORRESPONDENCE[filterName]; if (!compareObjects(obj1, obj2)) { urlParams.set( name, @@ -316,9 +316,9 @@ window.filters = { versions.push(`lte:${filters.version_filter["≤"]}`); } if (versions.length !== 0) { - urlParams.set(URL_PARAMS_CORRESPONDANCE["version_filter"], versions.join(",")); + urlParams.set(URL_PARAMS_CORRESPONDENCE["version_filter"], versions.join(",")); } else { - urlParams.delete(URL_PARAMS_CORRESPONDANCE["version_filter"]); + urlParams.delete(URL_PARAMS_CORRESPONDENCE["version_filter"]); } let params = urlParams.toString(); @@ -532,7 +532,7 @@ function parseURLFilters() { const urlParams = new URLSearchParams(window.location.search); for (const [key, value] of urlParams.entries()) { - for (const [corres_key, corres_value] of Object.entries(URL_PARAMS_CORRESPONDANCE)) { + for (const [corres_key, corres_value] of Object.entries(URL_PARAMS_CORRESPONDENCE)) { if (corres_value === key) { if (key !== "versions") { const settings = new Set(value.split(",")); @@ -545,7 +545,7 @@ function parseURLFilters() { for (const [kind, value] of settings) { const elem = document.querySelector( - `#version-filter input[data-value="${VERSIONS_CORRESPONDANCE[kind]}"]`); + `#version-filter input[data-value="${VERSIONS_CORRESPONDENCE[kind]}"]`); elem.value = value; updateVersionFilters(elem, true); } diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index b784bdb7139..16cc1d2a565 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -16,7 +16,7 @@ indexmap = "2.0.0" miropt-test-tools = { path = "../miropt-test-tools" } build_helper = { path = "../../build_helper" } tracing = "0.1" -tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] } +tracing-subscriber = { version = "0.3.3", default-features = false, features = ["ansi", "env-filter", "fmt", "parking_lot", "smallvec"] } regex = "1.0" semver = { version = "1.0.23", features = ["serde"] } serde = { version = "1.0", features = ["derive"] } diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs index d3b4631a212..250ef0794ad 100644 --- a/src/tools/compiletest/src/lib.rs +++ b/src/tools/compiletest/src/lib.rs @@ -37,7 +37,7 @@ use walkdir::WalkDir; use self::header::{EarlyProps, make_test_description}; use crate::common::{ - CompareMode, Config, Mode, PassMode, TestPaths, UI_EXTENSIONS, expected_output_path, + CompareMode, Config, Debugger, Mode, PassMode, TestPaths, UI_EXTENSIONS, expected_output_path, output_base_dir, output_relative_path, }; use crate::header::HeadersCache; @@ -183,7 +183,13 @@ pub fn parse_config(args: Vec<String>) -> Config { "What custom diff tool to use for displaying compiletest tests.", "COMMAND", ) - .reqopt("", "minicore-path", "path to minicore aux library", "PATH"); + .reqopt("", "minicore-path", "path to minicore aux library", "PATH") + .optopt( + "", + "debugger", + "only test a specific debugger in debuginfo tests", + "gdb | lldb | cdb", + ); let (argv0, args_) = args.split_first().unwrap(); if args.len() == 1 || args[1] == "-h" || args[1] == "--help" { @@ -302,7 +308,11 @@ pub fn parse_config(args: Vec<String>) -> Config { stage_id: matches.opt_str("stage-id").unwrap(), mode, suite: matches.opt_str("suite").unwrap(), - debugger: None, + debugger: matches.opt_str("debugger").map(|debugger| { + debugger + .parse::<Debugger>() + .unwrap_or_else(|_| panic!("unknown `--debugger` option `{debugger}` given")) + }), run_ignored, with_rustc_debug_assertions, with_std_debug_assertions, @@ -475,9 +485,16 @@ pub fn run_tests(config: Arc<Config>) { if let Mode::DebugInfo = config.mode { // Debugging emscripten code doesn't make sense today if !config.target.contains("emscripten") { - configs.extend(debuggers::configure_cdb(&config)); - configs.extend(debuggers::configure_gdb(&config)); - configs.extend(debuggers::configure_lldb(&config)); + match config.debugger { + Some(Debugger::Cdb) => configs.extend(debuggers::configure_cdb(&config)), + Some(Debugger::Gdb) => configs.extend(debuggers::configure_gdb(&config)), + Some(Debugger::Lldb) => configs.extend(debuggers::configure_lldb(&config)), + None => { + configs.extend(debuggers::configure_cdb(&config)); + configs.extend(debuggers::configure_gdb(&config)); + configs.extend(debuggers::configure_lldb(&config)); + } + } } } else { configs.push(config.clone()); diff --git a/src/tools/compiletest/src/read2.rs b/src/tools/compiletest/src/read2.rs index 62e675c77ae..28ca5589992 100644 --- a/src/tools/compiletest/src/read2.rs +++ b/src/tools/compiletest/src/read2.rs @@ -90,7 +90,7 @@ impl ProcOutput { .count(); *filtered_len -= matches * path_bytes.len(); - // We can't just remove the length of the filtered path from the output lenght, + // We can't just remove the length of the filtered path from the output length, // otherwise a compiler emitting only filtered paths would OOM compiletest. Add // a fixed placeholder length for each path to prevent that. *filtered_len += matches * FILTERED_PATHS_PLACEHOLDER_LEN; diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 6a4f0b96bb4..108fde1c899 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -2809,29 +2809,6 @@ impl<'test> TestCx<'test> { println!("init_incremental_test: incremental_dir={}", incremental_dir.display()); } } - - fn aggressive_rm_rf(&self, path: &Path) -> io::Result<()> { - for e in path.read_dir()? { - let entry = e?; - let path = entry.path(); - if entry.file_type()?.is_dir() { - self.aggressive_rm_rf(&path)?; - } else { - // Remove readonly files as well on windows (by default we can't) - fs::remove_file(&path).or_else(|e| { - if cfg!(windows) && e.kind() == io::ErrorKind::PermissionDenied { - let mut meta = entry.metadata()?.permissions(); - meta.set_readonly(false); - fs::set_permissions(&path, meta)?; - fs::remove_file(&path) - } else { - Err(e) - } - })?; - } - } - fs::remove_dir(path) - } } struct ProcArgs { diff --git a/src/tools/compiletest/src/runtest/run_make.rs b/src/tools/compiletest/src/runtest/run_make.rs index 85ade5b727a..ee7aed2a39c 100644 --- a/src/tools/compiletest/src/runtest/run_make.rs +++ b/src/tools/compiletest/src/runtest/run_make.rs @@ -2,6 +2,8 @@ use std::path::Path; use std::process::{Command, Output, Stdio}; use std::{env, fs}; +use build_helper::fs::{ignore_not_found, recursive_remove}; + use super::{ProcRes, TestCx, disable_error_reporting}; use crate::util::{copy_dir_all, dylib_env_var}; @@ -27,9 +29,8 @@ impl TestCx<'_> { // are hopefully going away, it seems safer to leave this perilous code // as-is until it can all be deleted. let tmpdir = cwd.join(self.output_base_name()); - if tmpdir.exists() { - self.aggressive_rm_rf(&tmpdir).unwrap(); - } + ignore_not_found(|| recursive_remove(&tmpdir)).unwrap(); + fs::create_dir_all(&tmpdir).unwrap(); let host = &self.config.host; @@ -218,9 +219,8 @@ impl TestCx<'_> { // // This setup intentionally diverges from legacy Makefile run-make tests. let base_dir = self.output_base_dir(); - if base_dir.exists() { - self.aggressive_rm_rf(&base_dir).unwrap(); - } + ignore_not_found(|| recursive_remove(&base_dir)).unwrap(); + let rmake_out_dir = base_dir.join("rmake_out"); fs::create_dir_all(&rmake_out_dir).unwrap(); diff --git a/src/tools/compiletest/src/runtest/ui.rs b/src/tools/compiletest/src/runtest/ui.rs index 172b1e32aad..10528de427d 100644 --- a/src/tools/compiletest/src/runtest/ui.rs +++ b/src/tools/compiletest/src/runtest/ui.rs @@ -145,12 +145,13 @@ impl TestCx<'_> { self.fatal_proc_rec("test run succeeded!", &proc_res); } + let output_to_check = self.get_output(&proc_res); if !self.props.error_patterns.is_empty() || !self.props.regex_error_patterns.is_empty() { // "// error-pattern" comments - let output_to_check = self.get_output(&proc_res); self.check_all_error_patterns(&output_to_check, &proc_res, pm); } + self.check_forbid_output(&output_to_check, &proc_res) } debug!( @@ -181,11 +182,12 @@ impl TestCx<'_> { ); self.fatal(&msg); } + let output_to_check = self.get_output(&proc_res); if check_patterns { // "// error-pattern" comments - let output_to_check = self.get_output(&proc_res); self.check_all_error_patterns(&output_to_check, &proc_res, pm); } + self.check_forbid_output(&output_to_check, &proc_res); if self.props.run_rustfix && self.config.compare_mode.is_none() { // And finally, compile the fixed code and make sure it both diff --git a/src/tools/opt-dist/src/tests.rs b/src/tools/opt-dist/src/tests.rs index 887055798e0..06ed076a864 100644 --- a/src/tools/opt-dist/src/tests.rs +++ b/src/tools/opt-dist/src/tests.rs @@ -25,6 +25,8 @@ pub fn run_tests(env: &Environment) -> anyhow::Result<()> { let host_triple = env.host_tuple(); let version = find_dist_version(&dist_dir)?; + let channel = version_to_channel(&version); + // Extract rustc, libstd, cargo and src archives to create the optimized sysroot let rustc_dir = extract_dist_dir(&format!("rustc-{version}-{host_triple}"))?.join("rustc"); let libstd_dir = extract_dist_dir(&format!("rust-std-{version}-{host_triple}"))? @@ -61,9 +63,13 @@ pub fn run_tests(env: &Environment) -> anyhow::Result<()> { assert!(llvm_config.is_file()); let config_content = format!( - r#"profile = "user" + r#" +profile = "user" change-id = 115898 +[rust] +channel = "{channel}" + [build] rustc = "{rustc}" cargo = "{cargo}" @@ -116,3 +122,13 @@ fn find_dist_version(directory: &Utf8Path) -> anyhow::Result<String> { archive.strip_prefix("reproducible-artifacts-").unwrap().split_once('-').unwrap(); Ok(version.to_string()) } + +/// Roughly convert a version string (`nightly`, `beta`, or `1.XY.Z`) to channel string (`nightly`, +/// `beta` or `stable`). +fn version_to_channel(version_str: &str) -> &'static str { + match version_str { + "nightly" => "nightly", + "beta" => "beta", + _ => "stable", + } +} diff --git a/src/tools/run-make-support/src/fs.rs b/src/tools/run-make-support/src/fs.rs index 2ca55ad3b3a..7ebe4a9ca13 100644 --- a/src/tools/run-make-support/src/fs.rs +++ b/src/tools/run-make-support/src/fs.rs @@ -1,7 +1,51 @@ +use std::fs::FileType; use std::io; use std::path::{Path, PathBuf}; -/// Copy a directory into another. +/// Given a symlink at `src`, read its target, then create a new symlink at `dst` also pointing to +/// target. +pub fn copy_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) { + let src = src.as_ref(); + let dst = dst.as_ref(); + let metadata = symlink_metadata(src); + if let Err(e) = copy_symlink_raw(metadata.file_type(), src, dst) { + panic!("failed to copy symlink from `{}` to `{}`: {e}", src.display(), dst.display(),); + } +} + +fn copy_symlink_raw(ty: FileType, src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> { + // Traverse symlink once to find path of target entity. + let target_path = std::fs::read_link(src)?; + + let new_symlink_path = dst.as_ref(); + #[cfg(windows)] + { + use std::os::windows::fs::FileTypeExt; + if ty.is_symlink_dir() { + std::os::windows::fs::symlink_dir(&target_path, new_symlink_path)?; + } else { + // Target may be a file or another symlink, in any case we can use + // `symlink_file` here. + std::os::windows::fs::symlink_file(&target_path, new_symlink_path)?; + } + } + #[cfg(unix)] + { + let _ = ty; + std::os::unix::fs::symlink(target_path, new_symlink_path)?; + } + #[cfg(not(any(windows, unix)))] + { + let _ = ty; + // Technically there's also wasi, but I have no clue about wasi symlink + // semantics and which wasi targets / environment support symlinks. + unimplemented!("unsupported target"); + } + Ok(()) +} + +/// Copy a directory into another. This will not traverse symlinks; instead, it will create new +/// symlinks pointing at target paths that symlinks in the original directory points to. pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) { fn copy_dir_all_inner(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> { let dst = dst.as_ref(); @@ -14,31 +58,7 @@ pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) { if ty.is_dir() { copy_dir_all_inner(entry.path(), dst.join(entry.file_name()))?; } else if ty.is_symlink() { - // Traverse symlink once to find path of target entity. - let target_path = std::fs::read_link(entry.path())?; - - let new_symlink_path = dst.join(entry.file_name()); - #[cfg(windows)] - { - use std::os::windows::fs::FileTypeExt; - if ty.is_symlink_dir() { - std::os::windows::fs::symlink_dir(&target_path, new_symlink_path)?; - } else { - // Target may be a file or another symlink, in any case we can use - // `symlink_file` here. - std::os::windows::fs::symlink_file(&target_path, new_symlink_path)?; - } - } - #[cfg(unix)] - { - std::os::unix::fs::symlink(target_path, new_symlink_path)?; - } - #[cfg(not(any(windows, unix)))] - { - // Technically there's also wasi, but I have no clue about wasi symlink - // semantics and which wasi targets / environment support symlinks. - unimplemented!("unsupported target"); - } + copy_symlink_raw(ty, entry.path(), dst.join(entry.file_name()))?; } else { std::fs::copy(entry.path(), dst.join(entry.file_name()))?; } @@ -64,6 +84,21 @@ pub fn read_dir_entries<P: AsRef<Path>, F: FnMut(&Path)>(dir: P, mut callback: F } } +/// A wrapper around [`build_helper::fs::recursive_remove`] which includes the file path in the +/// panic message. +/// +/// This handles removing symlinks on Windows (e.g. symlink-to-file will be removed via +/// [`std::fs::remove_file`] while symlink-to-dir will be removed via [`std::fs::remove_dir`]). +#[track_caller] +pub fn recursive_remove<P: AsRef<Path>>(path: P) { + if let Err(e) = build_helper::fs::recursive_remove(path.as_ref()) { + panic!( + "failed to recursive remove filesystem entities at `{}`: {e}", + path.as_ref().display() + ); + } +} + /// A wrapper around [`std::fs::remove_file`] which includes the file path in the panic message. #[track_caller] pub fn remove_file<P: AsRef<Path>>(path: P) { diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index ab6580a97a7..2323fdf5333 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -1011,24 +1011,25 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "lsp-server" version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9" dependencies = [ "crossbeam-channel", - "ctrlc", "log", - "lsp-types", "serde", "serde_json", ] [[package]] name = "lsp-server" -version = "0.7.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9" +version = "0.7.8" dependencies = [ "crossbeam-channel", + "ctrlc", "log", + "lsp-types", "serde", + "serde_derive", "serde_json", ] @@ -1289,7 +1290,6 @@ name = "paths" version = "0.0.0" dependencies = [ "camino", - "serde", ] [[package]] @@ -1352,12 +1352,12 @@ dependencies = [ name = "proc-macro-api" version = "0.0.0" dependencies = [ - "base-db", "indexmap", "intern", "paths", "rustc-hash 2.0.0", "serde", + "serde_derive", "serde_json", "span", "stdx", @@ -1369,7 +1369,6 @@ dependencies = [ name = "proc-macro-srv" version = "0.0.0" dependencies = [ - "base-db", "expect-test", "intern", "libloading", @@ -1448,6 +1447,7 @@ dependencies = [ "rustc-hash 2.0.0", "semver", "serde", + "serde_derive", "serde_json", "span", "stdx", @@ -1507,9 +1507,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.85.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af462c3a2d524b84a51b6848b439787f01b35c6c1086d3e3086a5f5eea92ed9a" +checksum = "28b782af0a7a8df16ddf43cd70da9f17bc3b1ce712c9e4992b6edb16f5f53632" dependencies = [ "bitflags 2.6.0", "ra-ap-rustc_index", @@ -1518,9 +1518,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.85.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be6bb8cb0ab78d94a222f1ffd3e87254cdfb57413382b8d6ebe26a85482f99d1" +checksum = "ce5742f134960482f543b35ecebec3cacc6d79a9a685713518b4d8d70c5f9aa8" dependencies = [ "ra-ap-rustc_index_macros", "smallvec", @@ -1528,9 +1528,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.85.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c24b1641455b46e87435b7321219672077066e678963d239a4a2904732979b16" +checksum = "d7ea011fcf68309a8835ad01d91c032cb18444617b00e2cab21d45b208164441" dependencies = [ "proc-macro2", "quote", @@ -1539,9 +1539,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.85.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94daa86974417981fed2f12bd8fb00158dfa6fee561152bed689278c846d0272" +checksum = "eb76f0a4d4c20859e41f0a23bff0f37ab9ca9171c214a6c7dd72ea69434865dc" dependencies = [ "unicode-properties", "unicode-xid", @@ -1549,9 +1549,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.85.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc07f6bd581746f358e39c4b6bfe8d455b3d6ad1a857821016d0d42eeb5e1e3e" +checksum = "06080bd35078305421a62da77f3c128482d8d44441b6da8ce9d146d1cd9cdb5b" dependencies = [ "ra-ap-rustc_index", "ra-ap-rustc_lexer", @@ -1559,9 +1559,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.85.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f49b86e1276c1c3c72898410def29b699415f4e7d1dfb3531daf79794694372" +checksum = "68a3154fe4c20c177d7b3c678a2d3a97aba0cca156ddef88959915041889daf0" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.0.0", @@ -1676,7 +1676,7 @@ dependencies = [ "intern", "itertools", "load-cargo", - "lsp-server 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)", + "lsp-server 0.7.7", "lsp-types", "memchr", "mimalloc", @@ -1695,6 +1695,7 @@ dependencies = [ "scip", "semver", "serde", + "serde_derive", "serde_json", "stdx", "syntax", @@ -1822,18 +1823,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.206" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284" +checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.206" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97" +checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" dependencies = [ "proc-macro2", "quote", @@ -1925,12 +1926,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" [[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] name = "stdx" version = "0.0.0" dependencies = [ @@ -1946,9 +1941,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.74" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -2264,13 +2259,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" -dependencies = [ - "serde", - "stable_deref_trait", -] +checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" [[package]] name = "tt" diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 8086569a781..7f3abcccc47 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -85,11 +85,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.85", default-features = false } -ra-ap-rustc_parse_format = { version = "0.85", default-features = false } -ra-ap-rustc_index = { version = "0.85", default-features = false } -ra-ap-rustc_abi = { version = "0.85", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.85", default-features = false } +ra-ap-rustc_lexer = { version = "0.87", default-features = false } +ra-ap-rustc_parse_format = { version = "0.87", default-features = false } +ra-ap-rustc_index = { version = "0.87", default-features = false } +ra-ap-rustc_abi = { version = "0.87", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.87", default-features = false } # local crates that aren't published to crates.io. These should not have versions. test-fixture = { path = "./crates/test-fixture" } @@ -138,7 +138,8 @@ pulldown-cmark = { version = "0.9.0", default-features = false } rayon = "1.8.0" rustc-hash = "2.0.0" semver = "1.0.14" -serde = { version = "1.0.192", features = ["derive"] } +serde = { version = "1.0.192" } +serde_derive = { version = "1.0.192" } serde_json = "1.0.108" smallvec = { version = "1.10.0", features = [ "const_new", @@ -157,7 +158,7 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features = "time", "tracing-log", ] } -triomphe = { version = "0.1.10", default-features = false, features = ["std"] } +triomphe = { version = "0.1.14", default-features = false, features = ["std"] } url = "2.3.1" xshell = "0.2.5" diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index 867bee95bed..433a956ff9a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -18,6 +18,7 @@ use smallvec::SmallVec; use span::{Edition, MacroFileId}; use syntax::{ast, AstPtr, SyntaxNodePtr}; use triomphe::Arc; +use tt::TextRange; use crate::{ db::DefDatabase, @@ -143,15 +144,7 @@ pub struct BodySourceMap { pub types: TypesSourceMap, - // FIXME: Make this a sane struct. - template_map: Option< - Box<( - // format_args! - FxHashMap<ExprId, (HygieneId, Vec<(syntax::TextRange, Name)>)>, - // asm! - FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>, - )>, - >, + template_map: Option<Box<FormatTemplate>>, expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>, @@ -160,6 +153,20 @@ pub struct BodySourceMap { diagnostics: Vec<BodyDiagnostic>, } +#[derive(Default, Debug, Eq, PartialEq)] +struct FormatTemplate { + /// A map from `format_args!()` expressions to their captures. + format_args_to_captures: FxHashMap<ExprId, (HygieneId, Vec<(syntax::TextRange, Name)>)>, + /// A map from `asm!()` expressions to their captures. + asm_to_captures: FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>, + /// A map from desugared expressions of implicit captures to their source. + /// + /// The value stored for each capture is its template literal and offset inside it. The template literal + /// is from the `format_args[_nl]!()` macro and so needs to be mapped up once to go to the user-written + /// template. + implicit_capture_to_source: FxHashMap<ExprId, InFile<(AstPtr<ast::Expr>, TextRange)>>, +} + #[derive(Debug, Eq, PartialEq)] pub enum BodyDiagnostic { InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, @@ -798,18 +805,29 @@ impl BodySourceMap { node: InFile<&ast::FormatArgsExpr>, ) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> { let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>); - let (hygiene, names) = - self.template_map.as_ref()?.0.get(&self.expr_map.get(&src)?.as_expr()?)?; + let (hygiene, names) = self + .template_map + .as_ref()? + .format_args_to_captures + .get(&self.expr_map.get(&src)?.as_expr()?)?; Some((*hygiene, &**names)) } + pub fn format_args_implicit_capture( + &self, + capture_expr: ExprId, + ) -> Option<InFile<(AstPtr<ast::Expr>, TextRange)>> { + self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied() + } + pub fn asm_template_args( &self, node: InFile<&ast::AsmExpr>, ) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> { let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>); let expr = self.expr_map.get(&src)?.as_expr()?; - Some(expr).zip(self.template_map.as_ref()?.1.get(&expr).map(std::ops::Deref::deref)) + Some(expr) + .zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref)) } /// Get a reference to the body source map's diagnostics. @@ -835,8 +853,14 @@ impl BodySourceMap { types, } = self; if let Some(template_map) = template_map { - template_map.0.shrink_to_fit(); - template_map.1.shrink_to_fit(); + let FormatTemplate { + format_args_to_captures, + asm_to_captures, + implicit_capture_to_source, + } = &mut **template_map; + format_args_to_captures.shrink_to_fit(); + asm_to_captures.shrink_to_fit(); + implicit_capture_to_source.shrink_to_fit(); } expr_map.shrink_to_fit(); expr_map_back.shrink_to_fit(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index 3b73d409634..eed9f9468fd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -1957,8 +1957,10 @@ impl ExprCollector<'_> { _ => None, }); let mut mappings = vec![]; - let (fmt, hygiene) = match template.and_then(|it| self.expand_macros_to_string(it)) { - Some((s, is_direct_literal)) => { + let (fmt, hygiene) = match template.and_then(|template| { + self.expand_macros_to_string(template.clone()).map(|it| (it, template)) + }) { + Some(((s, is_direct_literal), template)) => { let call_ctx = self.expander.syntax_context(); let hygiene = self.hygiene_id_for(s.syntax().text_range().start()); let fmt = format_args::parse( @@ -1966,8 +1968,18 @@ impl ExprCollector<'_> { fmt_snippet, args, is_direct_literal, - |name| { + |name, range| { let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name))); + if let Some(range) = range { + self.source_map + .template_map + .get_or_insert_with(Default::default) + .implicit_capture_to_source + .insert( + expr_id, + self.expander.in_file((AstPtr::new(&template), range)), + ); + } if !hygiene.is_root() { self.body.expr_hygiene.insert(expr_id, hygiene); } @@ -2139,7 +2151,7 @@ impl ExprCollector<'_> { self.source_map .template_map .get_or_insert_with(Default::default) - .0 + .format_args_to_captures .insert(idx, (hygiene, mappings)); idx } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs index c1b58dbdd0c..68c7173d1e4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs @@ -6,7 +6,7 @@ use syntax::{ ast::{self, HasName, IsString}, AstNode, AstPtr, AstToken, T, }; -use tt::{TextRange, TextSize}; +use tt::TextRange; use crate::{ body::lower::{ExprCollector, FxIndexSet}, @@ -224,7 +224,7 @@ impl ExprCollector<'_> { TextRange::new( inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap(), - ) - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1) + ) }) }; for piece in unverified_pieces { @@ -268,7 +268,11 @@ impl ExprCollector<'_> { Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }), syntax_ptr, ); - self.source_map.template_map.get_or_insert_with(Default::default).1.insert(idx, mappings); + self.source_map + .template_map + .get_or_insert_with(Default::default) + .asm_to_captures + .insert(idx, mappings); idx } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index f8b6eef3422..52b91b522a4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -685,6 +685,7 @@ impl Printer<'_> { self.print_binding(*id); if let Some(pat) = subpat { self.whitespace(); + w!(self, "@ "); self.print_pat(*pat); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs index 8f010915845..13ba4db6064 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs @@ -426,3 +426,21 @@ fn f() { "should have a binding for `B`", ); } + +#[test] +fn regression_pretty_print_bind_pat() { + let (db, body, owner) = lower( + r#" +fn foo() { + let v @ u = 123; +} +"#, + ); + let printed = body.pretty_print(&db, owner, Edition::CURRENT); + assert_eq!( + printed, + r#"fn foo() -> () { + let v @ u = 123; +}"# + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs index e1c3bd25bcf..e64e498c170 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs @@ -1,5 +1,6 @@ //! Parses `format_args` input. +use either::Either; use hir_expand::name::Name; use intern::Symbol; use rustc_parse_format as parse; @@ -7,7 +8,7 @@ use span::SyntaxContextId; use stdx::TupleExt; use syntax::{ ast::{self, IsString}, - TextRange, TextSize, + TextRange, }; use crate::hir::ExprId; @@ -33,7 +34,7 @@ pub enum FormatArgsPiece { Placeholder(FormatPlaceholder), } -#[derive(Copy, Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct FormatPlaceholder { /// Index into [`FormatArgs::arguments`]. pub argument: FormatArgPosition, @@ -45,11 +46,11 @@ pub struct FormatPlaceholder { pub format_options: FormatOptions, } -#[derive(Copy, Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct FormatArgPosition { /// Which argument this position refers to (Ok), /// or would've referred to if it existed (Err). - pub index: Result<usize, usize>, + pub index: Result<usize, Either<usize, Name>>, /// What kind of position this is. See [`FormatArgPositionKind`]. pub kind: FormatArgPositionKind, /// The span of the name or number. @@ -88,7 +89,7 @@ pub enum FormatTrait { UpperHex, } -#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)] +#[derive(Clone, Default, Debug, PartialEq, Eq)] pub struct FormatOptions { /// The width. E.g. `{:5}` or `{:width$}`. pub width: Option<FormatCount>, @@ -133,7 +134,7 @@ pub enum FormatAlignment { Center, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum FormatCount { /// `{:5}` or `{:.5}` Literal(usize), @@ -173,7 +174,7 @@ pub(crate) fn parse( fmt_snippet: Option<String>, mut args: FormatArgumentsCollector, is_direct_literal: bool, - mut synth: impl FnMut(Name) -> ExprId, + mut synth: impl FnMut(Name, Option<TextRange>) -> ExprId, mut record_usage: impl FnMut(Name, Option<TextRange>), call_ctx: SyntaxContextId, ) -> FormatArgs { @@ -192,7 +193,6 @@ pub(crate) fn parse( } None => None, }; - let mut parser = parse::Parser::new(&text, str_style, fmt_snippet, false, parse::ParseMode::Format); @@ -217,7 +217,6 @@ pub(crate) fn parse( let to_span = |inner_span: parse::InnerSpan| { is_source_literal.then(|| { TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap()) - - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1) }) }; @@ -245,8 +244,8 @@ pub(crate) fn parse( Ok(index) } else { // Doesn't exist as an explicit argument. - invalid_refs.push((index, span, used_as, kind)); - Err(index) + invalid_refs.push((Either::Left(index), span, used_as, kind)); + Err(Either::Left(index)) } } ArgRef::Name(name, span) => { @@ -265,14 +264,17 @@ pub(crate) fn parse( // For the moment capturing variables from format strings expanded from macros is // disabled (see RFC #2795) // FIXME: Diagnose + invalid_refs.push((Either::Right(name.clone()), span, used_as, kind)); + Err(Either::Right(name)) + } else { + record_usage(name.clone(), span); + Ok(args.add(FormatArgument { + kind: FormatArgumentKind::Captured(name.clone()), + // FIXME: This is problematic, we might want to synthesize a dummy + // expression proper and/or desugar these. + expr: synth(name, span), + })) } - record_usage(name.clone(), span); - Ok(args.add(FormatArgument { - kind: FormatArgumentKind::Captured(name.clone()), - // FIXME: This is problematic, we might want to synthesize a dummy - // expression proper and/or desugar these. - expr: synth(name), - })) } } }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index e96e38eceeb..2c3eb5c8e5e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -16,7 +16,7 @@ use syntax::ast; use crate::{ db::DefDatabase, - per_ns::PerNs, + per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem}, visibility::{Visibility, VisibilityExplicitness}, AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, @@ -80,9 +80,9 @@ pub struct ItemScope { /// Defs visible in this scope. This includes `declarations`, but also /// imports. The imports belong to this module and can be resolved by using them on /// the `use_imports_*` fields. - types: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportOrExternCrate>)>, - values: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportId>)>, - macros: FxIndexMap<Name, (MacroId, Visibility, Option<ImportId>)>, + types: FxIndexMap<Name, TypesItem>, + values: FxIndexMap<Name, ValuesItem>, + macros: FxIndexMap<Name, MacrosItem>, unresolved: FxHashSet<Name>, /// The defs declared in this scope. Each def has a single scope where it is @@ -92,7 +92,7 @@ pub struct ItemScope { impls: Vec<ImplId>, unnamed_consts: Vec<ConstId>, /// Traits imported via `use Trait as _;`. - unnamed_trait_imports: FxHashMap<TraitId, (Visibility, Option<ImportId>)>, + unnamed_trait_imports: FxHashMap<TraitId, Item<()>>, // the resolutions of the imports of this scope use_imports_types: FxHashMap<ImportOrExternCrate, ImportOrDef>, @@ -187,7 +187,7 @@ impl ItemScope { import = i; } ImportOrDef::Def(ModuleDefId::MacroId(def)) => { - res.macros = Some((def, Visibility::Public, None)); + res.macros = Some(Item { def, vis: Visibility::Public, import: None }); break; } _ => break, @@ -203,7 +203,7 @@ impl ItemScope { import = i; } ImportOrDef::Def(def) => { - res.types = Some((def, Visibility::Public, None)); + res.types = Some(Item { def, vis: Visibility::Public, import: None }); break; } _ => break, @@ -219,7 +219,7 @@ impl ItemScope { import = i; } ImportOrDef::Def(def) => { - res.values = Some((def, Visibility::Public, None)); + res.values = Some(Item { def, vis: Visibility::Public, import: None }); break; } _ => break, @@ -253,8 +253,8 @@ impl ItemScope { } pub(crate) fn modules_in_scope(&self) -> impl Iterator<Item = (ModuleId, Visibility)> + '_ { - self.types.values().copied().filter_map(|(def, vis, _)| match def { - ModuleDefId::ModuleId(module) => Some((module, vis)), + self.types.values().filter_map(|ns| match ns.def { + ModuleDefId::ModuleId(module) => Some((module, ns.vis)), _ => None, }) } @@ -283,20 +283,20 @@ impl ItemScope { } pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> { - self.types.get(name).copied().map(|(a, b, _)| (a, b)) + self.types.get(name).map(|item| (item.def, item.vis)) } /// XXX: this is O(N) rather than O(1), try to not introduce new usages. pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility, /*declared*/ bool)> { match item { - ItemInNs::Macros(def) => self.macros.iter().find_map(|(name, &(other_def, vis, i))| { - (other_def == def).then_some((name, vis, i.is_none())) + ItemInNs::Macros(def) => self.macros.iter().find_map(|(name, other_def)| { + (other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none())) }), - ItemInNs::Types(def) => self.types.iter().find_map(|(name, &(other_def, vis, i))| { - (other_def == def).then_some((name, vis, i.is_none())) + ItemInNs::Types(def) => self.types.iter().find_map(|(name, other_def)| { + (other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none())) }), - ItemInNs::Values(def) => self.values.iter().find_map(|(name, &(other_def, vis, i))| { - (other_def == def).then_some((name, vis, i.is_none())) + ItemInNs::Values(def) => self.values.iter().find_map(|(name, other_def)| { + (other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none())) }), } } @@ -311,22 +311,34 @@ impl ItemScope { ItemInNs::Macros(def) => self .macros .iter() - .filter_map(|(name, &(other_def, vis, i))| { - (other_def == def).then_some((name, vis, i.is_none())) + .filter_map(|(name, other_def)| { + (other_def.def == def).then_some(( + name, + other_def.vis, + other_def.import.is_none(), + )) }) .find_map(|(a, b, c)| cb(a, b, c)), ItemInNs::Types(def) => self .types .iter() - .filter_map(|(name, &(other_def, vis, i))| { - (other_def == def).then_some((name, vis, i.is_none())) + .filter_map(|(name, other_def)| { + (other_def.def == def).then_some(( + name, + other_def.vis, + other_def.import.is_none(), + )) }) .find_map(|(a, b, c)| cb(a, b, c)), ItemInNs::Values(def) => self .values .iter() - .filter_map(|(name, &(other_def, vis, i))| { - (other_def == def).then_some((name, vis, i.is_none())) + .filter_map(|(name, other_def)| { + (other_def.def == def).then_some(( + name, + other_def.vis, + other_def.import.is_none(), + )) }) .find_map(|(a, b, c)| cb(a, b, c)), } @@ -335,7 +347,7 @@ impl ItemScope { pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ { self.types .values() - .filter_map(|&(def, _, _)| match def { + .filter_map(|def| match def.def { ModuleDefId::TraitId(t) => Some(t), _ => None, }) @@ -344,13 +356,13 @@ impl ItemScope { pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ { self.entries().map(|(name, res)| (Some(name.clone()), res)).chain( - self.unnamed_trait_imports.iter().map(|(tr, (vis, i))| { + self.unnamed_trait_imports.iter().map(|(tr, trait_)| { ( None, PerNs::types( ModuleDefId::TraitId(*tr), - *vis, - i.map(ImportOrExternCrate::Import), + trait_.vis, + trait_.import.map(ImportOrExternCrate::Import), ), ) }), @@ -464,12 +476,12 @@ impl ItemScope { // FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> { - self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a) + self.unnamed_trait_imports.get(&tr).map(|trait_| trait_.vis) } pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) { // FIXME: import - self.unnamed_trait_imports.insert(tr, (vis, None)); + self.unnamed_trait_imports.insert(tr, Item { def: (), vis, import: None }); } pub(crate) fn push_res_with_import( @@ -502,7 +514,7 @@ impl ItemScope { } None | Some(ImportType::Glob(_)) => None, }; - let prev = std::mem::replace(&mut fld.2, import); + let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_types.insert( import, @@ -513,7 +525,7 @@ impl ItemScope { Some(ImportOrExternCrate::ExternCrate(import)) => { ImportOrDef::ExternCrate(import) } - None => ImportOrDef::Def(fld.0), + None => ImportOrDef::Def(fld.def), }, ); } @@ -540,7 +552,7 @@ impl ItemScope { } None | Some(ImportType::Glob(_)) => None, }; - let prev = std::mem::replace(&mut fld.2, import); + let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_types.insert( import, @@ -551,7 +563,7 @@ impl ItemScope { Some(ImportOrExternCrate::ExternCrate(import)) => { ImportOrDef::ExternCrate(import) } - None => ImportOrDef::Def(fld.0), + None => ImportOrDef::Def(fld.def), }, ); } @@ -579,13 +591,13 @@ impl ItemScope { Some(ImportType::Import(import)) => Some(import), _ => None, }; - let prev = std::mem::replace(&mut fld.2, import); + let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_values.insert( import, match prev { Some(import) => ImportOrDef::Import(import), - None => ImportOrDef::Def(fld.0), + None => ImportOrDef::Def(fld.def), }, ); } @@ -599,13 +611,13 @@ impl ItemScope { Some(ImportType::Import(import)) => Some(import), _ => None, }; - let prev = std::mem::replace(&mut fld.2, import); + let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_values.insert( import, match prev { Some(import) => ImportOrDef::Import(import), - None => ImportOrDef::Def(fld.0), + None => ImportOrDef::Def(fld.def), }, ); } @@ -631,13 +643,13 @@ impl ItemScope { Some(ImportType::Import(import)) => Some(import), _ => None, }; - let prev = std::mem::replace(&mut fld.2, import); + let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_macros.insert( import, match prev { Some(import) => ImportOrDef::Import(import), - None => ImportOrDef::Def(fld.0.into()), + None => ImportOrDef::Def(fld.def.into()), }, ); } @@ -651,13 +663,13 @@ impl ItemScope { Some(ImportType::Import(import)) => Some(import), _ => None, }; - let prev = std::mem::replace(&mut fld.2, import); + let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_macros.insert( import, match prev { Some(import) => ImportOrDef::Import(import), - None => ImportOrDef::Def(fld.0.into()), + None => ImportOrDef::Def(fld.def.into()), }, ); } @@ -680,19 +692,19 @@ impl ItemScope { pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) { self.types .values_mut() - .map(|(_, vis, _)| vis) - .chain(self.values.values_mut().map(|(_, vis, _)| vis)) - .chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis)) + .map(|def| &mut def.vis) + .chain(self.values.values_mut().map(|def| &mut def.vis)) + .chain(self.unnamed_trait_imports.values_mut().map(|def| &mut def.vis)) .for_each(|vis| { *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit) }); - for (mac, vis, import) in self.macros.values_mut() { - if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) { + for mac in self.macros.values_mut() { + if matches!(mac.def, MacroId::ProcMacroId(_) if mac.import.is_none()) { continue; } - *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit); + mac.vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit); } } @@ -707,23 +719,23 @@ impl ItemScope { name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string()) ); - if let Some((.., i)) = def.types { + if let Some(Item { import, .. }) = def.types { buf.push_str(" t"); - match i { + match import { Some(ImportOrExternCrate::Import(_)) => buf.push('i'), Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'), None => (), } } - if let Some((.., i)) = def.values { + if let Some(Item { import, .. }) = def.values { buf.push_str(" v"); - if i.is_some() { + if import.is_some() { buf.push('i'); } } - if let Some((.., i)) = def.macros { + if let Some(Item { import, .. }) = def.macros { buf.push_str(" m"); - if i.is_some() { + if import.is_some() { buf.push('i'); } } @@ -781,19 +793,19 @@ impl ItemScope { pub(crate) fn update_visibility_types(&mut self, name: &Name, vis: Visibility) { let res = self.types.get_mut(name).expect("tried to update visibility of non-existent type"); - res.1 = vis; + res.vis = vis; } pub(crate) fn update_visibility_values(&mut self, name: &Name, vis: Visibility) { let res = self.values.get_mut(name).expect("tried to update visibility of non-existent value"); - res.1 = vis; + res.vis = vis; } pub(crate) fn update_visibility_macros(&mut self, name: &Name, vis: Visibility) { let res = self.macros.get_mut(name).expect("tried to update visibility of non-existent macro"); - res.1 = vis; + res.vis = vis; } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 98b08bcf708..f391cc41c18 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -44,7 +44,7 @@ use crate::{ ResolveMode, }, path::{ImportAlias, ModPath, PathKind}, - per_ns::PerNs, + per_ns::{Item, PerNs}, tt, visibility::{RawVisibility, Visibility}, AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantLoc, @@ -523,7 +523,7 @@ impl DefCollector<'_> { self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None); match per_ns.types { - Some((ModuleDefId::ModuleId(m), _, import)) => { + Some(Item { def: ModuleDefId::ModuleId(m), import, .. }) => { // FIXME: This should specifically look for a glob import somehow and record that here self.def_map.prelude = Some(( m, @@ -1069,9 +1069,9 @@ impl DefCollector<'_> { // // This has been historically allowed, but may be not allowed in future // https://github.com/rust-lang/rust/issues/127909 - if let Some((_, v, it)) = defs.types.as_mut() { + if let Some(def) = defs.types.as_mut() { let is_extern_crate_reimport_without_prefix = || { - let Some(ImportOrExternCrate::ExternCrate(_)) = it else { + let Some(ImportOrExternCrate::ExternCrate(_)) = def.import else { return false; }; let Some(ImportType::Import(id)) = def_import_type else { @@ -1086,16 +1086,16 @@ impl DefCollector<'_> { path.segments().len() < 2 }; if is_extern_crate_reimport_without_prefix() { - *v = vis; + def.vis = vis; } else { - *v = v.min(vis, &self.def_map).unwrap_or(vis); + def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis); } } - if let Some((_, v, _)) = defs.values.as_mut() { - *v = v.min(vis, &self.def_map).unwrap_or(vis); + if let Some(def) = defs.values.as_mut() { + def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis); } - if let Some((_, v, _)) = defs.macros.as_mut() { - *v = v.min(vis, &self.def_map).unwrap_or(vis); + if let Some(def) = defs.macros.as_mut() { + def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis); } let mut changed = false; @@ -1106,12 +1106,12 @@ impl DefCollector<'_> { // Multiple globs may import the same item and they may override visibility from // previously resolved globs. Handle overrides here and leave the rest to // `ItemScope::push_res_with_import()`. - if let Some((def, def_vis, _)) = defs.types { - if let Some((prev_def, prev_vis, _)) = prev_defs.types { - if def == prev_def + if let Some(def) = defs.types { + if let Some(prev_def) = prev_defs.types { + if def.def == prev_def.def && self.from_glob_import.contains_type(module_id, name.clone()) - && def_vis != prev_vis - && def_vis.max(prev_vis, &self.def_map) == Some(def_vis) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) { changed = true; // This import is being handled here, don't pass it down to @@ -1119,41 +1119,41 @@ impl DefCollector<'_> { defs.types = None; self.def_map.modules[module_id] .scope - .update_visibility_types(name, def_vis); + .update_visibility_types(name, def.vis); } } } - if let Some((def, def_vis, _)) = defs.values { - if let Some((prev_def, prev_vis, _)) = prev_defs.values { - if def == prev_def + if let Some(def) = defs.values { + if let Some(prev_def) = prev_defs.values { + if def.def == prev_def.def && self.from_glob_import.contains_value(module_id, name.clone()) - && def_vis != prev_vis - && def_vis.max(prev_vis, &self.def_map) == Some(def_vis) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) { changed = true; // See comment above. defs.values = None; self.def_map.modules[module_id] .scope - .update_visibility_values(name, def_vis); + .update_visibility_values(name, def.vis); } } } - if let Some((def, def_vis, _)) = defs.macros { - if let Some((prev_def, prev_vis, _)) = prev_defs.macros { - if def == prev_def + if let Some(def) = defs.macros { + if let Some(prev_def) = prev_defs.macros { + if def.def == prev_def.def && self.from_glob_import.contains_macro(module_id, name.clone()) - && def_vis != prev_vis - && def_vis.max(prev_vis, &self.def_map) == Some(def_vis) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) { changed = true; // See comment above. defs.macros = None; self.def_map.modules[module_id] .scope - .update_visibility_macros(name, def_vis); + .update_visibility_macros(name, def.vis); } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs index 29379d00749..8eb195680d1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs @@ -67,8 +67,8 @@ impl PerNs { db: &dyn DefDatabase, expected: Option<MacroSubNs>, ) -> Self { - self.macros = self.macros.filter(|&(id, _, _)| { - let this = MacroSubNs::from_id(db, id); + self.macros = self.macros.filter(|def| { + let this = MacroSubNs::from_id(db, def.def); sub_namespace_match(Some(this), expected) }); @@ -411,7 +411,7 @@ impl DefMap { original_module: LocalModuleId, ) -> ResolvePathResult { for (i, segment) in segments { - let (curr, vis, imp) = match curr_per_ns.take_types_full() { + let curr = match curr_per_ns.take_types_full() { Some(r) => r, None => { // we still have path segments left, but the path so far @@ -424,7 +424,7 @@ impl DefMap { }; // resolve segment in curr - curr_per_ns = match curr { + curr_per_ns = match curr.def { ModuleDefId::ModuleId(module) => { if module.krate != self.krate { let path = ModPath::from_segments( @@ -492,7 +492,7 @@ impl DefMap { Some(res) => res, None => { return ResolvePathResult::new( - PerNs::types(e.into(), vis, imp), + PerNs::types(e.into(), curr.vis, curr.import), ReachedFixedPoint::Yes, Some(i), false, @@ -510,7 +510,7 @@ impl DefMap { ); return ResolvePathResult::new( - PerNs::types(s, vis, imp), + PerNs::types(s, curr.vis, curr.import), ReachedFixedPoint::Yes, Some(i), false, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index 1cfbabca28c..c8b7ec463a0 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -331,7 +331,7 @@ pub type Ty = (); } for (_, res) in module_data.scope.resolutions() { - match res.values.map(|(a, _, _)| a).or(res.types.map(|(a, _, _)| a)).unwrap() { + match res.values.map(|it| it.def).or(res.types.map(|it| it.def)).unwrap() { ModuleDefId::FunctionId(f) => _ = db.function_data(f), ModuleDefId::AdtId(adt) => match adt { AdtId::StructId(it) => _ = db.struct_data(it), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs index 3f3b98c6b5b..899dd4afffe 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs @@ -28,11 +28,22 @@ bitflags! { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Item<Def, Import = ImportId> { + pub def: Def, + pub vis: Visibility, + pub import: Option<Import>, +} + +pub type TypesItem = Item<ModuleDefId, ImportOrExternCrate>; +pub type ValuesItem = Item<ModuleDefId>; +pub type MacrosItem = Item<MacroId>; + #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] pub struct PerNs { - pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>, - pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>, - pub macros: Option<(MacroId, Visibility, Option<ImportId>)>, + pub types: Option<TypesItem>, + pub values: Option<ValuesItem>, + pub macros: Option<MacrosItem>, } impl PerNs { @@ -48,29 +59,33 @@ impl PerNs { PerNs { types: None, values: None, macros: None } } - pub fn values(t: ModuleDefId, v: Visibility, i: Option<ImportId>) -> PerNs { - PerNs { types: None, values: Some((t, v, i)), macros: None } + pub fn values(def: ModuleDefId, vis: Visibility, import: Option<ImportId>) -> PerNs { + PerNs { types: None, values: Some(Item { def, vis, import }), macros: None } } - pub fn types(t: ModuleDefId, v: Visibility, i: Option<ImportOrExternCrate>) -> PerNs { - PerNs { types: Some((t, v, i)), values: None, macros: None } + pub fn types(def: ModuleDefId, vis: Visibility, import: Option<ImportOrExternCrate>) -> PerNs { + PerNs { types: Some(Item { def, vis, import }), values: None, macros: None } } pub fn both( types: ModuleDefId, values: ModuleDefId, - v: Visibility, - i: Option<ImportOrExternCrate>, + vis: Visibility, + import: Option<ImportOrExternCrate>, ) -> PerNs { PerNs { - types: Some((types, v, i)), - values: Some((values, v, i.and_then(ImportOrExternCrate::into_import))), + types: Some(Item { def: types, vis, import }), + values: Some(Item { + def: values, + vis, + import: import.and_then(ImportOrExternCrate::into_import), + }), macros: None, } } - pub fn macros(macro_: MacroId, v: Visibility, i: Option<ImportId>) -> PerNs { - PerNs { types: None, values: None, macros: Some((macro_, v, i)) } + pub fn macros(def: MacroId, vis: Visibility, import: Option<ImportId>) -> PerNs { + PerNs { types: None, values: None, macros: Some(Item { def, vis, import }) } } pub fn is_none(&self) -> bool { @@ -82,43 +97,43 @@ impl PerNs { } pub fn take_types(self) -> Option<ModuleDefId> { - self.types.map(|it| it.0) + self.types.map(|it| it.def) } - pub fn take_types_full(self) -> Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)> { + pub fn take_types_full(self) -> Option<TypesItem> { self.types } pub fn take_values(self) -> Option<ModuleDefId> { - self.values.map(|it| it.0) + self.values.map(|it| it.def) } pub fn take_values_import(self) -> Option<(ModuleDefId, Option<ImportId>)> { - self.values.map(|it| (it.0, it.2)) + self.values.map(|it| (it.def, it.import)) } pub fn take_macros(self) -> Option<MacroId> { - self.macros.map(|it| it.0) + self.macros.map(|it| it.def) } pub fn take_macros_import(self) -> Option<(MacroId, Option<ImportId>)> { - self.macros.map(|it| (it.0, it.2)) + self.macros.map(|it| (it.def, it.import)) } pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs { let _p = tracing::info_span!("PerNs::filter_visibility").entered(); PerNs { - types: self.types.filter(|&(_, v, _)| f(v)), - values: self.values.filter(|&(_, v, _)| f(v)), - macros: self.macros.filter(|&(_, v, _)| f(v)), + types: self.types.filter(|def| f(def.vis)), + values: self.values.filter(|def| f(def.vis)), + macros: self.macros.filter(|def| f(def.vis)), } } pub fn with_visibility(self, vis: Visibility) -> PerNs { PerNs { - types: self.types.map(|(it, _, c)| (it, vis, c)), - values: self.values.map(|(it, _, c)| (it, vis, c)), - macros: self.macros.map(|(it, _, import)| (it, vis, import)), + types: self.types.map(|def| Item { vis, ..def }), + values: self.values.map(|def| Item { vis, ..def }), + macros: self.macros.map(|def| Item { vis, ..def }), } } @@ -141,15 +156,17 @@ impl PerNs { pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> { let _p = tracing::info_span!("PerNs::iter_items").entered(); self.types - .map(|it| (ItemInNs::Types(it.0), it.2)) + .map(|it| (ItemInNs::Types(it.def), it.import)) .into_iter() .chain( - self.values - .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))), + self.values.map(|it| { + (ItemInNs::Values(it.def), it.import.map(ImportOrExternCrate::Import)) + }), ) .chain( - self.macros - .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))), + self.macros.map(|it| { + (ItemInNs::Macros(it.def), it.import.map(ImportOrExternCrate::Import)) + }), ) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 316406c151f..f4dfd42a30e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -933,8 +933,8 @@ impl ModuleItemMap { Some(ResolveValueResult::ValueNs(value, import)) } Some(idx) => { - let (def, _, import) = module_def.take_types_full()?; - let ty = match def { + let def = module_def.take_types_full()?; + let ty = match def.def { ModuleDefId::AdtId(it) => TypeNs::AdtId(it), ModuleDefId::TraitId(it) => TypeNs::TraitId(it), ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it), @@ -948,7 +948,7 @@ impl ModuleItemMap { | ModuleDefId::MacroId(_) | ModuleDefId::StaticId(_) => return None, }; - Some(ResolveValueResult::Partial(ty, idx, import)) + Some(ResolveValueResult::Partial(ty, idx, def.import)) } } } @@ -986,8 +986,8 @@ fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option<ImportId>)> { } fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> { - let (def, _, import) = per_ns.take_types_full()?; - let res = match def { + let def = per_ns.take_types_full()?; + let res = match def.def { ModuleDefId::AdtId(it) => TypeNs::AdtId(it), ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it), @@ -1003,7 +1003,7 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> { | ModuleDefId::StaticId(_) | ModuleDefId::ModuleId(_) => return None, }; - Some((res, import)) + Some((res, def.import)) } #[derive(Default)] @@ -1019,14 +1019,14 @@ impl ScopeNames { } } fn add_per_ns(&mut self, name: &Name, def: PerNs) { - if let &Some((ty, _, _)) = &def.types { - self.add(name, ScopeDef::ModuleDef(ty)) + if let Some(ty) = &def.types { + self.add(name, ScopeDef::ModuleDef(ty.def)) } - if let &Some((def, _, _)) = &def.values { - self.add(name, ScopeDef::ModuleDef(def)) + if let Some(def) = &def.values { + self.add(name, ScopeDef::ModuleDef(def.def)) } - if let &Some((mac, _, _)) = &def.macros { - self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))) + if let Some(mac) = &def.macros { + self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac.def))) } if def.is_none() { self.add(name, ScopeDef::Unknown) diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 0d19ae202ce..fa400378f3a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -153,13 +153,13 @@ fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId { /// This expands the given macro call, but with different arguments. This is /// used for completion, where we want to see what 'would happen' if we insert a /// token. The `token_to_map` mapped down into the expansion, with the mapped -/// token returned. +/// token(s) returned with their priority. pub fn expand_speculative( db: &dyn ExpandDatabase, actual_macro_call: MacroCallId, speculative_args: &SyntaxNode, token_to_map: SyntaxToken, -) -> Option<(SyntaxNode, SyntaxToken)> { +) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let loc = db.lookup_intern_macro_call(actual_macro_call); let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind); @@ -303,17 +303,19 @@ pub fn expand_speculative( token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition); let syntax_node = node.syntax_node(); - let (token, _) = rev_tmap + let token = rev_tmap .ranges_with_span(span_map.span_for_range(token_to_map.text_range())) .filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx))) - .min_by_key(|(t, ctx)| { + .map(|(t, ctx)| { // prefer tokens of the same kind and text, as well as non opaque marked ones // Note the inversion of the score here, as we want to prefer the first token in case // of all tokens having the same score - ctx.is_opaque(db) as u8 + let ranking = ctx.is_opaque(db) as u8 + 2 * (t.kind() != token_to_map.kind()) as u8 - + 4 * ((t.text() != token_to_map.text()) as u8) - })?; + + 4 * ((t.text() != token_to_map.text()) as u8); + (t, ranking) + }) + .collect(); Some((node.syntax_node(), token)) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs index 2bba410de02..9a7a1a01a09 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs @@ -237,7 +237,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ template!(List: "address, kcfi, memory, thread"), DuplicatesOk, experimental!(no_sanitize) ), - ungated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing), + gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)), ungated!( doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 53795c0b600..55d0edd5e0c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -22,7 +22,6 @@ use hir_def::{ use crate::{ db::{HirDatabase, InternedCoroutine}, - display::HirDisplay, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, generics::generics, make_binders, make_single_type_binders, @@ -823,13 +822,12 @@ pub(crate) fn impl_datum_query( let _p = tracing::info_span!("impl_datum_query").entered(); debug!("impl_datum {:?}", impl_id); let impl_: hir_def::ImplId = from_chalk(db, impl_id); - impl_def_datum(db, krate, impl_id, impl_) + impl_def_datum(db, krate, impl_) } fn impl_def_datum( db: &dyn HirDatabase, krate: CrateId, - chalk_id: ImplId, impl_id: hir_def::ImplId, ) -> Arc<ImplDatum> { let trait_ref = db @@ -850,13 +848,6 @@ fn impl_def_datum( }; let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars); let negative = impl_data.is_negative; - debug!( - "impl {:?}: {}{} where {:?}", - chalk_id, - if negative { "!" } else { "" }, - trait_ref.display(db, db.crate_graph()[krate].edition), - where_clauses - ); let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 193aaa52c26..6bba83fac98 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -193,10 +193,19 @@ impl<'a> UnsafeVisitor<'a> { self.resolver.reset_to_guard(guard); } Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => { - if let Expr::Path(_) = self.body.exprs[*expr] { + match self.body.exprs[*expr] { // Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`, // see https://github.com/rust-lang/rust/pull/125834. - return; + Expr::Path(_) => return, + // https://github.com/rust-lang/rust/pull/129248 + // Taking a raw ref to a deref place expr is always safe. + Expr::UnaryOp { expr, op: UnaryOp::Deref } => { + self.body + .walk_child_exprs_without_pats(expr, |child| self.walk_expr(child)); + + return; + } + _ => (), } } Expr::MethodCall { .. } => { diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 612c6adb207..cbb1ed95ed6 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -262,7 +262,7 @@ pub struct UnresolvedAssocItem { #[derive(Debug)] pub struct UnresolvedIdent { - pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, + pub node: InFile<(AstPtr<Either<ast::Expr, ast::Pat>>, Option<TextRange>)>, } #[derive(Debug)] @@ -550,11 +550,10 @@ impl AnyDiagnostic { source_map: &hir_def::body::BodySourceMap, ) -> Option<AnyDiagnostic> { let expr_syntax = |expr| { - source_map.expr_syntax(expr).inspect_err(|_| tracing::error!("synthetic syntax")).ok() - }; - let pat_syntax = |pat| { - source_map.pat_syntax(pat).inspect_err(|_| tracing::error!("synthetic syntax")).ok() + source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok() }; + let pat_syntax = + |pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok(); let expr_or_pat_syntax = |id| match id { ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)), ExprOrPatId::PatId(pat) => pat_syntax(pat), @@ -626,8 +625,16 @@ impl AnyDiagnostic { UnresolvedAssocItem { expr_or_pat }.into() } &InferenceDiagnostic::UnresolvedIdent { id } => { - let expr_or_pat = expr_or_pat_syntax(id)?; - UnresolvedIdent { expr_or_pat }.into() + let node = match id { + ExprOrPatId::ExprId(id) => match source_map.expr_syntax(id) { + Ok(syntax) => syntax.map(|it| (it.wrap_left(), None)), + Err(SyntheticSyntax) => source_map + .format_args_implicit_capture(id)? + .map(|(node, range)| (node.wrap_left(), Some(range))), + }, + ExprOrPatId::PatId(id) => pat_syntax(id)?.map(|it| (it, None)), + }; + UnresolvedIdent { node }.into() } &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { let expr = expr_syntax(expr)?; diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 3bc2eee1e7c..dfc91c73433 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -3105,10 +3105,10 @@ impl From<ModuleDef> for ItemInNs { } impl ItemInNs { - pub fn as_module_def(self) -> Option<ModuleDef> { + pub fn into_module_def(self) -> ModuleDef { match self { - ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id), - ItemInNs::Macros(_) => None, + ItemInNs::Types(id) | ItemInNs::Values(id) => id, + ItemInNs::Macros(id) => ModuleDef::Macro(id), } } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index b896cda9ddf..1cf22b05e7f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -38,9 +38,9 @@ use span::{AstIdMap, EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams, IsString as _}, - AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, - TextSize, + ast::{self, HasAttrs as _, HasGenericParams}, + AstNode, AstToken, Direction, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, + TextRange, TextSize, }; use triomphe::Arc; @@ -571,7 +571,7 @@ impl<'db> SemanticsImpl<'db> { actual_macro_call: &ast::MacroCall, speculative_args: &ast::TokenTree, token_to_map: SyntaxToken, - ) -> Option<(SyntaxNode, SyntaxToken)> { + ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(actual_macro_call.syntax())?; let macro_call = InFile::new(file_id, actual_macro_call); @@ -592,7 +592,7 @@ impl<'db> SemanticsImpl<'db> { macro_file: MacroFileId, speculative_args: &SyntaxNode, token_to_map: SyntaxToken, - ) -> Option<(SyntaxNode, SyntaxToken)> { + ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { hir_expand::db::expand_speculative( self.db.upcast(), macro_file.macro_call_id, @@ -608,7 +608,7 @@ impl<'db> SemanticsImpl<'db> { actual_macro_call: &ast::Item, speculative_args: &ast::Item, token_to_map: SyntaxToken, - ) -> Option<(SyntaxNode, SyntaxToken)> { + ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?; hir_expand::db::expand_speculative( @@ -624,7 +624,7 @@ impl<'db> SemanticsImpl<'db> { actual_macro_call: &ast::Attr, speculative_args: &ast::Attr, token_to_map: SyntaxToken, - ) -> Option<(SyntaxNode, SyntaxToken)> { + ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let attr = self.wrap_node_infile(actual_macro_call.clone()); let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?; let macro_call_id = self.with_ctx(|ctx| { @@ -643,8 +643,7 @@ impl<'db> SemanticsImpl<'db> { &self, string: &ast::String, ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> { - let quote = string.open_quote_text_range()?; - + let string_start = string.syntax().text_range().start(); let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?; self.descend_into_macros_breakable(token, |token, _| { (|| { @@ -658,7 +657,7 @@ impl<'db> SemanticsImpl<'db> { let format_args = self.wrap_node_infile(format_args); let res = source_analyzer .as_format_args_parts(self.db, format_args.as_ref())? - .map(|(range, res)| (range + quote.end(), res.map(Either::Left))) + .map(|(range, res)| (range + string_start, res.map(Either::Left))) .collect(); Some(res) } else { @@ -672,7 +671,7 @@ impl<'db> SemanticsImpl<'db> { .iter() .map(|&(range, index)| { ( - range + quote.end(), + range + string_start, Some(Either::Right(InlineAsmOperand { owner, expr, index })), ) }) @@ -690,17 +689,16 @@ impl<'db> SemanticsImpl<'db> { original_token: SyntaxToken, offset: TextSize, ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> { - let original_string = ast::String::cast(original_token.clone())?; + let string_start = original_token.text_range().start(); let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?; - let quote = original_string.open_quote_text_range()?; self.descend_into_macros_breakable(original_token, |token, _| { (|| { let token = token.value; self.resolve_offset_in_format_args( ast::String::cast(token)?, - offset.checked_sub(quote.end())?, + offset.checked_sub(string_start)?, ) - .map(|(range, res)| (range + quote.end(), res)) + .map(|(range, res)| (range + string_start, res)) })() .map_or(ControlFlow::Continue(()), ControlFlow::Break) }) @@ -1542,6 +1540,21 @@ impl<'db> SemanticsImpl<'db> { Some(items.iter_items().map(|(item, _)| item.into())) } + pub fn resolve_mod_path_relative( + &self, + to: Module, + segments: impl IntoIterator<Item = SmolStr>, + ) -> Option<impl Iterator<Item = ItemInNs>> { + let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items( + self.db.upcast(), + &ModPath::from_segments( + hir_def::path::PathKind::Plain, + segments.into_iter().map(|it| Name::new(&it, SyntaxContextId::ROOT)), + ), + ); + Some(items.iter_items().map(|(item, _)| item.into())) + } + fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> { self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs index 0146369f298..074d943719f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs @@ -3,6 +3,7 @@ use hir::{FileRange, Semantics}; use ide_db::EditionedFileId; use ide_db::{label::Label, FileId, RootDatabase}; +use syntax::Edition; use syntax::{ algo::{self, find_node_at_offset, find_node_at_range}, AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, @@ -94,6 +95,10 @@ impl<'a> AssistContext<'a> { self.frange.file_id } + pub(crate) fn edition(&self) -> Edition { + self.frange.file_id.edition() + } + pub(crate) fn has_empty_selection(&self) -> bool { self.trimmed_range.is_empty() } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index 3f0d5cf152c..b9142d0318a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -1,10 +1,12 @@ -use ide_db::text_edit::TextRange; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, - search::{FileReference, SearchScope, UsageSearchResult}, + search::{FileReference, SearchScope}, + syntax_helpers::suggest_name, + text_edit::TextRange, }; use itertools::Itertools; +use syntax::SmolStr; use syntax::{ ast::{self, make, AstNode, FieldExpr, HasName, IdentPat}, ted, @@ -122,33 +124,43 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat return None; } - let name = ident_pat.name()?.to_string(); - - let usages = ctx.sema.to_def(&ident_pat).map(|def| { + let usages = ctx.sema.to_def(&ident_pat).and_then(|def| { Definition::Local(def) .usages(&ctx.sema) .in_scope(&SearchScope::single_file(ctx.file_id())) .all() + .iter() + .next() + .map(|(_, refs)| refs.to_vec()) }); - let field_names = (0..field_types.len()) - .map(|i| generate_name(ctx, i, &name, &ident_pat, &usages)) + let mut name_generator = { + let mut names = vec![]; + if let Some(scope) = ctx.sema.scope(ident_pat.syntax()) { + scope.process_all_names(&mut |name, scope| { + if let hir::ScopeDef::Local(_) = scope { + names.push(name.as_str().into()) + } + }) + } + suggest_name::NameGenerator::new_with_names(names.iter().map(|s: &SmolStr| s.as_str())) + }; + + let field_names = field_types + .into_iter() + .enumerate() + .map(|(id, ty)| { + match name_generator.for_type(&ty, ctx.db(), ctx.edition()) { + Some(name) => name, + None => name_generator.suggest_name(&format!("_{}", id)), + } + .to_string() + }) .collect::<Vec<_>>(); Some(TupleData { ident_pat, ref_type, field_names, usages }) } -fn generate_name( - _ctx: &AssistContext<'_>, - index: usize, - _tuple_name: &str, - _ident_pat: &IdentPat, - _usages: &Option<UsageSearchResult>, -) -> String { - // FIXME: detect if name already used - format!("_{index}") -} - enum RefType { ReadOnly, Mutable, @@ -157,7 +169,7 @@ struct TupleData { ident_pat: IdentPat, ref_type: Option<RefType>, field_names: Vec<String>, - usages: Option<UsageSearchResult>, + usages: Option<Vec<FileReference>>, } fn edit_tuple_assignment( ctx: &AssistContext<'_>, @@ -213,42 +225,23 @@ fn edit_tuple_usages( ctx: &AssistContext<'_>, in_sub_pattern: bool, ) -> Option<Vec<EditTupleUsage>> { - let mut current_file_usages = None; - - if let Some(usages) = data.usages.as_ref() { - // We need to collect edits first before actually applying them - // as mapping nodes to their mutable node versions requires an - // unmodified syntax tree. - // - // We also defer editing usages in the current file first since - // tree mutation in the same file breaks when `builder.edit_file` - // is called - - if let Some((_, refs)) = usages.iter().find(|(file_id, _)| *file_id == ctx.file_id()) { - current_file_usages = Some( - refs.iter() - .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern)) - .collect_vec(), - ); - } - - for (file_id, refs) in usages.iter() { - if file_id == ctx.file_id() { - continue; - } - - edit.edit_file(file_id.file_id()); - - let tuple_edits = refs - .iter() - .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern)) - .collect_vec(); - - tuple_edits.into_iter().for_each(|tuple_edit| tuple_edit.apply(edit)) - } - } - - current_file_usages + // We need to collect edits first before actually applying them + // as mapping nodes to their mutable node versions requires an + // unmodified syntax tree. + // + // We also defer editing usages in the current file first since + // tree mutation in the same file breaks when `builder.edit_file` + // is called + + let edits = data + .usages + .as_ref()? + .as_slice() + .iter() + .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern)) + .collect_vec(); + + Some(edits) } fn edit_tuple_usage( ctx: &AssistContext<'_>, @@ -1769,14 +1762,14 @@ struct S4 { } fn foo() -> Option<()> { - let ($0_0, _1, _2, _3, _4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5); + let ($0_0, _1, _2, _3, s4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5); let v: i32 = *_0; // deref, no parens let v: &i32 = _0; // no deref, no parens, remove `&` f1(*_0); // deref, no parens f2(_0); // `&*` -> cancel out -> no deref, no parens // https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639 // let v: i32 = t.1.0; // no deref, no parens - let v: i32 = _4.value; // no deref, no parens + let v: i32 = s4.value; // no deref, no parens (*_0).do_stuff(); // deref, parens let v: i32 = (*_2)?; // deref, parens let v: i32 = _3[0]; // no deref, no parens @@ -1815,8 +1808,8 @@ impl S { } fn main() { - let ($0_0, _1) = &(S,2); - let s = _0.f(); + let ($0s, _1) = &(S,2); + let s = s.f(); } "#, ) @@ -1845,8 +1838,8 @@ impl S { } fn main() { - let ($0_0, _1) = &(S,2); - let s = (*_0).f(); + let ($0s, _1) = &(S,2); + let s = (*s).f(); } "#, ) @@ -1882,8 +1875,8 @@ impl T for &S { } fn main() { - let ($0_0, _1) = &(S,2); - let s = (*_0).f(); + let ($0s, _1) = &(S,2); + let s = (*s).f(); } "#, ) @@ -1923,8 +1916,8 @@ impl T for &S { } fn main() { - let ($0_0, _1) = &(S,2); - let s = (*_0).f(); + let ($0s, _1) = &(S,2); + let s = (*s).f(); } "#, ) @@ -1951,8 +1944,8 @@ impl S { fn do_stuff(&self) -> i32 { 42 } } fn main() { - let ($0_0, _1) = &(S,&S); - let v = _0.do_stuff(); + let ($0s, s1) = &(S,&S); + let v = s.do_stuff(); } "#, ) @@ -1973,7 +1966,7 @@ fn main() { // `t.0` gets auto-refed -> no deref needed -> no parens let v = t.0.do_stuff(); // no deref, no parens let v = &t.0.do_stuff(); // `&` is for result -> no deref, no parens - // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S` + // deref: `s1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S` let v = t.1.do_stuff(); // deref, parens } "#, @@ -1984,13 +1977,13 @@ impl S { fn do_stuff(&self) -> i32 { 42 } } fn main() { - let ($0_0, _1) = &(S,&S); - let v = _0.do_stuff(); // no deref, remove parens + let ($0s, s1) = &(S,&S); + let v = s.do_stuff(); // no deref, remove parens // `t.0` gets auto-refed -> no deref needed -> no parens - let v = _0.do_stuff(); // no deref, no parens - let v = &_0.do_stuff(); // `&` is for result -> no deref, no parens - // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S` - let v = (*_1).do_stuff(); // deref, parens + let v = s.do_stuff(); // no deref, no parens + let v = &s.do_stuff(); // `&` is for result -> no deref, no parens + // deref: `s1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S` + let v = (*s1).do_stuff(); // deref, parens } "#, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index a8d71ed7f4d..6735d7dcbe1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -1,5 +1,8 @@ use hir::{HirDisplay, TypeInfo}; -use ide_db::{assists::GroupLabel, syntax_helpers::suggest_name}; +use ide_db::{ + assists::GroupLabel, + syntax_helpers::{suggest_name, LexedStr}, +}; use syntax::{ ast::{ self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory, @@ -320,24 +323,58 @@ impl ExtractionKind { ctx: &AssistContext<'_>, to_extract: &ast::Expr, ) -> (String, SyntaxNode) { - let field_shorthand = to_extract - .syntax() - .parent() - .and_then(ast::RecordExprField::cast) - .filter(|field| field.name_ref().is_some()); - let (var_name, expr_replace) = match field_shorthand { - Some(field) => (field.to_string(), field.syntax().clone()), - None => { - (suggest_name::for_variable(to_extract, &ctx.sema), to_extract.syntax().clone()) + // We only do this sort of extraction for fields because they should have lowercase names + if let ExtractionKind::Variable = self { + let field_shorthand = to_extract + .syntax() + .parent() + .and_then(ast::RecordExprField::cast) + .filter(|field| field.name_ref().is_some()); + + if let Some(field) = field_shorthand { + return (field.to_string(), field.syntax().clone()); } + } + + let var_name = if let Some(literal_name) = get_literal_name(ctx, to_extract) { + literal_name + } else { + suggest_name::for_variable(to_extract, &ctx.sema) }; let var_name = match self { - ExtractionKind::Variable => var_name, + ExtractionKind::Variable => var_name.to_lowercase(), ExtractionKind::Constant | ExtractionKind::Static => var_name.to_uppercase(), }; - (var_name, expr_replace) + (var_name, to_extract.syntax().clone()) + } +} + +fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String> { + let literal = match expr { + ast::Expr::Literal(literal) => literal, + _ => return None, + }; + let inner = match literal.kind() { + ast::LiteralKind::String(string) => string.value().ok()?.into_owned(), + ast::LiteralKind::ByteString(byte_string) => { + String::from_utf8(byte_string.value().ok()?.into_owned()).ok()? + } + ast::LiteralKind::CString(cstring) => { + String::from_utf8(cstring.value().ok()?.into_owned()).ok()? + } + _ => return None, + }; + + // Entirely arbitrary + if inner.len() > 32 { + return None; + } + + match LexedStr::single_token(ctx.file_id().edition(), &inner) { + Some((SyntaxKind::IDENT, None)) => Some(inner), + _ => None, } } @@ -493,7 +530,7 @@ fn main() { "#, r#" fn main() { - let $0var_name = "hello"; + let $0hello = "hello"; } "#, "Extract into variable", @@ -588,7 +625,7 @@ fn main() { "#, r#" fn main() { - const $0VAR_NAME: &str = "hello"; + const $0HELLO: &str = "hello"; } "#, "Extract into constant", @@ -683,7 +720,7 @@ fn main() { "#, r#" fn main() { - static $0VAR_NAME: &str = "hello"; + static $0HELLO: &str = "hello"; } "#, "Extract into static", @@ -2479,4 +2516,120 @@ fn foo() { "Extract into variable", ); } + + #[test] + fn extract_string_literal() { + check_assist_by_label( + extract_variable, + r#" +struct Entry(&str); +fn foo() { + let entry = Entry($0"Hello"$0); +} +"#, + r#" +struct Entry(&str); +fn foo() { + let $0hello = "Hello"; + let entry = Entry(hello); +} +"#, + "Extract into variable", + ); + + check_assist_by_label( + extract_variable, + r#" +struct Entry(&str); +fn foo() { + let entry = Entry($0"Hello"$0); +} +"#, + r#" +struct Entry(&str); +fn foo() { + const $0HELLO: &str = "Hello"; + let entry = Entry(HELLO); +} +"#, + "Extract into constant", + ); + + check_assist_by_label( + extract_variable, + r#" +struct Entry(&str); +fn foo() { + let entry = Entry($0"Hello"$0); +} +"#, + r#" +struct Entry(&str); +fn foo() { + static $0HELLO: &str = "Hello"; + let entry = Entry(HELLO); +} +"#, + "Extract into static", + ); + } + + #[test] + fn extract_variable_string_literal_use_field_shorthand() { + // When field shorthand is available, it should + // only be used when extracting into a variable + check_assist_by_label( + extract_variable, + r#" +struct Entry { message: &str } +fn foo() { + let entry = Entry { message: $0"Hello"$0 }; +} +"#, + r#" +struct Entry { message: &str } +fn foo() { + let $0message = "Hello"; + let entry = Entry { message }; +} +"#, + "Extract into variable", + ); + + check_assist_by_label( + extract_variable, + r#" +struct Entry { message: &str } +fn foo() { + let entry = Entry { message: $0"Hello"$0 }; +} +"#, + r#" +struct Entry { message: &str } +fn foo() { + const $0HELLO: &str = "Hello"; + let entry = Entry { message: HELLO }; +} +"#, + "Extract into constant", + ); + + check_assist_by_label( + extract_variable, + r#" +struct Entry { message: &str } +fn foo() { + let entry = Entry { message: $0"Hello"$0 }; +} +"#, + r#" +struct Entry { message: &str } +fn foo() { + static $0HELLO: &str = "Hello"; + let entry = Entry { message: HELLO }; +} +"#, + "Extract into static", + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs index 14518c4d2cc..c3600af5a6c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -86,7 +86,7 @@ fn item_for_path_search(db: &dyn HirDatabase, item: ItemInNs) -> Option<ItemInNs } fn item_as_assoc(db: &dyn HirDatabase, item: ItemInNs) -> Option<AssocItem> { - item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)) + item.into_module_def().as_assoc_item(db) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs index ac88861fe4f..849b8a42c69 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs @@ -51,7 +51,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let candidate = import_assets.import_candidate(); let qualify_candidate = match syntax_under_caret.clone() { NodeOrToken::Node(syntax_under_caret) => match candidate { - ImportCandidate::Path(candidate) if candidate.qualifier.is_some() => { + ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => { cov_mark::hit!(qualify_path_qualifier_start); let path = ast::Path::cast(syntax_under_caret)?; let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?); @@ -219,11 +219,9 @@ fn find_trait_method( } fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> { - let item_module_def = item.as_module_def()?; - - match item_module_def { + match item.into_module_def() { hir::ModuleDef::Trait(trait_) => Some(trait_), - _ => item_module_def.as_assoc_item(db)?.container_trait(db), + item_module_def => item_module_def.as_assoc_item(db)?.container_trait(db), } } @@ -247,7 +245,7 @@ fn label( let import_path = &import.import_path; match candidate { - ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => { + ImportCandidate::Path(candidate) if candidate.qualifier.is_empty() => { format!("Qualify as `{}`", import_path.display(db, edition)) } _ => format!("Qualify with `{}`", import_path.display(db, edition)), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 2dec876215c..31e828eae27 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -78,7 +78,7 @@ pub(crate) fn replace_derive_with_manual_impl( NameToImport::exact_case_sensitive(path.segments().last()?.to_string()), items_locator::AssocSearchMode::Exclude, ) - .filter_map(|item| match item.as_module_def()? { + .filter_map(|item| match item.into_module_def() { ModuleDef::Trait(trait_) => Some(trait_), _ => None, }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs index a83b27867b4..abe7fb132f0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -12,13 +12,15 @@ use syntax::{ use crate::{AssistContext, Assists}; +// FIXME: This ought to be a diagnostic lint. + // Assist: unnecessary_async // // Removes the `async` mark from functions which have no `.await` in their body. // Looks for calls to the functions and removes the `.await` on the call site. // // ``` -// pub async f$0n foo() {} +// pub asy$0nc fn foo() {} // pub async fn bar() { foo().await } // ``` // -> @@ -29,15 +31,11 @@ use crate::{AssistContext, Assists}; pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let function: ast::Fn = ctx.find_node_at_offset()?; - // Do nothing if the cursor is not on the prototype. This is so that the check does not pollute - // when the user asks us for assists when in the middle of the function body. - // We consider the prototype to be anything that is before the body of the function. - let cursor_position = ctx.offset(); - if cursor_position >= function.body()?.syntax().text_range().start() { + // Do nothing if the cursor isn't on the async token. + let async_token = function.async_token()?; + if !async_token.text_range().contains_inclusive(ctx.offset()) { return None; } - // Do nothing if the function isn't async. - function.async_token()?; // Do nothing if the function has an `await` expression in its body. if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() { return None; @@ -138,27 +136,22 @@ mod tests { #[test] fn applies_on_empty_function() { - check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}") + check_assist(unnecessary_async, "pub asy$0nc fn f() {}", "pub fn f() {}") } #[test] fn applies_and_removes_whitespace() { - check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}") - } - - #[test] - fn does_not_apply_on_non_async_function() { - check_assist_not_applicable(unnecessary_async, "pub f$0n f() {}") + check_assist(unnecessary_async, "pub async$0 fn f() {}", "pub fn f() {}") } #[test] fn applies_on_function_with_a_non_await_expr() { - check_assist(unnecessary_async, "pub async f$0n f() { f2() }", "pub fn f() { f2() }") + check_assist(unnecessary_async, "pub asy$0nc fn f() { f2() }", "pub fn f() { f2() }") } #[test] fn does_not_apply_on_function_with_an_await_expr() { - check_assist_not_applicable(unnecessary_async, "pub async f$0n f() { f2().await }") + check_assist_not_applicable(unnecessary_async, "pub asy$0nc fn f() { f2().await }") } #[test] @@ -167,7 +160,7 @@ mod tests { unnecessary_async, r#" pub async fn f4() { } -pub async f$0n f2() { } +pub asy$0nc fn f2() { } pub async fn f() { f2().await } pub async fn f3() { f2().await }"#, r#" @@ -184,7 +177,7 @@ pub async fn f3() { f2() }"#, unnecessary_async, r#" pub async fn f4() { } -mod a { pub async f$0n f2() { } } +mod a { pub asy$0nc fn f2() { } } pub async fn f() { a::f2().await } pub async fn f3() { a::f2().await }"#, r#" @@ -202,7 +195,7 @@ pub async fn f3() { a::f2() }"#, // Ensure that it is the first await on the 3rd line that is removed r#" pub async fn f() { f2().await } -pub async f$0n f2() -> i32 { 1 } +pub asy$0nc fn f2() -> i32 { 1 } pub async fn f3() { f4(f2().await).await } pub async fn f4(i: i32) { }"#, r#" @@ -220,7 +213,7 @@ pub async fn f4(i: i32) { }"#, // Ensure that it is the second await on the 3rd line that is removed r#" pub async fn f() { f2().await } -pub async f$0n f2(i: i32) { } +pub async$0 fn f2(i: i32) { } pub async fn f3() { f2(f4().await).await } pub async fn f4() -> i32 { 1 }"#, r#" @@ -237,7 +230,7 @@ pub async fn f4() -> i32 { 1 }"#, unnecessary_async, r#" pub struct S { } -impl S { pub async f$0n f2(&self) { } } +impl S { pub async$0 fn f2(&self) { } } pub async fn f(s: &S) { s.f2().await }"#, r#" pub struct S { } @@ -250,13 +243,13 @@ pub async fn f(s: &S) { s.f2() }"#, fn does_not_apply_on_function_with_a_nested_await_expr() { check_assist_not_applicable( unnecessary_async, - "async f$0n f() { if true { loop { f2().await } } }", + "async$0 fn f() { if true { loop { f2().await } } }", ) } #[test] - fn does_not_apply_when_not_on_prototype() { - check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }") + fn does_not_apply_when_not_on_async_token() { + check_assist_not_applicable(unnecessary_async, "pub async fn$0 f() { f2() }") } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs index 2d918a5b1c1..658600cd2d0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs @@ -198,7 +198,7 @@ fn wrapper_alias( ); ctx.sema.resolve_mod_path(ret_type.syntax(), &wrapper_path).and_then(|def| { - def.filter_map(|def| match def.as_module_def()? { + def.filter_map(|def| match def.into_module_def() { hir::ModuleDef::TypeAlias(alias) => { let enum_ty = alias.ty(ctx.db()).as_adt()?.as_enum()?; (&enum_ty == core_wrapper).then_some(alias) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 87c3d166ee6..78fdfba6a07 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -3280,7 +3280,7 @@ fn doctest_unnecessary_async() { check_doc_test( "unnecessary_async", r#####" -pub async f$0n foo() {} +pub asy$0nc fn foo() {} pub async fn bar() { foo().await } "#####, r#####" diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index f2c360a9d5b..229ce7723b5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -205,7 +205,7 @@ impl S { fn foo(s: S) { s.$0 } "#, expect![[r#" - fd foo u32 + fd foo u32 me bar() fn(&self) "#]], ); @@ -259,7 +259,7 @@ impl S { "#, expect![[r#" fd the_field (u32,) - me foo() fn(self) + me foo() fn(self) "#]], ) } @@ -275,7 +275,7 @@ impl A { "#, expect![[r#" fd the_field (u32, i32) - me foo() fn(&self) + me foo() fn(&self) "#]], ) } @@ -536,7 +536,7 @@ impl A { } "#, expect![[r#" - fd pub_field u32 + fd pub_field u32 me pub_method() fn(&self) "#]], ) @@ -550,7 +550,7 @@ union U { field: u8, other: u16 } fn foo(u: U) { u.$0 } "#, expect![[r#" - fd field u8 + fd field u8 fd other u16 "#]], ); @@ -725,8 +725,8 @@ fn test(a: A) { } "#, expect![[r#" - fd another u32 - fd field u8 + fd another u32 + fd field u8 me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target "#]], ); @@ -748,8 +748,8 @@ fn test(a: A) { } "#, expect![[r#" - fd 0 u8 - fd 1 u32 + fd 0 u8 + fd 1 u32 me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target "#]], ); @@ -770,8 +770,8 @@ fn test(a: A) { } "#, expect![[r#" - fd 0 u8 - fd 1 u32 + fd 0 u8 + fd 1 u32 me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target "#]], ); @@ -964,12 +964,12 @@ struct Foo { field: i32 } impl Foo { fn foo(&self) { $0 } }"#, expect![[r#" - fd self.field i32 + fd self.field i32 me self.foo() fn(&self) - lc self &Foo - sp Self Foo - st Foo Foo - bt u32 u32 + lc self &Foo + sp Self Foo + st Foo Foo + bt u32 u32 "#]], ); check( @@ -978,12 +978,12 @@ struct Foo(i32); impl Foo { fn foo(&mut self) { $0 } }"#, expect![[r#" - fd self.0 i32 + fd self.0 i32 me self.foo() fn(&mut self) - lc self &mut Foo - sp Self Foo - st Foo Foo - bt u32 u32 + lc self &mut Foo + sp Self Foo + st Foo Foo + bt u32 u32 "#]], ); } @@ -1106,7 +1106,7 @@ fn test(a: A) { } "#, expect![[r#" - fd 0 u8 + fd 0 u8 me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target "#]], ); @@ -1162,7 +1162,7 @@ impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> { } "#, expect![[r#" - fd foo &u8 + fd foo &u8 me foobar() fn(&self) "#]], ); @@ -1199,8 +1199,8 @@ impl<B: Bar, F: core::ops::Deref<Target = B>> Foo<F> { } "#, expect![[r#" - fd foo &u8 - "#]], + fd foo &u8 + "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index c38a8ef29bb..80d72b460f9 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -537,10 +537,10 @@ impl Test for T { } ", expect![[r#" - sp Self T - st T T + sp Self T + st T T tt Test - bt u32 u32 + bt u32 u32 "#]], ); @@ -646,10 +646,10 @@ impl Test for T { } ", expect![[r#" - sp Self T - st T T + sp Self T + st T T tt Test - bt u32 u32 + bt u32 u32 "#]], ); @@ -663,10 +663,10 @@ impl Test for T { } ", expect![[r#" - sp Self T - st T T + sp Self T + st T T tt Test - bt u32 u32 + bt u32 u32 "#]], ); @@ -682,10 +682,10 @@ impl Test for T { } ", expect![[r#" - sp Self T - st T T + sp Self T + st T T tt Test - bt u32 u32 + bt u32 u32 "#]], ); @@ -730,10 +730,10 @@ impl Test for T { } ", expect![[r#" - sp Self T - st T T + sp Self T + st T T tt Test - bt u32 u32 + bt u32 u32 "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs index 71ca6e99494..4700ed6c1ae 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs @@ -78,19 +78,19 @@ fn foo(a: A) { a.$0 } "#, expect![[r#" me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture - kw await expr.await - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + kw await expr.await + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); @@ -105,19 +105,19 @@ fn foo() { "#, expect![[r#" me into_future() (use core::future::IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture - kw await expr.await - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + kw await expr.await + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } @@ -134,19 +134,19 @@ fn foo(a: A) { a.$0 } "#, expect![[r#" me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture - kw await expr.await - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + kw await expr.await + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 495f82da866..7b57eea0524 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -423,21 +423,21 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn if if expr {} - sn let let - sn letm let mut - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn if if expr {} + sn let let + sn letm let mut + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } @@ -456,19 +456,19 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn if if expr {} - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn if if expr {} + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } @@ -483,18 +483,18 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ) } @@ -509,21 +509,21 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn if if expr {} - sn let let - sn letm let mut - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn if if expr {} + sn let let + sn letm let mut + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 3a661706336..f8d403122d1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -718,7 +718,7 @@ impl<'a> CompletionContext<'a> { expected: (expected_type, expected_name), qualifier_ctx, token, - offset, + original_offset, } = expand_and_analyze( &sema, original_file.syntax().clone(), @@ -728,7 +728,7 @@ impl<'a> CompletionContext<'a> { )?; // adjust for macro input, this still fails if there is no token written yet - let scope = sema.scope_at_offset(&token.parent()?, offset)?; + let scope = sema.scope_at_offset(&token.parent()?, original_offset)?; let krate = scope.krate(); let module = scope.module(); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 3b7898b9e86..1c4cbb25b1f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -22,10 +22,14 @@ use crate::context::{ COMPLETION_MARKER, }; +#[derive(Debug)] struct ExpansionResult { original_file: SyntaxNode, speculative_file: SyntaxNode, - offset: TextSize, + /// The offset in the original file. + original_offset: TextSize, + /// The offset in the speculatively expanded file. + speculative_offset: TextSize, fake_ident_token: SyntaxToken, derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>, } @@ -36,7 +40,8 @@ pub(super) struct AnalysisResult { pub(super) qualifier_ctx: QualifierCtx, /// the original token of the expanded file pub(super) token: SyntaxToken, - pub(super) offset: TextSize, + /// The offset in the original file. + pub(super) original_offset: TextSize, } pub(super) fn expand_and_analyze( @@ -54,226 +59,344 @@ pub(super) fn expand_and_analyze( // make the offset point to the start of the original token, as that is what the // intermediate offsets calculated in expansion always points to let offset = offset - relative_offset; - let expansion = - expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset); + let expansion = expand( + sema, + original_file.clone(), + speculative_file.clone(), + offset, + fake_ident_token.clone(), + relative_offset, + ) + .unwrap_or(ExpansionResult { + original_file, + speculative_file, + original_offset: offset, + speculative_offset: fake_ident_token.text_range().start(), + fake_ident_token, + derive_ctx: None, + }); // add the relative offset back, so that left_biased finds the proper token - let offset = expansion.offset + relative_offset; - let token = expansion.original_file.token_at_offset(offset).left_biased()?; + let original_offset = expansion.original_offset + relative_offset; + let token = expansion.original_file.token_at_offset(original_offset).left_biased()?; analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| { - AnalysisResult { analysis, expected, qualifier_ctx, token, offset } + AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset } }) } /// Expand attributes and macro calls at the current cursor position for both the original file /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original /// and speculative states stay in sync. +/// +/// We do this by recursively expanding all macros and picking the best possible match. We cannot just +/// choose the first expansion each time because macros can expand to something that does not include +/// our completion marker, e.g.: +/// ``` +/// macro_rules! helper { ($v:ident) => {} } +/// macro_rules! my_macro { +/// ($v:ident) => { +/// helper!($v); +/// $v +/// }; +/// } +/// +/// my_macro!(complete_me_here) +/// ``` +/// If we would expand the first thing we encounter only (which in fact this method used to do), we would +/// be unable to complete here, because we would be walking directly into the void. So we instead try +/// *every* possible path. +/// +/// This can also creates discrepancies between the speculative and real expansions: because we insert +/// tokens, we insert characters, which means if we try the second occurrence it may not be at the same +/// position in the original and speculative file. We take an educated guess here, and for each token +/// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros +/// can insert the text of the completion marker in other places while removing the span, but this is +/// the best we can do. fn expand( sema: &Semantics<'_, RootDatabase>, - mut original_file: SyntaxNode, - mut speculative_file: SyntaxNode, - mut offset: TextSize, - mut fake_ident_token: SyntaxToken, + original_file: SyntaxNode, + speculative_file: SyntaxNode, + original_offset: TextSize, + fake_ident_token: SyntaxToken, relative_offset: TextSize, -) -> ExpansionResult { +) -> Option<ExpansionResult> { let _p = tracing::info_span!("CompletionContext::expand").entered(); - let mut derive_ctx = None; - - 'expansion: loop { - let parent_item = - |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); - let ancestor_items = iter::successors( - Option::zip( - find_node_at_offset::<ast::Item>(&original_file, offset), - find_node_at_offset::<ast::Item>(&speculative_file, offset), + + if !sema.might_be_inside_macro_call(&fake_ident_token) + && original_file + .token_at_offset(original_offset + relative_offset) + .right_biased() + .is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token)) + { + // Recursion base case. + return Some(ExpansionResult { + original_file, + speculative_file, + original_offset, + speculative_offset: fake_ident_token.text_range().start(), + fake_ident_token, + derive_ctx: None, + }); + } + + let parent_item = + |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); + let ancestor_items = iter::successors( + Option::zip( + find_node_at_offset::<ast::Item>(&original_file, original_offset), + find_node_at_offset::<ast::Item>( + &speculative_file, + fake_ident_token.text_range().start(), ), - |(a, b)| parent_item(a).zip(parent_item(b)), - ); - - // first try to expand attributes as these are always the outermost macro calls - 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items { - match ( - sema.expand_attr_macro(&actual_item), - sema.speculative_expand_attr_macro( - &actual_item, - &item_with_fake_ident, - fake_ident_token.clone(), - ), - ) { - // maybe parent items have attributes, so continue walking the ancestors - (None, None) => continue 'ancestors, - // successful expansions - ( - Some(ExpandResult { value: actual_expansion, err: _ }), - Some((fake_expansion, fake_mapped_token)), - ) => { - let new_offset = fake_mapped_token.text_range().start(); - if new_offset + relative_offset > actual_expansion.text_range().end() { - // offset outside of bounds from the original expansion, - // stop here to prevent problems from happening - break 'expansion; - } - original_file = actual_expansion; - speculative_file = fake_expansion; - fake_ident_token = fake_mapped_token; - offset = new_offset; - continue 'expansion; + ), + |(a, b)| parent_item(a).zip(parent_item(b)), + ); + + // first try to expand attributes as these are always the outermost macro calls + 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items { + match ( + sema.expand_attr_macro(&actual_item), + sema.speculative_expand_attr_macro( + &actual_item, + &item_with_fake_ident, + fake_ident_token.clone(), + ), + ) { + // maybe parent items have attributes, so continue walking the ancestors + (None, None) => continue 'ancestors, + // successful expansions + ( + Some(ExpandResult { value: actual_expansion, err: _ }), + Some((fake_expansion, fake_mapped_tokens)), + ) => { + let mut accumulated_offset_from_fake_tokens = 0; + let actual_range = actual_expansion.text_range().end(); + let result = fake_mapped_tokens + .into_iter() + .filter_map(|(fake_mapped_token, rank)| { + let accumulated_offset = accumulated_offset_from_fake_tokens; + if !fake_mapped_token.text().contains(COMPLETION_MARKER) { + // Proc macros can make the same span with different text, we don't + // want them to participate in completion because the macro author probably + // didn't intend them to. + return None; + } + accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len(); + + let new_offset = fake_mapped_token.text_range().start() + - TextSize::new(accumulated_offset as u32); + if new_offset + relative_offset > actual_range { + // offset outside of bounds from the original expansion, + // stop here to prevent problems from happening + return None; + } + let result = expand( + sema, + actual_expansion.clone(), + fake_expansion.clone(), + new_offset, + fake_mapped_token, + relative_offset, + )?; + Some((result, rank)) + }) + .min_by_key(|(_, rank)| *rank) + .map(|(result, _)| result); + if result.is_some() { + return result; } - // exactly one expansion failed, inconsistent state so stop expanding completely - _ => break 'expansion, } + // exactly one expansion failed, inconsistent state so stop expanding completely + _ => break 'ancestors, } + } - // No attributes have been expanded, so look for macro_call! token trees or derive token trees - let orig_tt = match ancestors_at_offset(&original_file, offset) - .map_while(Either::<ast::TokenTree, ast::Meta>::cast) - .last() - { - Some(it) => it, - None => break 'expansion, - }; - let spec_tt = match ancestors_at_offset(&speculative_file, offset) - .map_while(Either::<ast::TokenTree, ast::Meta>::cast) - .last() - { - Some(it) => it, - None => break 'expansion, - }; - - let (tts, attrs) = match (orig_tt, spec_tt) { - (Either::Left(orig_tt), Either::Left(spec_tt)) => { - let attrs = orig_tt - .syntax() - .parent() - .and_then(ast::Meta::cast) - .and_then(|it| it.parent_attr()) - .zip( - spec_tt - .syntax() - .parent() - .and_then(ast::Meta::cast) - .and_then(|it| it.parent_attr()), - ); - (Some((orig_tt, spec_tt)), attrs) - } - (Either::Right(orig_path), Either::Right(spec_path)) => { - (None, orig_path.parent_attr().zip(spec_path.parent_attr())) - } - _ => break 'expansion, - }; + // No attributes have been expanded, so look for macro_call! token trees or derive token trees + let orig_tt = ancestors_at_offset(&original_file, original_offset) + .map_while(Either::<ast::TokenTree, ast::Meta>::cast) + .last()?; + let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start()) + .map_while(Either::<ast::TokenTree, ast::Meta>::cast) + .last()?; + + let (tts, attrs) = match (orig_tt, spec_tt) { + (Either::Left(orig_tt), Either::Left(spec_tt)) => { + let attrs = orig_tt + .syntax() + .parent() + .and_then(ast::Meta::cast) + .and_then(|it| it.parent_attr()) + .zip( + spec_tt + .syntax() + .parent() + .and_then(ast::Meta::cast) + .and_then(|it| it.parent_attr()), + ); + (Some((orig_tt, spec_tt)), attrs) + } + (Either::Right(orig_path), Either::Right(spec_path)) => { + (None, orig_path.parent_attr().zip(spec_path.parent_attr())) + } + _ => return None, + }; - // Expand pseudo-derive expansion aka `derive(Debug$0)` - if let Some((orig_attr, spec_attr)) = attrs { - if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = ( - sema.expand_derive_as_pseudo_attr_macro(&orig_attr), - sema.speculative_expand_derive_as_pseudo_attr_macro( - &orig_attr, - &spec_attr, - fake_ident_token.clone(), - ), - ) { - derive_ctx = Some(( - actual_expansion, - fake_expansion, - fake_mapped_token.text_range().start(), - orig_attr, - )); - break 'expansion; + // Expand pseudo-derive expansion aka `derive(Debug$0)` + if let Some((orig_attr, spec_attr)) = attrs { + if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = ( + sema.expand_derive_as_pseudo_attr_macro(&orig_attr), + sema.speculative_expand_derive_as_pseudo_attr_macro( + &orig_attr, + &spec_attr, + fake_ident_token.clone(), + ), + ) { + if let Some((fake_mapped_token, _)) = + fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank) + { + return Some(ExpansionResult { + original_file, + speculative_file, + original_offset, + speculative_offset: fake_ident_token.text_range().start(), + fake_ident_token, + derive_ctx: Some(( + actual_expansion, + fake_expansion, + fake_mapped_token.text_range().start(), + orig_attr, + )), + }); } + } - if let Some(spec_adt) = - spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it { - ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), - ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), - ast::Item::Union(it) => Some(ast::Adt::Union(it)), - _ => None, - }) - { - // might be the path of derive helper or a token tree inside of one - if let Some(helpers) = sema.derive_helper(&orig_attr) { - for (_mac, file) in helpers { - if let Some((fake_expansion, fake_mapped_token)) = sema - .speculative_expand_raw( - file, - spec_adt.syntax(), - fake_ident_token.clone(), - ) - { - // we are inside a derive helper token tree, treat this as being inside - // the derive expansion - let actual_expansion = sema.parse_or_expand(file.into()); - let new_offset = fake_mapped_token.text_range().start(); - if new_offset + relative_offset > actual_expansion.text_range().end() { - // offset outside of bounds from the original expansion, - // stop here to prevent problems from happening - break 'expansion; - } - original_file = actual_expansion; - speculative_file = fake_expansion; - fake_ident_token = fake_mapped_token; - offset = new_offset; - continue 'expansion; + if let Some(spec_adt) = + spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it { + ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), + ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), + ast::Item::Union(it) => Some(ast::Adt::Union(it)), + _ => None, + }) + { + // might be the path of derive helper or a token tree inside of one + if let Some(helpers) = sema.derive_helper(&orig_attr) { + for (_mac, file) in helpers { + if let Some((fake_expansion, fake_mapped_tokens)) = sema.speculative_expand_raw( + file, + spec_adt.syntax(), + fake_ident_token.clone(), + ) { + // we are inside a derive helper token tree, treat this as being inside + // the derive expansion + let actual_expansion = sema.parse_or_expand(file.into()); + let mut accumulated_offset_from_fake_tokens = 0; + let actual_range = actual_expansion.text_range().end(); + let result = fake_mapped_tokens + .into_iter() + .filter_map(|(fake_mapped_token, rank)| { + let accumulated_offset = accumulated_offset_from_fake_tokens; + if !fake_mapped_token.text().contains(COMPLETION_MARKER) { + // Proc macros can make the same span with different text, we don't + // want them to participate in completion because the macro author probably + // didn't intend them to. + return None; + } + accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len(); + + let new_offset = fake_mapped_token.text_range().start() + - TextSize::new(accumulated_offset as u32); + if new_offset + relative_offset > actual_range { + // offset outside of bounds from the original expansion, + // stop here to prevent problems from happening + return None; + } + let result = expand( + sema, + actual_expansion.clone(), + fake_expansion.clone(), + new_offset, + fake_mapped_token, + relative_offset, + )?; + Some((result, rank)) + }) + .min_by_key(|(_, rank)| *rank) + .map(|(result, _)| result); + if result.is_some() { + return result; } } } } - // at this point we won't have any more successful expansions, so stop - break 'expansion; } + // at this point we won't have any more successful expansions, so stop + return None; + } - // Expand fn-like macro calls - let Some((orig_tt, spec_tt)) = tts else { break 'expansion }; - if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( - orig_tt.syntax().parent().and_then(ast::MacroCall::cast), - spec_tt.syntax().parent().and_then(ast::MacroCall::cast), - ) { - let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text()); - let mac_call_path1 = - macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()); + // Expand fn-like macro calls + let (orig_tt, spec_tt) = tts?; + let (actual_macro_call, macro_call_with_fake_ident) = ( + orig_tt.syntax().parent().and_then(ast::MacroCall::cast)?, + spec_tt.syntax().parent().and_then(ast::MacroCall::cast)?, + ); + let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text()); + let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()); - // inconsistent state, stop expanding - if mac_call_path0 != mac_call_path1 { - break 'expansion; - } - let speculative_args = match macro_call_with_fake_ident.token_tree() { - Some(tt) => tt, - None => break 'expansion, - }; + // inconsistent state, stop expanding + if mac_call_path0 != mac_call_path1 { + return None; + } + let speculative_args = macro_call_with_fake_ident.token_tree()?; + + match ( + sema.expand_macro_call(&actual_macro_call), + sema.speculative_expand_macro_call( + &actual_macro_call, + &speculative_args, + fake_ident_token.clone(), + ), + ) { + // successful expansions + (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => { + let mut accumulated_offset_from_fake_tokens = 0; + let actual_range = actual_expansion.text_range().end(); + fake_mapped_tokens + .into_iter() + .filter_map(|(fake_mapped_token, rank)| { + let accumulated_offset = accumulated_offset_from_fake_tokens; + if !fake_mapped_token.text().contains(COMPLETION_MARKER) { + // Proc macros can make the same span with different text, we don't + // want them to participate in completion because the macro author probably + // didn't intend them to. + return None; + } + accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len(); - match ( - sema.expand_macro_call(&actual_macro_call), - sema.speculative_expand_macro_call( - &actual_macro_call, - &speculative_args, - fake_ident_token.clone(), - ), - ) { - // successful expansions - (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { - let new_offset = fake_mapped_token.text_range().start(); - if new_offset + relative_offset > actual_expansion.text_range().end() { + let new_offset = fake_mapped_token.text_range().start() + - TextSize::new(accumulated_offset as u32); + if new_offset + relative_offset > actual_range { // offset outside of bounds from the original expansion, // stop here to prevent problems from happening - break 'expansion; + return None; } - original_file = actual_expansion; - speculative_file = fake_expansion; - fake_ident_token = fake_mapped_token; - offset = new_offset; - continue 'expansion; - } - // at least on expansion failed, we won't have anything to expand from this point - // onwards so break out - _ => break 'expansion, - } + let result = expand( + sema, + actual_expansion.clone(), + fake_expansion.clone(), + new_offset, + fake_mapped_token, + relative_offset, + )?; + Some((result, rank)) + }) + .min_by_key(|(_, rank)| *rank) + .map(|(result, _)| result) } - - // none of our states have changed so stop the loop - break 'expansion; + // at least one expansion failed, we won't have anything to expand from this point + // onwards so break out + _ => None, } - - ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } } /// Fill the completion context, this is what does semantic reasoning about the surrounding context @@ -285,8 +408,14 @@ fn analyze( self_token: &SyntaxToken, ) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> { let _p = tracing::info_span!("CompletionContext::analyze").entered(); - let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } = - expansion_result; + let ExpansionResult { + original_file, + speculative_file, + original_offset: _, + speculative_offset, + fake_ident_token, + derive_ctx, + } = expansion_result; // Overwrite the path kind for derives if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx { @@ -294,7 +423,8 @@ fn analyze( find_node_at_offset(&file_with_fake_ident, offset) { let parent = name_ref.syntax().parent()?; - let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?; + let (mut nameref_ctx, _) = + classify_name_ref(sema, &original_file, name_ref, offset, parent)?; if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind { path_ctx.kind = PathKind::Derive { existing_derives: sema @@ -314,7 +444,7 @@ fn analyze( return None; } - let Some(name_like) = find_node_at_offset(&speculative_file, offset) else { + let Some(name_like) = find_node_at_offset(&speculative_file, speculative_offset) else { let analysis = if let Some(original) = ast::String::cast(original_token.clone()) { CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) } } else { @@ -350,8 +480,13 @@ fn analyze( } ast::NameLike::NameRef(name_ref) => { let parent = name_ref.syntax().parent()?; - let (nameref_ctx, qualifier_ctx) = - classify_name_ref(sema, &original_file, name_ref, parent)?; + let (nameref_ctx, qualifier_ctx) = classify_name_ref( + sema, + &original_file, + name_ref, + expansion_result.original_offset, + parent, + )?; if let NameRefContext { kind: @@ -636,9 +771,10 @@ fn classify_name_ref( sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, name_ref: ast::NameRef, + original_offset: TextSize, parent: SyntaxNode, ) -> Option<(NameRefContext, QualifierCtx)> { - let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start()); + let nameref = find_node_at_offset(original_file, original_offset); let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default()); @@ -760,7 +896,7 @@ fn classify_name_ref( // We do not want to generate path completions when we are sandwiched between an item decl signature and its body. // ex. trait Foo $0 {} // in these cases parser recovery usually kicks in for our inserted identifier, causing it - // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block + // to either be parsed as an ExprStmt or a ItemRecovery, depending on whether it is in a block // expression or an item list. // The following code checks if the body is missing, if it is we either cut off the body // from the item or it was missing in the first place @@ -1088,15 +1224,10 @@ fn classify_name_ref( PathKind::Type { location: location.unwrap_or(TypeLocation::Other) } }; - let mut kind_macro_call = |it: ast::MacroCall| { - path_ctx.has_macro_bang = it.excl_token().is_some(); - let parent = it.syntax().parent()?; - // Any path in an item list will be treated as a macro call by the parser + let kind_item = |it: &SyntaxNode| { + let parent = it.parent()?; let kind = match_ast! { match parent { - ast::MacroExpr(expr) => make_path_kind_expr(expr.into()), - ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}, - ast::MacroType(ty) => make_path_kind_type(ty.into()), ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module }, ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() { Some(it) => match_ast! { @@ -1126,6 +1257,23 @@ fn classify_name_ref( }; Some(kind) }; + + let mut kind_macro_call = |it: ast::MacroCall| { + path_ctx.has_macro_bang = it.excl_token().is_some(); + let parent = it.syntax().parent()?; + if let Some(kind) = kind_item(it.syntax()) { + return Some(kind); + } + let kind = match_ast! { + match parent { + ast::MacroExpr(expr) => make_path_kind_expr(expr.into()), + ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}, + ast::MacroType(ty) => make_path_kind_type(ty.into()), + _ => return None, + } + }; + Some(kind) + }; let make_path_kind_attr = |meta: ast::Meta| { let attr = meta.parent_attr()?; let kind = attr.kind(); @@ -1153,94 +1301,98 @@ fn classify_name_ref( // Infer the path kind let parent = path.syntax().parent()?; - let kind = match_ast! { - match parent { - ast::PathType(it) => make_path_kind_type(it.into()), - ast::PathExpr(it) => { - if let Some(p) = it.syntax().parent() { - let p_kind = p.kind(); - // The syntax node of interest, for which we want to check whether - // it is sandwiched between an item decl signature and its body. - let probe = if ast::ExprStmt::can_cast(p_kind) { - Some(p) - } else if ast::StmtList::can_cast(p_kind) { - Some(it.syntax().clone()) - } else { - None - }; - if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) { - return Some(make_res(NameRefKind::Keyword(kind))); - } - } + let kind = 'find_kind: { + if parent.kind() == SyntaxKind::ERROR { + if let Some(kind) = inbetween_body_and_decl_check(parent.clone()) { + return Some(make_res(NameRefKind::Keyword(kind))); + } - path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); + break 'find_kind kind_item(&parent)?; + } + match_ast! { + match parent { + ast::PathType(it) => make_path_kind_type(it.into()), + ast::PathExpr(it) => { + if let Some(p) = it.syntax().parent() { + let p_kind = p.kind(); + // The syntax node of interest, for which we want to check whether + // it is sandwiched between an item decl signature and its body. + let probe = if ast::ExprStmt::can_cast(p_kind) { + Some(p) + } else if ast::StmtList::can_cast(p_kind) { + Some(it.syntax().clone()) + } else { + None + }; + if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) { + return Some(make_res(NameRefKind::Keyword(kind))); + } + } - make_path_kind_expr(it.into()) - }, - ast::TupleStructPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::RecordPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::PathPat(it) => { - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::MacroCall(it) => { - // A macro call in this position is usually a result of parsing recovery, so check that - if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { - return Some(make_res(NameRefKind::Keyword(kind))); - } + path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); - kind_macro_call(it)? - }, - ast::Meta(meta) => make_path_kind_attr(meta)?, - ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, - ast::UseTree(_) => PathKind::Use, - // completing inside a qualifier - ast::Path(parent) => { - path_ctx.parent = Some(parent.clone()); - let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?; - match_ast! { - match parent { - ast::PathType(it) => make_path_kind_type(it.into()), - ast::PathExpr(it) => { - path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); + make_path_kind_expr(it.into()) + }, + ast::TupleStructPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::RecordPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::PathPat(it) => { + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} + }, + ast::MacroCall(it) => { + kind_macro_call(it)? + }, + ast::Meta(meta) => make_path_kind_attr(meta)?, + ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, + ast::UseTree(_) => PathKind::Use, + // completing inside a qualifier + ast::Path(parent) => { + path_ctx.parent = Some(parent.clone()); + let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?; + match_ast! { + match parent { + ast::PathType(it) => make_path_kind_type(it.into()), + ast::PathExpr(it) => { + path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); - make_path_kind_expr(it.into()) - }, - ast::TupleStructPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::RecordPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::PathPat(it) => { - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::MacroCall(it) => { - kind_macro_call(it)? - }, - ast::Meta(meta) => make_path_kind_attr(meta)?, - ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, - ast::UseTree(_) => PathKind::Use, - ast::RecordExpr(it) => make_path_kind_expr(it.into()), - _ => return None, + make_path_kind_expr(it.into()) + }, + ast::TupleStructPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::RecordPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::PathPat(it) => { + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} + }, + ast::MacroCall(it) => { + kind_macro_call(it)? + }, + ast::Meta(meta) => make_path_kind_attr(meta)?, + ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, + ast::UseTree(_) => PathKind::Use, + ast::RecordExpr(it) => make_path_kind_expr(it.into()), + _ => return None, + } } - } - }, - ast::RecordExpr(it) => { - // A record expression in this position is usually a result of parsing recovery, so check that - if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { - return Some(make_res(NameRefKind::Keyword(kind))); - } - make_path_kind_expr(it.into()) - }, - _ => return None, + }, + ast::RecordExpr(it) => { + // A record expression in this position is usually a result of parsing recovery, so check that + if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { + return Some(make_res(NameRefKind::Keyword(kind))); + } + make_path_kind_expr(it.into()) + }, + _ => return None, + } } }; @@ -1320,9 +1472,7 @@ fn classify_name_ref( } }) } - PathKind::Item { .. } => { - parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind())) - } + PathKind::Item { .. } => parent.ancestors().find(|it| it.kind() == SyntaxKind::ERROR), _ => None, }; if let Some(top) = top_node { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 8878fbbea30..9608eed99d8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -10,7 +10,7 @@ use ide_db::{ }; use itertools::Itertools; use smallvec::SmallVec; -use stdx::{impl_from, never}; +use stdx::{format_to, impl_from, never}; use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize}; use crate::{ @@ -27,10 +27,7 @@ use crate::{ #[non_exhaustive] pub struct CompletionItem { /// Label in the completion pop up which identifies completion. - pub label: SmolStr, - /// Additional label details in the completion pop up that are - /// displayed and aligned on the right side after the label. - pub label_detail: Option<SmolStr>, + pub label: CompletionItemLabel, /// Range of identifier that is being completed. /// @@ -89,11 +86,23 @@ pub struct CompletionItem { pub import_to_add: SmallVec<[(String, String); 1]>, } +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct CompletionItemLabel { + /// The primary label for the completion item. + pub primary: SmolStr, + /// The left detail for the completion item, usually rendered right next to the primary label. + pub detail_left: Option<String>, + /// The right detail for the completion item, usually rendered right aligned at the end of the completion item. + pub detail_right: Option<String>, +} // We use custom debug for CompletionItem to make snapshot tests more readable. impl fmt::Debug for CompletionItem { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut s = f.debug_struct("CompletionItem"); - s.field("label", &self.label).field("source_range", &self.source_range); + s.field("label", &self.label.primary) + .field("detail_left", &self.label.detail_left) + .field("detail_right", &self.label.detail_right) + .field("source_range", &self.source_range); if self.text_edit.len() == 1 { let atom = self.text_edit.iter().next().unwrap(); s.field("delete", &atom.delete); @@ -102,7 +111,7 @@ impl fmt::Debug for CompletionItem { s.field("text_edit", &self.text_edit); } s.field("kind", &self.kind); - if self.lookup() != self.label { + if self.lookup() != self.label.primary { s.field("lookup", &self.lookup()); } if let Some(detail) = &self.detail { @@ -434,7 +443,7 @@ impl CompletionItem { self.ref_match.map(|(mutability, offset)| { ( - format!("&{}{}", mutability.as_keyword_for_ref(), self.label), + format!("&{}{}", mutability.as_keyword_for_ref(), self.label.primary), ide_db::text_edit::Indel::insert( offset, format!("&{}", mutability.as_keyword_for_ref()), @@ -488,13 +497,13 @@ impl Builder { let _p = tracing::info_span!("item::Builder::build").entered(); let label = self.label; - let mut label_detail = None; let mut lookup = self.lookup.unwrap_or_else(|| label.clone()); let insert_text = self.insert_text.unwrap_or_else(|| label.to_string()); + let mut detail_left = None; if !self.doc_aliases.is_empty() { let doc_aliases = self.doc_aliases.iter().join(", "); - label_detail.replace(format_smolstr!(" (alias {doc_aliases})")); + detail_left = Some(format!("(alias {doc_aliases})")); let lookup_doc_aliases = self .doc_aliases .iter() @@ -516,16 +525,20 @@ impl Builder { } if let [import_edit] = &*self.imports_to_add { // snippets can have multiple imports, but normal completions only have up to one - label_detail.replace(format_smolstr!( - "{} (use {})", - label_detail.as_deref().unwrap_or_default(), + let detail_left = detail_left.get_or_insert_with(String::new); + format_to!( + detail_left, + "{}(use {})", + if detail_left.is_empty() { "" } else { " " }, import_edit.import_path.display(db, self.edition) - )); + ); } else if let Some(trait_name) = self.trait_name { - label_detail.replace(format_smolstr!( - "{} (as {trait_name})", - label_detail.as_deref().unwrap_or_default(), - )); + let detail_left = detail_left.get_or_insert_with(String::new); + format_to!( + detail_left, + "{}(as {trait_name})", + if detail_left.is_empty() { "" } else { " " }, + ); } let text_edit = match self.text_edit { @@ -546,8 +559,11 @@ impl Builder { CompletionItem { source_range: self.source_range, - label, - label_detail, + label: CompletionItemLabel { + primary: label, + detail_left, + detail_right: self.detail.clone(), + }, text_edit, is_snippet: self.is_snippet, detail: self.detail, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index 0f00ad45f98..baa30b28630 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -748,9 +748,9 @@ mod tests { let tag = it.kind.tag(); let relevance = display_relevance(it.relevance); items.push(format!( - "{tag} {}{} {relevance}\n", - it.label, - it.label_detail.clone().unwrap_or_default(), + "{tag} {} {} {relevance}\n", + it.label.primary, + it.label.detail_right.clone().unwrap_or_default(), )); if let Some((label, _indel, relevance)) = it.ref_match() { @@ -812,13 +812,13 @@ fn main() { } "#, expect![[r#" - st dep::test_mod_b::Struct {…} [type_could_unify] - ex dep::test_mod_b::Struct { } [type_could_unify] - st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import] - fn main() [] - fn test(…) [] - md dep [] - st Struct (use dep::test_mod_a::Struct) [requires_import] + st dep::test_mod_b::Struct {…} dep::test_mod_b::Struct { } [type_could_unify] + ex dep::test_mod_b::Struct { } [type_could_unify] + st Struct Struct [type_could_unify+requires_import] + fn main() fn() [] + fn test(…) fn(Struct) [] + md dep [] + st Struct Struct [requires_import] "#]], ); } @@ -852,11 +852,11 @@ fn main() { } "#, expect![[r#" - un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import] - fn main() [] - fn test(…) [] - md dep [] - en Union (use dep::test_mod_a::Union) [requires_import] + un Union Union [type_could_unify+requires_import] + fn main() fn() [] + fn test(…) fn(Union) [] + md dep [] + en Union Union [requires_import] "#]], ); } @@ -888,13 +888,13 @@ fn main() { } "#, expect![[r#" - ev dep::test_mod_b::Enum::variant [type_could_unify] - ex dep::test_mod_b::Enum::variant [type_could_unify] - en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import] - fn main() [] - fn test(…) [] - md dep [] - en Enum (use dep::test_mod_a::Enum) [requires_import] + ev dep::test_mod_b::Enum::variant dep::test_mod_b::Enum::variant [type_could_unify] + ex dep::test_mod_b::Enum::variant [type_could_unify] + en Enum Enum [type_could_unify+requires_import] + fn main() fn() [] + fn test(…) fn(Enum) [] + md dep [] + en Enum Enum [requires_import] "#]], ); } @@ -926,11 +926,11 @@ fn main() { } "#, expect![[r#" - ev dep::test_mod_b::Enum::Variant [type_could_unify] - ex dep::test_mod_b::Enum::Variant [type_could_unify] - fn main() [] - fn test(…) [] - md dep [] + ev dep::test_mod_b::Enum::Variant dep::test_mod_b::Enum::Variant [type_could_unify] + ex dep::test_mod_b::Enum::Variant [type_could_unify] + fn main() fn() [] + fn test(…) fn(Enum) [] + md dep [] "#]], ); } @@ -958,11 +958,11 @@ fn main() { } "#, expect![[r#" - fn main() [] - fn test(…) [] - md dep [] - fn function (use dep::test_mod_a::function) [requires_import] - fn function(…) (use dep::test_mod_b::function) [requires_import] + fn main() fn() [] + fn test(…) fn(fn(usize) -> i32) [] + md dep [] + fn function fn(usize) -> i32 [requires_import] + fn function(…) fn(isize) -> i32 [requires_import] "#]], ); } @@ -990,11 +990,11 @@ fn main() { } "#, expect![[r#" - ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import] - fn main() [] - fn test(…) [] - md dep [] - ct CONST (use dep::test_mod_a::CONST) [requires_import] + ct CONST i32 [type_could_unify+requires_import] + fn main() fn() [] + fn test(…) fn(i32) [] + md dep [] + ct CONST i64 [requires_import] "#]], ); } @@ -1022,11 +1022,11 @@ fn main() { } "#, expect![[r#" - sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import] - fn main() [] - fn test(…) [] - md dep [] - sc STATIC (use dep::test_mod_a::STATIC) [requires_import] + sc STATIC i32 [type_could_unify+requires_import] + fn main() fn() [] + fn test(…) fn(i32) [] + md dep [] + sc STATIC i64 [requires_import] "#]], ); } @@ -1058,7 +1058,7 @@ fn main() { "#, expect![[r#" - me Function [] + me Function fn(&self, i32) -> bool [] "#]], ); } @@ -1081,14 +1081,14 @@ fn func(input: Struct) { } "#, expect![[r#" - st Struct [type] - st Self [type] - sp Self [type] - st Struct [type] - ex Struct [type] - lc self [local] - fn func(…) [] - me self.test() [] + st Struct Struct [type] + st Self Self [type] + sp Self Struct [type] + st Struct Struct [type] + ex Struct [type] + lc self &Struct [local] + fn func(…) fn(Struct) [] + me self.test() fn(&self) [] "#]], ); } @@ -1109,13 +1109,13 @@ fn main() { } "#, expect![[r#" - lc input [type+name+local] - ex input [type] - ex true [type] - ex false [type] - lc inputbad [local] - fn main() [] - fn test(…) [] + lc input bool [type+name+local] + ex input [type] + ex true [type] + ex false [type] + lc inputbad i32 [local] + fn main() fn() [] + fn test(…) fn(bool) [] "#]], ); } @@ -1133,6 +1133,10 @@ fn main() { Foo::Fo$0 } [ CompletionItem { label: "Foo {…}", + detail_left: None, + detail_right: Some( + "Foo { x: i32, y: i32 }", + ), source_range: 54..56, delete: 54..56, insert: "Foo { x: ${1:()}, y: ${2:()} }$0", @@ -1161,6 +1165,10 @@ fn main() { Foo::Fo$0 } [ CompletionItem { label: "Foo(…)", + detail_left: None, + detail_right: Some( + "Foo(i32, i32)", + ), source_range: 46..48, delete: 46..48, insert: "Foo(${1:()}, ${2:()})$0", @@ -1189,6 +1197,10 @@ fn main() { fo$0 } [ CompletionItem { label: "foo(…)", + detail_left: None, + detail_right: Some( + "fn(u32, u32, T) -> (u32, T)", + ), source_range: 68..70, delete: 68..70, insert: "foo(${1:a}, ${2:b}, ${3:t})$0", @@ -1201,6 +1213,10 @@ fn main() { fo$0 } }, CompletionItem { label: "main()", + detail_left: None, + detail_right: Some( + "fn()", + ), source_range: 68..70, delete: 68..70, insert: "main();$0", @@ -1228,6 +1244,10 @@ fn main() { Foo::Fo$0 } [ CompletionItem { label: "Foo", + detail_left: None, + detail_right: Some( + "Foo", + ), source_range: 35..37, delete: 35..37, insert: "Foo$0", @@ -1260,6 +1280,10 @@ fn main() { let _: m::Spam = S$0 } [ CompletionItem { label: "main()", + detail_left: None, + detail_right: Some( + "fn()", + ), source_range: 75..76, delete: 75..76, insert: "main();$0", @@ -1271,6 +1295,8 @@ fn main() { let _: m::Spam = S$0 } }, CompletionItem { label: "m", + detail_left: None, + detail_right: None, source_range: 75..76, delete: 75..76, insert: "m", @@ -1280,6 +1306,10 @@ fn main() { let _: m::Spam = S$0 } }, CompletionItem { label: "m::Spam::Bar(…)", + detail_left: None, + detail_right: Some( + "m::Spam::Bar(i32)", + ), source_range: 75..76, delete: 75..76, insert: "m::Spam::Bar(${1:()})$0", @@ -1305,6 +1335,10 @@ fn main() { let _: m::Spam = S$0 } }, CompletionItem { label: "m::Spam::Foo", + detail_left: None, + detail_right: Some( + "m::Spam::Foo", + ), source_range: 75..76, delete: 75..76, insert: "m::Spam::Foo$0", @@ -1347,6 +1381,10 @@ fn main() { som$0 } [ CompletionItem { label: "main()", + detail_left: None, + detail_right: Some( + "fn()", + ), source_range: 56..59, delete: 56..59, insert: "main();$0", @@ -1358,6 +1396,10 @@ fn main() { som$0 } }, CompletionItem { label: "something_deprecated()", + detail_left: None, + detail_right: Some( + "fn()", + ), source_range: 56..59, delete: 56..59, insert: "something_deprecated();$0", @@ -1382,6 +1424,10 @@ fn foo() { A { the$0 } } [ CompletionItem { label: "the_field", + detail_left: None, + detail_right: Some( + "u32", + ), source_range: 57..60, delete: 57..60, insert: "the_field", @@ -1429,6 +1475,10 @@ impl S { [ CompletionItem { label: "bar()", + detail_left: None, + detail_right: Some( + "fn(self)", + ), source_range: 94..94, delete: 94..94, insert: "bar();$0", @@ -1460,6 +1510,10 @@ impl S { }, CompletionItem { label: "foo", + detail_left: None, + detail_right: Some( + "{unknown}", + ), source_range: 94..94, delete: 94..94, insert: "foo", @@ -1498,6 +1552,8 @@ use self::E::*; [ CompletionItem { label: "my", + detail_left: None, + detail_right: None, source_range: 10..12, delete: 10..12, insert: "my", @@ -1510,6 +1566,10 @@ use self::E::*; }, CompletionItem { label: "V", + detail_left: None, + detail_right: Some( + "V", + ), source_range: 10..12, delete: 10..12, insert: "V$0", @@ -1524,6 +1584,10 @@ use self::E::*; }, CompletionItem { label: "E", + detail_left: None, + detail_right: Some( + "E", + ), source_range: 10..12, delete: 10..12, insert: "E", @@ -1556,6 +1620,10 @@ fn foo(s: S) { s.$0 } [ CompletionItem { label: "the_method()", + detail_left: None, + detail_right: Some( + "fn(&self)", + ), source_range: 81..81, delete: 81..81, insert: "the_method();$0", @@ -1729,9 +1797,9 @@ fn test(bar: u32) { } fn foo(s: S) { test(s.$0) } "#, expect![[r#" - fd bar [type+name] - fd baz [type] - fd foo [] + fd bar u32 [type+name] + fd baz u32 [type] + fd foo i64 [] "#]], ); } @@ -1745,9 +1813,9 @@ struct B { x: (), y: f32, bar: u32 } fn foo(a: A) { B { bar: a.$0 }; } "#, expect![[r#" - fd bar [type+name] - fd baz [type] - fd foo [] + fd bar u32 [type+name] + fd baz u32 [type] + fd foo i64 [] "#]], ) } @@ -1768,6 +1836,10 @@ fn f() -> i32 { [ CompletionItem { label: "0", + detail_left: None, + detail_right: Some( + "i32", + ), source_range: 56..57, delete: 56..57, insert: "0", @@ -1804,9 +1876,9 @@ fn f(foo: i64) { } fn foo(a: A) { B { bar: f(a.$0) }; } "#, expect![[r#" - fd foo [type+name] - fd bar [] - fd baz [] + fd foo i64 [type+name] + fd bar u32 [] + fd baz u32 [] "#]], ); check_relevance( @@ -1817,9 +1889,9 @@ fn f(foo: i64) { } fn foo(a: A) { f(B { bar: a.$0 }); } "#, expect![[r#" - fd bar [type+name] - fd baz [type] - fd foo [] + fd bar u32 [type+name] + fd baz u32 [type] + fd foo i64 [] "#]], ); } @@ -1832,13 +1904,13 @@ struct WorldSnapshot { _f: () }; fn go(world: &WorldSnapshot) { go(w$0) } "#, expect![[r#" - lc world [type+name+local] - ex world [type] - st WorldSnapshot {…} [] + lc world &WorldSnapshot [type+name+local] + ex world [type] + st WorldSnapshot {…} WorldSnapshot { _f: () } [] st &WorldSnapshot {…} [type] - st WorldSnapshot [] + st WorldSnapshot WorldSnapshot [] st &WorldSnapshot [type] - fn go(…) [] + fn go(…) fn(&WorldSnapshot) [] "#]], ); } @@ -1852,9 +1924,9 @@ struct Foo; fn f(foo: &Foo) { f(foo, w$0) } "#, expect![[r#" - lc foo [local] - st Foo [] - fn f(…) [] + lc foo &Foo [local] + st Foo Foo [] + fn f(…) fn(&Foo) [] "#]], ); } @@ -1869,12 +1941,12 @@ fn bar() -> u8 { 0 } fn f() { A { bar: b$0 }; } "#, expect![[r#" - fn bar() [type+name] - fn baz() [type] - ex bar() [type] - ex baz() [type] - st A [] - fn f() [] + fn bar() fn() -> u8 [type+name] + fn baz() fn() -> u8 [type] + ex bar() [type] + ex baz() [type] + st A A [] + fn f() fn() [] "#]], ); } @@ -1895,9 +1967,9 @@ fn f() { } "#, expect![[r#" - me aaa() [type+name] - me bbb() [type] - me ccc() [] + me aaa() fn(&self) -> u32 [type+name] + me bbb() fn(&self) -> u32 [type] + me ccc() fn(&self) -> u64 [] "#]], ); } @@ -1916,7 +1988,7 @@ fn f() { } "#, expect![[r#" - me aaa() [name] + me aaa() fn(&self) -> u64 [name] "#]], ); } @@ -1934,14 +2006,14 @@ fn main() { } "#, expect![[r#" - lc s [name+local] + lc s S [name+local] lc &mut s [type+name+local] - st S [] + st S S [] st &mut S [type] - st S [] + st S S [] st &mut S [type] - fn foo(…) [] - fn main() [] + fn foo(…) fn(&mut S) [] + fn main() fn() [] "#]], ); check_relevance( @@ -1954,13 +2026,13 @@ fn main() { } "#, expect![[r#" - lc s [type+name+local] - st S [type] - st S [type] - ex s [type] - ex S [type] - fn foo(…) [] - fn main() [] + lc s S [type+name+local] + st S S [type] + st S S [type] + ex s [type] + ex S [type] + fn foo(…) fn(&mut S) [] + fn main() fn() [] "#]], ); check_relevance( @@ -1973,13 +2045,13 @@ fn main() { } "#, expect![[r#" - lc ssss [type+local] - st S [type] - st S [type] - ex ssss [type] - ex S [type] - fn foo(…) [] - fn main() [] + lc ssss S [type+local] + st S S [type] + st S S [type] + ex ssss [type] + ex S [type] + fn foo(…) fn(&mut S) [] + fn main() fn() [] "#]], ); } @@ -2010,19 +2082,19 @@ fn main() { } "#, expect![[r#" - ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] - lc m [local] - lc t [local] + ex core::ops::Deref::deref(&t) [type_could_unify] + lc m i32 [local] + lc t T [local] lc &t [type+local] - st S [] + st S S [] st &S [type] - st S [] + st S S [] st &S [type] - st T [] + st T T [] st &T [type] - fn foo(…) [] - fn main() [] - md core [] + fn foo(…) fn(&S) [] + fn main() fn() [] + md core [] "#]], ) } @@ -2059,19 +2131,19 @@ fn main() { } "#, expect![[r#" - ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] - lc m [local] - lc t [local] + ex core::ops::DerefMut::deref_mut(&mut t) [type_could_unify] + lc m i32 [local] + lc t T [local] lc &mut t [type+local] - st S [] + st S S [] st &mut S [type] - st S [] + st S S [] st &mut S [type] - st T [] + st T T [] st &mut T [type] - fn foo(…) [] - fn main() [] - md core [] + fn foo(…) fn(&mut S) [] + fn main() fn() [] + md core [] "#]], ) } @@ -2087,9 +2159,9 @@ fn foo(bar: u32) { } "#, expect![[r#" - lc baz [local] - lc bar [local] - fn foo(…) [] + lc baz i32 [local] + lc bar u32 [local] + fn foo(…) fn(u32) [] "#]], ); } @@ -2105,13 +2177,13 @@ fn foo() { fn bar(t: Foo) {} "#, expect![[r#" - ev Foo::A [type] - ev Foo::B [type] - en Foo [type] - ex Foo::A [type] - ex Foo::B [type] - fn bar(…) [] - fn foo() [] + ev Foo::A Foo::A [type] + ev Foo::B Foo::B [type] + en Foo Foo [type] + ex Foo::A [type] + ex Foo::B [type] + fn bar(…) fn(Foo) [] + fn foo() fn() [] "#]], ); } @@ -2127,14 +2199,14 @@ fn foo() { fn bar(t: &Foo) {} "#, expect![[r#" - ev Foo::A [] + ev Foo::A Foo::A [] ev &Foo::A [type] - ev Foo::B [] + ev Foo::B Foo::B [] ev &Foo::B [type] - en Foo [] + en Foo Foo [] en &Foo [type] - fn bar(…) [] - fn foo() [] + fn bar(…) fn(&Foo) [] + fn foo() fn() [] "#]], ); } @@ -2163,18 +2235,18 @@ fn main() { } "#, expect![[r#" - ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] - st S [] + ex core::ops::Deref::deref(&bar()) [type_could_unify] + st S S [] st &S [type] - st S [] + st S S [] st &S [type] - st T [] + st T T [] st &T [type] - fn bar() [] + fn bar() fn() -> T [] fn &bar() [type] - fn foo(…) [] - fn main() [] - md core [] + fn foo(…) fn(&S) [] + fn main() fn() [] + md core [] "#]], ) } @@ -2191,7 +2263,7 @@ impl Sub for u32 {} fn foo(a: u32) { a.$0 } "#, expect![[r#" - me sub(…) (as Sub) [op_method] + me sub(…) fn(self, Self) -> Self [op_method] "#]], ); check_relevance( @@ -2212,9 +2284,9 @@ fn main() { } "#, expect![[r#" - fn new() [] - me eq(…) (as PartialEq) [op_method] - me ne(…) (as PartialEq) [op_method] + fn new() fn() -> Foo [] + me eq(…) fn(&self, &Rhs) -> bool [op_method] + me ne(…) fn(&self, &Rhs) -> bool [op_method] "#]], ); } @@ -2238,9 +2310,9 @@ fn test() { } "#, expect![[r#" - fn fn_ctr() [type_could_unify] - fn fn_ctr_self() [type_could_unify] - fn fn_another(…) [type_could_unify] + fn fn_ctr() fn() -> Foo [type_could_unify] + fn fn_ctr_self() fn() -> Option<Foo> [type_could_unify] + fn fn_another(…) fn(u32) -> Other [type_could_unify] "#]], ); } @@ -2384,12 +2456,12 @@ fn test() { // Constructor // Others expect![[r#" - fn fn_direct_ctr() [type_could_unify] - fn fn_ctr_with_args(…) [type_could_unify] - fn fn_builder() [type_could_unify] - fn fn_ctr() [type_could_unify] - me fn_no_ret(…) [type_could_unify] - fn fn_other() [type_could_unify] + fn fn_direct_ctr() fn() -> Foo [type_could_unify] + fn fn_ctr_with_args(…) fn(u32) -> Foo [type_could_unify] + fn fn_builder() fn() -> FooBuilder [type_could_unify] + fn fn_ctr() fn() -> Result<Foo> [type_could_unify] + me fn_no_ret(…) fn(&self) [type_could_unify] + fn fn_other() fn() -> Result<u32> [type_could_unify] "#]], ); @@ -2420,14 +2492,14 @@ fn test() { } "#, expect![[r#" - fn fn_direct_ctr() [type_could_unify] - fn fn_ctr_with_args(…) [type_could_unify] - fn fn_builder() [type_could_unify] - fn fn_ctr_wrapped() [type_could_unify] - fn fn_ctr_wrapped_2() [type_could_unify] - me fn_returns_unit(…) [type_could_unify] - fn fn_other() [type_could_unify] - "#]], + fn fn_direct_ctr() fn() -> Foo<T> [type_could_unify] + fn fn_ctr_with_args(…) fn(T) -> Foo<T> [type_could_unify] + fn fn_builder() fn() -> FooBuilder [type_could_unify] + fn fn_ctr_wrapped() fn() -> Option<Foo<T>> [type_could_unify] + fn fn_ctr_wrapped_2() fn() -> Result<Foo<T>, u32> [type_could_unify] + me fn_returns_unit(…) fn(&self) [type_could_unify] + fn fn_other() fn() -> Option<u32> [type_could_unify] + "#]], ); } @@ -2456,13 +2528,13 @@ fn test() { } "#, expect![[r#" - fn fn_direct_ctr() [type_could_unify] - fn fn_ctr_with_args(…) [type_could_unify] - fn fn_builder() [type_could_unify] - fn fn_ctr() [type_could_unify] - fn fn_ctr2() [type_could_unify] - me fn_no_ret(…) [type_could_unify] - fn fn_other() [type_could_unify] + fn fn_direct_ctr() fn() -> Foo<T> [type_could_unify] + fn fn_ctr_with_args(…) fn(T) -> Foo<T> [type_could_unify] + fn fn_builder() fn() -> FooBuilder [type_could_unify] + fn fn_ctr() fn() -> Option<Foo<T>> [type_could_unify] + fn fn_ctr2() fn() -> Result<Foo<T>, u32> [type_could_unify] + me fn_no_ret(…) fn(&self) [type_could_unify] + fn fn_other() fn() -> Option<u32> [type_could_unify] "#]], ); } @@ -2484,6 +2556,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } [ CompletionItem { label: "baz()", + detail_left: None, + detail_right: Some( + "fn(&self) -> u32", + ), source_range: 109..110, delete: 109..110, insert: "baz()$0", @@ -2513,6 +2589,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } }, CompletionItem { label: "bar", + detail_left: None, + detail_right: Some( + "u32", + ), source_range: 109..110, delete: 109..110, insert: "bar", @@ -2524,6 +2604,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } }, CompletionItem { label: "qux", + detail_left: None, + detail_right: Some( + "fn()", + ), source_range: 109..110, text_edit: TextEdit { indels: [ @@ -2562,6 +2646,10 @@ fn foo() { [ CompletionItem { label: "field", + detail_left: None, + detail_right: Some( + "fn()", + ), source_range: 76..78, delete: 76..78, insert: "field", @@ -2610,6 +2698,10 @@ fn main() { [ CompletionItem { label: "foo()", + detail_left: None, + detail_right: Some( + "fn() -> S", + ), source_range: 95..95, delete: 95..95, insert: "foo()$0", @@ -2661,15 +2753,15 @@ fn foo() { } "#, expect![[r#" - lc foo [type+local] - ex foo [type] - ex Foo::B [type] - ev Foo::A(…) [type_could_unify] - ev Foo::B [type_could_unify] - en Foo [type_could_unify] - fn foo() [] - fn bar() [] - fn baz() [] + lc foo Foo<u32> [type+local] + ex foo [type] + ex Foo::B [type] + ev Foo::A(…) Foo::A(T) [type_could_unify] + ev Foo::B Foo::B [type_could_unify] + en Foo Foo<{unknown}> [type_could_unify] + fn foo() fn() [] + fn bar() fn() -> Foo<u8> [] + fn baz() fn() -> Foo<T> [] "#]], ); } @@ -2697,20 +2789,20 @@ fn main() { "#, &[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)], expect![[r#" - sn not [snippet] - me not() (use ops::Not) [type_could_unify+requires_import] - sn if [] - sn while [] - sn ref [] - sn refm [] - sn deref [] - sn unsafe [] - sn match [] - sn box [] - sn dbg [] - sn dbgr [] - sn call [] - sn return [] + sn not !expr [snippet] + me not() fn(self) -> <Self as Not>::Output [type_could_unify+requires_import] + sn if if expr {} [] + sn while while expr {} [] + sn ref &expr [] + sn refm &mut expr [] + sn deref *expr [] + sn unsafe unsafe {} [] + sn match match expr {} [] + sn box Box::new(expr) [] + sn dbg dbg!(expr) [] + sn dbgr dbg!(&expr) [] + sn call function(expr) [] + sn return return expr [] "#]], ); } @@ -2730,19 +2822,19 @@ fn main() { "#, &[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)], expect![[r#" - me f() [] - sn ref [] - sn refm [] - sn deref [] - sn unsafe [] - sn match [] - sn box [] - sn dbg [] - sn dbgr [] - sn call [] - sn let [] - sn letm [] - sn return [] + me f() fn(&self) [] + sn ref &expr [] + sn refm &mut expr [] + sn deref *expr [] + sn unsafe unsafe {} [] + sn match match expr {} [] + sn box Box::new(expr) [] + sn dbg dbg!(expr) [] + sn dbgr dbg!(&expr) [] + sn call function(expr) [] + sn let let [] + sn letm let mut [] + sn return return expr [] "#]], ); } @@ -2765,12 +2857,12 @@ fn f() { } "#, expect![[r#" - st Buffer [] - fn f() [] - md std [] - tt BufRead (use std::io::BufRead) [requires_import] - st BufReader (use std::io::BufReader) [requires_import] - st BufWriter (use std::io::BufWriter) [requires_import] + st Buffer Buffer [] + fn f() fn() [] + md std [] + tt BufRead [requires_import] + st BufReader BufReader [requires_import] + st BufWriter BufWriter [requires_import] "#]], ); } @@ -2979,6 +3071,12 @@ fn main() { [ CompletionItem { label: "flush()", + detail_left: Some( + "(as Write)", + ), + detail_right: Some( + "fn(&self)", + ), source_range: 193..193, delete: 193..193, insert: "flush();$0", @@ -3006,6 +3104,12 @@ fn main() { }, CompletionItem { label: "write()", + detail_left: Some( + "(as Write)", + ), + detail_right: Some( + "fn(&self)", + ), source_range: 193..193, delete: 193..193, insert: "write();$0", diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index f371012de3f..e01097a9105 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -118,10 +118,16 @@ fn completion_list_with_config_raw( let items = get_all_items(config, ra_fixture, trigger_character); items .into_iter() - .filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label == "u32") + .filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label.primary == "u32") .filter(|it| include_keywords || it.kind != CompletionItemKind::Keyword) .filter(|it| include_keywords || it.kind != CompletionItemKind::Snippet) - .sorted_by_key(|it| (it.kind, it.label.clone(), it.detail.as_ref().map(ToOwned::to_owned))) + .sorted_by_key(|it| { + ( + it.kind, + it.label.primary.clone(), + it.label.detail_left.as_ref().map(ToOwned::to_owned), + ) + }) .collect() } @@ -173,27 +179,30 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String { let label_width = completions .iter() .map(|it| { - monospace_width(&it.label) - + monospace_width(it.label_detail.as_deref().unwrap_or_default()) + monospace_width(&it.label.primary) + + monospace_width(it.label.detail_left.as_deref().unwrap_or_default()) + + monospace_width(it.label.detail_right.as_deref().unwrap_or_default()) + + it.label.detail_left.is_some() as usize + + it.label.detail_right.is_some() as usize }) .max() - .unwrap_or_default() - .min(22); + .unwrap_or_default(); completions .into_iter() .map(|it| { let tag = it.kind.tag(); - let var_name = format!("{tag} {}", it.label); - let mut buf = var_name; - if let Some(ref label_detail) = it.label_detail { - format_to!(buf, "{label_detail}"); + let mut buf = format!("{tag} {}", it.label.primary); + if let Some(label_detail) = &it.label.detail_left { + format_to!(buf, " {label_detail}"); } - if let Some(detail) = it.detail { - let width = label_width.saturating_sub( - monospace_width(&it.label) - + monospace_width(&it.label_detail.unwrap_or_default()), + if let Some(detail_right) = it.label.detail_right { + let pad_with = label_width.saturating_sub( + monospace_width(&it.label.primary) + + monospace_width(it.label.detail_left.as_deref().unwrap_or_default()) + + monospace_width(&detail_right) + + it.label.detail_left.is_some() as usize, ); - format_to!(buf, "{:width$} {}", "", detail, width = width); + format_to!(buf, "{:pad_with$}{detail_right}", "",); } if it.deprecated { format_to!(buf, " DEPRECATED"); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs index 1443ebc6c0c..acafa6518f6 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs @@ -33,7 +33,7 @@ pub struct Foo(#[m$0] i32); at cold at deny(…) at deprecated - at derive macro derive + at derive macro derive at derive(…) at doc = "…" at doc(alias = "…") @@ -367,9 +367,9 @@ struct Foo; at cfg_attr(…) at deny(…) at deprecated - at derive macro derive + at derive macro derive at derive(…) - at derive_const macro derive_const + at derive_const macro derive_const at doc = "…" at doc(alias = "…") at doc(hidden) @@ -790,10 +790,10 @@ mod derive { #[derive($0)] struct Test; "#, expect![[r#" - de Clone macro Clone + de Clone macro Clone de Clone, Copy - de Default macro Default - de PartialEq macro PartialEq + de Default macro Default + de PartialEq macro PartialEq de PartialEq, Eq de PartialEq, Eq, PartialOrd, Ord de PartialEq, PartialOrd @@ -812,9 +812,9 @@ mod derive { #[derive(serde::Serialize, PartialEq, $0)] struct Test; "#, expect![[r#" - de Clone macro Clone + de Clone macro Clone de Clone, Copy - de Default macro Default + de Default macro Default de Eq de Eq, PartialOrd, Ord de PartialOrd @@ -833,9 +833,9 @@ mod derive { #[derive($0 serde::Serialize, PartialEq)] struct Test; "#, expect![[r#" - de Clone macro Clone + de Clone macro Clone de Clone, Copy - de Default macro Default + de Default macro Default de Eq de Eq, PartialOrd, Ord de PartialOrd @@ -854,9 +854,9 @@ mod derive { #[derive(PartialEq, Eq, Or$0)] struct Test; "#, expect![[r#" - de Clone macro Clone + de Clone macro Clone de Clone, Copy - de Default macro Default + de Default macro Default de PartialOrd de PartialOrd, Ord md core diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs index 545c2a2a8a0..ea1b7ad7871 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs @@ -26,22 +26,22 @@ fn baz() { "#, // This should not contain `FooDesc {…}`. expect![[r#" - ct CONST Unit - en Enum Enum - fn baz() fn() - fn create_foo(…) fn(&FooDesc) - fn function() fn() - ma makro!(…) macro_rules! makro + ct CONST Unit + en Enum Enum + fn baz() fn() + fn create_foo(…) fn(&FooDesc) + fn function() fn() + ma makro!(…) macro_rules! makro md _69latrick md module - sc STATIC Unit - st FooDesc FooDesc - st Record Record - st Tuple Tuple - st Unit Unit - un Union Union - ev TupleV(…) TupleV(u32) - bt u32 u32 + sc STATIC Unit + st FooDesc FooDesc + st Record Record + st Tuple Tuple + st Unit Unit + un Union Union + ev TupleV(…) TupleV(u32) + bt u32 u32 kw crate:: kw false kw for @@ -76,14 +76,14 @@ fn func(param0 @ (param1, param2): (i32, i32)) { } "#, expect![[r#" - fn func(…) fn((i32, i32)) - lc ifletlocal i32 - lc letlocal i32 - lc matcharm i32 - lc param0 (i32, i32) - lc param1 i32 - lc param2 i32 - bt u32 u32 + fn func(…) fn((i32, i32)) + lc ifletlocal i32 + lc letlocal i32 + lc matcharm i32 + lc param0 (i32, i32) + lc param1 i32 + lc param2 i32 + bt u32 u32 kw crate:: kw false kw for @@ -122,25 +122,25 @@ impl Unit { "#, // `self` is in here twice, once as the module, once as the local expect![[r#" - ct CONST Unit + ct CONST Unit cp CONST_PARAM - en Enum Enum - fn function() fn() - fn local_func() fn() - me self.foo() fn(self) - lc self Unit - ma makro!(…) macro_rules! makro + en Enum Enum + fn function() fn() + fn local_func() fn() + me self.foo() fn(self) + lc self Unit + ma makro!(…) macro_rules! makro md module md qualified - sp Self Unit - sc STATIC Unit - st Record Record - st Tuple Tuple - st Unit Unit + sp Self Unit + sc STATIC Unit + st Record Record + st Tuple Tuple + st Unit Unit tp TypeParam - un Union Union - ev TupleV(…) TupleV(u32) - bt u32 u32 + un Union Union + ev TupleV(…) TupleV(u32) + bt u32 u32 kw async kw const kw crate:: @@ -187,19 +187,19 @@ impl Unit { } "#, expect![[r#" - ct CONST Unit - en Enum Enum - fn function() fn() - ma makro!(…) macro_rules! makro + ct CONST Unit + en Enum Enum + fn function() fn() + ma makro!(…) macro_rules! makro md module md qualified - sc STATIC Unit - st Record Record - st Tuple Tuple - st Unit Unit + sc STATIC Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - ev TupleV(…) TupleV(u32) + un Union Union + ev TupleV(…) TupleV(u32) ?? Unresolved "#]], ); @@ -216,8 +216,8 @@ fn complete_in_block() { } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -264,8 +264,8 @@ fn complete_after_if_expr() { } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -313,8 +313,8 @@ fn complete_in_match_arm() { } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw crate:: kw false kw for @@ -337,8 +337,8 @@ fn completes_in_loop_ctx() { check_empty( r"fn my() { loop { $0 } }", expect![[r#" - fn my() fn() - bt u32 u32 + fn my() fn() + bt u32 u32 kw async kw break kw const @@ -376,22 +376,22 @@ fn completes_in_loop_ctx() { check_empty( r"fn my() { loop { foo.$0 } }", expect![[r#" - sn box Box::new(expr) - sn break break expr - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn if if expr {} - sn let let - sn letm let mut - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} - sn while while expr {} + sn box Box::new(expr) + sn break break expr + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn if if expr {} + sn let let + sn letm let mut + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } @@ -401,8 +401,8 @@ fn completes_in_let_initializer() { check_empty( r#"fn main() { let _ = $0 }"#, expect![[r#" - fn main() fn() - bt u32 u32 + fn main() fn() + bt u32 u32 kw crate:: kw false kw for @@ -434,9 +434,9 @@ fn foo() { } "#, expect![[r#" - fn foo() fn() - st Foo Foo - bt u32 u32 + fn foo() fn() + st Foo Foo + bt u32 u32 kw crate:: kw false kw for @@ -469,9 +469,9 @@ fn foo() { } "#, expect![[r#" - fn foo() fn() - lc bar i32 - bt u32 u32 + fn foo() fn() + lc bar i32 + bt u32 u32 kw crate:: kw false kw for @@ -499,10 +499,10 @@ fn quux(x: i32) { } "#, expect![[r#" - fn quux(…) fn(i32) - lc x i32 - ma m!(…) macro_rules! m - bt u32 u32 + fn quux(…) fn(i32) + lc x i32 + ma m!(…) macro_rules! m + bt u32 u32 kw crate:: kw false kw for @@ -526,10 +526,10 @@ fn quux(x: i32) { } ", expect![[r#" - fn quux(…) fn(i32) - lc x i32 - ma m!(…) macro_rules! m - bt u32 u32 + fn quux(…) fn(i32) + lc x i32 + ma m!(…) macro_rules! m + bt u32 u32 kw crate:: kw false kw for @@ -554,11 +554,11 @@ fn quux(x: i32) { } "#, expect![[r#" - fn quux(…) fn(i32) - lc x i32 - lc y i32 - ma m!(…) macro_rules! m - bt u32 u32 + fn quux(…) fn(i32) + lc x i32 + lc y i32 + ma m!(…) macro_rules! m + bt u32 u32 kw crate:: kw false kw for @@ -590,12 +590,12 @@ fn func() { } "#, expect![[r#" - ct ASSOC_CONST const ASSOC_CONST: () - fn assoc_fn() fn() - ta AssocType type AssocType = () + ct ASSOC_CONST const ASSOC_CONST: () + fn assoc_fn() fn() + ta AssocType type AssocType = () ev RecordV {…} RecordV { field: u32 } - ev TupleV(…) TupleV(u32) - ev UnitV UnitV + ev TupleV(…) TupleV(u32) + ev UnitV UnitV "#]], ); } @@ -633,7 +633,7 @@ fn func() { "#, expect![[r#" fn variant fn() -> Enum - ev Variant Variant + ev Variant Variant "#]], ); } @@ -650,8 +650,8 @@ fn main() { } ", expect![[r#" - fn foo() fn() -> impl Trait<U> - fn main() fn() + fn foo() fn() -> impl Trait<U> + fn main() fn() tt Trait "#]], ); @@ -670,9 +670,9 @@ fn main() { } "#, expect![[r#" - fn bar() async fn() -> impl Trait<U> - fn foo() async fn() -> u8 - fn main() fn() + fn bar() async fn() -> impl Trait<U> + fn foo() async fn() -> u8 + fn main() fn() tt Trait "#]], ); @@ -692,9 +692,9 @@ fn main() { Foo::$0 } ", - expect![[r" + expect![[r#" fn bar(…) fn(impl Trait<U>) - "]], + "#]], ); } @@ -712,7 +712,7 @@ fn main() { } "#, expect![[r#" - fn test() fn() -> Zulu + fn test() fn() -> Zulu ex Zulu ex Zulu::test() "#]], @@ -736,11 +736,11 @@ fn brr() { } "#, expect![[r#" - en HH HH - fn brr() fn() - st YoloVariant YoloVariant + en HH HH + fn brr() fn() + st YoloVariant YoloVariant st YoloVariant {…} YoloVariant { f: usize } - bt u32 u32 + bt u32 u32 kw crate:: kw false kw for @@ -801,8 +801,8 @@ fn else_completion_after_if() { fn foo() { if foo {} $0 } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -842,8 +842,8 @@ fn foo() { if foo {} $0 } fn foo() { if foo {} el$0 } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -883,8 +883,8 @@ fn foo() { if foo {} el$0 } fn foo() { bar(if foo {} $0) } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw crate:: kw else kw else if @@ -907,8 +907,8 @@ fn foo() { bar(if foo {} $0) } fn foo() { bar(if foo {} el$0) } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw crate:: kw else kw else if @@ -931,8 +931,8 @@ fn foo() { bar(if foo {} el$0) } fn foo() { if foo {} $0 let x = 92; } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -972,8 +972,8 @@ fn foo() { if foo {} $0 let x = 92; } fn foo() { if foo {} el$0 let x = 92; } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -1013,8 +1013,8 @@ fn foo() { if foo {} el$0 let x = 92; } fn foo() { if foo {} el$0 { let x = 92; } } "#, expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -1065,9 +1065,9 @@ fn main() { pub struct UnstableThisShouldNotBeListed; "#, expect![[r#" - fn main() fn() + fn main() fn() md std - bt u32 u32 + bt u32 u32 kw async kw const kw crate:: @@ -1117,10 +1117,10 @@ fn main() { pub struct UnstableButWeAreOnNightlyAnyway; "#, expect![[r#" - fn main() fn() + fn main() fn() md std st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway - bt u32 u32 + bt u32 u32 kw async kw const kw crate:: @@ -1170,17 +1170,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); check_empty( @@ -1196,17 +1196,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } @@ -1226,17 +1226,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); check_empty( @@ -1252,17 +1252,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); check_empty( @@ -1278,17 +1278,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); check_empty( @@ -1304,19 +1304,89 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn if if expr {} - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn if if expr {} + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + sn while while expr {} + "#]], + ); +} + +#[test] +fn macro_that_ignores_completion_marker() { + check( + r#" +macro_rules! helper { + ($v:ident) => {}; +} + +macro_rules! m { + ($v:ident) => {{ + helper!($v); + $v + }}; +} + +fn main() { + let variable = "test"; + m!(v$0); +} + "#, + expect![[r#" + ct CONST Unit + en Enum Enum + fn function() fn() + fn main() fn() + lc variable &str + ma helper!(…) macro_rules! helper + ma m!(…) macro_rules! m + ma makro!(…) macro_rules! makro + md module + sc STATIC Unit + st Record Record + st Tuple Tuple + st Unit Unit + un Union Union + ev TupleV(…) TupleV(u32) + bt u32 u32 + kw async + kw const + kw crate:: + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 4b949e0d657..447dbc998b5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -139,9 +139,9 @@ fn main() { } "#, expect![[r#" - st Rc (use dep::Rc) Rc - st Rcar (use dep::Rcar) Rcar - st Rc (use dep::some_module::Rc) Rc + st Rc (use dep::Rc) Rc + st Rcar (use dep::Rcar) Rcar + st Rc (use dep::some_module::Rc) Rc st Rcar (use dep::some_module::Rcar) Rcar "#]], ); @@ -165,11 +165,11 @@ fn main() { } "#, expect![[r#" - ct RC (use dep::RC) () - st Rc (use dep::Rc) Rc - st Rcar (use dep::Rcar) Rcar - ct RC (use dep::some_module::RC) () - st Rc (use dep::some_module::Rc) Rc + ct RC (use dep::RC) () + st Rc (use dep::Rc) Rc + st Rcar (use dep::Rcar) Rcar + ct RC (use dep::some_module::RC) () + st Rc (use dep::some_module::Rc) Rc st Rcar (use dep::some_module::Rcar) Rcar "#]], ); @@ -193,7 +193,7 @@ fn main() { } "#, expect![[r#" - ct RC (use dep::RC) () + ct RC (use dep::RC) () ct RC (use dep::some_module::RC) () "#]], ); @@ -227,7 +227,7 @@ fn main() { } "#, expect![[r#" - st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct + st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct st AfterThirdStruct (use dep::some_module::AfterThirdStruct) AfterThirdStruct st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) ThiiiiiirdStruct "#]], @@ -263,8 +263,8 @@ fn trait_function_fuzzy_completion() { check( fixture, expect![[r#" - fn weird_function() (use dep::test_mod::TestTrait) fn() - "#]], + fn weird_function() (use dep::test_mod::TestTrait) fn() + "#]], ); check_edit( @@ -356,8 +356,8 @@ fn trait_method_fuzzy_completion() { check( fixture, expect![[r#" - me random_method() (use dep::test_mod::TestTrait) fn(&self) - "#]], + me random_method() (use dep::test_mod::TestTrait) fn(&self) + "#]], ); check_edit( @@ -401,8 +401,8 @@ fn main() { check( fixture, expect![[r#" - me some_method() (use foo::TestTrait) fn(&self) - "#]], + me some_method() (use foo::TestTrait) fn(&self) + "#]], ); check_edit( @@ -448,8 +448,8 @@ fn main() { check( fixture, expect![[r#" - me some_method() (use foo::TestTrait) fn(&self) - "#]], + me some_method() (use foo::TestTrait) fn(&self) + "#]], ); check_edit( @@ -496,8 +496,8 @@ fn completion<T: Wrapper>(whatever: T) { check( fixture, expect![[r#" - me not_in_scope() (use foo::NotInScope) fn(&self) - "#]], + me not_in_scope() (use foo::NotInScope) fn(&self) + "#]], ); check_edit( @@ -539,8 +539,8 @@ fn main() { check( fixture, expect![[r#" - me into() (use test_trait::TestInto) fn(self) -> T - "#]], + me into() (use test_trait::TestInto) fn(self) -> T + "#]], ); } @@ -568,8 +568,8 @@ fn main() { check( fixture, expect![[r#" - fn random_method() (use dep::test_mod::TestTrait) fn() - "#]], + fn random_method() (use dep::test_mod::TestTrait) fn() + "#]], ); check_edit( @@ -737,8 +737,8 @@ fn main() { } "#, expect![[r#" - me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED - "#]], + me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED + "#]], ); check( @@ -767,8 +767,8 @@ fn main() { } "#, expect![[r#" - ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED - fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED + ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED + fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED "#]], ); @@ -1117,7 +1117,7 @@ fn main() { tes$0 }"#, expect![[r#" - ct TEST_CONST (use foo::TEST_CONST) usize + ct TEST_CONST (use foo::TEST_CONST) usize fn test_function() (use foo::test_function) fn() -> i32 "#]], ); @@ -1175,8 +1175,8 @@ fn main() { } "#, expect![[r#" - fn some_fn() (use m::some_fn) fn() -> i32 - "#]], + fn some_fn() (use m::some_fn) fn() -> i32 + "#]], ); } @@ -1691,7 +1691,7 @@ fn function() { expect![[r#" st FooStruct (use outer::FooStruct) BarStruct md foo (use outer::foo) - fn foo_fun() (use outer::foo_fun) fn() + fn foo_fun() (use outer::foo_fun) fn() "#]], ); } @@ -1720,3 +1720,45 @@ fn function() { "#]], ); } + +#[test] +fn intrinsics() { + check( + r#" + //- /core.rs crate:core + pub mod intrinsics { + extern "rust-intrinsic" { + pub fn transmute<Src, Dst>(src: Src) -> Dst; + } + } + pub mod mem { + pub use crate::intrinsics::transmute; + } + //- /main.rs crate:main deps:core + fn function() { + transmute$0 + } + "#, + expect![[r#" + fn transmute(…) (use core::mem::transmute) unsafe fn(Src) -> Dst + "#]], + ); + check( + r#" +//- /core.rs crate:core +pub mod intrinsics { + extern "rust-intrinsic" { + pub fn transmute<Src, Dst>(src: Src) -> Dst; + } +} +pub mod mem { + pub use crate::intrinsics::transmute; +} +//- /main.rs crate:main deps:core +fn function() { + mem::transmute$0 +} +"#, + expect![""], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs index cce74604c2d..4a89f874e15 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs @@ -140,7 +140,7 @@ fn foo2($0) {} expect![[r#" st Bar bn Bar { bar }: Bar - bn Bar {…} Bar { bar$1 }: Bar$0 + bn Bar {…} Bar { bar$1 }: Bar$0 kw mut kw ref "#]], diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs index 09254aed7cb..f34f3d0fc2f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs @@ -20,15 +20,15 @@ fn target_type_or_trait_in_impl_block() { impl Tra$0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -42,15 +42,15 @@ fn target_type_in_trait_impl_block() { impl Trait for Str$0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs index dfef8fa472d..d3d52dc6dfc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs @@ -13,7 +13,7 @@ fn in_mod_item_list() { check( r#"mod tests { $0 }"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro kw async kw const kw crate:: @@ -46,7 +46,7 @@ fn in_source_file_item_list() { check( r#"$0"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw async kw const @@ -79,7 +79,7 @@ fn in_item_list_after_attr() { check( r#"#[attr] $0"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw async kw const @@ -182,7 +182,7 @@ fn in_impl_assoc_item_list() { check( r#"impl Struct { $0 }"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw async kw const @@ -202,7 +202,7 @@ fn in_impl_assoc_item_list_after_attr() { check( r#"impl Struct { #[attr] $0 }"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw async kw const @@ -315,7 +315,7 @@ impl Test for () { fn async fn function2() fn fn function1() fn fn function2() - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module ta type Type1 = kw crate:: @@ -381,7 +381,7 @@ fn after_unit_struct() { check( r#"struct S; f$0"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw async kw const @@ -503,7 +503,7 @@ fn inside_extern_blocks() { check( r#"extern { $0 }"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw crate:: kw fn @@ -520,7 +520,7 @@ fn inside_extern_blocks() { check( r#"unsafe extern { $0 }"#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module kw crate:: kw fn diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs index a5eb0369b14..2f1f555e524 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs @@ -122,15 +122,15 @@ fn foo() { expect![[r#" ct CONST en Enum - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module st Record st Tuple st Unit ev TupleV bn Record {…} Record { field$1 }$0 - bn Tuple(…) Tuple($1)$0 - bn TupleV(…) TupleV($1)$0 + bn Tuple(…) Tuple($1)$0 + bn TupleV(…) TupleV($1)$0 kw mut kw ref "#]], @@ -151,15 +151,15 @@ fn foo() { "#, expect![[r#" en SingleVariantEnum - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module st Record st Tuple st Unit ev Variant - bn Record {…} Record { field$1 }$0 - bn Tuple(…) Tuple($1)$0 - bn Variant Variant$0 + bn Record {…} Record { field$1 }$0 + bn Tuple(…) Tuple($1)$0 + bn Variant Variant$0 kw mut kw ref "#]], @@ -174,13 +174,13 @@ fn foo(a$0) { } "#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module st Record st Tuple st Unit bn Record {…} Record { field$1 }: Record$0 - bn Tuple(…) Tuple($1): Tuple$0 + bn Tuple(…) Tuple($1): Tuple$0 kw mut kw ref "#]], @@ -191,13 +191,13 @@ fn foo(a$0: Tuple) { } "#, expect![[r#" - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module st Record st Tuple st Unit bn Record {…} Record { field$1 }$0 - bn Tuple(…) Tuple($1)$0 + bn Tuple(…) Tuple($1)$0 bn tuple kw mut kw ref @@ -240,7 +240,7 @@ fn foo() { expect![[r#" en E ma m!(…) macro_rules! m - bn E::X E::X$0 + bn E::X E::X$0 kw mut kw ref "#]], @@ -268,7 +268,7 @@ fn outer() { st Record st Tuple bn Record {…} Record { field$1, .. }$0 - bn Tuple(…) Tuple($1, ..)$0 + bn Tuple(…) Tuple($1, ..)$0 kw mut kw ref "#]], @@ -291,7 +291,7 @@ impl Foo { expect![[r#" sp Self st Foo - bn Foo(…) Foo($1)$0 + bn Foo(…) Foo($1)$0 bn Self(…) Self($1)$0 kw mut kw ref @@ -315,8 +315,8 @@ fn func() { expect![[r#" ct ASSOC_CONST const ASSOC_CONST: () bn RecordV {…} RecordV { field$1 }$0 - bn TupleV(…) TupleV($1)$0 - bn UnitV UnitV$0 + bn TupleV(…) TupleV($1)$0 + bn UnitV UnitV$0 "#]], ); } @@ -332,7 +332,7 @@ fn outer(Foo { bar: $0 }: Foo) {} expect![[r#" st Bar st Foo - bn Bar(…) Bar($1)$0 + bn Bar(…) Bar($1)$0 bn Foo {…} Foo { bar$1 }$0 kw mut kw ref @@ -395,7 +395,7 @@ fn foo($0) {} expect![[r#" st Bar st Foo - bn Bar(…) Bar($1): Bar$0 + bn Bar(…) Bar($1): Bar$0 bn Foo {…} Foo { bar$1 }: Foo$0 kw mut kw ref @@ -416,7 +416,7 @@ fn foo() { expect![[r#" st Bar st Foo - bn Bar(…) Bar($1)$0 + bn Bar(…) Bar($1)$0 bn Foo {…} Foo { bar$1 }$0 kw mut kw ref @@ -436,7 +436,7 @@ fn foo() { } "#, expect![[r#" - st Bar Bar + st Bar Bar kw crate:: kw self:: "#]], @@ -451,7 +451,7 @@ fn foo() { } "#, expect![[r#" - st Foo Foo + st Foo Foo kw crate:: kw self:: "#]], @@ -535,10 +535,10 @@ fn foo() { "#, expect![[r#" en Enum - bn Enum::A Enum::A$0 - bn Enum::B {…} Enum::B { r#type$1 }$0 + bn Enum::A Enum::A$0 + bn Enum::B {…} Enum::B { r#type$1 }$0 bn Enum::struct {…} Enum::r#struct { r#type$1 }$0 - bn Enum::type Enum::r#type$0 + bn Enum::type Enum::r#type$0 kw mut kw ref "#]], @@ -559,10 +559,10 @@ fn foo() { } "#, expect![[r#" - bn A A$0 - bn B {…} B { r#type$1 }$0 + bn A A$0 + bn B {…} B { r#type$1 }$0 bn struct {…} r#struct { r#type$1 }$0 - bn type r#type$0 + bn type r#type$0 "#]], ); } @@ -672,8 +672,8 @@ impl Ty { st Ty bn &mut self bn &self - bn Self(…) Self($1): Self$0 - bn Ty(…) Ty($1): Ty$0 + bn Self(…) Self($1): Self$0 + bn Ty(…) Ty($1): Ty$0 bn mut self bn self kw mut @@ -693,8 +693,8 @@ impl Ty { st Ty bn &mut self bn &self - bn Self(…) Self($1): Self$0 - bn Ty(…) Ty($1): Ty$0 + bn Self(…) Self($1): Self$0 + bn Ty(…) Ty($1): Ty$0 bn mut self bn self kw mut @@ -714,8 +714,8 @@ impl Ty { st Ty bn &mut self bn &self - bn Self(…) Self($1): Self$0 - bn Ty(…) Ty($1): Ty$0 + bn Self(…) Self($1): Self$0 + bn Ty(…) Ty($1): Ty$0 bn mut self bn self kw mut @@ -734,7 +734,7 @@ impl Ty { sp Self st Ty bn Self(…) Self($1): Self$0 - bn Ty(…) Ty($1): Ty$0 + bn Ty(…) Ty($1): Ty$0 kw mut kw ref "#]], @@ -763,7 +763,7 @@ fn f(x: EnumAlias<u8>) { "#, expect![[r#" bn Tuple(…) Tuple($1)$0 - bn Unit Unit$0 + bn Unit Unit$0 "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs index 46a3e97d3e9..c1926359efc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs @@ -16,16 +16,16 @@ fn predicate_start() { struct Foo<'lt, T, const C: usize> where $0 {} "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<'_, {unknown}, _> - st Record Record - st Tuple Tuple - st Unit Unit + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -89,16 +89,16 @@ fn param_list_for_for_pred() { struct Foo<'lt, T, const C: usize> where for<'a> $0 {} "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<'_, {unknown}, _> - st Record Record - st Tuple Tuple - st Unit Unit + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -114,16 +114,16 @@ impl Record { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self Record - st Record Record - st Tuple Tuple - st Unit Unit + sp Self Record + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs index 613f33309f5..afc286b6fb4 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs @@ -24,19 +24,19 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ) } @@ -57,19 +57,19 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ) } @@ -92,19 +92,19 @@ impl Foo { fn main() {} "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ) } @@ -127,19 +127,19 @@ impl Foo { fn main() {} "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs index 56162bb57b8..a9c9f604e07 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs @@ -70,8 +70,8 @@ fn foo(baz: Baz) { ev Ok bn Baz::Bar Baz::Bar$0 bn Baz::Foo Baz::Foo$0 - bn Err(…) Err($1)$0 - bn Ok(…) Ok($1)$0 + bn Err(…) Err($1)$0 + bn Ok(…) Ok($1)$0 kw mut kw ref "#]], @@ -91,20 +91,20 @@ fn foo(baz: Baz) { } "#, expect![[r#" - en Baz - en Result - md core - ev Bar - ev Err - ev Foo - ev Ok - bn Bar Bar$0 - bn Err(…) Err($1)$0 - bn Foo Foo$0 - bn Ok(…) Ok($1)$0 - kw mut - kw ref - "#]], + en Baz + en Result + md core + ev Bar + ev Err + ev Foo + ev Ok + bn Bar Bar$0 + bn Err(…) Err($1)$0 + bn Foo Foo$0 + bn Ok(…) Ok($1)$0 + kw mut + kw ref + "#]], ); } @@ -184,14 +184,14 @@ fn main() { "#, expect![[r#" fd ..Default::default() - fn main() fn() - lc foo Foo - lc thing i32 + fn main() fn() + lc foo Foo + lc thing i32 md core - st Foo Foo - st Foo {…} Foo { foo1: u32, foo2: u32 } + st Foo Foo + st Foo {…} Foo { foo1: u32, foo2: u32 } tt Default - bt u32 u32 + bt u32 u32 kw crate:: kw self:: ex Foo::default() @@ -238,8 +238,8 @@ fn main() { "#, expect![[r#" fd ..Default::default() - fd foo1 u32 - fd foo2 u32 + fd foo1 u32 + fd foo2 u32 "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs index 508f6248dd4..388af48c68b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs @@ -87,7 +87,7 @@ pub mod prelude { expect![[r#" md std st Option Option - bt u32 u32 + bt u32 u32 "#]], ); } @@ -113,10 +113,10 @@ mod macros { } "#, expect![[r#" - fn f() fn() + fn f() fn() ma concat!(…) macro_rules! concat md std - bt u32 u32 + bt u32 u32 "#]], ); } @@ -146,7 +146,7 @@ pub mod prelude { md core md std st String String - bt u32 u32 + bt u32 u32 "#]], ); } @@ -174,7 +174,7 @@ pub mod prelude { expect![[r#" fn f() fn() md std - bt u32 u32 + bt u32 u32 "#]], ); } @@ -226,9 +226,9 @@ impl S { fn foo() { let _ = lib::S::$0 } "#, expect![[r#" - ct PUBLIC_CONST pub const PUBLIC_CONST: u32 - fn public_method() fn() - ta PublicType pub type PublicType = u32 + ct PUBLIC_CONST pub const PUBLIC_CONST: u32 + fn public_method() fn() + ta PublicType pub type PublicType = u32 "#]], ); } @@ -317,14 +317,14 @@ trait Sub: Super { fn foo<T: Sub>() { T::$0 } "#, expect![[r#" - ct C2 (as Sub) const C2: () - ct CONST (as Super) const CONST: u8 - fn func() (as Super) fn() - fn subfunc() (as Sub) fn() - me method(…) (as Super) fn(&self) - me submethod(…) (as Sub) fn(&self) + ct C2 (as Sub) const C2: () + ct CONST (as Super) const CONST: u8 + fn func() (as Super) fn() + fn subfunc() (as Sub) fn() + me method(…) (as Super) fn(&self) + me submethod(…) (as Sub) fn(&self) ta SubTy (as Sub) type SubTy - ta Ty (as Super) type Ty + ta Ty (as Super) type Ty "#]], ); } @@ -357,14 +357,14 @@ impl<T> Sub for Wrap<T> { } "#, expect![[r#" - ct C2 (as Sub) const C2: () - ct CONST (as Super) const CONST: u8 - fn func() (as Super) fn() - fn subfunc() (as Sub) fn() - me method(…) (as Super) fn(&self) - me submethod(…) (as Sub) fn(&self) + ct C2 (as Sub) const C2: () + ct CONST (as Super) const CONST: u8 + fn func() (as Super) fn() + fn subfunc() (as Sub) fn() + me method(…) (as Super) fn(&self) + me submethod(…) (as Sub) fn(&self) ta SubTy (as Sub) type SubTy - ta Ty (as Super) type Ty + ta Ty (as Super) type Ty "#]], ); } @@ -381,9 +381,9 @@ impl T { fn bar() {} } fn main() { T::$0; } "#, expect![[r#" - fn bar() fn() - fn foo() fn() - "#]], + fn bar() fn() + fn foo() fn() + "#]], ); } @@ -397,7 +397,7 @@ macro_rules! foo { () => {} } fn main() { let _ = crate::$0 } "#, expect![[r#" - fn main() fn() + fn main() fn() ma foo!(…) macro_rules! foo "#]], ); @@ -447,9 +447,9 @@ mod p { } "#, expect![[r#" - ct RIGHT_CONST u32 - fn right_fn() fn() - st RightType WrongType + ct RIGHT_CONST u32 + fn right_fn() fn() + st RightType WrongType "#]], ); @@ -495,9 +495,9 @@ fn main() { m!(self::f$0); } fn foo() {} "#, expect![[r#" - fn foo() fn() - fn main() fn() - "#]], + fn foo() fn() + fn main() fn() + "#]], ); } @@ -513,9 +513,9 @@ mod m { } "#, expect![[r#" - fn z() fn() - md z - "#]], + fn z() fn() + md z + "#]], ); } @@ -534,8 +534,8 @@ fn foo() { } "#, expect![[r#" - fn new() fn() -> HashMap<K, V, RandomState> - "#]], + fn new() fn() -> HashMap<K, V, RandomState> + "#]], ); } @@ -557,8 +557,8 @@ impl Foo { "#, expect![[r#" me foo(…) fn(self) - ev Bar Bar - ev Baz Baz + ev Bar Bar + ev Baz Baz "#]], ); } @@ -578,9 +578,9 @@ fn foo(self) { } "#, expect![[r#" - ev Bar Bar - ev Baz Baz - "#]], + ev Bar Bar + ev Baz Baz + "#]], ); check_no_kw( @@ -598,8 +598,8 @@ enum Foo { } "#, expect![[r#" - ev Baz Baz - "#]], + ev Baz Baz + "#]], ); } @@ -623,9 +623,9 @@ impl u8 { } "#, expect![[r#" - ct MAX pub const MAX: Self - me func(…) fn(self) - "#]], + ct MAX pub const MAX: Self + me func(…) fn(self) + "#]], ); } @@ -643,8 +643,8 @@ fn main() { } "#, expect![[r#" - ev Bar Bar - "#]], + ev Bar Bar + "#]], ); } @@ -723,7 +723,7 @@ fn bar() -> Bar { } "#, expect![[r#" - fn bar() fn() + fn bar() fn() fn foo() (as Foo) fn() -> Self ex Bar ex bar() @@ -787,7 +787,7 @@ fn main() { } "#, expect![[r#" - me by_macro() (as MyTrait) fn(&self) + me by_macro() (as MyTrait) fn(&self) me not_by_macro() (as MyTrait) fn(&self) "#]], ) @@ -827,7 +827,7 @@ fn main() { } "#, expect![[r#" - me by_macro() (as MyTrait) fn(&self) + me by_macro() (as MyTrait) fn(&self) me not_by_macro() (as MyTrait) fn(&self) "#]], ) @@ -885,10 +885,10 @@ fn main() { } "#, expect![[r#" - fn main() fn() - lc foobar i32 - ma x!(…) macro_rules! x - bt u32 u32 + fn main() fn() + lc foobar i32 + ma x!(…) macro_rules! x + bt u32 u32 "#]], ) } @@ -1014,7 +1014,7 @@ fn here_we_go() { } "#, expect![[r#" - fn here_we_go() fn() + fn here_we_go() fn() st Foo (alias Bar) Foo bt u32 u32 kw async @@ -1064,9 +1064,9 @@ fn here_we_go() { } "#, expect![[r#" - fn here_we_go() fn() + fn here_we_go() fn() st Foo (alias Bar, Qux, Baz) Foo - bt u32 u32 + bt u32 u32 kw async kw const kw crate:: @@ -1160,20 +1160,20 @@ fn here_we_go() { } "#, expect![[r#" - fd bar u8 + fd bar u8 me baz() (alias qux) fn(&self) -> u8 - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } @@ -1189,7 +1189,7 @@ fn bar() { qu$0 } expect![[r#" fn bar() fn() fn foo() (alias qux) fn() - bt u32 u32 + bt u32 u32 kw async kw const kw crate:: @@ -1277,10 +1277,10 @@ fn here_we_go() { } "#, expect![[r#" - fn here_we_go() fn() + fn here_we_go() fn() md foo st Bar (alias Qux) (use foo::Bar) Bar - bt u32 u32 + bt u32 u32 kw crate:: kw false kw for @@ -1315,10 +1315,9 @@ use krate::e; fn main() { e::$0 }"#, - expect![ - "fn i_am_public() fn() -" - ], + expect![[r#" + fn i_am_public() fn() + "#]], ) } @@ -1444,8 +1443,8 @@ fn foo() { "#, Some('_'), expect![[r#" - fn foo() fn() - bt u32 u32 + fn foo() fn() + bt u32 u32 kw async kw const kw crate:: @@ -1498,7 +1497,7 @@ fn foo(_: a_$0) { } "#, Some('_'), expect![[r#" - bt u32 u32 + bt u32 u32 kw crate:: kw self:: "#]], @@ -1512,7 +1511,7 @@ fn foo<T>() { Some('_'), expect![[r#" tp T - bt u32 u32 + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs index db4ac9381ce..9ea262bcc59 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs @@ -17,18 +17,18 @@ struct Foo<'lt, T, const C: usize> { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self Foo<'_, {unknown}, _> - st Foo<…> Foo<'_, {unknown}, _> - st Record Record - st Tuple Tuple - st Unit Unit + sp Self Foo<'_, {unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -42,18 +42,18 @@ fn tuple_struct_field() { struct Foo<'lt, T, const C: usize>(f$0); "#, expect![[r#" - en Enum Enum - ma makro!(…) macro_rules! makro + en Enum Enum + ma makro!(…) macro_rules! makro md module - sp Self Foo<'_, {unknown}, _> - st Foo<…> Foo<'_, {unknown}, _> - st Record Record - st Tuple Tuple - st Unit Unit + sp Self Foo<'_, {unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw pub kw pub(crate) @@ -70,16 +70,16 @@ fn fn_return_type() { fn x<'lt, T, const C: usize>() -> $0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -100,15 +100,15 @@ fn foo() -> B$0 { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it () kw crate:: kw self:: @@ -124,16 +124,16 @@ struct Foo<T>(T); const FOO: $0 = Foo(2); "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<{unknown}> - st Record Record - st Tuple Tuple - st Unit Unit + st Foo<…> Foo<{unknown}> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it Foo<i32> kw crate:: kw self:: @@ -151,15 +151,15 @@ fn f2() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it i32 kw crate:: kw self:: @@ -179,15 +179,15 @@ fn f2() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it u64 kw crate:: kw self:: @@ -204,15 +204,15 @@ fn f2(x: u64) -> $0 { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it u64 kw crate:: kw self:: @@ -230,15 +230,15 @@ fn f2(x: $0) { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it i32 kw crate:: kw self:: @@ -262,17 +262,17 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum Enum - ma makro!(…) macro_rules! makro + en Enum Enum + ma makro!(…) macro_rules! makro md a md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it a::Foo<a::Foo<i32>> kw crate:: kw self:: @@ -291,17 +291,17 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<{unknown}> - st Record Record - st Tuple Tuple - st Unit Unit + st Foo<…> Foo<{unknown}> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 it Foo<i32> kw crate:: kw self:: @@ -319,16 +319,16 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -341,14 +341,14 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union + un Union Union "#]], ); } @@ -368,7 +368,7 @@ trait Trait2: Trait1 { fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {} "#, expect![[r#" - ta Foo = (as Trait2) type Foo + ta Foo = (as Trait2) type Foo ta Super = (as Trait1) type Super "#]], ); @@ -384,18 +384,18 @@ trait Trait2<T>: Trait1 { fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {} "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tt Trait1 tt Trait2 tp T - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -409,15 +409,15 @@ trait Trait2<T> { fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {} "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tt Trait2 - un Union Union + un Union Union "#]], ); } @@ -434,18 +434,18 @@ trait Tr<T> { impl Tr<$0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self dyn Tr<{unknown}> - st Record Record - st S S - st Tuple Tuple - st Unit Unit + sp Self dyn Tr<{unknown}> + st Record Record + st S S + st Tuple Tuple + st Unit Unit tt Tr tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -481,16 +481,16 @@ trait MyTrait<T, U> { fn f(t: impl MyTrait<u$0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt MyTrait tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -506,16 +506,16 @@ trait MyTrait<T, U> { fn f(t: impl MyTrait<u8, u$0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt MyTrait tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -549,16 +549,16 @@ trait MyTrait<T, U = u8> { fn f(t: impl MyTrait<u$0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt MyTrait tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -574,18 +574,18 @@ trait MyTrait<T, U = u8> { fn f(t: impl MyTrait<u8, u$0 "#, expect![[r#" - en Enum Enum - ma makro!(…) macro_rules! makro + en Enum Enum + ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt MyTrait tt Trait ta Item1 = (as MyTrait) type Item1 ta Item2 = (as MyTrait) type Item2 - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -619,16 +619,16 @@ trait MyTrait { fn f(t: impl MyTrait<Item1 = $0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt MyTrait tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -644,16 +644,16 @@ trait MyTrait { fn f(t: impl MyTrait<Item1 = u8, Item2 = $0 "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record Record - st Tuple Tuple - st Unit Unit + st Record Record + st Tuple Tuple + st Unit Unit tt MyTrait tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -668,7 +668,7 @@ trait MyTrait { fn f(t: impl MyTrait<C = $0 "#, expect![[r#" - ct CONST Unit + ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -691,9 +691,9 @@ pub struct S; "#, expect![[r#" md std - sp Self Foo - st Foo Foo - bt u32 u32 + sp Self Foo + st Foo Foo + bt u32 u32 kw crate:: kw self:: "#]], @@ -716,10 +716,10 @@ pub struct S; "#, expect![[r#" md std - sp Self Foo - st Foo Foo - st S S - bt u32 u32 + sp Self Foo + st Foo Foo + st S S + bt u32 u32 kw crate:: kw self:: "#]], @@ -739,16 +739,16 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo Foo - st Record Record - st Tuple Tuple - st Unit Unit + st Foo Foo + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -766,8 +766,8 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -785,16 +785,16 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo Foo - st Record Record - st Tuple Tuple - st Unit Unit + st Foo Foo + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -809,8 +809,8 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -828,17 +828,17 @@ fn completes_const_and_type_generics_separately() { fn foo(_: impl Bar<Baz<F$0, 0> = ()>) {} "#, expect![[r#" - en Enum Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo Foo - st Record Record - st Tuple Tuple - st Unit Unit + st Foo Foo + st Record Record + st Tuple Tuple + st Unit Unit tt Bar tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -853,8 +853,8 @@ fn completes_const_and_type_generics_separately() { fn foo<T: Bar<Baz<(), $0> = ()>>() {} "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -871,8 +871,8 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -890,8 +890,8 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -908,8 +908,8 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -924,8 +924,8 @@ fn completes_const_and_type_generics_separately() { impl Foo<(), $0> for () {} "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -942,8 +942,8 @@ fn completes_const_and_type_generics_separately() { fn foo<T: Bar<X$0, ()>>() {} "#, expect![[r#" - ct CONST Unit - ct X usize + ct CONST Unit + ct X usize ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -957,7 +957,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>); fn foo<'a>() { S::<F$0, _>; } "#, expect![[r#" - ct CONST Unit + ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -970,7 +970,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>); fn foo<'a>() { S::<'static, 'static, F$0, _>; } "#, expect![[r#" - ct CONST Unit + ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: kw self:: diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs index f8b76571ca0..2ea2e4e4c96 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs @@ -92,7 +92,7 @@ use self::{foo::*, bar$0}; "#, expect![[r#" md foo - st S S + st S S "#]], ); } @@ -179,7 +179,7 @@ struct Bar; "#, expect![[r#" ma foo macro_rules! foo_ - st Foo Foo + st Foo Foo "#]], ); } @@ -203,8 +203,8 @@ impl Foo { "#, expect![[r#" ev RecordVariant RecordVariant - ev TupleVariant TupleVariant - ev UnitVariant UnitVariant + ev TupleVariant TupleVariant + ev UnitVariant UnitVariant "#]], ); } @@ -257,7 +257,7 @@ mod a { } "#, expect![[r#" - ct A usize + ct A usize md b kw super:: "#]], @@ -450,9 +450,9 @@ pub fn foo() {} marco_rules! m { () => {} } "#, expect![[r#" - fn foo fn() + fn foo fn() md simd - st S S + st S S "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 82a182806a4..dab36bf20b9 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -2,10 +2,10 @@ use hir::{ db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig, - ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics, + ItemInNs, ModPath, Module, ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Trait, TyFingerprint, Type, }; -use itertools::{EitherOrBoth, Itertools}; +use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{ ast::{self, make, HasName}, @@ -13,7 +13,6 @@ use syntax::{ }; use crate::{ - helpers::item_name, items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT}, FxIndexSet, RootDatabase, }; @@ -52,7 +51,7 @@ pub struct TraitImportCandidate { #[derive(Debug)] pub struct PathImportCandidate { /// Optional qualifier before name. - pub qualifier: Option<Vec<SmolStr>>, + pub qualifier: Vec<SmolStr>, /// The name the item (struct, trait, enum, etc.) should have. pub name: NameToImport, } @@ -264,7 +263,6 @@ impl ImportAssets { Some(it) => it, None => return <FxIndexSet<_>>::default().into_iter(), }; - let krate = self.module_with_candidate.krate(); let scope_definitions = self.scope_definitions(sema); let mod_path = |item| { @@ -279,11 +277,14 @@ impl ImportAssets { }; match &self.import_candidate { - ImportCandidate::Path(path_candidate) => { - path_applicable_imports(sema, krate, path_candidate, mod_path, |item_to_import| { - !scope_definitions.contains(&ScopeDef::from(item_to_import)) - }) - } + ImportCandidate::Path(path_candidate) => path_applicable_imports( + sema, + &scope, + krate, + path_candidate, + mod_path, + |item_to_import| !scope_definitions.contains(&ScopeDef::from(item_to_import)), + ), ImportCandidate::TraitAssocItem(trait_candidate) | ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items( sema, @@ -315,6 +316,7 @@ impl ImportAssets { fn path_applicable_imports( sema: &Semantics<'_, RootDatabase>, + scope: &SemanticsScope<'_>, current_crate: Crate, path_candidate: &PathImportCandidate, mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy, @@ -322,8 +324,8 @@ fn path_applicable_imports( ) -> FxIndexSet<LocatedImport> { let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered(); - match &path_candidate.qualifier { - None => { + match &*path_candidate.qualifier { + [] => { items_locator::items_with_name( sema, current_crate, @@ -348,89 +350,107 @@ fn path_applicable_imports( .take(DEFAULT_QUERY_SEARCH_LIMIT.inner()) .collect() } - Some(qualifier) => items_locator::items_with_name( + [first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name( sema, current_crate, - path_candidate.name.clone(), - AssocSearchMode::Include, + NameToImport::Exact(first_qsegment.to_string(), true), + AssocSearchMode::Exclude, ) - .filter_map(|item| import_for_item(sema.db, mod_path, qualifier, item, scope_filter)) + .filter_map(|item| { + import_for_item( + sema, + scope, + mod_path, + &path_candidate.name, + item, + qualifier_rest, + scope_filter, + ) + }) .take(DEFAULT_QUERY_SEARCH_LIMIT.inner()) .collect(), } } fn import_for_item( - db: &RootDatabase, + sema: &Semantics<'_, RootDatabase>, + scope: &SemanticsScope<'_>, mod_path: impl Fn(ItemInNs) -> Option<ModPath>, + candidate: &NameToImport, + resolved_qualifier: ItemInNs, unresolved_qualifier: &[SmolStr], - original_item: ItemInNs, scope_filter: impl Fn(ItemInNs) -> bool, ) -> Option<LocatedImport> { let _p = tracing::info_span!("ImportAssets::import_for_item").entered(); - let [first_segment, ..] = unresolved_qualifier else { return None }; - - let item_as_assoc = item_as_assoc(db, original_item); - let (original_item_candidate, trait_item_to_import) = match item_as_assoc { - Some(assoc_item) => match assoc_item.container(db) { - AssocItemContainer::Trait(trait_) => { - let trait_ = ItemInNs::from(ModuleDef::from(trait_)); - (trait_, Some(trait_)) - } - AssocItemContainer::Impl(impl_) => { - (ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None) + let qualifier = { + let mut adjusted_resolved_qualifier = resolved_qualifier; + if !unresolved_qualifier.is_empty() { + match resolved_qualifier { + ItemInNs::Types(ModuleDef::Module(module)) => { + adjusted_resolved_qualifier = sema + .resolve_mod_path_relative(module, unresolved_qualifier.iter().cloned())? + .next()?; + } + // can't resolve multiple segments for non-module item path bases + _ => return None, } - }, - None => (original_item, None), - }; - let import_path_candidate = mod_path(original_item_candidate)?; - - let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev(); - let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it { - // segments match, check next one - EitherOrBoth::Both(a, b) if b.as_str() == &**a => None, - // segments mismatch / qualifier is longer than the path, bail out - EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false), - // all segments match and we have exhausted the qualifier, proceed - EitherOrBoth::Right(_) => Some(true), - }; - if item_as_assoc.is_none() { - let item_name = item_name(db, original_item)?; - let last_segment = import_path_candidate_segments.next()?; - if *last_segment != item_name { - return None; } - } - let ends_with = unresolved_qualifier - .iter() - .rev() - .zip_longest(import_path_candidate_segments) - .find_map(predicate) - .unwrap_or(true); - if !ends_with { - return None; - } - let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?; - - Some(match (segment_import == original_item_candidate, trait_item_to_import) { - (true, Some(_)) => { - // FIXME we should be able to import both the trait and the segment, - // but it's unclear what to do with overlapping edits (merge imports?) - // especially in case of lazy completion edit resolutions. - return None; + match adjusted_resolved_qualifier { + ItemInNs::Types(def) => def, + _ => return None, } - (false, Some(trait_to_import)) if scope_filter(trait_to_import) => { - LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item) + }; + let import_path_candidate = mod_path(resolved_qualifier)?; + let ty = match qualifier { + ModuleDef::Module(module) => { + return items_locator::items_with_name_in_module( + sema, + module, + candidate.clone(), + AssocSearchMode::Exclude, + ) + .find(|&it| scope_filter(it)) + .map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item)) } - (true, None) if scope_filter(original_item_candidate) => { - LocatedImport::new(import_path_candidate, original_item_candidate, original_item) + // FIXME + ModuleDef::Trait(_) => return None, + // FIXME + ModuleDef::TraitAlias(_) => return None, + ModuleDef::TypeAlias(alias) => alias.ty(sema.db), + ModuleDef::BuiltinType(builtin) => builtin.ty(sema.db), + ModuleDef::Adt(adt) => adt.ty(sema.db), + _ => return None, + }; + ty.iterate_path_candidates(sema.db, scope, &FxHashSet::default(), None, None, |assoc| { + // FIXME: Support extra trait imports + if assoc.container_or_implemented_trait(sema.db).is_some() { + return None; } - (false, None) if scope_filter(segment_import) => { - LocatedImport::new(mod_path(segment_import)?, segment_import, original_item) + let name = assoc.name(sema.db)?; + let is_match = match candidate { + NameToImport::Prefix(text, true) => name.as_str().starts_with(text), + NameToImport::Prefix(text, false) => { + name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| { + name_char.eq_ignore_ascii_case(&candidate_char) + }) + } + NameToImport::Exact(text, true) => name.as_str() == text, + NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text), + NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)), + NameToImport::Fuzzy(text, false) => text + .chars() + .all(|c| name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))), + }; + if !is_match { + return None; } - _ => return None, + Some(LocatedImport::new( + import_path_candidate.clone(), + resolved_qualifier, + assoc_to_item(assoc), + )) }) } @@ -453,45 +473,6 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio }) } -fn find_import_for_segment( - db: &RootDatabase, - original_item: ItemInNs, - unresolved_first_segment: &str, -) -> Option<ItemInNs> { - let segment_is_name = item_name(db, original_item) - .map(|name| name.eq_ident(unresolved_first_segment)) - .unwrap_or(false); - - Some(if segment_is_name { - original_item - } else { - let matching_module = - module_with_segment_name(db, unresolved_first_segment, original_item)?; - ItemInNs::from(ModuleDef::from(matching_module)) - }) -} - -fn module_with_segment_name( - db: &RootDatabase, - segment_name: &str, - candidate: ItemInNs, -) -> Option<Module> { - let mut current_module = match candidate { - ItemInNs::Types(module_def_id) => module_def_id.module(db), - ItemInNs::Values(module_def_id) => module_def_id.module(db), - ItemInNs::Macros(macro_def_id) => ModuleDef::from(macro_def_id).module(db), - }; - while let Some(module) = current_module { - if let Some(module_name) = module.name(db) { - if module_name.eq_ident(segment_name) { - return Some(module); - } - } - current_module = module.parent(db); - } - None -} - fn trait_applicable_items( sema: &Semantics<'_, RootDatabase>, current_crate: Crate, @@ -703,7 +684,7 @@ impl ImportCandidate { return None; } Some(ImportCandidate::Path(PathImportCandidate { - qualifier: None, + qualifier: vec![], name: NameToImport::exact_case_sensitive(name.to_string()), })) } @@ -730,7 +711,7 @@ fn path_import_candidate( .segments() .map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text()))) .collect::<Option<Vec<_>>>()?; - ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name }) + ImportCandidate::Path(PathImportCandidate { qualifier, name }) } else { return None; } @@ -754,10 +735,10 @@ fn path_import_candidate( } Some(_) => return None, }, - None => ImportCandidate::Path(PathImportCandidate { qualifier: None, name }), + None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name }), }) } fn item_as_assoc(db: &RootDatabase, item: ItemInNs) -> Option<AssocItem> { - item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)) + item.into_module_def().as_assoc_item(db) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs index 47549a1d008..7f66ea0c103 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs @@ -3,10 +3,14 @@ //! The main reason for this module to exist is the fact that project's items and dependencies' items //! are located in different caches, with different APIs. use either::Either; -use hir::{import_map, Crate, ItemInNs, Semantics}; +use hir::{import_map, Crate, ItemInNs, Module, Semantics}; use limit::Limit; -use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase}; +use crate::{ + imports::import_assets::NameToImport, + symbol_index::{self, SymbolsDatabase as _}, + RootDatabase, +}; /// A value to use, when uncertain which limit to pick. pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100); @@ -20,8 +24,7 @@ pub fn items_with_name<'a>( name: NameToImport, assoc_item_search: AssocSearchMode, ) -> impl Iterator<Item = ItemInNs> + 'a { - let krate_name = krate.display_name(sema.db).map(|name| name.to_string()); - let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name) + let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(sema.db).map(|name| name.to_string())) .entered(); let prefix = matches!(name, NameToImport::Prefix(..)); @@ -66,6 +69,54 @@ pub fn items_with_name<'a>( find_items(sema, krate, local_query, external_query) } +/// Searches for importable items with the given name in the crate and its dependencies. +pub fn items_with_name_in_module<'a>( + sema: &'a Semantics<'_, RootDatabase>, + module: Module, + name: NameToImport, + assoc_item_search: AssocSearchMode, +) -> impl Iterator<Item = ItemInNs> + 'a { + let _p = tracing::info_span!("items_with_name_in", name = name.text(), assoc_item_search = ?assoc_item_search, ?module) + .entered(); + + let prefix = matches!(name, NameToImport::Prefix(..)); + let local_query = match name { + NameToImport::Prefix(exact_name, case_sensitive) + | NameToImport::Exact(exact_name, case_sensitive) => { + let mut local_query = symbol_index::Query::new(exact_name.clone()); + local_query.assoc_search_mode(assoc_item_search); + if prefix { + local_query.prefix(); + } else { + local_query.exact(); + } + if case_sensitive { + local_query.case_sensitive(); + } + local_query + } + NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => { + let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone()); + local_query.fuzzy(); + local_query.assoc_search_mode(assoc_item_search); + + if case_sensitive { + local_query.case_sensitive(); + } + + local_query + } + }; + let mut local_results = Vec::new(); + local_query.search(&[sema.db.module_symbols(module)], |local_candidate| { + local_results.push(match local_candidate.def { + hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), + def => ItemInNs::from(def), + }) + }); + local_results.into_iter() +} + fn find_items<'a>( sema: &'a Semantics<'_, RootDatabase>, krate: Crate, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs index b3ecc26cb22..1e08e8e3098 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs @@ -377,6 +377,8 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<S return None; } name + } else if let Some(inner_ty) = ty.remove_ref() { + return name_of_type(&inner_ty, db, edition); } else { return None; }; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index dc3dee5c9ce..5f38d13570a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -778,4 +778,20 @@ fn bar(mut v: Union2) { "#, ) } + + #[test] + fn raw_ref_reborrow_is_safe() { + check_diagnostics( + r#" +fn main() { + let ptr: *mut i32; + let _addr = &raw const *ptr; + + let local = 1; + let ptr = &local as *const i32; + let _addr = &raw const *ptr; +} +"#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 656bedff1a8..4accd181ca4 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -90,7 +90,9 @@ fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<A make::ty("()") }; - if !is_editable_crate(target_module.krate(), ctx.sema.db) { + if !is_editable_crate(target_module.krate(), ctx.sema.db) + || SyntaxKind::from_keyword(field_name, ctx.edition).is_some() + { return None; } @@ -501,4 +503,19 @@ fn main() {} "#, ) } + + #[test] + fn regression_18683() { + check_diagnostics( + r#" +struct S; +impl S { + fn f(self) { + self.self + // ^^^^ error: no field `self` on type `S` + } +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs index 68f14a97f59..4f64dabeb52 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs @@ -7,20 +7,19 @@ pub(crate) fn unresolved_ident( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedIdent, ) -> Diagnostic { - Diagnostic::new_with_syntax_node_ptr( - ctx, - DiagnosticCode::RustcHardError("E0425"), - "no such value in this scope", - d.expr_or_pat.map(Into::into), - ) - .experimental() + let mut range = + ctx.sema.diagnostics_display_range(d.node.map(|(node, _)| node.syntax_node_ptr())); + if let Some(in_node_range) = d.node.value.1 { + range.range = in_node_range + range.range.start(); + } + Diagnostic::new(DiagnosticCode::RustcHardError("E0425"), "no such value in this scope", range) + .experimental() } #[cfg(test)] mod tests { use crate::tests::check_diagnostics; - // FIXME: This should show a diagnostic #[test] fn feature() { check_diagnostics( @@ -28,6 +27,7 @@ mod tests { //- minicore: fmt fn main() { format_args!("{unresolved}"); + // ^^^^^^^^^^ error: no such value in this scope } "#, ) diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 3bbbd36c1b1..d385e453e21 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -16,7 +16,7 @@ use ide_db::{ }; use itertools::Itertools; use span::{Edition, TextSize}; -use stdx::{always, format_to}; +use stdx::format_to; use syntax::{ ast::{self, AstNode}, SmolStr, SyntaxNode, ToSmolStr, @@ -130,14 +130,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { // In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables. let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default(); let mut add_opt = |runnable: Option<Runnable>, def| { - if let Some(runnable) = runnable.filter(|runnable| { - always!( - runnable.nav.file_id == file_id, - "tried adding a runnable pointing to a different file: {:?} for {:?}", - runnable.kind, - file_id - ) - }) { + if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) { if let Some(def) = def { let file_id = match def { Definition::Module(it) => { @@ -161,13 +154,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { Definition::SelfType(impl_) => runnable_impl(&sema, &impl_), _ => None, }; - add_opt( - runnable - .or_else(|| module_def_doctest(sema.db, def)) - // #[macro_export] mbe macros are declared in the root, while their definition may reside in a different module - .filter(|it| it.nav.file_id == file_id), - Some(def), - ); + add_opt(runnable.or_else(|| module_def_doctest(sema.db, def)), Some(def)); if let Definition::SelfType(impl_) = def { impl_.items(db).into_iter().for_each(|assoc| { let runnable = match assoc { diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs index 3d9146cc4c7..d37318ff457 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs @@ -174,6 +174,9 @@ fn on_delimited_node_typed( kinds: &[fn(SyntaxKind) -> bool], ) -> Option<TextEdit> { let t = reparsed.syntax().token_at_offset(offset).right_biased()?; + if t.prev_token().map_or(false, |t| t.kind().is_any_identifier()) { + return None; + } let (filter, node) = t .parent_ancestors() .take_while(|n| n.text_range().start() == offset) @@ -1092,6 +1095,22 @@ fn f() { } #[test] + fn preceding_whitespace_is_significant_for_closing_brackets() { + type_char_noop( + '(', + r#" +fn f() { a.b$0if true {} } +"#, + ); + type_char_noop( + '(', + r#" +fn f() { foo$0{} } +"#, + ); + } + + #[test] fn adds_closing_parenthesis_for_pat() { type_char( '(', diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index aa64f570ed5..1b2162dad0f 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -487,7 +487,7 @@ impl ProcMacroExpander for Expander { match self.0.expand( subtree, attrs, - env.clone(), + env.clone().into(), def_site, call_site, mixed_site, diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs index 8ece5af527d..0ac11371c54 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs @@ -72,8 +72,19 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool, is_in_ext // macro_rules! () // macro_rules! [] if paths::is_use_path_start(p) { - macro_call(p, m); - return; + paths::use_path(p); + // Do not create a MACRO_CALL node here if this isn't a macro call, this causes problems with completion. + + // test_err path_item_without_excl + // foo + if p.at(T![!]) { + macro_call(p, m); + return; + } else { + m.complete(p, ERROR); + p.error("expected an item"); + return; + } } m.abandon(p); @@ -410,8 +421,7 @@ fn fn_(p: &mut Parser<'_>, m: Marker) { } fn macro_call(p: &mut Parser<'_>, m: Marker) { - assert!(paths::is_use_path_start(p)); - paths::use_path(p); + assert!(p.at(T![!])); match macro_call_after_excl(p) { BlockLike::Block => (), BlockLike::NotBlock => { diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs index 7076e03ba4b..6cad71093fd 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs @@ -30,22 +30,20 @@ fn source_file() { TopEntryPoint::SourceFile, "@error@", expect![[r#" - SOURCE_FILE - ERROR - AT "@" - MACRO_CALL - PATH - PATH_SEGMENT - NAME_REF - IDENT "error" - ERROR - AT "@" - error 0: expected an item - error 6: expected BANG - error 6: expected `{`, `[`, `(` - error 6: expected SEMICOLON - error 6: expected an item - "#]], + SOURCE_FILE + ERROR + AT "@" + ERROR + PATH + PATH_SEGMENT + NAME_REF + IDENT "error" + ERROR + AT "@" + error 0: expected an item + error 6: expected an item + error 6: expected an item + "#]], ); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index 003b7fda947..b9f87b6af24 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -775,6 +775,10 @@ mod err { run_and_expect_errors("test_data/parser/inline/err/missing_fn_param_type.rs"); } #[test] + fn path_item_without_excl() { + run_and_expect_errors("test_data/parser/inline/err/path_item_without_excl.rs"); + } + #[test] fn pointer_type_no_mutability() { run_and_expect_errors("test_data/parser/inline/err/pointer_type_no_mutability.rs"); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast index ec6c3151005..3159a15a3b1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast @@ -10,20 +10,20 @@ SOURCE_FILE USE_KW "use" ERROR SLASH "/" - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF IDENT "bin" ERROR SLASH "/" - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF IDENT "env" WHITESPACE " " - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF @@ -33,13 +33,7 @@ error 23: expected `[` error 23: expected an item error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier error 28: expected SEMICOLON -error 31: expected BANG -error 31: expected `{`, `[`, `(` -error 31: expected SEMICOLON error 31: expected an item -error 35: expected BANG -error 35: expected `{`, `[`, `(` -error 35: expected SEMICOLON -error 41: expected BANG -error 41: expected `{`, `[`, `(` -error 41: expected SEMICOLON +error 31: expected an item +error 35: expected an item +error 41: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast index 60b2fe98755..2a296fe4aa0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast @@ -14,14 +14,15 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n\n" - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF IDENT "bar" - TOKEN_TREE - L_PAREN "(" - R_PAREN ")" + ERROR + L_PAREN "(" + ERROR + R_PAREN ")" WHITESPACE " " ERROR L_CURLY "{" @@ -75,6 +76,7 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 17: expected BANG -error 19: expected SEMICOLON +error 17: expected an item +error 17: expected an item +error 18: expected an item error 20: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast index b485c71ab39..8c8debb8b09 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast @@ -46,7 +46,7 @@ SOURCE_FILE ERROR AT "@" WHITESPACE " " - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF @@ -72,9 +72,7 @@ error 67: expected R_ANGLE error 67: expected R_PAREN error 67: expected SEMICOLON error 67: expected an item -error 72: expected BANG -error 72: expected `{`, `[`, `(` -error 72: expected SEMICOLON +error 72: expected an item error 72: expected an item error 73: expected an item error 79: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast index 76464bf7cc2..d6e3219c395 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast @@ -26,14 +26,15 @@ SOURCE_FILE ERROR FN_KW "fn" WHITESPACE " " - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF IDENT "bar" - TOKEN_TREE - L_PAREN "(" - R_PAREN ")" + ERROR + L_PAREN "(" + ERROR + R_PAREN ")" WHITESPACE " " ERROR L_CURLY "{" @@ -43,6 +44,7 @@ error 6: expected fn, trait or impl error 38: expected a name error 40: missing type for `const` or `static` error 40: expected SEMICOLON -error 44: expected BANG -error 46: expected SEMICOLON +error 44: expected an item +error 44: expected an item +error 45: expected an item error 47: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0055_impl_use.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0055_impl_use.rast index 751f007df94..87a8b519d7e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0055_impl_use.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0055_impl_use.rast @@ -12,15 +12,16 @@ SOURCE_FILE ERROR USE_KW "use" WHITESPACE " " - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF IDENT "std" + ERROR SEMICOLON ";" WHITESPACE "\n" error 8: expected R_ANGLE error 8: expected type error 11: expected `{` -error 15: expected BANG -error 15: expected `{`, `[`, `(` +error 15: expected an item +error 15: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast index 9609ece77df..f8a7d0e552c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast @@ -1,5 +1,5 @@ SOURCE_FILE - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF @@ -22,7 +22,7 @@ SOURCE_FILE ERROR ASYNC_KW "async" WHITESPACE " " - MACRO_CALL + ERROR PATH PATH_SEGMENT NAME_REF @@ -42,10 +42,6 @@ SOURCE_FILE L_CURLY "{" R_CURLY "}" WHITESPACE "\n" -error 3: expected BANG -error 3: expected `{`, `[`, `(` -error 3: expected SEMICOLON +error 3: expected an item error 24: expected fn, trait or impl -error 28: expected BANG -error 28: expected `{`, `[`, `(` -error 28: expected SEMICOLON +error 28: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/path_item_without_excl.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/path_item_without_excl.rast new file mode 100644 index 00000000000..a22dff1a679 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/path_item_without_excl.rast @@ -0,0 +1,8 @@ +SOURCE_FILE + ERROR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + WHITESPACE "\n" +error 3: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/path_item_without_excl.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/path_item_without_excl.rs new file mode 100644 index 00000000000..257cc5642cb --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/path_item_without_excl.rs @@ -0,0 +1 @@ +foo diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml index d4b0a54ed64..f0dafab70c1 100644 --- a/src/tools/rust-analyzer/crates/paths/Cargo.toml +++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml @@ -14,10 +14,9 @@ doctest = false [dependencies] camino.workspace = true -serde = { workspace = true, optional = true } [features] -serde1 = ["camino/serde1", "dep:serde"] +serde1 = ["camino/serde1"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index 84b877f026b..dac8e094357 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] serde.workspace = true +serde_derive.workspace = true serde_json = { workspace = true, features = ["unbounded_depth"] } tracing.workspace = true rustc-hash.workspace = true @@ -23,11 +24,9 @@ indexmap.workspace = true paths = { workspace = true, features = ["serde1"] } tt.workspace = true stdx.workspace = true -# Ideally this crate would not depend on salsa things, but we need span information here which wraps -# InternIds for the syntax context -span.workspace = true -# only here due to the `Env` newtype :/ -base-db.workspace = true +# span = {workspace = true, default-features = false} does not work +span = { path = "../span", version = "0.0.0", default-features = false} + intern.workspace = true [lints] diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 011baad65f7..e54d501b94c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -9,7 +9,6 @@ pub mod json; pub mod msg; mod process; -use base_db::Env; use paths::{AbsPath, AbsPathBuf}; use span::Span; use std::{fmt, io, sync::Arc}; @@ -148,7 +147,7 @@ impl ProcMacro { &self, subtree: &tt::Subtree<Span>, attr: Option<&tt::Subtree<Span>>, - env: Env, + env: Vec<(String, String)>, def_site: Span, call_site: Span, mixed_site: Span, @@ -179,7 +178,7 @@ impl ProcMacro { }, }, lib: self.dylib_path.to_path_buf().into(), - env: env.into(), + env, current_dir, }; diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index 883528558d9..bbd9f582df9 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -4,7 +4,8 @@ pub(crate) mod flat; use std::io::{self, BufRead, Write}; use paths::Utf8PathBuf; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::de::DeserializeOwned; +use serde_derive::{Deserialize, Serialize}; use crate::ProcMacroKind; @@ -123,7 +124,7 @@ impl ExpnGlobals { } } -pub trait Message: Serialize + DeserializeOwned { +pub trait Message: serde::Serialize + DeserializeOwned { fn read<R: BufRead>( from_proto: ProtocolRead<R>, inp: &mut R, diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index af3412e90e4..ce4b060fca5 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -39,10 +39,10 @@ use std::collections::VecDeque; use intern::Symbol; use rustc_hash::FxHashMap; -use serde::{Deserialize, Serialize}; +use serde_derive::{Deserialize, Serialize}; use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange}; -use crate::msg::EXTENDED_LEAF_DATA; +use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; pub type SpanDataIndexMap = indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; @@ -145,7 +145,11 @@ impl FlatTree { w.write(subtree); FlatTree { - subtree: write_vec(w.subtree, SubtreeRepr::write), + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + write_vec(w.subtree, SubtreeRepr::write_with_close_span) + } else { + write_vec(w.subtree, SubtreeRepr::write) + }, literal: if version >= EXTENDED_LEAF_DATA { write_vec(w.literal, LiteralRepr::write_with_kind) } else { @@ -179,7 +183,11 @@ impl FlatTree { w.write(subtree); FlatTree { - subtree: write_vec(w.subtree, SubtreeRepr::write), + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + write_vec(w.subtree, SubtreeRepr::write_with_close_span) + } else { + write_vec(w.subtree, SubtreeRepr::write) + }, literal: if version >= EXTENDED_LEAF_DATA { write_vec(w.literal, LiteralRepr::write_with_kind) } else { @@ -202,7 +210,11 @@ impl FlatTree { span_data_table: &SpanDataIndexMap, ) -> tt::Subtree<Span> { Reader { - subtree: read_vec(self.subtree, SubtreeRepr::read), + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + read_vec(self.subtree, SubtreeRepr::read_with_close_span) + } else { + read_vec(self.subtree, SubtreeRepr::read) + }, literal: if version >= EXTENDED_LEAF_DATA { read_vec(self.literal, LiteralRepr::read_with_kind) } else { @@ -224,7 +236,11 @@ impl FlatTree { pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> { Reader { - subtree: read_vec(self.subtree, SubtreeRepr::read), + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + read_vec(self.subtree, SubtreeRepr::read_with_close_span) + } else { + read_vec(self.subtree, SubtreeRepr::read) + }, literal: if version >= EXTENDED_LEAF_DATA { read_vec(self.literal, LiteralRepr::read_with_kind) } else { @@ -257,7 +273,26 @@ fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u } impl SubtreeRepr { - fn write(self) -> [u32; 5] { + fn write(self) -> [u32; 4] { + let kind = match self.kind { + tt::DelimiterKind::Invisible => 0, + tt::DelimiterKind::Parenthesis => 1, + tt::DelimiterKind::Brace => 2, + tt::DelimiterKind::Bracket => 3, + }; + [self.open.0, kind, self.tt[0], self.tt[1]] + } + fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr { + let kind = match kind { + 0 => tt::DelimiterKind::Invisible, + 1 => tt::DelimiterKind::Parenthesis, + 2 => tt::DelimiterKind::Brace, + 3 => tt::DelimiterKind::Bracket, + other => panic!("bad kind {other}"), + }; + SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] } + } + fn write_with_close_span(self) -> [u32; 5] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, @@ -266,7 +301,7 @@ impl SubtreeRepr { }; [self.open.0, self.close.0, kind, self.tt[0], self.tt[1]] } - fn read([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr { + fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index b1e35b7a08b..4045e25fdf1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -56,25 +56,8 @@ impl ProcMacroProcessSrv { match srv.version_check() { Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( io::ErrorKind::Other, - format!( - "The version of the proc-macro server ({v}) in your Rust toolchain \ - is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}). -\ - This will prevent proc-macro expansion from working. \ - Please consider updating your rust-analyzer to ensure compatibility with your \ - current toolchain." - ), - )), - Ok(v) if v < RUST_ANALYZER_SPAN_SUPPORT => Err(io::Error::new( - io::ErrorKind::Other, - format!( - "The version of the proc-macro server ({v}) in your Rust toolchain \ - is too old and no longer supported by your rust-analyzer which requires\ - version {RUST_ANALYZER_SPAN_SUPPORT} or higher. -\ - This will prevent proc-macro expansion from working. \ - Please consider updating your toolchain or downgrading your rust-analyzer \ - to ensure compatibility with your current toolchain." + format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}). + This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain." ), )), Ok(v) => { @@ -89,10 +72,10 @@ impl ProcMacroProcessSrv { tracing::info!("Proc-macro server span mode: {:?}", srv.mode); Ok(srv) } - Err(e) => Err(io::Error::new( - io::ErrorKind::Other, - format!("Failed to fetch proc-macro server version: {e}"), - )), + Err(e) => { + tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0"); + create_srv(false) + } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 4fabcc90067..98385969459 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -21,8 +21,8 @@ stdx.workspace = true tt.workspace = true syntax-bridge.workspace = true paths.workspace = true -base-db.workspace = true -span.workspace = true +# span = {workspace = true, default-features = false} does not work +span = { path = "../span", version = "0.0.0", default-features = false} proc-macro-api.workspace = true intern.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 68e0e1ba554..ed647950e66 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -19,6 +19,7 @@ rustc-hash.workspace = true semver.workspace = true serde_json.workspace = true serde.workspace = true +serde_derive.workspace = true tracing.workspace = true triomphe.workspace = true la-arena.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index dc71b13eeec..524323b9736 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -15,7 +15,7 @@ use itertools::Itertools; use la_arena::ArenaMap; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; -use serde::Deserialize; +use serde::Deserialize as _; use toolchain::Tool; use crate::{ diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 4ae3426ed97..ba4946bf0b9 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -8,7 +8,7 @@ use cargo_metadata::{CargoOpt, MetadataCommand}; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; -use serde::Deserialize; +use serde_derive::Deserialize; use serde_json::from_value; use span::Edition; use toolchain::Tool; diff --git a/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml b/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml index f8a3156fe40..57a20be0cad 100644 --- a/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml @@ -20,7 +20,7 @@ parking_lot = "0.12.1" rustc-hash = "2.0.0" smallvec = "1.0.0" oorandom = "11" -triomphe = "0.1.11" +triomphe.workspace = true itertools.workspace = true ra-salsa-macros = { version = "0.0.0", path = "ra-salsa-macros", package = "salsa-macros" } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index 7c8610280b3..fa9ff6b56df 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -36,6 +36,7 @@ rayon.workspace = true rustc-hash.workspace = true serde_json = { workspace = true, features = ["preserve_order"] } serde.workspace = true +serde_derive.workspace = true tenthash = "0.4.0" num_cpus = "1.15.0" mimalloc = { version = "0.1.30", default-features = false, optional = true } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 40fd294e72a..b06117f7383 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -644,7 +644,8 @@ config_data! { /// Aliased as `"checkOnSave.targets"`. check_targets | checkOnSave_targets | checkOnSave_target: Option<CheckOnSaveTargets> = None, /// Whether `--workspace` should be passed to `cargo check`. - /// If false, `-p <package>` will be passed instead. + /// If false, `-p <package>` will be passed instead if applicable. In case it is not, no + /// check will be performed. check_workspace: bool = true, /// These proc-macros will be ignored when trying to expand them. diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index 22910ee4c68..0b51dd87fea 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -3,6 +3,7 @@ pub(crate) mod to_proto; use std::mem; +use cargo_metadata::PackageId; use ide::FileId; use ide_db::FxHashMap; use itertools::Itertools; @@ -13,7 +14,8 @@ use triomphe::Arc; use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind}; -pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>; +pub(crate) type CheckFixes = + Arc<IntMap<usize, FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<Fix>>>>>; #[derive(Debug, Default, Clone)] pub struct DiagnosticsMapConfig { @@ -31,7 +33,10 @@ pub(crate) struct DiagnosticCollection { pub(crate) native_syntax: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>, pub(crate) native_semantic: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>, // FIXME: should be Vec<flycheck::Diagnostic> - pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>, + pub(crate) check: IntMap< + usize, + FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<lsp_types::Diagnostic>>>, + >, pub(crate) check_fixes: CheckFixes, changes: IntSet<FileId>, /// Counter for supplying a new generation number for diagnostics. @@ -50,18 +55,37 @@ pub(crate) struct Fix { impl DiagnosticCollection { pub(crate) fn clear_check(&mut self, flycheck_id: usize) { - if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) { - it.clear(); - } - if let Some(it) = self.check.get_mut(&flycheck_id) { - self.changes.extend(it.drain().map(|(key, _value)| key)); + let Some(check) = self.check.get_mut(&flycheck_id) else { + return; + }; + self.changes.extend(check.drain().flat_map(|(_, v)| v.into_keys())); + if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) { + fixes.clear(); } } pub(crate) fn clear_check_all(&mut self) { Arc::make_mut(&mut self.check_fixes).clear(); - self.changes - .extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key))) + self.changes.extend( + self.check.values_mut().flat_map(|it| it.drain().flat_map(|(_, v)| v.into_keys())), + ) + } + + pub(crate) fn clear_check_for_package( + &mut self, + flycheck_id: usize, + package_id: Arc<PackageId>, + ) { + let Some(check) = self.check.get_mut(&flycheck_id) else { + return; + }; + let package_id = Some(package_id); + if let Some(checks) = check.remove(&package_id) { + self.changes.extend(checks.into_keys()); + } + if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) { + fixes.remove(&package_id); + } } pub(crate) fn clear_native_for(&mut self, file_id: FileId) { @@ -73,11 +97,19 @@ impl DiagnosticCollection { pub(crate) fn add_check_diagnostic( &mut self, flycheck_id: usize, + package_id: &Option<Arc<PackageId>>, file_id: FileId, diagnostic: lsp_types::Diagnostic, fix: Option<Box<Fix>>, ) { - let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default(); + let diagnostics = self + .check + .entry(flycheck_id) + .or_default() + .entry(package_id.clone()) + .or_default() + .entry(file_id) + .or_default(); for existing_diagnostic in diagnostics.iter() { if are_diagnostics_equal(existing_diagnostic, &diagnostic) { return; @@ -86,7 +118,14 @@ impl DiagnosticCollection { if let Some(fix) = fix { let check_fixes = Arc::make_mut(&mut self.check_fixes); - check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().push(*fix); + check_fixes + .entry(flycheck_id) + .or_default() + .entry(package_id.clone()) + .or_default() + .entry(file_id) + .or_default() + .push(*fix); } diagnostics.push(diagnostic); self.changes.insert(file_id); @@ -135,7 +174,12 @@ impl DiagnosticCollection { ) -> impl Iterator<Item = &lsp_types::Diagnostic> { let native_syntax = self.native_syntax.get(&file_id).into_iter().flat_map(|(_, d)| d); let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d); - let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten(); + let check = self + .check + .values() + .flat_map(|it| it.values()) + .filter_map(move |it| it.get(&file_id)) + .flatten(); native_syntax.chain(native_semantic).chain(check) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index b035d779a7d..53c145f884e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -1,17 +1,20 @@ //! Flycheck provides the functionality needed to run `cargo check` to provide //! LSP diagnostics based on the output of the command. -use std::{fmt, io, process::Command, time::Duration}; +use std::{fmt, io, mem, process::Command, time::Duration}; +use cargo_metadata::PackageId; use crossbeam_channel::{select_biased, unbounded, Receiver, Sender}; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; -use serde::Deserialize; +use serde::Deserialize as _; +use serde_derive::Deserialize; pub(crate) use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, }; use toolchain::Tool; +use triomphe::Arc; use crate::command::{CommandHandle, ParseFromLine}; @@ -150,10 +153,19 @@ impl FlycheckHandle { pub(crate) enum FlycheckMessage { /// Request adding a diagnostic with fixes included to a file - AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic }, + AddDiagnostic { + id: usize, + workspace_root: Arc<AbsPathBuf>, + diagnostic: Diagnostic, + package_id: Option<Arc<PackageId>>, + }, - /// Request clearing all previous diagnostics - ClearDiagnostics { id: usize }, + /// Request clearing all outdated diagnostics. + ClearDiagnostics { + id: usize, + /// The package whose diagnostics to clear, or if unspecified, all diagnostics. + package_id: Option<Arc<PackageId>>, + }, /// Request check progress notification to client Progress { @@ -166,15 +178,18 @@ pub(crate) enum FlycheckMessage { impl fmt::Debug for FlycheckMessage { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => f + FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => f .debug_struct("AddDiagnostic") .field("id", id) .field("workspace_root", workspace_root) + .field("package_id", package_id) .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code)) .finish(), - FlycheckMessage::ClearDiagnostics { id } => { - f.debug_struct("ClearDiagnostics").field("id", id).finish() - } + FlycheckMessage::ClearDiagnostics { id, package_id } => f + .debug_struct("ClearDiagnostics") + .field("id", id) + .field("package_id", package_id) + .finish(), FlycheckMessage::Progress { id, progress } => { f.debug_struct("Progress").field("id", id).field("progress", progress).finish() } @@ -200,12 +215,13 @@ enum StateChange { struct FlycheckActor { /// The workspace id of this flycheck instance. id: usize, + sender: Sender<FlycheckMessage>, config: FlycheckConfig, manifest_path: Option<AbsPathBuf>, /// Either the workspace root of the workspace we are flychecking, /// or the project root of the project. - root: AbsPathBuf, + root: Arc<AbsPathBuf>, sysroot_root: Option<AbsPathBuf>, /// CargoHandle exists to wrap around the communication needed to be able to /// run `cargo check` without blocking. Currently the Rust standard library @@ -215,8 +231,13 @@ struct FlycheckActor { command_handle: Option<CommandHandle<CargoCheckMessage>>, /// The receiver side of the channel mentioned above. command_receiver: Option<Receiver<CargoCheckMessage>>, + package_status: FxHashMap<Arc<PackageId>, DiagnosticReceived>, +} - status: FlycheckStatus, +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +enum DiagnosticReceived { + Yes, + No, } #[allow(clippy::large_enum_variant)] @@ -225,13 +246,6 @@ enum Event { CheckEvent(Option<CargoCheckMessage>), } -#[derive(PartialEq)] -enum FlycheckStatus { - Started, - DiagnosticSent, - Finished, -} - pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; impl FlycheckActor { @@ -249,11 +263,11 @@ impl FlycheckActor { sender, config, sysroot_root, - root: workspace_root, + root: Arc::new(workspace_root), manifest_path, command_handle: None, command_receiver: None, - status: FlycheckStatus::Finished, + package_status: FxHashMap::default(), } } @@ -306,13 +320,11 @@ impl FlycheckActor { self.command_handle = Some(command_handle); self.command_receiver = Some(receiver); self.report_progress(Progress::DidStart); - self.status = FlycheckStatus::Started; } Err(error) => { self.report_progress(Progress::DidFailToRestart(format!( "Failed to run the following command: {formatted_command} error={error}" ))); - self.status = FlycheckStatus::Finished; } } } @@ -332,37 +344,62 @@ impl FlycheckActor { error ); } - if self.status == FlycheckStatus::Started { - self.send(FlycheckMessage::ClearDiagnostics { id: self.id }); + if self.package_status.is_empty() { + // We finished without receiving any diagnostics. + // That means all of them are stale. + self.send(FlycheckMessage::ClearDiagnostics { + id: self.id, + package_id: None, + }); + } else { + for (package_id, status) in mem::take(&mut self.package_status) { + if let DiagnosticReceived::No = status { + self.send(FlycheckMessage::ClearDiagnostics { + id: self.id, + package_id: Some(package_id), + }); + } + } } + self.report_progress(Progress::DidFinish(res)); - self.status = FlycheckStatus::Finished; } Event::CheckEvent(Some(message)) => match message { CargoCheckMessage::CompilerArtifact(msg) => { tracing::trace!( flycheck_id = self.id, artifact = msg.target.name, + package_id = msg.package_id.repr, "artifact received" ); self.report_progress(Progress::DidCheckCrate(msg.target.name)); + self.package_status + .entry(Arc::new(msg.package_id)) + .or_insert(DiagnosticReceived::No); } - - CargoCheckMessage::Diagnostic(msg) => { + CargoCheckMessage::Diagnostic { diagnostic, package_id } => { tracing::trace!( flycheck_id = self.id, - message = msg.message, + message = diagnostic.message, + package_id = package_id.as_ref().map(|it| &it.repr), "diagnostic received" ); - if self.status == FlycheckStatus::Started { - self.send(FlycheckMessage::ClearDiagnostics { id: self.id }); + if let Some(package_id) = &package_id { + if !self.package_status.contains_key(package_id) { + self.package_status + .insert(package_id.clone(), DiagnosticReceived::Yes); + self.send(FlycheckMessage::ClearDiagnostics { + id: self.id, + package_id: Some(package_id.clone()), + }); + } } self.send(FlycheckMessage::AddDiagnostic { id: self.id, + package_id, workspace_root: self.root.clone(), - diagnostic: msg, + diagnostic, }); - self.status = FlycheckStatus::DiagnosticSent; } }, } @@ -380,7 +417,7 @@ impl FlycheckActor { command_handle.cancel(); self.command_receiver.take(); self.report_progress(Progress::DidCancel); - self.status = FlycheckStatus::Finished; + self.package_status.clear(); } } @@ -400,7 +437,7 @@ impl FlycheckActor { cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root)); } cmd.arg(command); - cmd.current_dir(&self.root); + cmd.current_dir(&*self.root); match package { Some(pkg) => cmd.arg("-p").arg(pkg), @@ -442,11 +479,11 @@ impl FlycheckActor { match invocation_strategy { InvocationStrategy::Once => { - cmd.current_dir(&self.root); + cmd.current_dir(&*self.root); } InvocationStrategy::PerWorkspace => { // FIXME: cmd.current_dir(&affected_workspace); - cmd.current_dir(&self.root); + cmd.current_dir(&*self.root); } } @@ -486,7 +523,7 @@ impl FlycheckActor { #[allow(clippy::large_enum_variant)] enum CargoCheckMessage { CompilerArtifact(cargo_metadata::Artifact), - Diagnostic(Diagnostic), + Diagnostic { diagnostic: Diagnostic, package_id: Option<Arc<PackageId>> }, } impl ParseFromLine for CargoCheckMessage { @@ -501,11 +538,16 @@ impl ParseFromLine for CargoCheckMessage { Some(CargoCheckMessage::CompilerArtifact(artifact)) } cargo_metadata::Message::CompilerMessage(msg) => { - Some(CargoCheckMessage::Diagnostic(msg.message)) + Some(CargoCheckMessage::Diagnostic { + diagnostic: msg.message, + package_id: Some(Arc::new(msg.package_id)), + }) } _ => None, }, - JsonMessage::Rustc(message) => Some(CargoCheckMessage::Diagnostic(message)), + JsonMessage::Rustc(message) => { + Some(CargoCheckMessage::Diagnostic { diagnostic: message, package_id: None }) + } }; } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 29be53cee1d..dd13bdba4cb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -92,7 +92,7 @@ pub(crate) struct GlobalState { // status pub(crate) shutdown_requested: bool, - pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>, + pub(crate) last_reported_status: lsp_ext::ServerStatusParams, // proc macros pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroServer>]>, @@ -238,7 +238,11 @@ impl GlobalState { mem_docs: MemDocs::default(), semantic_tokens_cache: Arc::new(Default::default()), shutdown_requested: false, - last_reported_status: None, + last_reported_status: lsp_ext::ServerStatusParams { + health: lsp_ext::Health::Ok, + quiescent: true, + message: None, + }, source_root_config: SourceRootConfig::default(), local_roots_parent_map: Arc::new(FxHashMap::default()), config_errors: Default::default(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index 03759b036b4..2aa4ffbe1dc 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -126,7 +126,7 @@ impl RequestDispatcher<'_> { /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not /// ready this will return a `default` constructed [`R::Result`]. - pub(crate) fn on_with<R>( + pub(crate) fn on_with_vfs_default<R>( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, default: impl FnOnce() -> R::Result, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 5e7262b14ca..c0231fd04e5 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -189,7 +189,7 @@ pub(crate) fn handle_did_save_text_document( if !state.config.check_on_save(Some(sr)) || run_flycheck(state, vfs_path) { return Ok(()); } - } else if state.config.check_on_save(None) { + } else if state.config.check_on_save(None) && state.config.flycheck_workspace(None) { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { flycheck.restart_workspace(None); @@ -293,7 +293,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let file_id = state.vfs.read().0.file_id(&vfs_path); if let Some(file_id) = file_id { let world = state.snapshot(); - let source_root_id = world.analysis.source_root_id(file_id).ok(); + let may_flycheck_workspace = state.config.flycheck_workspace(None); let mut updated = false; let task = move || -> std::result::Result<(), ide::Cancelled> { // Is the target binary? If so we let flycheck run only for the workspace that contains the crate. @@ -375,21 +375,22 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let saved_file = vfs_path.as_path().map(|p| p.to_owned()); // Find and trigger corresponding flychecks - for flycheck in world.flycheck.iter() { + 'flychecks: for flycheck in world.flycheck.iter() { for (id, package) in workspace_ids.clone() { if id == flycheck.id() { updated = true; - match package.filter(|_| !world.config.flycheck_workspace(source_root_id)) { - Some(package) => flycheck - .restart_for_package(package, target.clone().map(TupleExt::head)), - None => flycheck.restart_workspace(saved_file.clone()), + if may_flycheck_workspace { + flycheck.restart_workspace(saved_file.clone()) + } else if let Some(package) = package { + flycheck + .restart_for_package(package, target.clone().map(TupleExt::head)) } - continue; + continue 'flychecks; } } } // No specific flycheck was triggered, so let's trigger all of them. - if !updated { + if !updated && may_flycheck_workspace { for flycheck in world.flycheck.iter() { flycheck.restart_workspace(saved_file.clone()); } @@ -432,8 +433,10 @@ pub(crate) fn handle_run_flycheck( } } // No specific flycheck was triggered, so let's trigger all of them. - for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(None); + if state.config.flycheck_workspace(None) { + for flycheck in state.flycheck.iter() { + flycheck.restart_workspace(None); + } } Ok(()) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index fa78be5cb60..8f2bf80ea26 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -481,27 +481,28 @@ pub(crate) fn handle_document_diagnostics( snap: GlobalStateSnapshot, params: lsp_types::DocumentDiagnosticParams, ) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> { - const EMPTY: lsp_types::DocumentDiagnosticReportResult = + let empty = || { lsp_types::DocumentDiagnosticReportResult::Report( lsp_types::DocumentDiagnosticReport::Full( lsp_types::RelatedFullDocumentDiagnosticReport { related_documents: None, full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { - result_id: None, + result_id: Some("rust-analyzer".to_owned()), items: vec![], }, }, ), - ); + ) + }; let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let source_root = snap.analysis.source_root_id(file_id)?; if !snap.analysis.is_local_source_root(source_root)? { - return Ok(EMPTY); + return Ok(empty()); } let config = snap.config.diagnostics(Some(source_root)); if !config.enabled { - return Ok(EMPTY); + return Ok(empty()); } let line_index = snap.file_line_index(file_id)?; let supports_related = snap.config.text_document_diagnostic_related_document_support(); @@ -529,7 +530,7 @@ pub(crate) fn handle_document_diagnostics( Ok(lsp_types::DocumentDiagnosticReportResult::Report( lsp_types::DocumentDiagnosticReport::Full(lsp_types::RelatedFullDocumentDiagnosticReport { full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { - result_id: None, + result_id: Some("rust-analyzer".to_owned()), items: diagnostics.collect(), }, related_documents: related_documents.is_empty().not().then(|| { @@ -539,7 +540,10 @@ pub(crate) fn handle_document_diagnostics( ( to_proto::url(&snap, id), lsp_types::DocumentDiagnosticReportKind::Full( - lsp_types::FullDocumentDiagnosticReport { result_id: None, items }, + lsp_types::FullDocumentDiagnosticReport { + result_id: Some("rust-analyzer".to_owned()), + items, + }, ), ) }) @@ -1144,7 +1148,7 @@ pub(crate) fn handle_completion_resolve( let Some(corresponding_completion) = completions.into_iter().find(|completion_item| { // Avoid computing hashes for items that obviously do not match // r-a might append a detail-based suffix to the label, so we cannot check for equality - original_completion.label.starts_with(completion_item.label.as_str()) + original_completion.label.starts_with(completion_item.label.primary.as_str()) && resolve_data_hash == completion_item_hash(completion_item, resolve_data.for_ref) }) else { return Ok(original_completion); @@ -1441,7 +1445,13 @@ pub(crate) fn handle_code_action( } // Fixes from `cargo check`. - for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() { + for fix in snap + .check_fixes + .values() + .flat_map(|it| it.values()) + .filter_map(|it| it.get(&frange.file_id)) + .flatten() + { // FIXME: this mapping is awkward and shouldn't exist. Refactor // `snap.check_fixes` to not convert to LSP prematurely. let intersect_fix_range = fix diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index 15d60c873fb..e7f5a7f5e78 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -114,8 +114,11 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8; u8::from(item.deprecated), u8::from(item.trigger_call_info), ]); - hasher.update(&item.label); - if let Some(label_detail) = &item.label_detail { + hasher.update(&item.label.primary); + if let Some(label_detail) = &item.label.detail_left { + hasher.update(label_detail); + } + if let Some(label_detail) = &item.label.detail_right { hasher.update(label_detail); } // NB: do not hash edits or source range, as those may change between the time the client sends the resolve request diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index df06270a8b1..c0173d9c247 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -823,8 +823,11 @@ impl Request for OnTypeFormatting { #[derive(Debug, Serialize, Deserialize)] pub struct CompletionResolveData { pub position: lsp_types::TextDocumentPositionParams, + #[serde(skip_serializing_if = "Vec::is_empty", default)] pub imports: Vec<CompletionImport>, + #[serde(skip_serializing_if = "Option::is_none", default)] pub version: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none", default)] pub trigger_character: Option<char>, pub for_ref: bool, pub hash: String, @@ -836,6 +839,7 @@ pub struct InlayHintResolveData { // This is a string instead of a u64 as javascript can't represent u64 fully pub hash: String, pub resolve_range: lsp_types::Range, + #[serde(skip_serializing_if = "Option::is_none", default)] pub version: Option<i32>, } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index 612cb547b41..05e93b4e6ac 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -2,6 +2,7 @@ use std::{ iter::once, mem, + ops::Not as _, sync::atomic::{AtomicU32, Ordering}, }; @@ -353,14 +354,17 @@ fn completion_item( }; let mut lsp_item = lsp_types::CompletionItem { - label: item.label.to_string(), + label: item.label.primary.to_string(), detail, filter_text, kind: Some(completion_item_kind(item.kind)), text_edit, - additional_text_edits: Some(additional_text_edits), + additional_text_edits: additional_text_edits + .is_empty() + .not() + .then_some(additional_text_edits), documentation, - deprecated: Some(item.deprecated), + deprecated: item.deprecated.then_some(item.deprecated), tags, command, insert_text_format, @@ -368,15 +372,17 @@ fn completion_item( }; if config.completion_label_details_support() { + let has_label_details = + item.label.detail_left.is_some() || item.label.detail_right.is_some(); if fields_to_resolve.resolve_label_details { - something_to_resolve |= true; - } else { + something_to_resolve |= has_label_details; + } else if has_label_details { lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails { - detail: item.label_detail.as_ref().map(ToString::to_string), - description: item.detail.clone(), + detail: item.label.detail_left.clone(), + description: item.label.detail_right.clone(), }); } - } else if let Some(label_detail) = &item.label_detail { + } else if let Some(label_detail) = &item.label.detail_left { lsp_item.label.push_str(label_detail.as_str()); } @@ -1578,22 +1584,26 @@ pub(crate) fn code_lens( }; let lens_config = snap.config.lens(); - if lens_config.run && client_commands_config.run_single && has_root { - let command = command::run_single(&r, &title); - acc.push(lsp_types::CodeLens { - range: annotation_range, - command: Some(command), - data: None, - }) - } - if lens_config.debug && can_debug && client_commands_config.debug_single { - let command = command::debug_single(&r); - acc.push(lsp_types::CodeLens { - range: annotation_range, - command: Some(command), - data: None, - }) + + if has_root { + if lens_config.run && client_commands_config.run_single { + let command = command::run_single(&r, &title); + acc.push(lsp_types::CodeLens { + range: annotation_range, + command: Some(command), + data: None, + }) + } + if lens_config.debug && can_debug && client_commands_config.debug_single { + let command = command::debug_single(&r); + acc.push(lsp_types::CodeLens { + range: annotation_range, + command: Some(command), + data: None, + }) + } } + if lens_config.interpret { let command = command::interpret_single(&r); acc.push(lsp_types::CodeLens { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index a34f0a3c929..d97d96d54a0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -408,7 +408,10 @@ impl GlobalState { if self.is_quiescent() { let became_quiescent = !was_quiescent; if became_quiescent { - if self.config.check_on_save(None) { + if self.config.check_on_save(None) + && self.config.flycheck_workspace(None) + && !self.fetch_build_data_queue.op_requested() + { // Project has loaded properly, kick off initial flycheck self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); } @@ -656,8 +659,8 @@ impl GlobalState { fn update_status_or_notify(&mut self) { let status = self.current_status(); - if self.last_reported_status.as_ref() != Some(&status) { - self.last_reported_status = Some(status.clone()); + if self.last_reported_status != status { + self.last_reported_status = status.clone(); if self.config.server_status_notification() { self.send_notification::<lsp_ext::ServerStatusNotification>(status); @@ -715,6 +718,7 @@ impl GlobalState { error!("FetchWorkspaceError: {e}"); } self.wants_to_switch = Some("fetched workspace".to_owned()); + self.diagnostics.clear_check_all(); (Progress::End, None) } }; @@ -956,7 +960,7 @@ impl GlobalState { fn handle_flycheck_msg(&mut self, message: FlycheckMessage) { match message { - FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => { + FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => { let snap = self.snapshot(); let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( &self.config.diagnostics_map(None), @@ -968,6 +972,7 @@ impl GlobalState { match url_to_file_id(&self.vfs.read().0, &diag.url) { Ok(file_id) => self.diagnostics.add_check_diagnostic( id, + &package_id, file_id, diag.diagnostic, diag.fix, @@ -981,9 +986,12 @@ impl GlobalState { }; } } - - FlycheckMessage::ClearDiagnostics { id } => self.diagnostics.clear_check(id), - + FlycheckMessage::ClearDiagnostics { id, package_id: None } => { + self.diagnostics.clear_check(id) + } + FlycheckMessage::ClearDiagnostics { id, package_id: Some(package_id) } => { + self.diagnostics.clear_check_for_package(id, package_id) + } FlycheckMessage::Progress { id, progress } => { let (state, message) = match progress { flycheck::Progress::DidStart => (Progress::Begin, None), @@ -1090,12 +1098,12 @@ impl GlobalState { .on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range) // FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change. // All other request handlers - .on_with::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report( + .on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report( lsp_types::DocumentDiagnosticReport::Full( lsp_types::RelatedFullDocumentDiagnosticReport { related_documents: None, full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { - result_id: None, + result_id: Some("rust-analyzer".to_owned()), items: vec![], }, }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 4549735fef8..3444773695b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -70,7 +70,6 @@ impl GlobalState { /// are ready to do semantic work. pub(crate) fn is_quiescent(&self) -> bool { self.vfs_done - && self.last_reported_status.is_some() && !self.fetch_workspaces_queue.op_in_progress() && !self.fetch_build_data_queue.op_in_progress() && !self.fetch_proc_macros_queue.op_in_progress() diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs index 5e43a3c60d8..2fd52547336 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs @@ -5,7 +5,8 @@ use std::process::Command; use crossbeam_channel::Sender; use paths::AbsPath; -use serde::Deserialize; +use serde::Deserialize as _; +use serde_derive::Deserialize; use toolchain::Tool; use crate::{ diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs index f540a33b451..9e35990a5bc 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs @@ -54,7 +54,7 @@ where fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {} fn on_close(&self, id: Id, ctx: Context<'_, S>) { - #[derive(serde::Serialize)] + #[derive(serde_derive::Serialize)] struct JsonDataInner { name: &'static str, elapsed_ms: u128, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 5a88a5515c7..1f52f366c54 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -1,7 +1,7 @@ use std::{ cell::{Cell, RefCell}, env, fs, - sync::{Once, OnceLock}, + sync::Once, time::Duration, }; @@ -141,34 +141,15 @@ impl Project<'_> { /// file in the config dir after server is run, something where our naive approach comes short. /// Using a `prelock` allows us to force a lock when we know we need it. pub(crate) fn server_with_lock(self, config_lock: bool) -> Server { - static CONFIG_DIR_LOCK: OnceLock<(Utf8PathBuf, Mutex<()>)> = OnceLock::new(); + static CONFIG_DIR_LOCK: Mutex<()> = Mutex::new(()); let config_dir_guard = if config_lock { Some({ - let (path, mutex) = CONFIG_DIR_LOCK.get_or_init(|| { - let value = TestDir::new().keep().path().to_owned(); - env::set_var("__TEST_RA_USER_CONFIG_DIR", &value); - (value, Mutex::new(())) - }); - #[allow(dyn_drop)] - (mutex.lock(), { - Box::new({ - struct Dropper(Utf8PathBuf); - impl Drop for Dropper { - fn drop(&mut self) { - for entry in fs::read_dir(&self.0).unwrap() { - let path = entry.unwrap().path(); - if path.is_file() { - fs::remove_file(path).unwrap(); - } else if path.is_dir() { - fs::remove_dir_all(path).unwrap(); - } - } - } - } - Dropper(path.clone()) - }) as Box<dyn Drop> - }) + let guard = CONFIG_DIR_LOCK.lock(); + let test_dir = TestDir::new(); + let value = test_dir.path().to_owned(); + env::set_var("__TEST_RA_USER_CONFIG_DIR", &value); + (guard, test_dir) }) } else { None @@ -311,14 +292,12 @@ pub(crate) struct Server { client: Connection, /// XXX: remove the tempdir last dir: TestDir, - #[allow(dyn_drop)] - _config_dir_guard: Option<(MutexGuard<'static, ()>, Box<dyn Drop>)>, + _config_dir_guard: Option<(MutexGuard<'static, ()>, TestDir)>, } impl Server { - #[allow(dyn_drop)] fn new( - config_dir_guard: Option<(MutexGuard<'static, ()>, Box<dyn Drop>)>, + config_dir_guard: Option<(MutexGuard<'static, ()>, TestDir)>, dir: TestDir, config: Config, ) -> Server { diff --git a/src/tools/rust-analyzer/crates/span/Cargo.toml b/src/tools/rust-analyzer/crates/span/Cargo.toml index 569da8082a8..097a056c99a 100644 --- a/src/tools/rust-analyzer/crates/span/Cargo.toml +++ b/src/tools/rust-analyzer/crates/span/Cargo.toml @@ -12,7 +12,7 @@ authors.workspace = true [dependencies] la-arena.workspace = true -ra-salsa.workspace = true +ra-salsa = { workspace = true, optional = true } rustc-hash.workspace = true hashbrown.workspace = true text-size.workspace = true @@ -22,5 +22,8 @@ vfs.workspace = true syntax.workspace = true stdx.workspace = true +[features] +default = ["ra-salsa"] + [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs index 67d7bb9a0de..87a948df550 100644 --- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs @@ -21,6 +21,9 @@ //! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer. use std::fmt; +#[cfg(not(feature = "ra-salsa"))] +use crate::InternId; +#[cfg(feature = "ra-salsa")] use ra_salsa::{InternId, InternValue}; use crate::MacroCallId; @@ -39,6 +42,7 @@ impl fmt::Debug for SyntaxContextId { } } +#[cfg(feature = "ra-salsa")] impl ra_salsa::InternKey for SyntaxContextId { fn from_intern_id(v: ra_salsa::InternId) -> Self { SyntaxContextId(v) @@ -92,6 +96,7 @@ pub struct SyntaxContextData { pub opaque_and_semitransparent: SyntaxContextId, } +#[cfg(feature = "ra-salsa")] impl InternValue for SyntaxContextData { type Key = (SyntaxContextId, Option<MacroCallId>, Transparency); diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index bd270bfe2b1..20c3b087af5 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -1,6 +1,7 @@ //! File and span related types. use std::fmt::{self, Write}; +#[cfg(feature = "ra-salsa")] use ra_salsa::InternId; mod ast_id; @@ -261,8 +262,9 @@ pub struct MacroFileId { /// `MacroCallId` identifies a particular macro invocation, like /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct MacroCallId(ra_salsa::InternId); +pub struct MacroCallId(InternId); +#[cfg(feature = "ra-salsa")] impl ra_salsa::InternKey for MacroCallId { fn from_intern_id(v: ra_salsa::InternId) -> Self { MacroCallId(v) @@ -355,3 +357,72 @@ impl HirFileId { } } } + +#[cfg(not(feature = "ra-salsa"))] +mod intern_id_proxy { + use std::fmt; + use std::num::NonZeroU32; + + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub(super) struct InternId { + value: NonZeroU32, + } + + impl InternId { + pub(super) const MAX: u32 = 0xFFFF_FF00; + + pub(super) const unsafe fn new_unchecked(value: u32) -> Self { + debug_assert!(value < InternId::MAX); + let value = unsafe { NonZeroU32::new_unchecked(value + 1) }; + InternId { value } + } + + pub(super) fn as_u32(self) -> u32 { + self.value.get() - 1 + } + + pub(super) fn as_usize(self) -> usize { + self.as_u32() as usize + } + } + + impl From<InternId> for u32 { + fn from(raw: InternId) -> u32 { + raw.as_u32() + } + } + + impl From<InternId> for usize { + fn from(raw: InternId) -> usize { + raw.as_usize() + } + } + + impl From<u32> for InternId { + fn from(id: u32) -> InternId { + assert!(id < InternId::MAX); + unsafe { InternId::new_unchecked(id) } + } + } + + impl From<usize> for InternId { + fn from(id: usize) -> InternId { + assert!(id < (InternId::MAX as usize)); + unsafe { InternId::new_unchecked(id as u32) } + } + } + + impl fmt::Debug for InternId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_usize().fmt(f) + } + } + + impl fmt::Display for InternId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_usize().fmt(f) + } + } +} +#[cfg(not(feature = "ra-salsa"))] +use intern_id_proxy::InternId; diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml index e995ff3b55b..f9a9f40541d 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml @@ -21,7 +21,8 @@ syntax.workspace = true parser.workspace = true tt.workspace = true stdx.workspace = true -span.workspace = true +# span = {workspace = true, default-features = false} does not work +span = { path = "../span", version = "0.0.0", default-features = false} intern.workspace = true [dev-dependencies] diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 30428329dd9..4e2a70d6cd9 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -414,7 +414,7 @@ AsmClobberAbi = 'clobber_abi' '(' ('@string' (',' '@string')* ','?) ')' // option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax" / "raw" AsmOption = 'pure' | 'nomem' | 'readonly' | 'preserves_flags' | 'noreturn' | 'nostack' | 'att_syntax' | 'raw' | 'may_unwind' // options := "options(" option *("," option) [","] ")" -AsmOptions = 'options' '(' AsmOption *(',' AsmOption) ','? ')' +AsmOptions = 'options' '(' (AsmOption (',' AsmOption)*) ','? ')' AsmLabel = 'label' BlockExpr AsmSym = 'sym' Path AsmConst = 'const' Expr diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 01dcb646b37..3876ef71a07 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -212,8 +212,6 @@ pub struct AsmOptions { } impl AsmOptions { #[inline] - pub fn asm_option(&self) -> Option<AsmOption> { support::child(&self.syntax) } - #[inline] pub fn asm_options(&self) -> AstChildren<AsmOption> { support::children(&self.syntax) } #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index 2aad2cfa361..0e37611a549 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp/ext.rs hash: 14b7fb1309f5bb00 +lsp/ext.rs hash: 9790509d87670c22 If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index 1195a85cf70..142aa22a704 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -270,7 +270,8 @@ Aliased as `"checkOnSave.targets"`. + -- Whether `--workspace` should be passed to `cargo check`. -If false, `-p <package>` will be passed instead. +If false, `-p <package>` will be passed instead if applicable. In case it is not, no +check will be performed. -- [[rust-analyzer.completion.addSemicolonToUnit]]rust-analyzer.completion.addSemicolonToUnit (default: `true`):: + diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 469c1b458d5..df97efaae73 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -1098,7 +1098,7 @@ "title": "check", "properties": { "rust-analyzer.check.workspace": { - "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead.", + "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead if applicable. In case it is not, no\ncheck will be performed.", "default": true, "type": "boolean" } diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index 4a3f66b00d0..459754b1d1c 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -347,6 +347,8 @@ export class Ctx implements RustAnalyzerExtensionApi { } log.info("Disposing language client"); this.updateCommands("disable"); + // we give the server 100ms to stop gracefully + await this.client?.stop(100).catch((_) => {}); await this.disposeClient(); } diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml index cce007ae54c..2fa3272e659 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml +++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lsp-server" -version = "0.7.7" +version = "0.7.8" description = "Generic LSP server scaffold." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server" @@ -9,7 +9,8 @@ edition = "2021" [dependencies] log = "0.4.17" serde_json = "1.0.108" -serde = { version = "1.0.192", features = ["derive"] } +serde = { version = "1.0.216" } +serde_derive = { version = "1.0.216" } crossbeam-channel.workspace = true [dev-dependencies] diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs index 53c64796f28..11f98f50790 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs +++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs @@ -3,7 +3,8 @@ use std::{ io::{self, BufRead, Write}, }; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::de::DeserializeOwned; +use serde_derive::{Deserialize, Serialize}; use crate::error::ExtractError; @@ -196,7 +197,7 @@ impl Message { } impl Response { - pub fn new_ok<R: Serialize>(id: RequestId, result: R) -> Response { + pub fn new_ok<R: serde::Serialize>(id: RequestId, result: R) -> Response { Response { id, result: Some(serde_json::to_value(result).unwrap()), error: None } } pub fn new_err(id: RequestId, code: i32, message: String) -> Response { @@ -206,7 +207,7 @@ impl Response { } impl Request { - pub fn new<P: Serialize>(id: RequestId, method: String, params: P) -> Request { + pub fn new<P: serde::Serialize>(id: RequestId, method: String, params: P) -> Request { Request { id, method, params: serde_json::to_value(params).unwrap() } } pub fn extract<P: DeserializeOwned>( @@ -231,7 +232,7 @@ impl Request { } impl Notification { - pub fn new(method: String, params: impl Serialize) -> Notification { + pub fn new(method: String, params: impl serde::Serialize) -> Notification { Notification { method, params: serde_json::to_value(params).unwrap() } } pub fn extract<P: DeserializeOwned>( diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs b/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs index 347a9fb6fb9..c216864bee8 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs +++ b/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs @@ -1,7 +1,5 @@ use std::collections::HashMap; -use serde::Serialize; - use crate::{ErrorCode, Request, RequestId, Response, ResponseError}; /// Manages the set of pending requests, both incoming and outgoing. @@ -56,7 +54,7 @@ impl<I> Incoming<I> { } impl<O> Outgoing<O> { - pub fn register<P: Serialize>(&mut self, method: String, params: P, data: O) -> Request { + pub fn register<P: serde::Serialize>(&mut self, method: String, params: P, data: O) -> Request { let id = RequestId::from(self.next_id); self.pending.insert(id.clone(), data); self.next_id += 1; diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 7d60fa6cb76..3be63741c00 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -5a6036a1802262f8cf02192b02026688d396f1d7 +0eca4dd3205a01dba4bd7b7c140ec370aff03440 diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml index 01ad3336311..b505ee835b2 100644 --- a/src/tools/rust-analyzer/xtask/Cargo.toml +++ b/src/tools/rust-analyzer/xtask/Cargo.toml @@ -16,7 +16,8 @@ xflags = "0.3.0" time = { version = "0.3", default-features = false } zip = { version = "0.6", default-features = false, features = ["deflate", "time"] } stdx.workspace = true -proc-macro2 = "1.0.47" +# https://github.com/dtolnay/proc-macro2/issues/475 +proc-macro2 = "=1.0.86" quote = "1.0.20" ungrammar = "1.16.1" either.workspace = true diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock index c2f4ba161b7..68fb9895ecd 100644 --- a/src/tools/rustbook/Cargo.lock +++ b/src/tools/rustbook/Cargo.lock @@ -161,9 +161,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" -version = "1.2.0" +version = "1.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" +checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" dependencies = [ "shlex", ] @@ -213,9 +213,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.39" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4db298d517d5fa00b2b84bbe044efd3fde43874a41db0d46f91994646a2da4" +checksum = "ac2e663e3e3bed2d32d065a8404024dad306e699a04263ec59919529f803aee9" dependencies = [ "clap", ] @@ -342,9 +342,9 @@ dependencies = [ [[package]] name = "env_filter" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" dependencies = [ "log", "regex", @@ -352,9 +352,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "anstream", "anstyle", @@ -714,9 +714,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.168" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libdbus-sys" @@ -878,9 +878,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miniz_oxide" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ef2593ffb6958c941575cee70c8e257438749971869c4ae5acf6f91a168a61" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" dependencies = [ "adler2", ] @@ -1006,7 +1006,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 2.0.7", + "thiserror 2.0.9", "ucd-trie", ] @@ -1358,9 +1358,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" dependencies = [ "itoa", "memchr", @@ -1537,11 +1537,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.7" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767" +checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" dependencies = [ - "thiserror-impl 2.0.7", + "thiserror-impl 2.0.9", ] [[package]] @@ -1557,9 +1557,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.7" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36" +checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" dependencies = [ "proc-macro2", "quote", diff --git a/src/tools/rustfmt/src/items.rs b/src/tools/rustfmt/src/items.rs index c3debc2f4f0..e7d0fba048b 100644 --- a/src/tools/rustfmt/src/items.rs +++ b/src/tools/rustfmt/src/items.rs @@ -1944,6 +1944,11 @@ pub(crate) fn rewrite_struct_field( shape: Shape, lhs_max_width: usize, ) -> RewriteResult { + // FIXME(default_field_values): Implement formatting. + if field.default.is_some() { + return Err(RewriteError::Unknown); + } + if contains_skip(&field.attrs) { return Ok(context.snippet(field.span()).to_owned()); } diff --git a/src/tools/rustfmt/src/spanned.rs b/src/tools/rustfmt/src/spanned.rs index db7c3486e71..6b3e40b9115 100644 --- a/src/tools/rustfmt/src/spanned.rs +++ b/src/tools/rustfmt/src/spanned.rs @@ -144,6 +144,7 @@ impl Spanned for ast::GenericParam { impl Spanned for ast::FieldDef { fn span(&self) -> Span { + // FIXME(default_field_values): This needs to be adjusted. span_with_attrs_lo_hi!(self, self.span.lo(), self.ty.span.hi()) } } diff --git a/src/tools/rustfmt/tests/source/default-field-values.rs b/src/tools/rustfmt/tests/source/default-field-values.rs new file mode 100644 index 00000000000..622f9640d0d --- /dev/null +++ b/src/tools/rustfmt/tests/source/default-field-values.rs @@ -0,0 +1,18 @@ +#![feature(default_struct_values)] + +// Test for now that nightly default field values are left alone for now. + +struct Foo { + default_field: Spacing = /* uwu */ 0, +} + +struct Foo2 { + #[rustfmt::skip] + default_field: Spacing = /* uwu */ 0, +} + +a_macro!( + struct Foo2 { + default_field: Spacing = /* uwu */ 0, + } +); diff --git a/src/tools/rustfmt/tests/target/default-field-values.rs b/src/tools/rustfmt/tests/target/default-field-values.rs new file mode 100644 index 00000000000..622f9640d0d --- /dev/null +++ b/src/tools/rustfmt/tests/target/default-field-values.rs @@ -0,0 +1,18 @@ +#![feature(default_struct_values)] + +// Test for now that nightly default field values are left alone for now. + +struct Foo { + default_field: Spacing = /* uwu */ 0, +} + +struct Foo2 { + #[rustfmt::skip] + default_field: Spacing = /* uwu */ 0, +} + +a_macro!( + struct Foo2 { + default_field: Spacing = /* uwu */ 0, + } +); diff --git a/tests/codegen/slice-is-ascii.rs b/tests/codegen/slice-is-ascii.rs new file mode 100644 index 00000000000..b1e97154609 --- /dev/null +++ b/tests/codegen/slice-is-ascii.rs @@ -0,0 +1,16 @@ +//@ only-x86_64 +//@ compile-flags: -C opt-level=3 +#![crate_type = "lib"] + +/// Check that the fast-path of `is_ascii` uses a `pmovmskb` instruction. +/// Platforms lacking an equivalent instruction use other techniques for +/// optimizing `is_ascii`. +// CHECK-LABEL: @is_ascii_autovectorized +#[no_mangle] +pub fn is_ascii_autovectorized(s: &[u8]) -> bool { + // CHECK: load <32 x i8> + // CHECK-NEXT: icmp slt <32 x i8> + // CHECK-NEXT: bitcast <32 x i1> + // CHECK-NEXT: icmp eq i32 + s.is_ascii() +} diff --git a/tests/codegen/vec-in-place.rs b/tests/codegen/vec-in-place.rs index 5d05f242617..33de0913f77 100644 --- a/tests/codegen/vec-in-place.rs +++ b/tests/codegen/vec-in-place.rs @@ -37,6 +37,9 @@ pub struct Baz { #[no_mangle] pub fn vec_iterator_cast_primitive(vec: Vec<i8>) -> Vec<u8> { // CHECK-NOT: loop + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: loop // CHECK-NOT: call vec.into_iter().map(|e| e as u8).collect() } @@ -45,14 +48,37 @@ pub fn vec_iterator_cast_primitive(vec: Vec<i8>) -> Vec<u8> { #[no_mangle] pub fn vec_iterator_cast_wrapper(vec: Vec<u8>) -> Vec<Wrapper<u8>> { // CHECK-NOT: loop + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: loop // CHECK-NOT: call vec.into_iter().map(|e| Wrapper(e)).collect() } +// CHECK-LABEL: @vec_iterator_cast_signed +#[no_mangle] +pub fn vec_iterator_cast_signed(vec: Vec<i32>) -> Vec<u32> { + // CHECK-NOT: and i{{[0-9]+}} %{{.*}}, {{[0-9]+}} + vec.into_iter().map(|e| u32::from_ne_bytes(e.to_ne_bytes())).collect() +} + +// CHECK-LABEL: @vec_iterator_cast_signed_nested +#[no_mangle] +pub fn vec_iterator_cast_signed_nested(vec: Vec<Vec<i32>>) -> Vec<Vec<u32>> { + // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}} + // CHECK-NOT: %{{.*}} = udiv + vec.into_iter() + .map(|e| e.into_iter().map(|e| u32::from_ne_bytes(e.to_ne_bytes())).collect()) + .collect() +} + // CHECK-LABEL: @vec_iterator_cast_unwrap #[no_mangle] pub fn vec_iterator_cast_unwrap(vec: Vec<Wrapper<u8>>) -> Vec<u8> { // CHECK-NOT: loop + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: loop // CHECK-NOT: call vec.into_iter().map(|e| e.0).collect() } @@ -61,6 +87,9 @@ pub fn vec_iterator_cast_unwrap(vec: Vec<Wrapper<u8>>) -> Vec<u8> { #[no_mangle] pub fn vec_iterator_cast_aggregate(vec: Vec<[u64; 4]>) -> Vec<Foo> { // CHECK-NOT: loop + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: loop // CHECK-NOT: call vec.into_iter().map(|e| unsafe { std::mem::transmute(e) }).collect() } @@ -69,6 +98,9 @@ pub fn vec_iterator_cast_aggregate(vec: Vec<[u64; 4]>) -> Vec<Foo> { #[no_mangle] pub fn vec_iterator_cast_deaggregate_tra(vec: Vec<Bar>) -> Vec<[u64; 4]> { // CHECK-NOT: loop + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: loop // CHECK-NOT: call // Safety: For the purpose of this test we assume that Bar layout matches [u64; 4]. @@ -82,6 +114,9 @@ pub fn vec_iterator_cast_deaggregate_tra(vec: Vec<Bar>) -> Vec<[u64; 4]> { #[no_mangle] pub fn vec_iterator_cast_deaggregate_fold(vec: Vec<Baz>) -> Vec<[u64; 4]> { // CHECK-NOT: loop + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: loop // CHECK-NOT: call // Safety: For the purpose of this test we assume that Bar layout matches [u64; 4]. @@ -95,6 +130,11 @@ pub fn vec_iterator_cast_deaggregate_fold(vec: Vec<Baz>) -> Vec<[u64; 4]> { #[no_mangle] pub fn vec_iterator_cast_unwrap_drop(vec: Vec<Wrapper<String>>) -> Vec<String> { // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}} + // CHECK-NOT: %{{.*}} = mul + // CHECK-NOT: %{{.*}} = udiv + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}} // CHECK-NOT: call // CHECK-NOT: %{{.*}} = mul // CHECK-NOT: %{{.*}} = udiv @@ -106,9 +146,15 @@ pub fn vec_iterator_cast_unwrap_drop(vec: Vec<Wrapper<String>>) -> Vec<String> { #[no_mangle] pub fn vec_iterator_cast_wrap_drop(vec: Vec<String>) -> Vec<Wrapper<String>> { // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}} + // CHECK-NOT: %{{.*}} = mul + // CHECK-NOT: %{{.*}} = udiv + // CHECK: call + // CHECK-SAME: void @llvm.assume(i1 %{{.+}}) + // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}} // CHECK-NOT: call // CHECK-NOT: %{{.*}} = mul // CHECK-NOT: %{{.*}} = udiv + // CHECK: ret void vec.into_iter().map(Wrapper).collect() } diff --git a/tests/codegen/vec_pop_push_noop.rs b/tests/codegen/vec_pop_push_noop.rs index 4d76c24a9d9..a8ad5b6f1a3 100644 --- a/tests/codegen/vec_pop_push_noop.rs +++ b/tests/codegen/vec_pop_push_noop.rs @@ -1,3 +1,6 @@ +//@ revisions: llvm-pre-19 llvm-19 +//@ [llvm-19] min-llvm-version: 19 +//@ [llvm-pre-19] max-llvm-major-version: 18 //@ compile-flags: -O #![crate_type = "lib"] @@ -9,8 +12,11 @@ pub fn noop(v: &mut Vec<u8>) { // CHECK-NOT: call // CHECK: tail call void @llvm.assume // CHECK-NOT: grow_one + // llvm-pre-19: call + // llvm-pre-19-same: void @llvm.assume + // llvm-pre-19-NOT: grow_one // CHECK-NOT: call - // CHECK: ret + // CHECK: {{ret|[}]}} if let Some(x) = v.pop() { v.push(x) } diff --git a/tests/coverage/async.cov-map b/tests/coverage/async.cov-map index 9c6f4bd385f..d3eed6c4f2a 100644 --- a/tests/coverage/async.cov-map +++ b/tests/coverage/async.cov-map @@ -1,20 +1,20 @@ Function name: async::c -Raw bytes (9): 0x[01, 01, 00, 01, 01, 0a, 01, 00, 19] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 0b, 01, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 10, 1) to (start + 0, 25) +- Code(Counter(0)) at (prev + 11, 1) to (start + 0, 25) Highest counter ID seen: c0 Function name: async::c::{closure#0} -Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 0a, 19, 01, 0e, 05, 02, 09, 00, 0a, 02, 02, 09, 00, 0a, 01, 02, 01, 00, 02] +Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 0b, 19, 01, 0e, 05, 02, 09, 00, 0a, 02, 02, 09, 00, 0a, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 10, 25) to (start + 1, 14) +- Code(Counter(0)) at (prev + 11, 25) to (start + 1, 14) - Code(Counter(1)) at (prev + 2, 9) to (start + 0, 10) - Code(Expression(0, Sub)) at (prev + 2, 9) to (start + 0, 10) = (c0 - c1) @@ -22,93 +22,93 @@ Number of file 0 mappings: 4 Highest counter ID seen: c1 Function name: async::d -Raw bytes (9): 0x[01, 01, 00, 01, 01, 12, 01, 00, 14] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 13, 01, 00, 14] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 18, 1) to (start + 0, 20) +- Code(Counter(0)) at (prev + 19, 1) to (start + 0, 20) Highest counter ID seen: c0 Function name: async::d::{closure#0} -Raw bytes (9): 0x[01, 01, 00, 01, 01, 12, 14, 00, 19] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 13, 14, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 18, 20) to (start + 0, 25) +- Code(Counter(0)) at (prev + 19, 20) to (start + 0, 25) Highest counter ID seen: c0 Function name: async::e (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 14, 01, 00, 14] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 15, 01, 00, 14] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 20, 1) to (start + 0, 20) +- Code(Zero) at (prev + 21, 1) to (start + 0, 20) Highest counter ID seen: (none) Function name: async::e::{closure#0} (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 14, 14, 00, 19] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 15, 14, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 20, 20) to (start + 0, 25) +- Code(Zero) at (prev + 21, 20) to (start + 0, 25) Highest counter ID seen: (none) Function name: async::f -Raw bytes (9): 0x[01, 01, 00, 01, 01, 16, 01, 00, 14] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 17, 01, 00, 14] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 22, 1) to (start + 0, 20) +- Code(Counter(0)) at (prev + 23, 1) to (start + 0, 20) Highest counter ID seen: c0 Function name: async::f::{closure#0} -Raw bytes (9): 0x[01, 01, 00, 01, 01, 16, 14, 00, 19] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 17, 14, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 22, 20) to (start + 0, 25) +- Code(Counter(0)) at (prev + 23, 20) to (start + 0, 25) Highest counter ID seen: c0 Function name: async::foo (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 18, 01, 00, 1e] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 19, 01, 00, 1e] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 24, 1) to (start + 0, 30) +- Code(Zero) at (prev + 25, 1) to (start + 0, 30) Highest counter ID seen: (none) Function name: async::foo::{closure#0} (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 18, 1e, 00, 2d] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 19, 1e, 00, 2d] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 24, 30) to (start + 0, 45) +- Code(Zero) at (prev + 25, 30) to (start + 0, 45) Highest counter ID seen: (none) Function name: async::g -Raw bytes (9): 0x[01, 01, 00, 01, 01, 1a, 01, 00, 17] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 1b, 01, 00, 17] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 26, 1) to (start + 0, 23) +- Code(Counter(0)) at (prev + 27, 1) to (start + 0, 23) Highest counter ID seen: c0 Function name: async::g::{closure#0} (unused) -Raw bytes (59): 0x[01, 01, 00, 0b, 00, 1a, 17, 01, 0c, 00, 02, 09, 00, 0a, 00, 00, 0e, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 09, 00, 0a, 00, 00, 0e, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02] +Raw bytes (59): 0x[01, 01, 00, 0b, 00, 1b, 17, 01, 0c, 00, 02, 09, 00, 0a, 00, 00, 0e, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 09, 00, 0a, 00, 00, 0e, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 11 -- Code(Zero) at (prev + 26, 23) to (start + 1, 12) +- Code(Zero) at (prev + 27, 23) to (start + 1, 12) - Code(Zero) at (prev + 2, 9) to (start + 0, 10) - Code(Zero) at (prev + 0, 14) to (start + 0, 23) - Code(Zero) at (prev + 0, 27) to (start + 0, 28) @@ -122,21 +122,21 @@ Number of file 0 mappings: 11 Highest counter ID seen: (none) Function name: async::h -Raw bytes (9): 0x[01, 01, 00, 01, 01, 22, 01, 00, 16] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 23, 01, 00, 16] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 34, 1) to (start + 0, 22) +- Code(Counter(0)) at (prev + 35, 1) to (start + 0, 22) Highest counter ID seen: c0 Function name: async::h::{closure#0} (unused) -Raw bytes (39): 0x[01, 01, 00, 07, 00, 22, 16, 03, 0c, 00, 04, 09, 00, 0a, 00, 00, 0e, 00, 19, 00, 00, 1a, 00, 1b, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02] +Raw bytes (39): 0x[01, 01, 00, 07, 00, 23, 16, 03, 0c, 00, 04, 09, 00, 0a, 00, 00, 0e, 00, 19, 00, 00, 1a, 00, 1b, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 7 -- Code(Zero) at (prev + 34, 22) to (start + 3, 12) +- Code(Zero) at (prev + 35, 22) to (start + 3, 12) - Code(Zero) at (prev + 4, 9) to (start + 0, 10) - Code(Zero) at (prev + 0, 14) to (start + 0, 25) - Code(Zero) at (prev + 0, 26) to (start + 0, 27) @@ -146,23 +146,23 @@ Number of file 0 mappings: 7 Highest counter ID seen: (none) Function name: async::i -Raw bytes (9): 0x[01, 01, 00, 01, 01, 2b, 01, 00, 13] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 2c, 01, 00, 13] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 43, 1) to (start + 0, 19) +- Code(Counter(0)) at (prev + 44, 1) to (start + 0, 19) Highest counter ID seen: c0 Function name: async::i::{closure#0} -Raw bytes (63): 0x[01, 01, 02, 07, 15, 0d, 11, 0b, 01, 2b, 13, 04, 0c, 09, 05, 09, 00, 0a, 01, 00, 0e, 00, 18, 05, 00, 1c, 00, 21, 09, 00, 27, 00, 30, 11, 01, 09, 00, 0a, 19, 00, 0e, 00, 17, 1d, 00, 1b, 00, 20, 11, 00, 24, 00, 26, 15, 01, 0e, 00, 10, 03, 02, 01, 00, 02] +Raw bytes (63): 0x[01, 01, 02, 07, 15, 0d, 11, 0b, 01, 2c, 13, 04, 0c, 09, 05, 09, 00, 0a, 01, 00, 0e, 00, 18, 05, 00, 1c, 00, 21, 09, 00, 27, 00, 30, 11, 01, 09, 00, 0a, 19, 00, 0e, 00, 17, 1d, 00, 1b, 00, 20, 11, 00, 24, 00, 26, 15, 01, 0e, 00, 10, 03, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Expression(1, Add), rhs = Counter(5) - expression 1 operands: lhs = Counter(3), rhs = Counter(4) Number of file 0 mappings: 11 -- Code(Counter(0)) at (prev + 43, 19) to (start + 4, 12) +- Code(Counter(0)) at (prev + 44, 19) to (start + 4, 12) - Code(Counter(2)) at (prev + 5, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 14) to (start + 0, 24) - Code(Counter(1)) at (prev + 0, 28) to (start + 0, 33) @@ -177,14 +177,14 @@ Number of file 0 mappings: 11 Highest counter ID seen: c7 Function name: async::j -Raw bytes (58): 0x[01, 01, 02, 07, 0d, 05, 09, 0a, 01, 36, 01, 00, 0d, 01, 0b, 0b, 00, 0c, 05, 01, 09, 00, 0a, 01, 00, 0e, 00, 1b, 05, 00, 1f, 00, 27, 09, 01, 09, 00, 0a, 11, 00, 0e, 00, 1a, 09, 00, 1e, 00, 20, 0d, 01, 0e, 00, 10, 03, 02, 01, 00, 02] +Raw bytes (58): 0x[01, 01, 02, 07, 0d, 05, 09, 0a, 01, 37, 01, 00, 0d, 01, 0b, 0b, 00, 0c, 05, 01, 09, 00, 0a, 01, 00, 0e, 00, 1b, 05, 00, 1f, 00, 27, 09, 01, 09, 00, 0a, 11, 00, 0e, 00, 1a, 09, 00, 1e, 00, 20, 0d, 01, 0e, 00, 10, 03, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 10 -- Code(Counter(0)) at (prev + 54, 1) to (start + 0, 13) +- Code(Counter(0)) at (prev + 55, 1) to (start + 0, 13) - Code(Counter(0)) at (prev + 11, 11) to (start + 0, 12) - Code(Counter(1)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 14) to (start + 0, 27) @@ -198,13 +198,13 @@ Number of file 0 mappings: 10 Highest counter ID seen: c4 Function name: async::j::c -Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 38, 05, 01, 12, 05, 02, 0d, 00, 0e, 02, 02, 0d, 00, 0e, 01, 02, 05, 00, 06] +Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 39, 05, 01, 12, 05, 02, 0d, 00, 0e, 02, 02, 0d, 00, 0e, 01, 02, 05, 00, 06] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 56, 5) to (start + 1, 18) +- Code(Counter(0)) at (prev + 57, 5) to (start + 1, 18) - Code(Counter(1)) at (prev + 2, 13) to (start + 0, 14) - Code(Expression(0, Sub)) at (prev + 2, 13) to (start + 0, 14) = (c0 - c1) @@ -212,30 +212,30 @@ Number of file 0 mappings: 4 Highest counter ID seen: c1 Function name: async::j::d -Raw bytes (9): 0x[01, 01, 00, 01, 01, 3f, 05, 00, 17] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 40, 05, 00, 17] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 63, 5) to (start + 0, 23) +- Code(Counter(0)) at (prev + 64, 5) to (start + 0, 23) Highest counter ID seen: c0 Function name: async::j::f -Raw bytes (9): 0x[01, 01, 00, 01, 01, 40, 05, 00, 17] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 41, 05, 00, 17] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 64, 5) to (start + 0, 23) +- Code(Counter(0)) at (prev + 65, 5) to (start + 0, 23) Highest counter ID seen: c0 Function name: async::k (unused) -Raw bytes (29): 0x[01, 01, 00, 05, 00, 48, 01, 01, 0c, 00, 02, 0e, 00, 10, 00, 01, 0e, 00, 10, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02] +Raw bytes (29): 0x[01, 01, 00, 05, 00, 49, 01, 01, 0c, 00, 02, 0e, 00, 10, 00, 01, 0e, 00, 10, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 5 -- Code(Zero) at (prev + 72, 1) to (start + 1, 12) +- Code(Zero) at (prev + 73, 1) to (start + 1, 12) - Code(Zero) at (prev + 2, 14) to (start + 0, 16) - Code(Zero) at (prev + 1, 14) to (start + 0, 16) - Code(Zero) at (prev + 1, 14) to (start + 0, 16) @@ -243,14 +243,14 @@ Number of file 0 mappings: 5 Highest counter ID seen: (none) Function name: async::l -Raw bytes (33): 0x[01, 01, 02, 01, 07, 05, 09, 05, 01, 50, 01, 01, 0c, 02, 02, 0e, 00, 10, 09, 01, 0e, 00, 10, 05, 01, 0e, 00, 10, 01, 02, 01, 00, 02] +Raw bytes (33): 0x[01, 01, 02, 01, 07, 05, 09, 05, 01, 51, 01, 01, 0c, 02, 02, 0e, 00, 10, 09, 01, 0e, 00, 10, 05, 01, 0e, 00, 10, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Expression(1, Add) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 5 -- Code(Counter(0)) at (prev + 80, 1) to (start + 1, 12) +- Code(Counter(0)) at (prev + 81, 1) to (start + 1, 12) - Code(Expression(0, Sub)) at (prev + 2, 14) to (start + 0, 16) = (c0 - (c1 + c2)) - Code(Counter(2)) at (prev + 1, 14) to (start + 0, 16) @@ -259,29 +259,29 @@ Number of file 0 mappings: 5 Highest counter ID seen: c2 Function name: async::m -Raw bytes (9): 0x[01, 01, 00, 01, 01, 58, 01, 00, 19] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 59, 01, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 88, 1) to (start + 0, 25) +- Code(Counter(0)) at (prev + 89, 1) to (start + 0, 25) Highest counter ID seen: c0 Function name: async::m::{closure#0} (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 58, 19, 00, 22] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 59, 19, 00, 22] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 88, 25) to (start + 0, 34) +- Code(Zero) at (prev + 89, 25) to (start + 0, 34) Highest counter ID seen: (none) Function name: async::main -Raw bytes (9): 0x[01, 01, 00, 01, 01, 5a, 01, 08, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 5b, 01, 08, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 90, 1) to (start + 8, 2) +- Code(Counter(0)) at (prev + 91, 1) to (start + 8, 2) Highest counter ID seen: c0 diff --git a/tests/coverage/async.coverage b/tests/coverage/async.coverage index cee0e1a0a85..aee76b05fb7 100644 --- a/tests/coverage/async.coverage +++ b/tests/coverage/async.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |#![feature(custom_inner_attributes)] // for #![rustfmt::skip] LL| |#![allow(unused_assignments, dead_code)] LL| |#![rustfmt::skip] diff --git a/tests/coverage/async.rs b/tests/coverage/async.rs index 801c98c52df..da0a1c0b6f0 100644 --- a/tests/coverage/async.rs +++ b/tests/coverage/async.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] #![feature(custom_inner_attributes)] // for #![rustfmt::skip] #![allow(unused_assignments, dead_code)] #![rustfmt::skip] diff --git a/tests/coverage/async2.cov-map b/tests/coverage/async2.cov-map index 926124fdc76..7660f917b65 100644 --- a/tests/coverage/async2.cov-map +++ b/tests/coverage/async2.cov-map @@ -1,58 +1,58 @@ Function name: async2::async_func -Raw bytes (9): 0x[01, 01, 00, 01, 01, 0e, 01, 00, 17] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 0f, 01, 00, 17] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 14, 1) to (start + 0, 23) +- Code(Counter(0)) at (prev + 15, 1) to (start + 0, 23) Highest counter ID seen: c0 Function name: async2::async_func::{closure#0} -Raw bytes (24): 0x[01, 01, 00, 04, 01, 0e, 17, 03, 09, 05, 03, 0a, 02, 06, 00, 02, 05, 00, 06, 01, 01, 01, 00, 02] +Raw bytes (24): 0x[01, 01, 00, 04, 01, 0f, 17, 03, 09, 05, 03, 0a, 02, 06, 00, 02, 05, 00, 06, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 14, 23) to (start + 3, 9) +- Code(Counter(0)) at (prev + 15, 23) to (start + 3, 9) - Code(Counter(1)) at (prev + 3, 10) to (start + 2, 6) - Code(Zero) at (prev + 2, 5) to (start + 0, 6) - Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) Highest counter ID seen: c1 Function name: async2::async_func_just_println -Raw bytes (9): 0x[01, 01, 00, 01, 01, 16, 01, 00, 24] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 17, 01, 00, 24] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 22, 1) to (start + 0, 36) +- Code(Counter(0)) at (prev + 23, 1) to (start + 0, 36) Highest counter ID seen: c0 Function name: async2::async_func_just_println::{closure#0} -Raw bytes (9): 0x[01, 01, 00, 01, 01, 16, 24, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 17, 24, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 22, 36) to (start + 2, 2) +- Code(Counter(0)) at (prev + 23, 36) to (start + 2, 2) Highest counter ID seen: c0 Function name: async2::main -Raw bytes (9): 0x[01, 01, 00, 01, 01, 1a, 01, 07, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 1b, 01, 07, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 26, 1) to (start + 7, 2) +- Code(Counter(0)) at (prev + 27, 1) to (start + 7, 2) Highest counter ID seen: c0 Function name: async2::non_async_func -Raw bytes (24): 0x[01, 01, 00, 04, 01, 06, 01, 03, 09, 05, 03, 0a, 02, 06, 00, 02, 05, 00, 06, 01, 01, 01, 00, 02] +Raw bytes (24): 0x[01, 01, 00, 04, 01, 07, 01, 03, 09, 05, 03, 0a, 02, 06, 00, 02, 05, 00, 06, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 6, 1) to (start + 3, 9) +- Code(Counter(0)) at (prev + 7, 1) to (start + 3, 9) - Code(Counter(1)) at (prev + 3, 10) to (start + 2, 6) - Code(Zero) at (prev + 2, 5) to (start + 0, 6) - Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) diff --git a/tests/coverage/async2.coverage b/tests/coverage/async2.coverage index 0e91fa975f5..fa56072924b 100644 --- a/tests/coverage/async2.coverage +++ b/tests/coverage/async2.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2018 LL| | LL| |//@ aux-build: executor.rs diff --git a/tests/coverage/async2.rs b/tests/coverage/async2.rs index 64e85f1b6bd..9bd4821518a 100644 --- a/tests/coverage/async2.rs +++ b/tests/coverage/async2.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2018 //@ aux-build: executor.rs diff --git a/tests/coverage/async_block.cov-map b/tests/coverage/async_block.cov-map index e9e7e9cd2c3..14ed4850d4a 100644 --- a/tests/coverage/async_block.cov-map +++ b/tests/coverage/async_block.cov-map @@ -1,11 +1,11 @@ Function name: async_block::main -Raw bytes (36): 0x[01, 01, 01, 01, 05, 06, 01, 06, 01, 00, 0b, 05, 01, 09, 00, 0a, 03, 00, 0e, 00, 13, 05, 00, 14, 01, 16, 05, 07, 0a, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (36): 0x[01, 01, 01, 01, 05, 06, 01, 07, 01, 00, 0b, 05, 01, 09, 00, 0a, 03, 00, 0e, 00, 13, 05, 00, 14, 01, 16, 05, 07, 0a, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 6, 1) to (start + 0, 11) +- Code(Counter(0)) at (prev + 7, 1) to (start + 0, 11) - Code(Counter(1)) at (prev + 1, 9) to (start + 0, 10) - Code(Expression(0, Add)) at (prev + 0, 14) to (start + 0, 19) = (c0 + c1) @@ -15,13 +15,13 @@ Number of file 0 mappings: 6 Highest counter ID seen: c1 Function name: async_block::main::{closure#0} -Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 08, 1c, 01, 17, 05, 01, 18, 02, 0e, 02, 02, 14, 02, 0e, 01, 03, 09, 00, 0a] +Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 09, 1c, 01, 17, 05, 01, 18, 02, 0e, 02, 02, 14, 02, 0e, 01, 03, 09, 00, 0a] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 8, 28) to (start + 1, 23) +- Code(Counter(0)) at (prev + 9, 28) to (start + 1, 23) - Code(Counter(1)) at (prev + 1, 24) to (start + 2, 14) - Code(Expression(0, Sub)) at (prev + 2, 20) to (start + 2, 14) = (c0 - c1) diff --git a/tests/coverage/async_block.coverage b/tests/coverage/async_block.coverage index 7ccc83499e6..9e3294492cd 100644 --- a/tests/coverage/async_block.coverage +++ b/tests/coverage/async_block.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |//@ aux-build: executor.rs diff --git a/tests/coverage/async_block.rs b/tests/coverage/async_block.rs index 05a105224bb..d1e37ab7505 100644 --- a/tests/coverage/async_block.rs +++ b/tests/coverage/async_block.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ aux-build: executor.rs diff --git a/tests/coverage/attr/impl.cov-map b/tests/coverage/attr/impl.cov-map index 4d068c290f4..8a23c082082 100644 --- a/tests/coverage/attr/impl.cov-map +++ b/tests/coverage/attr/impl.cov-map @@ -1,27 +1,27 @@ Function name: <impl::MyStruct>::off_on (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 0e, 05, 00, 13] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 0f, 05, 00, 13] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 14, 5) to (start + 0, 19) +- Code(Zero) at (prev + 15, 5) to (start + 0, 19) Highest counter ID seen: (none) Function name: <impl::MyStruct>::on_inherit (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 16, 05, 00, 17] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 17, 05, 00, 17] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 22, 5) to (start + 0, 23) +- Code(Zero) at (prev + 23, 5) to (start + 0, 23) Highest counter ID seen: (none) Function name: <impl::MyStruct>::on_on (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 19, 05, 00, 12] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 1a, 05, 00, 12] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 25, 5) to (start + 0, 18) +- Code(Zero) at (prev + 26, 5) to (start + 0, 18) Highest counter ID seen: (none) diff --git a/tests/coverage/attr/impl.coverage b/tests/coverage/attr/impl.coverage index af00df5d743..670c1c36a96 100644 --- a/tests/coverage/attr/impl.coverage +++ b/tests/coverage/attr/impl.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ reference: attributes.coverage.nesting LL| | diff --git a/tests/coverage/attr/impl.rs b/tests/coverage/attr/impl.rs index db08fdc4179..c720a4cd6b2 100644 --- a/tests/coverage/attr/impl.rs +++ b/tests/coverage/attr/impl.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.nesting diff --git a/tests/coverage/attr/module.cov-map b/tests/coverage/attr/module.cov-map index b318ac85a6c..81e20a2c264 100644 --- a/tests/coverage/attr/module.cov-map +++ b/tests/coverage/attr/module.cov-map @@ -1,27 +1,27 @@ Function name: module::off::on (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 0c, 05, 00, 0f] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 0d, 05, 00, 0f] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 12, 5) to (start + 0, 15) +- Code(Zero) at (prev + 13, 5) to (start + 0, 15) Highest counter ID seen: (none) Function name: module::on::inherit (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 14, 05, 00, 14] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 15, 05, 00, 14] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 20, 5) to (start + 0, 20) +- Code(Zero) at (prev + 21, 5) to (start + 0, 20) Highest counter ID seen: (none) Function name: module::on::on (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 17, 05, 00, 0f] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 18, 05, 00, 0f] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 23, 5) to (start + 0, 15) +- Code(Zero) at (prev + 24, 5) to (start + 0, 15) Highest counter ID seen: (none) diff --git a/tests/coverage/attr/module.coverage b/tests/coverage/attr/module.coverage index 732850fb04a..bba021b9b47 100644 --- a/tests/coverage/attr/module.coverage +++ b/tests/coverage/attr/module.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ reference: attributes.coverage.nesting LL| | diff --git a/tests/coverage/attr/module.rs b/tests/coverage/attr/module.rs index c0ec5bc1d62..1d17ac7d503 100644 --- a/tests/coverage/attr/module.rs +++ b/tests/coverage/attr/module.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.nesting diff --git a/tests/coverage/attr/nested.coverage b/tests/coverage/attr/nested.coverage index 13e9aa0a8e8..6bd24d67936 100644 --- a/tests/coverage/attr/nested.coverage +++ b/tests/coverage/attr/nested.coverage @@ -1,4 +1,4 @@ - LL| |#![feature(stmt_expr_attributes)] + LL| |#![feature(coverage_attribute, stmt_expr_attributes)] LL| |//@ edition: 2021 LL| |//@ reference: attributes.coverage.nesting LL| | diff --git a/tests/coverage/attr/nested.rs b/tests/coverage/attr/nested.rs index 184fa54c066..042fcc5319a 100644 --- a/tests/coverage/attr/nested.rs +++ b/tests/coverage/attr/nested.rs @@ -1,4 +1,4 @@ -#![feature(stmt_expr_attributes)] +#![feature(coverage_attribute, stmt_expr_attributes)] //@ edition: 2021 //@ reference: attributes.coverage.nesting diff --git a/tests/coverage/attr/off-on-sandwich.cov-map b/tests/coverage/attr/off-on-sandwich.cov-map index ae5c9bd19a2..ef6f5a9dc42 100644 --- a/tests/coverage/attr/off-on-sandwich.cov-map +++ b/tests/coverage/attr/off-on-sandwich.cov-map @@ -1,30 +1,30 @@ Function name: off_on_sandwich::dense_a::dense_b -Raw bytes (14): 0x[01, 01, 00, 02, 01, 0f, 05, 02, 12, 01, 07, 05, 00, 06] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 10, 05, 02, 12, 01, 07, 05, 00, 06] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 15, 5) to (start + 2, 18) +- Code(Counter(0)) at (prev + 16, 5) to (start + 2, 18) - Code(Counter(0)) at (prev + 7, 5) to (start + 0, 6) Highest counter ID seen: c0 Function name: off_on_sandwich::sparse_a::sparse_b::sparse_c -Raw bytes (14): 0x[01, 01, 00, 02, 01, 21, 09, 02, 17, 01, 0b, 09, 00, 0a] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 22, 09, 02, 17, 01, 0b, 09, 00, 0a] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 33, 9) to (start + 2, 23) +- Code(Counter(0)) at (prev + 34, 9) to (start + 2, 23) - Code(Counter(0)) at (prev + 11, 9) to (start + 0, 10) Highest counter ID seen: c0 Function name: off_on_sandwich::sparse_a::sparse_b::sparse_c::sparse_d -Raw bytes (14): 0x[01, 01, 00, 02, 01, 24, 0d, 02, 1b, 01, 07, 0d, 00, 0e] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 25, 0d, 02, 1b, 01, 07, 0d, 00, 0e] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 36, 13) to (start + 2, 27) +- Code(Counter(0)) at (prev + 37, 13) to (start + 2, 27) - Code(Counter(0)) at (prev + 7, 13) to (start + 0, 14) Highest counter ID seen: c0 diff --git a/tests/coverage/attr/off-on-sandwich.coverage b/tests/coverage/attr/off-on-sandwich.coverage index 7a8c01b31eb..4fbc3884903 100644 --- a/tests/coverage/attr/off-on-sandwich.coverage +++ b/tests/coverage/attr/off-on-sandwich.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ reference: attributes.coverage.nesting LL| | diff --git a/tests/coverage/attr/off-on-sandwich.rs b/tests/coverage/attr/off-on-sandwich.rs index 6603e071dee..3d914c99b62 100644 --- a/tests/coverage/attr/off-on-sandwich.rs +++ b/tests/coverage/attr/off-on-sandwich.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.nesting diff --git a/tests/coverage/attr/trait-impl-inherit.cov-map b/tests/coverage/attr/trait-impl-inherit.cov-map new file mode 100644 index 00000000000..eab9f926bb7 --- /dev/null +++ b/tests/coverage/attr/trait-impl-inherit.cov-map @@ -0,0 +1,9 @@ +Function name: <trait_impl_inherit::S as trait_impl_inherit::T>::f +Raw bytes (9): 0x[01, 01, 00, 01, 01, 11, 05, 02, 06] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 0 +Number of file 0 mappings: 1 +- Code(Counter(0)) at (prev + 17, 5) to (start + 2, 6) +Highest counter ID seen: c0 + diff --git a/tests/coverage/attr/trait-impl-inherit.coverage b/tests/coverage/attr/trait-impl-inherit.coverage new file mode 100644 index 00000000000..b92d82aefbc --- /dev/null +++ b/tests/coverage/attr/trait-impl-inherit.coverage @@ -0,0 +1,26 @@ + LL| |#![feature(coverage_attribute)] + LL| |// Checks that `#[coverage(..)]` in a trait method is not inherited in an + LL| |// implementation. + LL| |//@ edition: 2021 + LL| |//@ reference: attributes.coverage.trait-impl-inherit + LL| | + LL| |trait T { + LL| | #[coverage(off)] + LL| | fn f(&self) { + LL| | println!("default"); + LL| | } + LL| |} + LL| | + LL| |struct S; + LL| | + LL| |impl T for S { + LL| 1| fn f(&self) { + LL| 1| println!("impl S"); + LL| 1| } + LL| |} + LL| | + LL| |#[coverage(off)] + LL| |fn main() { + LL| | S.f(); + LL| |} + diff --git a/tests/coverage/attr/trait-impl-inherit.rs b/tests/coverage/attr/trait-impl-inherit.rs new file mode 100644 index 00000000000..951fecce90a --- /dev/null +++ b/tests/coverage/attr/trait-impl-inherit.rs @@ -0,0 +1,25 @@ +#![feature(coverage_attribute)] +// Checks that `#[coverage(..)]` in a trait method is not inherited in an +// implementation. +//@ edition: 2021 +//@ reference: attributes.coverage.trait-impl-inherit + +trait T { + #[coverage(off)] + fn f(&self) { + println!("default"); + } +} + +struct S; + +impl T for S { + fn f(&self) { + println!("impl S"); + } +} + +#[coverage(off)] +fn main() { + S.f(); +} diff --git a/tests/coverage/auxiliary/executor.rs b/tests/coverage/auxiliary/executor.rs index ed1fe032ef4..c282414fb8e 100644 --- a/tests/coverage/auxiliary/executor.rs +++ b/tests/coverage/auxiliary/executor.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 use core::future::Future; diff --git a/tests/coverage/await_ready.cov-map b/tests/coverage/await_ready.cov-map index ea16b36b616..bc1af4e42e8 100644 --- a/tests/coverage/await_ready.cov-map +++ b/tests/coverage/await_ready.cov-map @@ -1,19 +1,19 @@ Function name: await_ready::await_ready -Raw bytes (9): 0x[01, 01, 00, 01, 01, 0d, 01, 00, 1e] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 0e, 01, 00, 1e] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 13, 1) to (start + 0, 30) +- Code(Counter(0)) at (prev + 14, 1) to (start + 0, 30) Highest counter ID seen: c0 Function name: await_ready::await_ready::{closure#0} -Raw bytes (14): 0x[01, 01, 00, 02, 01, 0d, 1e, 03, 0f, 05, 04, 01, 00, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 0e, 1e, 03, 0f, 05, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 13, 30) to (start + 3, 15) +- Code(Counter(0)) at (prev + 14, 30) to (start + 3, 15) - Code(Counter(1)) at (prev + 4, 1) to (start + 0, 2) Highest counter ID seen: c1 diff --git a/tests/coverage/await_ready.coverage b/tests/coverage/await_ready.coverage index 40107a92e41..1150d807e76 100644 --- a/tests/coverage/await_ready.coverage +++ b/tests/coverage/await_ready.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |#![coverage(off)] LL| |//@ edition: 2021 LL| | diff --git a/tests/coverage/await_ready.rs b/tests/coverage/await_ready.rs index 8fbdf7b8004..9eaa31dedda 100644 --- a/tests/coverage/await_ready.rs +++ b/tests/coverage/await_ready.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] #![coverage(off)] //@ edition: 2021 diff --git a/tests/coverage/bad_counter_ids.cov-map b/tests/coverage/bad_counter_ids.cov-map index ae9db139e3d..2b5399f33bb 100644 --- a/tests/coverage/bad_counter_ids.cov-map +++ b/tests/coverage/bad_counter_ids.cov-map @@ -1,88 +1,88 @@ Function name: bad_counter_ids::eq_bad -Raw bytes (14): 0x[01, 01, 00, 02, 01, 23, 01, 02, 1f, 00, 03, 01, 00, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 24, 01, 02, 1f, 00, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 35, 1) to (start + 2, 31) +- Code(Counter(0)) at (prev + 36, 1) to (start + 2, 31) - Code(Zero) at (prev + 3, 1) to (start + 0, 2) Highest counter ID seen: c0 Function name: bad_counter_ids::eq_bad_message -Raw bytes (21): 0x[01, 01, 01, 01, 00, 03, 01, 28, 01, 02, 0f, 02, 02, 20, 00, 2b, 00, 01, 01, 00, 02] +Raw bytes (21): 0x[01, 01, 01, 01, 00, 03, 01, 29, 01, 02, 0f, 02, 02, 20, 00, 2b, 00, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Zero Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 40, 1) to (start + 2, 15) +- Code(Counter(0)) at (prev + 41, 1) to (start + 2, 15) - Code(Expression(0, Sub)) at (prev + 2, 32) to (start + 0, 43) = (c0 - Zero) - Code(Zero) at (prev + 1, 1) to (start + 0, 2) Highest counter ID seen: c0 Function name: bad_counter_ids::eq_good -Raw bytes (14): 0x[01, 01, 00, 02, 01, 0f, 01, 02, 1f, 05, 03, 01, 00, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 10, 01, 02, 1f, 05, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 15, 1) to (start + 2, 31) +- Code(Counter(0)) at (prev + 16, 1) to (start + 2, 31) - Code(Counter(1)) at (prev + 3, 1) to (start + 0, 2) Highest counter ID seen: c1 Function name: bad_counter_ids::eq_good_message -Raw bytes (19): 0x[01, 01, 00, 03, 01, 14, 01, 02, 0f, 00, 02, 20, 00, 2b, 05, 01, 01, 00, 02] +Raw bytes (19): 0x[01, 01, 00, 03, 01, 15, 01, 02, 0f, 00, 02, 20, 00, 2b, 05, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 20, 1) to (start + 2, 15) +- Code(Counter(0)) at (prev + 21, 1) to (start + 2, 15) - Code(Zero) at (prev + 2, 32) to (start + 0, 43) - Code(Counter(1)) at (prev + 1, 1) to (start + 0, 2) Highest counter ID seen: c1 Function name: bad_counter_ids::ne_bad -Raw bytes (14): 0x[01, 01, 00, 02, 01, 2d, 01, 02, 1f, 00, 03, 01, 00, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 2e, 01, 02, 1f, 00, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 45, 1) to (start + 2, 31) +- Code(Counter(0)) at (prev + 46, 1) to (start + 2, 31) - Code(Zero) at (prev + 3, 1) to (start + 0, 2) Highest counter ID seen: c0 Function name: bad_counter_ids::ne_bad_message -Raw bytes (19): 0x[01, 01, 00, 03, 01, 32, 01, 02, 0f, 05, 02, 20, 00, 2b, 00, 01, 01, 00, 02] +Raw bytes (19): 0x[01, 01, 00, 03, 01, 33, 01, 02, 0f, 05, 02, 20, 00, 2b, 00, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 50, 1) to (start + 2, 15) +- Code(Counter(0)) at (prev + 51, 1) to (start + 2, 15) - Code(Counter(1)) at (prev + 2, 32) to (start + 0, 43) - Code(Zero) at (prev + 1, 1) to (start + 0, 2) Highest counter ID seen: c1 Function name: bad_counter_ids::ne_good -Raw bytes (16): 0x[01, 01, 01, 01, 00, 02, 01, 19, 01, 02, 1f, 02, 03, 01, 00, 02] +Raw bytes (16): 0x[01, 01, 01, 01, 00, 02, 01, 1a, 01, 02, 1f, 02, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Zero Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 25, 1) to (start + 2, 31) +- Code(Counter(0)) at (prev + 26, 1) to (start + 2, 31) - Code(Expression(0, Sub)) at (prev + 3, 1) to (start + 0, 2) = (c0 - Zero) Highest counter ID seen: c0 Function name: bad_counter_ids::ne_good_message -Raw bytes (21): 0x[01, 01, 01, 01, 00, 03, 01, 1e, 01, 02, 0f, 00, 02, 20, 00, 2b, 02, 01, 01, 00, 02] +Raw bytes (21): 0x[01, 01, 01, 01, 00, 03, 01, 1f, 01, 02, 0f, 00, 02, 20, 00, 2b, 02, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Zero Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 30, 1) to (start + 2, 15) +- Code(Counter(0)) at (prev + 31, 1) to (start + 2, 15) - Code(Zero) at (prev + 2, 32) to (start + 0, 43) - Code(Expression(0, Sub)) at (prev + 1, 1) to (start + 0, 2) = (c0 - Zero) diff --git a/tests/coverage/bad_counter_ids.coverage b/tests/coverage/bad_counter_ids.coverage index eede634923d..f6c69913cdd 100644 --- a/tests/coverage/bad_counter_ids.coverage +++ b/tests/coverage/bad_counter_ids.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Copt-level=0 -Zmir-opt-level=3 LL| | diff --git a/tests/coverage/bad_counter_ids.rs b/tests/coverage/bad_counter_ids.rs index 8fa0d83bf20..ef31d682e4f 100644 --- a/tests/coverage/bad_counter_ids.rs +++ b/tests/coverage/bad_counter_ids.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Copt-level=0 -Zmir-opt-level=3 diff --git a/tests/coverage/branch/generics.cov-map b/tests/coverage/branch/generics.cov-map index 9ff8e29f9e7..656890634ff 100644 --- a/tests/coverage/branch/generics.cov-map +++ b/tests/coverage/branch/generics.cov-map @@ -1,11 +1,11 @@ Function name: generics::print_size::<()> -Raw bytes (33): 0x[01, 01, 01, 01, 05, 05, 01, 05, 01, 01, 24, 20, 05, 02, 01, 08, 00, 24, 05, 00, 25, 02, 06, 02, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (33): 0x[01, 01, 01, 01, 05, 05, 01, 06, 01, 01, 24, 20, 05, 02, 01, 08, 00, 24, 05, 00, 25, 02, 06, 02, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 5 -- Code(Counter(0)) at (prev + 5, 1) to (start + 1, 36) +- Code(Counter(0)) at (prev + 6, 1) to (start + 1, 36) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 1, 8) to (start + 0, 36) true = c1 false = (c0 - c1) @@ -16,13 +16,13 @@ Number of file 0 mappings: 5 Highest counter ID seen: c1 Function name: generics::print_size::<u32> -Raw bytes (33): 0x[01, 01, 01, 01, 05, 05, 01, 05, 01, 01, 24, 20, 05, 02, 01, 08, 00, 24, 05, 00, 25, 02, 06, 02, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (33): 0x[01, 01, 01, 01, 05, 05, 01, 06, 01, 01, 24, 20, 05, 02, 01, 08, 00, 24, 05, 00, 25, 02, 06, 02, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 5 -- Code(Counter(0)) at (prev + 5, 1) to (start + 1, 36) +- Code(Counter(0)) at (prev + 6, 1) to (start + 1, 36) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 1, 8) to (start + 0, 36) true = c1 false = (c0 - c1) @@ -33,13 +33,13 @@ Number of file 0 mappings: 5 Highest counter ID seen: c1 Function name: generics::print_size::<u64> -Raw bytes (33): 0x[01, 01, 01, 01, 05, 05, 01, 05, 01, 01, 24, 20, 05, 02, 01, 08, 00, 24, 05, 00, 25, 02, 06, 02, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (33): 0x[01, 01, 01, 01, 05, 05, 01, 06, 01, 01, 24, 20, 05, 02, 01, 08, 00, 24, 05, 00, 25, 02, 06, 02, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 5 -- Code(Counter(0)) at (prev + 5, 1) to (start + 1, 36) +- Code(Counter(0)) at (prev + 6, 1) to (start + 1, 36) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 1, 8) to (start + 0, 36) true = c1 false = (c0 - c1) diff --git a/tests/coverage/branch/generics.coverage b/tests/coverage/branch/generics.coverage index 849ddfa7a72..85f73d45f65 100644 --- a/tests/coverage/branch/generics.coverage +++ b/tests/coverage/branch/generics.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/generics.rs b/tests/coverage/branch/generics.rs index 24bfdaaa687..d870ace7006 100644 --- a/tests/coverage/branch/generics.rs +++ b/tests/coverage/branch/generics.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/guard.cov-map b/tests/coverage/branch/guard.cov-map index 9e02240f1a2..7ca499bd847 100644 --- a/tests/coverage/branch/guard.cov-map +++ b/tests/coverage/branch/guard.cov-map @@ -1,5 +1,5 @@ Function name: guard::branch_match_guard -Raw bytes (89): 0x[01, 01, 08, 05, 0d, 05, 17, 0d, 11, 1f, 17, 05, 09, 0d, 11, 1f, 15, 05, 09, 0d, 01, 0b, 01, 01, 10, 02, 03, 0b, 00, 0c, 15, 01, 14, 02, 0a, 0d, 03, 0e, 00, 0f, 05, 00, 14, 00, 19, 20, 0d, 02, 00, 14, 00, 1e, 0d, 00, 1d, 02, 0a, 11, 03, 0e, 00, 0f, 02, 00, 14, 00, 19, 20, 11, 06, 00, 14, 00, 1e, 11, 00, 1d, 02, 0a, 0e, 03, 0e, 02, 0a, 1b, 04, 01, 00, 02] +Raw bytes (89): 0x[01, 01, 08, 05, 0d, 05, 17, 0d, 11, 1f, 17, 05, 09, 0d, 11, 1f, 15, 05, 09, 0d, 01, 0c, 01, 01, 10, 02, 03, 0b, 00, 0c, 15, 01, 14, 02, 0a, 0d, 03, 0e, 00, 0f, 05, 00, 14, 00, 19, 20, 0d, 02, 00, 14, 00, 1e, 0d, 00, 1d, 02, 0a, 11, 03, 0e, 00, 0f, 02, 00, 14, 00, 19, 20, 11, 06, 00, 14, 00, 1e, 11, 00, 1d, 02, 0a, 0e, 03, 0e, 02, 0a, 1b, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 8 @@ -12,7 +12,7 @@ Number of expressions: 8 - expression 6 operands: lhs = Expression(7, Add), rhs = Counter(5) - expression 7 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 13 -- Code(Counter(0)) at (prev + 11, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) - Code(Expression(0, Sub)) at (prev + 3, 11) to (start + 0, 12) = (c1 - c3) - Code(Counter(5)) at (prev + 1, 20) to (start + 2, 10) diff --git a/tests/coverage/branch/guard.coverage b/tests/coverage/branch/guard.coverage index 3376209d373..f89b965b5d0 100644 --- a/tests/coverage/branch/guard.coverage +++ b/tests/coverage/branch/guard.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/guard.rs b/tests/coverage/branch/guard.rs index 78b79a62946..fa049e6206d 100644 --- a/tests/coverage/branch/guard.rs +++ b/tests/coverage/branch/guard.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/if-let.coverage b/tests/coverage/branch/if-let.coverage index 368597f1daa..9a3f0113f75 100644 --- a/tests/coverage/branch/if-let.coverage +++ b/tests/coverage/branch/if-let.coverage @@ -1,4 +1,4 @@ - LL| |#![feature(let_chains)] + LL| |#![feature(coverage_attribute, let_chains)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/if-let.rs b/tests/coverage/branch/if-let.rs index 1ac506964b1..13db00a82b1 100644 --- a/tests/coverage/branch/if-let.rs +++ b/tests/coverage/branch/if-let.rs @@ -1,4 +1,4 @@ -#![feature(let_chains)] +#![feature(coverage_attribute, let_chains)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/if.cov-map b/tests/coverage/branch/if.cov-map index bd507c5a324..3d9a1d2e1ab 100644 --- a/tests/coverage/branch/if.cov-map +++ b/tests/coverage/branch/if.cov-map @@ -1,5 +1,5 @@ Function name: if::branch_and -Raw bytes (54): 0x[01, 01, 03, 05, 09, 09, 0d, 05, 0d, 08, 01, 2a, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 09, 00, 0d, 00, 0e, 20, 0d, 06, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (54): 0x[01, 01, 03, 05, 09, 09, 0d, 05, 0d, 08, 01, 2b, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 09, 00, 0d, 00, 0e, 20, 0d, 06, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 05, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -7,7 +7,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(2), rhs = Counter(3) - expression 2 operands: lhs = Counter(1), rhs = Counter(3) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 42, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 43, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 8) to (start + 0, 9) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) true = c2 @@ -23,7 +23,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c3 Function name: if::branch_not -Raw bytes (116): 0x[01, 01, 07, 05, 09, 05, 0d, 05, 0d, 05, 11, 05, 11, 05, 15, 05, 15, 12, 01, 0b, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 09, 01, 09, 00, 11, 02, 01, 05, 00, 06, 05, 01, 08, 00, 0a, 20, 0a, 0d, 00, 08, 00, 0a, 0a, 00, 0b, 02, 06, 0d, 02, 05, 00, 06, 05, 01, 08, 00, 0b, 20, 11, 12, 00, 08, 00, 0b, 11, 00, 0c, 02, 06, 12, 02, 05, 00, 06, 05, 01, 08, 00, 0c, 20, 1a, 15, 00, 08, 00, 0c, 1a, 00, 0d, 02, 06, 15, 02, 05, 00, 06, 05, 01, 01, 00, 02] +Raw bytes (116): 0x[01, 01, 07, 05, 09, 05, 0d, 05, 0d, 05, 11, 05, 11, 05, 15, 05, 15, 12, 01, 0c, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 09, 01, 09, 00, 11, 02, 01, 05, 00, 06, 05, 01, 08, 00, 0a, 20, 0a, 0d, 00, 08, 00, 0a, 0a, 00, 0b, 02, 06, 0d, 02, 05, 00, 06, 05, 01, 08, 00, 0b, 20, 11, 12, 00, 08, 00, 0b, 11, 00, 0c, 02, 06, 12, 02, 05, 00, 06, 05, 01, 08, 00, 0c, 20, 1a, 15, 00, 08, 00, 0c, 1a, 00, 0d, 02, 06, 15, 02, 05, 00, 06, 05, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 7 @@ -35,7 +35,7 @@ Number of expressions: 7 - expression 5 operands: lhs = Counter(1), rhs = Counter(5) - expression 6 operands: lhs = Counter(1), rhs = Counter(5) Number of file 0 mappings: 18 -- Code(Counter(0)) at (prev + 11, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 8) to (start + 0, 9) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) true = c2 @@ -68,7 +68,7 @@ Number of file 0 mappings: 18 Highest counter ID seen: c5 Function name: if::branch_not_as -Raw bytes (90): 0x[01, 01, 05, 05, 09, 05, 0d, 05, 0d, 05, 11, 05, 11, 0e, 01, 1c, 01, 01, 10, 05, 03, 08, 00, 14, 20, 02, 09, 00, 08, 00, 14, 02, 00, 15, 02, 06, 09, 02, 05, 00, 06, 05, 01, 08, 00, 15, 20, 0d, 0a, 00, 08, 00, 15, 0d, 00, 16, 02, 06, 0a, 02, 05, 00, 06, 05, 01, 08, 00, 16, 20, 12, 11, 00, 08, 00, 16, 12, 00, 17, 02, 06, 11, 02, 05, 00, 06, 05, 01, 01, 00, 02] +Raw bytes (90): 0x[01, 01, 05, 05, 09, 05, 0d, 05, 0d, 05, 11, 05, 11, 0e, 01, 1d, 01, 01, 10, 05, 03, 08, 00, 14, 20, 02, 09, 00, 08, 00, 14, 02, 00, 15, 02, 06, 09, 02, 05, 00, 06, 05, 01, 08, 00, 15, 20, 0d, 0a, 00, 08, 00, 15, 0d, 00, 16, 02, 06, 0a, 02, 05, 00, 06, 05, 01, 08, 00, 16, 20, 12, 11, 00, 08, 00, 16, 12, 00, 17, 02, 06, 11, 02, 05, 00, 06, 05, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 @@ -78,7 +78,7 @@ Number of expressions: 5 - expression 3 operands: lhs = Counter(1), rhs = Counter(4) - expression 4 operands: lhs = Counter(1), rhs = Counter(4) Number of file 0 mappings: 14 -- Code(Counter(0)) at (prev + 28, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 29, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 8) to (start + 0, 20) - Branch { true: Expression(0, Sub), false: Counter(2) } at (prev + 0, 8) to (start + 0, 20) true = (c1 - c2) @@ -104,7 +104,7 @@ Number of file 0 mappings: 14 Highest counter ID seen: c4 Function name: if::branch_or -Raw bytes (60): 0x[01, 01, 06, 05, 09, 05, 17, 09, 0d, 09, 0d, 05, 17, 09, 0d, 08, 01, 34, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 20, 0d, 12, 00, 0d, 00, 0e, 17, 00, 0f, 02, 06, 12, 02, 0c, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (60): 0x[01, 01, 06, 05, 09, 05, 17, 09, 0d, 09, 0d, 05, 17, 09, 0d, 08, 01, 35, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 20, 0d, 12, 00, 0d, 00, 0e, 17, 00, 0f, 02, 06, 12, 02, 0c, 02, 06, 05, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 6 @@ -115,7 +115,7 @@ Number of expressions: 6 - expression 4 operands: lhs = Counter(1), rhs = Expression(5, Add) - expression 5 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 52, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 53, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 8) to (start + 0, 9) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) true = c2 diff --git a/tests/coverage/branch/if.coverage b/tests/coverage/branch/if.coverage index fd0a3d87a8d..3d107188ca6 100644 --- a/tests/coverage/branch/if.coverage +++ b/tests/coverage/branch/if.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/if.rs b/tests/coverage/branch/if.rs index 9e06ffc1aa5..151eede75bb 100644 --- a/tests/coverage/branch/if.rs +++ b/tests/coverage/branch/if.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/lazy-boolean.cov-map b/tests/coverage/branch/lazy-boolean.cov-map index 70819505485..94522734bcd 100644 --- a/tests/coverage/branch/lazy-boolean.cov-map +++ b/tests/coverage/branch/lazy-boolean.cov-map @@ -1,11 +1,11 @@ Function name: lazy_boolean::branch_and -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 12, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 0e, 20, 09, 02, 00, 0d, 00, 0e, 09, 00, 12, 00, 13, 05, 01, 05, 01, 02] +Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 13, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 0e, 20, 09, 02, 00, 0d, 00, 0e, 09, 00, 12, 00, 13, 05, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 18, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 19, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) - Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) @@ -16,13 +16,13 @@ Number of file 0 mappings: 6 Highest counter ID seen: c2 Function name: lazy_boolean::branch_or -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 1a, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 0e, 20, 09, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 05, 01, 05, 01, 02] +Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 1b, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 0e, 20, 09, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 05, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 26, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 27, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) - Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) @@ -34,7 +34,7 @@ Number of file 0 mappings: 6 Highest counter ID seen: c2 Function name: lazy_boolean::chain -Raw bytes (141): 0x[01, 01, 0f, 05, 09, 09, 0d, 0d, 11, 05, 15, 05, 15, 05, 3b, 15, 19, 05, 3b, 15, 19, 05, 37, 3b, 1d, 15, 19, 05, 37, 3b, 1d, 15, 19, 13, 01, 23, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 12, 20, 09, 02, 00, 0d, 00, 12, 09, 00, 16, 00, 1b, 20, 0d, 06, 00, 16, 00, 1b, 0d, 00, 1f, 00, 24, 20, 11, 0a, 00, 1f, 00, 24, 11, 00, 28, 00, 2d, 05, 01, 05, 00, 11, 05, 03, 09, 00, 0a, 05, 00, 0d, 00, 12, 20, 15, 12, 00, 0d, 00, 12, 12, 00, 16, 00, 1b, 20, 19, 1e, 00, 16, 00, 1b, 1e, 00, 1f, 00, 24, 20, 1d, 32, 00, 1f, 00, 24, 32, 00, 28, 00, 2d, 05, 01, 05, 01, 02] +Raw bytes (141): 0x[01, 01, 0f, 05, 09, 09, 0d, 0d, 11, 05, 15, 05, 15, 05, 3b, 15, 19, 05, 3b, 15, 19, 05, 37, 3b, 1d, 15, 19, 05, 37, 3b, 1d, 15, 19, 13, 01, 24, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 12, 20, 09, 02, 00, 0d, 00, 12, 09, 00, 16, 00, 1b, 20, 0d, 06, 00, 16, 00, 1b, 0d, 00, 1f, 00, 24, 20, 11, 0a, 00, 1f, 00, 24, 11, 00, 28, 00, 2d, 05, 01, 05, 00, 11, 05, 03, 09, 00, 0a, 05, 00, 0d, 00, 12, 20, 15, 12, 00, 0d, 00, 12, 12, 00, 16, 00, 1b, 20, 19, 1e, 00, 16, 00, 1b, 1e, 00, 1f, 00, 24, 20, 1d, 32, 00, 1f, 00, 24, 32, 00, 28, 00, 2d, 05, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 15 @@ -54,7 +54,7 @@ Number of expressions: 15 - expression 13 operands: lhs = Expression(14, Add), rhs = Counter(7) - expression 14 operands: lhs = Counter(5), rhs = Counter(6) Number of file 0 mappings: 19 -- Code(Counter(0)) at (prev + 35, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 36, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) - Code(Counter(1)) at (prev + 0, 13) to (start + 0, 18) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 18) @@ -91,7 +91,7 @@ Number of file 0 mappings: 19 Highest counter ID seen: c7 Function name: lazy_boolean::nested_mixed -Raw bytes (137): 0x[01, 01, 0d, 05, 09, 05, 1f, 09, 0d, 09, 0d, 1f, 11, 09, 0d, 1f, 11, 09, 0d, 05, 15, 15, 19, 05, 19, 05, 33, 19, 1d, 13, 01, 30, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0e, 00, 13, 20, 09, 02, 00, 0e, 00, 13, 02, 00, 17, 00, 1d, 20, 0d, 06, 00, 17, 00, 1d, 1f, 00, 23, 00, 28, 20, 11, 1a, 00, 23, 00, 28, 1a, 00, 2c, 00, 33, 05, 01, 05, 00, 11, 05, 03, 09, 00, 0a, 05, 00, 0e, 00, 13, 20, 15, 22, 00, 0e, 00, 13, 15, 00, 17, 00, 1c, 20, 19, 26, 00, 17, 00, 1c, 2a, 00, 22, 00, 28, 20, 1d, 2e, 00, 22, 00, 28, 1d, 00, 2c, 00, 33, 05, 01, 05, 01, 02] +Raw bytes (137): 0x[01, 01, 0d, 05, 09, 05, 1f, 09, 0d, 09, 0d, 1f, 11, 09, 0d, 1f, 11, 09, 0d, 05, 15, 15, 19, 05, 19, 05, 33, 19, 1d, 13, 01, 31, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0e, 00, 13, 20, 09, 02, 00, 0e, 00, 13, 02, 00, 17, 00, 1d, 20, 0d, 06, 00, 17, 00, 1d, 1f, 00, 23, 00, 28, 20, 11, 1a, 00, 23, 00, 28, 1a, 00, 2c, 00, 33, 05, 01, 05, 00, 11, 05, 03, 09, 00, 0a, 05, 00, 0e, 00, 13, 20, 15, 22, 00, 0e, 00, 13, 15, 00, 17, 00, 1c, 20, 19, 26, 00, 17, 00, 1c, 2a, 00, 22, 00, 28, 20, 1d, 2e, 00, 22, 00, 28, 1d, 00, 2c, 00, 33, 05, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 13 @@ -109,7 +109,7 @@ Number of expressions: 13 - expression 11 operands: lhs = Counter(1), rhs = Expression(12, Add) - expression 12 operands: lhs = Counter(6), rhs = Counter(7) Number of file 0 mappings: 19 -- Code(Counter(0)) at (prev + 48, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 49, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) - Code(Counter(1)) at (prev + 0, 14) to (start + 0, 19) - Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 14) to (start + 0, 19) diff --git a/tests/coverage/branch/lazy-boolean.coverage b/tests/coverage/branch/lazy-boolean.coverage index 6e5dfbd19f3..f6aba1da46e 100644 --- a/tests/coverage/branch/lazy-boolean.coverage +++ b/tests/coverage/branch/lazy-boolean.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/lazy-boolean.rs b/tests/coverage/branch/lazy-boolean.rs index 68267bf56ed..3c73fc1a87d 100644 --- a/tests/coverage/branch/lazy-boolean.rs +++ b/tests/coverage/branch/lazy-boolean.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/let-else.cov-map b/tests/coverage/branch/let-else.cov-map index 466de5d5de3..e6bf7ed6a92 100644 --- a/tests/coverage/branch/let-else.cov-map +++ b/tests/coverage/branch/let-else.cov-map @@ -1,11 +1,11 @@ Function name: let_else::let_else -Raw bytes (43): 0x[01, 01, 01, 05, 09, 07, 01, 0b, 01, 01, 10, 20, 02, 09, 03, 09, 00, 10, 02, 00, 0e, 00, 0f, 05, 00, 13, 00, 18, 09, 01, 09, 01, 0f, 02, 04, 05, 00, 0b, 05, 01, 01, 00, 02] +Raw bytes (43): 0x[01, 01, 01, 05, 09, 07, 01, 0c, 01, 01, 10, 20, 02, 09, 03, 09, 00, 10, 02, 00, 0e, 00, 0f, 05, 00, 13, 00, 18, 09, 01, 09, 01, 0f, 02, 04, 05, 00, 0b, 05, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 7 -- Code(Counter(0)) at (prev + 11, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) - Branch { true: Expression(0, Sub), false: Counter(2) } at (prev + 3, 9) to (start + 0, 16) true = (c1 - c2) false = c2 diff --git a/tests/coverage/branch/let-else.coverage b/tests/coverage/branch/let-else.coverage index f0549205590..22ad8f2b0e1 100644 --- a/tests/coverage/branch/let-else.coverage +++ b/tests/coverage/branch/let-else.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/let-else.rs b/tests/coverage/branch/let-else.rs index 0d23d956541..af0665d8241 100644 --- a/tests/coverage/branch/let-else.rs +++ b/tests/coverage/branch/let-else.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/match-arms.cov-map b/tests/coverage/branch/match-arms.cov-map index 5d9f94923bc..53d0a4edbd0 100644 --- a/tests/coverage/branch/match-arms.cov-map +++ b/tests/coverage/branch/match-arms.cov-map @@ -1,5 +1,5 @@ Function name: match_arms::guards -Raw bytes (98): 0x[01, 01, 0d, 11, 19, 27, 19, 2b, 00, 2f, 11, 33, 0d, 05, 09, 1f, 25, 23, 21, 27, 1d, 2b, 00, 2f, 11, 33, 0d, 05, 09, 0c, 01, 2f, 01, 01, 10, 11, 03, 0b, 00, 10, 1d, 01, 11, 00, 29, 20, 1d, 05, 00, 17, 00, 1b, 21, 01, 11, 00, 29, 20, 21, 09, 00, 17, 00, 1b, 25, 01, 11, 00, 29, 20, 25, 0d, 00, 17, 00, 1b, 19, 01, 11, 00, 29, 20, 19, 02, 00, 17, 00, 1b, 06, 01, 0e, 00, 18, 1b, 03, 05, 01, 02] +Raw bytes (98): 0x[01, 01, 0d, 11, 19, 27, 19, 2b, 00, 2f, 11, 33, 0d, 05, 09, 1f, 25, 23, 21, 27, 1d, 2b, 00, 2f, 11, 33, 0d, 05, 09, 0c, 01, 30, 01, 01, 10, 11, 03, 0b, 00, 10, 1d, 01, 11, 00, 29, 20, 1d, 05, 00, 17, 00, 1b, 21, 01, 11, 00, 29, 20, 21, 09, 00, 17, 00, 1b, 25, 01, 11, 00, 29, 20, 25, 0d, 00, 17, 00, 1b, 19, 01, 11, 00, 29, 20, 19, 02, 00, 17, 00, 1b, 06, 01, 0e, 00, 18, 1b, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 13 @@ -17,7 +17,7 @@ Number of expressions: 13 - expression 11 operands: lhs = Expression(12, Add), rhs = Counter(3) - expression 12 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 12 -- Code(Counter(0)) at (prev + 47, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 48, 1) to (start + 1, 16) - Code(Counter(4)) at (prev + 3, 11) to (start + 0, 16) - Code(Counter(7)) at (prev + 1, 17) to (start + 0, 41) - Branch { true: Counter(7), false: Counter(1) } at (prev + 0, 23) to (start + 0, 27) @@ -42,7 +42,7 @@ Number of file 0 mappings: 12 Highest counter ID seen: c9 Function name: match_arms::match_arms -Raw bytes (45): 0x[01, 01, 03, 05, 07, 0b, 11, 09, 0d, 07, 01, 17, 01, 01, 10, 05, 03, 0b, 00, 10, 09, 01, 11, 00, 21, 0d, 01, 11, 00, 21, 11, 01, 11, 00, 21, 02, 01, 11, 00, 21, 05, 03, 05, 01, 02] +Raw bytes (45): 0x[01, 01, 03, 05, 07, 0b, 11, 09, 0d, 07, 01, 18, 01, 01, 10, 05, 03, 0b, 00, 10, 09, 01, 11, 00, 21, 0d, 01, 11, 00, 21, 11, 01, 11, 00, 21, 02, 01, 11, 00, 21, 05, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -50,7 +50,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Expression(2, Add), rhs = Counter(4) - expression 2 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 7 -- Code(Counter(0)) at (prev + 23, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 24, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 11) to (start + 0, 16) - Code(Counter(2)) at (prev + 1, 17) to (start + 0, 33) - Code(Counter(3)) at (prev + 1, 17) to (start + 0, 33) @@ -61,7 +61,7 @@ Number of file 0 mappings: 7 Highest counter ID seen: c4 Function name: match_arms::or_patterns -Raw bytes (57): 0x[01, 01, 04, 09, 0d, 05, 0b, 03, 11, 05, 03, 09, 01, 24, 01, 01, 10, 05, 03, 0b, 00, 10, 09, 01, 11, 00, 12, 0d, 00, 1e, 00, 1f, 03, 00, 24, 00, 2e, 11, 01, 11, 00, 12, 06, 00, 1e, 00, 1f, 0e, 00, 24, 00, 2e, 05, 03, 05, 01, 02] +Raw bytes (57): 0x[01, 01, 04, 09, 0d, 05, 0b, 03, 11, 05, 03, 09, 01, 25, 01, 01, 10, 05, 03, 0b, 00, 10, 09, 01, 11, 00, 12, 0d, 00, 1e, 00, 1f, 03, 00, 24, 00, 2e, 11, 01, 11, 00, 12, 06, 00, 1e, 00, 1f, 0e, 00, 24, 00, 2e, 05, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 4 @@ -70,7 +70,7 @@ Number of expressions: 4 - expression 2 operands: lhs = Expression(0, Add), rhs = Counter(4) - expression 3 operands: lhs = Counter(1), rhs = Expression(0, Add) Number of file 0 mappings: 9 -- Code(Counter(0)) at (prev + 36, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 37, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 11) to (start + 0, 16) - Code(Counter(2)) at (prev + 1, 17) to (start + 0, 18) - Code(Counter(3)) at (prev + 0, 30) to (start + 0, 31) diff --git a/tests/coverage/branch/match-arms.coverage b/tests/coverage/branch/match-arms.coverage index bc797d55a53..ea8a6f97ab1 100644 --- a/tests/coverage/branch/match-arms.coverage +++ b/tests/coverage/branch/match-arms.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/match-arms.rs b/tests/coverage/branch/match-arms.rs index 6292a9c2050..63151f59ffe 100644 --- a/tests/coverage/branch/match-arms.rs +++ b/tests/coverage/branch/match-arms.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/match-trivial.cov-map b/tests/coverage/branch/match-trivial.cov-map index 0a1d8cef050..6af8ce46f5f 100644 --- a/tests/coverage/branch/match-trivial.cov-map +++ b/tests/coverage/branch/match-trivial.cov-map @@ -1,19 +1,19 @@ Function name: match_trivial::_uninhabited (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 15, 01, 01, 10] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 16, 01, 01, 10] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 21, 1) to (start + 1, 16) +- Code(Zero) at (prev + 22, 1) to (start + 1, 16) Highest counter ID seen: (none) Function name: match_trivial::trivial -Raw bytes (14): 0x[01, 01, 00, 02, 01, 1d, 01, 01, 10, 05, 03, 0b, 05, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 1e, 01, 01, 10, 05, 03, 0b, 05, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 29, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 30, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 11) to (start + 5, 2) Highest counter ID seen: c1 diff --git a/tests/coverage/branch/match-trivial.coverage b/tests/coverage/branch/match-trivial.coverage index bd6be9ea3b5..4ffb172e1b6 100644 --- a/tests/coverage/branch/match-trivial.coverage +++ b/tests/coverage/branch/match-trivial.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/match-trivial.rs b/tests/coverage/branch/match-trivial.rs index 62680916d5c..db8887a26b7 100644 --- a/tests/coverage/branch/match-trivial.rs +++ b/tests/coverage/branch/match-trivial.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/no-mir-spans.cov-map b/tests/coverage/branch/no-mir-spans.cov-map index 15ead0726e1..6003efc36ca 100644 --- a/tests/coverage/branch/no-mir-spans.cov-map +++ b/tests/coverage/branch/no-mir-spans.cov-map @@ -1,35 +1,35 @@ Function name: no_mir_spans::while_cond -Raw bytes (16): 0x[01, 01, 00, 02, 01, 0f, 01, 00, 11, 20, 05, 09, 04, 0b, 00, 10] +Raw bytes (16): 0x[01, 01, 00, 02, 01, 10, 01, 00, 11, 20, 05, 09, 04, 0b, 00, 10] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 15, 1) to (start + 0, 17) +- Code(Counter(0)) at (prev + 16, 1) to (start + 0, 17) - Branch { true: Counter(1), false: Counter(2) } at (prev + 4, 11) to (start + 0, 16) true = c1 false = c2 Highest counter ID seen: c2 Function name: no_mir_spans::while_cond_not -Raw bytes (16): 0x[01, 01, 00, 02, 01, 18, 01, 00, 15, 20, 09, 05, 04, 0b, 00, 14] +Raw bytes (16): 0x[01, 01, 00, 02, 01, 19, 01, 00, 15, 20, 09, 05, 04, 0b, 00, 14] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 24, 1) to (start + 0, 21) +- Code(Counter(0)) at (prev + 25, 1) to (start + 0, 21) - Branch { true: Counter(2), false: Counter(1) } at (prev + 4, 11) to (start + 0, 20) true = c2 false = c1 Highest counter ID seen: c2 Function name: no_mir_spans::while_op_and -Raw bytes (25): 0x[01, 01, 01, 05, 09, 03, 01, 21, 01, 00, 13, 20, 05, 0d, 05, 0b, 00, 10, 20, 02, 09, 00, 14, 00, 19] +Raw bytes (25): 0x[01, 01, 01, 05, 09, 03, 01, 22, 01, 00, 13, 20, 05, 0d, 05, 0b, 00, 10, 20, 02, 09, 00, 14, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 33, 1) to (start + 0, 19) +- Code(Counter(0)) at (prev + 34, 1) to (start + 0, 19) - Branch { true: Counter(1), false: Counter(3) } at (prev + 5, 11) to (start + 0, 16) true = c1 false = c3 @@ -39,13 +39,13 @@ Number of file 0 mappings: 3 Highest counter ID seen: c3 Function name: no_mir_spans::while_op_or -Raw bytes (25): 0x[01, 01, 01, 09, 0d, 03, 01, 2c, 01, 00, 12, 20, 05, 09, 05, 0b, 00, 10, 20, 0d, 02, 00, 14, 00, 19] +Raw bytes (25): 0x[01, 01, 01, 09, 0d, 03, 01, 2d, 01, 00, 12, 20, 05, 09, 05, 0b, 00, 10, 20, 0d, 02, 00, 14, 00, 19] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 44, 1) to (start + 0, 18) +- Code(Counter(0)) at (prev + 45, 1) to (start + 0, 18) - Branch { true: Counter(1), false: Counter(2) } at (prev + 5, 11) to (start + 0, 16) true = c1 false = c2 diff --git a/tests/coverage/branch/no-mir-spans.coverage b/tests/coverage/branch/no-mir-spans.coverage index be5a1ef3442..2cae98ed3ff 100644 --- a/tests/coverage/branch/no-mir-spans.coverage +++ b/tests/coverage/branch/no-mir-spans.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch,no-mir-spans LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/no-mir-spans.rs b/tests/coverage/branch/no-mir-spans.rs index 47b4d1eff58..acb268f2d45 100644 --- a/tests/coverage/branch/no-mir-spans.rs +++ b/tests/coverage/branch/no-mir-spans.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch,no-mir-spans //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/while.cov-map b/tests/coverage/branch/while.cov-map index f2956efade1..5eb08a42803 100644 --- a/tests/coverage/branch/while.cov-map +++ b/tests/coverage/branch/while.cov-map @@ -1,11 +1,11 @@ Function name: while::while_cond -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 0b, 01, 01, 10, 05, 03, 09, 00, 12, 03, 01, 0b, 00, 10, 20, 09, 05, 00, 0b, 00, 10, 09, 00, 11, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 0c, 01, 01, 10, 05, 03, 09, 00, 12, 03, 01, 0b, 00, 10, 20, 09, 05, 00, 0b, 00, 10, 09, 00, 11, 02, 06, 05, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 11, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 9) to (start + 0, 18) - Code(Expression(0, Add)) at (prev + 1, 11) to (start + 0, 16) = (c1 + c2) @@ -17,13 +17,13 @@ Number of file 0 mappings: 6 Highest counter ID seen: c2 Function name: while::while_cond_not -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 14, 01, 01, 10, 05, 03, 09, 00, 12, 03, 01, 0b, 00, 14, 20, 09, 05, 00, 0b, 00, 14, 09, 00, 15, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 15, 01, 01, 10, 05, 03, 09, 00, 12, 03, 01, 0b, 00, 14, 20, 09, 05, 00, 0b, 00, 14, 09, 00, 15, 02, 06, 05, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 20, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 21, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 9) to (start + 0, 18) - Code(Expression(0, Add)) at (prev + 1, 11) to (start + 0, 20) = (c1 + c2) @@ -35,7 +35,7 @@ Number of file 0 mappings: 6 Highest counter ID seen: c2 Function name: while::while_op_and -Raw bytes (56): 0x[01, 01, 04, 05, 09, 03, 0d, 03, 0d, 05, 0d, 08, 01, 1d, 01, 01, 10, 05, 03, 09, 01, 12, 03, 02, 0b, 00, 10, 20, 0a, 0d, 00, 0b, 00, 10, 0a, 00, 14, 00, 19, 20, 09, 0e, 00, 14, 00, 19, 09, 00, 1a, 03, 06, 05, 04, 01, 00, 02] +Raw bytes (56): 0x[01, 01, 04, 05, 09, 03, 0d, 03, 0d, 05, 0d, 08, 01, 1e, 01, 01, 10, 05, 03, 09, 01, 12, 03, 02, 0b, 00, 10, 20, 0a, 0d, 00, 0b, 00, 10, 0a, 00, 14, 00, 19, 20, 09, 0e, 00, 14, 00, 19, 09, 00, 1a, 03, 06, 05, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 4 @@ -44,7 +44,7 @@ Number of expressions: 4 - expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) - expression 3 operands: lhs = Counter(1), rhs = Counter(3) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 29, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 30, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 9) to (start + 1, 18) - Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 16) = (c1 + c2) @@ -61,7 +61,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c3 Function name: while::while_op_or -Raw bytes (58): 0x[01, 01, 05, 07, 0d, 05, 09, 05, 0d, 05, 0d, 09, 0d, 08, 01, 28, 01, 01, 10, 05, 03, 09, 01, 12, 03, 02, 0b, 00, 10, 20, 09, 0f, 00, 0b, 00, 10, 0f, 00, 14, 00, 19, 20, 0d, 05, 00, 14, 00, 19, 13, 00, 1a, 03, 06, 05, 04, 01, 00, 02] +Raw bytes (58): 0x[01, 01, 05, 07, 0d, 05, 09, 05, 0d, 05, 0d, 09, 0d, 08, 01, 29, 01, 01, 10, 05, 03, 09, 01, 12, 03, 02, 0b, 00, 10, 20, 09, 0f, 00, 0b, 00, 10, 0f, 00, 14, 00, 19, 20, 0d, 05, 00, 14, 00, 19, 13, 00, 1a, 03, 06, 05, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 @@ -71,7 +71,7 @@ Number of expressions: 5 - expression 3 operands: lhs = Counter(1), rhs = Counter(3) - expression 4 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 40, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 41, 1) to (start + 1, 16) - Code(Counter(1)) at (prev + 3, 9) to (start + 1, 18) - Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 16) = ((c1 + c2) + c3) diff --git a/tests/coverage/branch/while.coverage b/tests/coverage/branch/while.coverage index b16c8d7defd..8d9a6c3bc68 100644 --- a/tests/coverage/branch/while.coverage +++ b/tests/coverage/branch/while.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=branch LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/branch/while.rs b/tests/coverage/branch/while.rs index e7180c43a5e..507815fbecb 100644 --- a/tests/coverage/branch/while.rs +++ b/tests/coverage/branch/while.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/closure_macro_async.cov-map b/tests/coverage/closure_macro_async.cov-map index 6a36ce2e5fe..1bd1460a147 100644 --- a/tests/coverage/closure_macro_async.cov-map +++ b/tests/coverage/closure_macro_async.cov-map @@ -1,29 +1,29 @@ Function name: closure_macro_async::load_configuration_files -Raw bytes (9): 0x[01, 01, 00, 01, 01, 20, 01, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 21, 01, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 32, 1) to (start + 2, 2) +- Code(Counter(0)) at (prev + 33, 1) to (start + 2, 2) Highest counter ID seen: c0 Function name: closure_macro_async::test -Raw bytes (9): 0x[01, 01, 00, 01, 01, 24, 01, 00, 2b] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 25, 01, 00, 2b] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 36, 1) to (start + 0, 43) +- Code(Counter(0)) at (prev + 37, 1) to (start + 0, 43) Highest counter ID seen: c0 Function name: closure_macro_async::test::{closure#0} -Raw bytes (36): 0x[01, 01, 01, 01, 05, 06, 01, 24, 2b, 01, 21, 02, 02, 09, 00, 0f, 01, 00, 12, 00, 54, 05, 00, 54, 00, 55, 02, 02, 09, 02, 0b, 01, 03, 01, 00, 02] +Raw bytes (36): 0x[01, 01, 01, 01, 05, 06, 01, 25, 2b, 01, 21, 02, 02, 09, 00, 0f, 01, 00, 12, 00, 54, 05, 00, 54, 00, 55, 02, 02, 09, 02, 0b, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 36, 43) to (start + 1, 33) +- Code(Counter(0)) at (prev + 37, 43) to (start + 1, 33) - Code(Expression(0, Sub)) at (prev + 2, 9) to (start + 0, 15) = (c0 - c1) - Code(Counter(0)) at (prev + 0, 18) to (start + 0, 84) @@ -34,7 +34,7 @@ Number of file 0 mappings: 6 Highest counter ID seen: c1 Function name: closure_macro_async::test::{closure#0}::{closure#0} -Raw bytes (35): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 05, 01, 13, 1c, 03, 21, 05, 04, 11, 01, 27, 02, 03, 11, 00, 16, 06, 00, 17, 00, 1e, 01, 02, 09, 00, 0a] +Raw bytes (35): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 05, 01, 14, 1c, 03, 21, 05, 04, 11, 01, 27, 02, 03, 11, 00, 16, 06, 00, 17, 00, 1e, 01, 02, 09, 00, 0a] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -42,7 +42,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(0), rhs = Expression(2, Add) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 5 -- Code(Counter(0)) at (prev + 19, 28) to (start + 3, 33) +- Code(Counter(0)) at (prev + 20, 28) to (start + 3, 33) - Code(Counter(1)) at (prev + 4, 17) to (start + 1, 39) - Code(Expression(0, Sub)) at (prev + 3, 17) to (start + 0, 22) = (c0 - c1) diff --git a/tests/coverage/closure_macro_async.coverage b/tests/coverage/closure_macro_async.coverage index efa40489bcf..1e1ffec9f76 100644 --- a/tests/coverage/closure_macro_async.coverage +++ b/tests/coverage/closure_macro_async.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2018 LL| | LL| |//@ aux-build: executor.rs diff --git a/tests/coverage/closure_macro_async.rs b/tests/coverage/closure_macro_async.rs index 1f67f2623a1..5dbb438424d 100644 --- a/tests/coverage/closure_macro_async.rs +++ b/tests/coverage/closure_macro_async.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2018 //@ aux-build: executor.rs diff --git a/tests/coverage/closure_unit_return.cov-map b/tests/coverage/closure_unit_return.cov-map index 0d108b3dcc7..9a66e0b0e77 100644 --- a/tests/coverage/closure_unit_return.cov-map +++ b/tests/coverage/closure_unit_return.cov-map @@ -1,38 +1,38 @@ Function name: closure_unit_return::explicit_unit -Raw bytes (14): 0x[01, 01, 00, 02, 01, 06, 01, 01, 10, 01, 05, 05, 02, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 07, 01, 01, 10, 01, 05, 05, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 6, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 7, 1) to (start + 1, 16) - Code(Counter(0)) at (prev + 5, 5) to (start + 2, 2) Highest counter ID seen: c0 Function name: closure_unit_return::explicit_unit::{closure#0} (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 07, 16, 02, 06] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 08, 16, 02, 06] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 7, 22) to (start + 2, 6) +- Code(Zero) at (prev + 8, 22) to (start + 2, 6) Highest counter ID seen: (none) Function name: closure_unit_return::implicit_unit -Raw bytes (14): 0x[01, 01, 00, 02, 01, 0f, 01, 01, 10, 01, 05, 05, 02, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 10, 01, 01, 10, 01, 05, 05, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 15, 1) to (start + 1, 16) +- Code(Counter(0)) at (prev + 16, 1) to (start + 1, 16) - Code(Counter(0)) at (prev + 5, 5) to (start + 2, 2) Highest counter ID seen: c0 Function name: closure_unit_return::implicit_unit::{closure#0} (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 10, 16, 02, 06] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 11, 16, 02, 06] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 16, 22) to (start + 2, 6) +- Code(Zero) at (prev + 17, 22) to (start + 2, 6) Highest counter ID seen: (none) diff --git a/tests/coverage/closure_unit_return.coverage b/tests/coverage/closure_unit_return.coverage index 131fab993f0..5e57e0db160 100644 --- a/tests/coverage/closure_unit_return.coverage +++ b/tests/coverage/closure_unit_return.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |// Regression test for an inconsistency between functions that return the value diff --git a/tests/coverage/closure_unit_return.rs b/tests/coverage/closure_unit_return.rs index 74334f32f6e..d4f139dd363 100644 --- a/tests/coverage/closure_unit_return.rs +++ b/tests/coverage/closure_unit_return.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 // Regression test for an inconsistency between functions that return the value diff --git a/tests/coverage/condition/conditions.cov-map b/tests/coverage/condition/conditions.cov-map index d437c91b2b0..417637f2d2e 100644 --- a/tests/coverage/condition/conditions.cov-map +++ b/tests/coverage/condition/conditions.cov-map @@ -1,5 +1,5 @@ Function name: conditions::assign_3_and_or -Raw bytes (65): 0x[01, 01, 05, 01, 05, 05, 09, 01, 09, 01, 13, 09, 0d, 09, 01, 1b, 01, 00, 2f, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 0a, 00, 17, 00, 18, 20, 0d, 0e, 00, 17, 00, 18, 01, 01, 05, 01, 02] +Raw bytes (65): 0x[01, 01, 05, 01, 05, 05, 09, 01, 09, 01, 13, 09, 0d, 09, 01, 1c, 01, 00, 2f, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 0a, 00, 17, 00, 18, 20, 0d, 0e, 00, 17, 00, 18, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 @@ -9,7 +9,7 @@ Number of expressions: 5 - expression 3 operands: lhs = Counter(0), rhs = Expression(4, Add) - expression 4 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 9 -- Code(Counter(0)) at (prev + 27, 1) to (start + 0, 47) +- Code(Counter(0)) at (prev + 28, 1) to (start + 0, 47) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) @@ -28,7 +28,7 @@ Number of file 0 mappings: 9 Highest counter ID seen: c3 Function name: conditions::assign_3_or_and -Raw bytes (63): 0x[01, 01, 04, 01, 05, 01, 0b, 05, 09, 09, 0d, 09, 01, 16, 01, 00, 2f, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 09, 00, 17, 00, 18, 20, 0d, 0e, 00, 17, 00, 18, 01, 01, 05, 01, 02] +Raw bytes (63): 0x[01, 01, 04, 01, 05, 01, 0b, 05, 09, 09, 0d, 09, 01, 17, 01, 00, 2f, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 09, 00, 17, 00, 18, 20, 0d, 0e, 00, 17, 00, 18, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 4 @@ -37,7 +37,7 @@ Number of expressions: 4 - expression 2 operands: lhs = Counter(1), rhs = Counter(2) - expression 3 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 9 -- Code(Counter(0)) at (prev + 22, 1) to (start + 0, 47) +- Code(Counter(0)) at (prev + 23, 1) to (start + 0, 47) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) @@ -56,14 +56,14 @@ Number of file 0 mappings: 9 Highest counter ID seen: c3 Function name: conditions::assign_and -Raw bytes (47): 0x[01, 01, 02, 01, 05, 05, 09, 07, 01, 0c, 01, 00, 21, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 01, 01, 05, 01, 02] +Raw bytes (47): 0x[01, 01, 02, 01, 05, 05, 09, 07, 01, 0d, 01, 00, 21, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 7 -- Code(Counter(0)) at (prev + 12, 1) to (start + 0, 33) +- Code(Counter(0)) at (prev + 13, 1) to (start + 0, 33) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) @@ -77,7 +77,7 @@ Number of file 0 mappings: 7 Highest counter ID seen: c2 Function name: conditions::assign_or -Raw bytes (49): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 07, 01, 11, 01, 00, 20, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 01, 01, 05, 01, 02] +Raw bytes (49): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 07, 01, 12, 01, 00, 20, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 20, 09, 06, 00, 12, 00, 13, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -85,7 +85,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(0), rhs = Expression(2, Add) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 7 -- Code(Counter(0)) at (prev + 17, 1) to (start + 0, 32) +- Code(Counter(0)) at (prev + 18, 1) to (start + 0, 32) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) @@ -100,23 +100,23 @@ Number of file 0 mappings: 7 Highest counter ID seen: c2 Function name: conditions::foo -Raw bytes (9): 0x[01, 01, 00, 01, 01, 20, 01, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 21, 01, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 32, 1) to (start + 2, 2) +- Code(Counter(0)) at (prev + 33, 1) to (start + 2, 2) Highest counter ID seen: c0 Function name: conditions::func_call -Raw bytes (37): 0x[01, 01, 02, 01, 05, 05, 09, 05, 01, 24, 01, 01, 0a, 20, 05, 02, 01, 09, 00, 0a, 05, 00, 0e, 00, 0f, 20, 09, 06, 00, 0e, 00, 0f, 01, 01, 01, 00, 02] +Raw bytes (37): 0x[01, 01, 02, 01, 05, 05, 09, 05, 01, 25, 01, 01, 0a, 20, 05, 02, 01, 09, 00, 0a, 05, 00, 0e, 00, 0f, 20, 09, 06, 00, 0e, 00, 0f, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 5 -- Code(Counter(0)) at (prev + 36, 1) to (start + 1, 10) +- Code(Counter(0)) at (prev + 37, 1) to (start + 1, 10) - Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 1, 9) to (start + 0, 10) true = c1 false = (c0 - c1) @@ -128,11 +128,11 @@ Number of file 0 mappings: 5 Highest counter ID seen: c2 Function name: conditions::simple_assign -Raw bytes (9): 0x[01, 01, 00, 01, 01, 07, 01, 03, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 08, 01, 03, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 7, 1) to (start + 3, 2) +- Code(Counter(0)) at (prev + 8, 1) to (start + 3, 2) Highest counter ID seen: c0 diff --git a/tests/coverage/condition/conditions.coverage b/tests/coverage/condition/conditions.coverage index 117e9aabb5b..3215b391d62 100644 --- a/tests/coverage/condition/conditions.coverage +++ b/tests/coverage/condition/conditions.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ compile-flags: -Zcoverage-options=condition LL| |//@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/condition/conditions.rs b/tests/coverage/condition/conditions.rs index 63fa962ce5f..3d658dc93e0 100644 --- a/tests/coverage/condition/conditions.rs +++ b/tests/coverage/condition/conditions.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=condition //@ llvm-cov-flags: --show-branches=count diff --git a/tests/coverage/coverage_attr_closure.coverage b/tests/coverage/coverage_attr_closure.coverage index 31898786afc..7bdb96bdab8 100644 --- a/tests/coverage/coverage_attr_closure.coverage +++ b/tests/coverage/coverage_attr_closure.coverage @@ -1,4 +1,4 @@ - LL| |#![feature(stmt_expr_attributes)] + LL| |#![feature(coverage_attribute, stmt_expr_attributes)] LL| |#![allow(dead_code)] LL| |//@ edition: 2021 LL| | diff --git a/tests/coverage/coverage_attr_closure.rs b/tests/coverage/coverage_attr_closure.rs index c66ccb7f5a5..4341a868ab8 100644 --- a/tests/coverage/coverage_attr_closure.rs +++ b/tests/coverage/coverage_attr_closure.rs @@ -1,4 +1,4 @@ -#![feature(stmt_expr_attributes)] +#![feature(coverage_attribute, stmt_expr_attributes)] #![allow(dead_code)] //@ edition: 2021 diff --git a/tests/coverage/fn_sig_into_try.cov-map b/tests/coverage/fn_sig_into_try.cov-map index cd8726fe1c3..374811dba9e 100644 --- a/tests/coverage/fn_sig_into_try.cov-map +++ b/tests/coverage/fn_sig_into_try.cov-map @@ -1,20 +1,20 @@ Function name: fn_sig_into_try::a -Raw bytes (9): 0x[01, 01, 00, 01, 01, 09, 01, 05, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 0a, 01, 05, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 9, 1) to (start + 5, 2) +- Code(Counter(0)) at (prev + 10, 1) to (start + 5, 2) Highest counter ID seen: c0 Function name: fn_sig_into_try::b -Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 10, 01, 03, 0f, 00, 03, 0f, 00, 10, 02, 01, 05, 00, 0c, 01, 01, 01, 00, 02] +Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 11, 01, 03, 0f, 00, 03, 0f, 00, 10, 02, 01, 05, 00, 0c, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Zero Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 16, 1) to (start + 3, 15) +- Code(Counter(0)) at (prev + 17, 1) to (start + 3, 15) - Code(Zero) at (prev + 3, 15) to (start + 0, 16) - Code(Expression(0, Sub)) at (prev + 1, 5) to (start + 0, 12) = (c0 - Zero) @@ -22,13 +22,13 @@ Number of file 0 mappings: 4 Highest counter ID seen: c0 Function name: fn_sig_into_try::c -Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 17, 01, 03, 17, 00, 03, 17, 00, 18, 02, 01, 05, 00, 0c, 01, 01, 01, 00, 02] +Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 18, 01, 03, 17, 00, 03, 17, 00, 18, 02, 01, 05, 00, 0c, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Zero Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 23, 1) to (start + 3, 23) +- Code(Counter(0)) at (prev + 24, 1) to (start + 3, 23) - Code(Zero) at (prev + 3, 23) to (start + 0, 24) - Code(Expression(0, Sub)) at (prev + 1, 5) to (start + 0, 12) = (c0 - Zero) @@ -36,13 +36,13 @@ Number of file 0 mappings: 4 Highest counter ID seen: c0 Function name: fn_sig_into_try::d -Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 1e, 01, 04, 0f, 00, 04, 0f, 00, 10, 02, 01, 05, 00, 0c, 01, 01, 01, 00, 02] +Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 1f, 01, 04, 0f, 00, 04, 0f, 00, 10, 02, 01, 05, 00, 0c, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Zero Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 30, 1) to (start + 4, 15) +- Code(Counter(0)) at (prev + 31, 1) to (start + 4, 15) - Code(Zero) at (prev + 4, 15) to (start + 0, 16) - Code(Expression(0, Sub)) at (prev + 1, 5) to (start + 0, 12) = (c0 - Zero) diff --git a/tests/coverage/fn_sig_into_try.coverage b/tests/coverage/fn_sig_into_try.coverage index 05b8edf15a4..cabe747ce5a 100644 --- a/tests/coverage/fn_sig_into_try.coverage +++ b/tests/coverage/fn_sig_into_try.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |// Regression test for inconsistent handling of function signature spans that diff --git a/tests/coverage/fn_sig_into_try.rs b/tests/coverage/fn_sig_into_try.rs index fd3e0c3f7c6..cda5e716edf 100644 --- a/tests/coverage/fn_sig_into_try.rs +++ b/tests/coverage/fn_sig_into_try.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 // Regression test for inconsistent handling of function signature spans that diff --git a/tests/coverage/if_not.cov-map b/tests/coverage/if_not.cov-map index 6f366796722..f47139ce5a4 100644 --- a/tests/coverage/if_not.cov-map +++ b/tests/coverage/if_not.cov-map @@ -1,5 +1,5 @@ Function name: if_not::if_not -Raw bytes (60): 0x[01, 01, 03, 01, 05, 01, 09, 01, 0d, 0a, 01, 04, 01, 03, 0d, 02, 04, 05, 02, 06, 05, 02, 05, 00, 06, 01, 03, 09, 01, 0d, 06, 02, 05, 02, 06, 09, 02, 05, 00, 06, 01, 03, 09, 01, 0d, 0a, 02, 05, 02, 06, 0d, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (60): 0x[01, 01, 03, 01, 05, 01, 09, 01, 0d, 0a, 01, 05, 01, 03, 0d, 02, 04, 05, 02, 06, 05, 02, 05, 00, 06, 01, 03, 09, 01, 0d, 06, 02, 05, 02, 06, 09, 02, 05, 00, 06, 01, 03, 09, 01, 0d, 0a, 02, 05, 02, 06, 0d, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -7,7 +7,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(0), rhs = Counter(2) - expression 2 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 10 -- Code(Counter(0)) at (prev + 4, 1) to (start + 3, 13) +- Code(Counter(0)) at (prev + 5, 1) to (start + 3, 13) - Code(Expression(0, Sub)) at (prev + 4, 5) to (start + 2, 6) = (c0 - c1) - Code(Counter(1)) at (prev + 2, 5) to (start + 0, 6) diff --git a/tests/coverage/if_not.coverage b/tests/coverage/if_not.coverage index c96627d88ae..678ccf9f2f8 100644 --- a/tests/coverage/if_not.coverage +++ b/tests/coverage/if_not.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |#[rustfmt::skip] diff --git a/tests/coverage/if_not.rs b/tests/coverage/if_not.rs index d1c2b5fc982..69283ef2527 100644 --- a/tests/coverage/if_not.rs +++ b/tests/coverage/if_not.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 #[rustfmt::skip] diff --git a/tests/coverage/let_else_loop.cov-map b/tests/coverage/let_else_loop.cov-map index 5a3ccff87c3..7789114c239 100644 --- a/tests/coverage/let_else_loop.cov-map +++ b/tests/coverage/let_else_loop.cov-map @@ -1,32 +1,32 @@ Function name: let_else_loop::_if (unused) -Raw bytes (19): 0x[01, 01, 00, 03, 00, 15, 01, 01, 0c, 00, 01, 0f, 00, 16, 00, 00, 20, 00, 27] +Raw bytes (19): 0x[01, 01, 00, 03, 00, 16, 01, 01, 0c, 00, 01, 0f, 00, 16, 00, 00, 20, 00, 27] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 3 -- Code(Zero) at (prev + 21, 1) to (start + 1, 12) +- Code(Zero) at (prev + 22, 1) to (start + 1, 12) - Code(Zero) at (prev + 1, 15) to (start + 0, 22) - Code(Zero) at (prev + 0, 32) to (start + 0, 39) Highest counter ID seen: (none) Function name: let_else_loop::_loop_either_way (unused) -Raw bytes (19): 0x[01, 01, 00, 03, 00, 0e, 01, 01, 14, 00, 01, 1c, 00, 23, 00, 01, 05, 00, 0c] +Raw bytes (19): 0x[01, 01, 00, 03, 00, 0f, 01, 01, 14, 00, 01, 1c, 00, 23, 00, 01, 05, 00, 0c] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 3 -- Code(Zero) at (prev + 14, 1) to (start + 1, 20) +- Code(Zero) at (prev + 15, 1) to (start + 1, 20) - Code(Zero) at (prev + 1, 28) to (start + 0, 35) - Code(Zero) at (prev + 1, 5) to (start + 0, 12) Highest counter ID seen: (none) Function name: let_else_loop::loopy -Raw bytes (19): 0x[01, 01, 00, 03, 01, 08, 01, 01, 14, 09, 01, 1c, 00, 23, 05, 01, 01, 00, 02] +Raw bytes (19): 0x[01, 01, 00, 03, 01, 09, 01, 01, 14, 09, 01, 1c, 00, 23, 05, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 8, 1) to (start + 1, 20) +- Code(Counter(0)) at (prev + 9, 1) to (start + 1, 20) - Code(Counter(2)) at (prev + 1, 28) to (start + 0, 35) - Code(Counter(1)) at (prev + 1, 1) to (start + 0, 2) Highest counter ID seen: c2 diff --git a/tests/coverage/let_else_loop.coverage b/tests/coverage/let_else_loop.coverage index b42e1e144ae..bd13f6e5650 100644 --- a/tests/coverage/let_else_loop.coverage +++ b/tests/coverage/let_else_loop.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |// Regression test for <https://github.com/rust-lang/rust/issues/122738>. diff --git a/tests/coverage/let_else_loop.rs b/tests/coverage/let_else_loop.rs index 83571287859..8217c0d072a 100644 --- a/tests/coverage/let_else_loop.rs +++ b/tests/coverage/let_else_loop.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 // Regression test for <https://github.com/rust-lang/rust/issues/122738>. diff --git a/tests/coverage/macro_in_closure.cov-map b/tests/coverage/macro_in_closure.cov-map index 38ce58d9ea5..9614154a366 100644 --- a/tests/coverage/macro_in_closure.cov-map +++ b/tests/coverage/macro_in_closure.cov-map @@ -1,18 +1,18 @@ Function name: macro_in_closure::NO_BLOCK::{closure#0} -Raw bytes (9): 0x[01, 01, 00, 01, 01, 06, 1c, 00, 2d] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 07, 1c, 00, 2d] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 6, 28) to (start + 0, 45) +- Code(Counter(0)) at (prev + 7, 28) to (start + 0, 45) Highest counter ID seen: c0 Function name: macro_in_closure::WITH_BLOCK::{closure#0} -Raw bytes (9): 0x[01, 01, 00, 01, 01, 08, 1e, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 09, 1e, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 8, 30) to (start + 2, 2) +- Code(Counter(0)) at (prev + 9, 30) to (start + 2, 2) Highest counter ID seen: c0 diff --git a/tests/coverage/macro_in_closure.coverage b/tests/coverage/macro_in_closure.coverage index c829c512cb8..a23ad2c37ec 100644 --- a/tests/coverage/macro_in_closure.coverage +++ b/tests/coverage/macro_in_closure.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |// If a closure body consists entirely of a single bang-macro invocation, the diff --git a/tests/coverage/macro_in_closure.rs b/tests/coverage/macro_in_closure.rs index 251fbf04ee3..3d62b54073f 100644 --- a/tests/coverage/macro_in_closure.rs +++ b/tests/coverage/macro_in_closure.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 // If a closure body consists entirely of a single bang-macro invocation, the diff --git a/tests/coverage/mcdc/condition-limit.cov-map b/tests/coverage/mcdc/condition-limit.cov-map index befe8866a59..8ff5d6360f6 100644 --- a/tests/coverage/mcdc/condition-limit.cov-map +++ b/tests/coverage/mcdc/condition-limit.cov-map @@ -1,5 +1,5 @@ Function name: condition_limit::accept_7_conditions -Raw bytes (147): 0x[01, 01, 08, 01, 05, 05, 09, 09, 0d, 0d, 11, 11, 15, 15, 19, 19, 1d, 01, 1d, 12, 01, 06, 01, 02, 09, 28, 08, 07, 02, 08, 00, 27, 30, 05, 02, 01, 07, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 07, 06, 00, 00, 0d, 00, 0e, 09, 00, 12, 00, 13, 30, 0d, 0a, 06, 05, 00, 00, 12, 00, 13, 0d, 00, 17, 00, 18, 30, 11, 0e, 05, 04, 00, 00, 17, 00, 18, 11, 00, 1c, 00, 1d, 30, 15, 12, 04, 03, 00, 00, 1c, 00, 1d, 15, 00, 21, 00, 22, 30, 19, 16, 03, 02, 00, 00, 21, 00, 22, 19, 00, 26, 00, 27, 30, 1d, 1a, 02, 00, 00, 00, 26, 00, 27, 1d, 00, 28, 02, 06, 1e, 02, 05, 00, 06, 01, 01, 01, 00, 02] +Raw bytes (147): 0x[01, 01, 08, 01, 05, 05, 09, 09, 0d, 0d, 11, 11, 15, 15, 19, 19, 1d, 01, 1d, 12, 01, 07, 01, 02, 09, 28, 08, 07, 02, 08, 00, 27, 30, 05, 02, 01, 07, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 07, 06, 00, 00, 0d, 00, 0e, 09, 00, 12, 00, 13, 30, 0d, 0a, 06, 05, 00, 00, 12, 00, 13, 0d, 00, 17, 00, 18, 30, 11, 0e, 05, 04, 00, 00, 17, 00, 18, 11, 00, 1c, 00, 1d, 30, 15, 12, 04, 03, 00, 00, 1c, 00, 1d, 15, 00, 21, 00, 22, 30, 19, 16, 03, 02, 00, 00, 21, 00, 22, 19, 00, 26, 00, 27, 30, 1d, 1a, 02, 00, 00, 00, 26, 00, 27, 1d, 00, 28, 02, 06, 1e, 02, 05, 00, 06, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 8 @@ -12,7 +12,7 @@ Number of expressions: 8 - expression 6 operands: lhs = Counter(6), rhs = Counter(7) - expression 7 operands: lhs = Counter(0), rhs = Counter(7) Number of file 0 mappings: 18 -- Code(Counter(0)) at (prev + 6, 1) to (start + 2, 9) +- Code(Counter(0)) at (prev + 7, 1) to (start + 2, 9) - MCDCDecision { bitmap_idx: 8, conditions_num: 7 } at (prev + 2, 8) to (start + 0, 39) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 7, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 diff --git a/tests/coverage/mcdc/condition-limit.coverage b/tests/coverage/mcdc/condition-limit.coverage index 1a990f27ac2..d11b8a17710 100644 --- a/tests/coverage/mcdc/condition-limit.coverage +++ b/tests/coverage/mcdc/condition-limit.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ min-llvm-version: 19 LL| |//@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/condition-limit.rs b/tests/coverage/mcdc/condition-limit.rs index 520a9f44e08..2e8f1619379 100644 --- a/tests/coverage/mcdc/condition-limit.rs +++ b/tests/coverage/mcdc/condition-limit.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ min-llvm-version: 19 //@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/if.cov-map b/tests/coverage/mcdc/if.cov-map index 1b038f48429..771351f649f 100644 --- a/tests/coverage/mcdc/if.cov-map +++ b/tests/coverage/mcdc/if.cov-map @@ -1,5 +1,5 @@ Function name: if::mcdc_check_a -Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 0e, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 0f, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -7,7 +7,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(1), rhs = Counter(2) - expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 14, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 15, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -23,7 +23,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c2 Function name: if::mcdc_check_b -Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 16, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 17, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -31,7 +31,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(1), rhs = Counter(2) - expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 22, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 23, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -47,7 +47,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c2 Function name: if::mcdc_check_both -Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 1e, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 1f, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -55,7 +55,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(1), rhs = Counter(2) - expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 30, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 31, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -71,7 +71,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c2 Function name: if::mcdc_check_neither -Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 06, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (62): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 07, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 09, 06, 02, 00, 00, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -79,7 +79,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(1), rhs = Counter(2) - expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 6, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 7, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -95,7 +95,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c2 Function name: if::mcdc_check_not_tree_decision -Raw bytes (85): 0x[01, 01, 07, 01, 05, 01, 17, 05, 09, 05, 09, 17, 0d, 05, 09, 01, 0d, 0a, 01, 30, 01, 03, 0a, 28, 05, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 03, 00, 09, 00, 0a, 02, 00, 0e, 00, 0f, 30, 09, 06, 03, 02, 00, 00, 0e, 00, 0f, 17, 00, 14, 00, 15, 30, 0d, 12, 02, 00, 00, 00, 14, 00, 15, 0d, 00, 16, 02, 06, 1a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (85): 0x[01, 01, 07, 01, 05, 01, 17, 05, 09, 05, 09, 17, 0d, 05, 09, 01, 0d, 0a, 01, 31, 01, 03, 0a, 28, 05, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 03, 00, 09, 00, 0a, 02, 00, 0e, 00, 0f, 30, 09, 06, 03, 02, 00, 00, 0e, 00, 0f, 17, 00, 14, 00, 15, 30, 0d, 12, 02, 00, 00, 00, 14, 00, 15, 0d, 00, 16, 02, 06, 1a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 7 @@ -107,7 +107,7 @@ Number of expressions: 7 - expression 5 operands: lhs = Counter(1), rhs = Counter(2) - expression 6 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 10 -- Code(Counter(0)) at (prev + 48, 1) to (start + 3, 10) +- Code(Counter(0)) at (prev + 49, 1) to (start + 3, 10) - MCDCDecision { bitmap_idx: 5, conditions_num: 3 } at (prev + 3, 8) to (start + 0, 21) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 3 } at (prev + 0, 9) to (start + 0, 10) true = c1 @@ -129,7 +129,7 @@ Number of file 0 mappings: 10 Highest counter ID seen: c3 Function name: if::mcdc_check_tree_decision -Raw bytes (87): 0x[01, 01, 08, 01, 05, 05, 09, 05, 09, 05, 1f, 09, 0d, 09, 0d, 01, 1f, 09, 0d, 0a, 01, 26, 01, 03, 09, 28, 04, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0e, 00, 0f, 30, 09, 0a, 02, 00, 03, 00, 0e, 00, 0f, 0a, 00, 13, 00, 14, 30, 0d, 0e, 03, 00, 00, 00, 13, 00, 14, 1f, 00, 16, 02, 06, 1a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (87): 0x[01, 01, 08, 01, 05, 05, 09, 05, 09, 05, 1f, 09, 0d, 09, 0d, 01, 1f, 09, 0d, 0a, 01, 27, 01, 03, 09, 28, 04, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0e, 00, 0f, 30, 09, 0a, 02, 00, 03, 00, 0e, 00, 0f, 0a, 00, 13, 00, 14, 30, 0d, 0e, 03, 00, 00, 00, 13, 00, 14, 1f, 00, 16, 02, 06, 1a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 8 @@ -142,7 +142,7 @@ Number of expressions: 8 - expression 6 operands: lhs = Counter(0), rhs = Expression(7, Add) - expression 7 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 10 -- Code(Counter(0)) at (prev + 38, 1) to (start + 3, 9) +- Code(Counter(0)) at (prev + 39, 1) to (start + 3, 9) - MCDCDecision { bitmap_idx: 4, conditions_num: 3 } at (prev + 3, 8) to (start + 0, 21) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -164,7 +164,7 @@ Number of file 0 mappings: 10 Highest counter ID seen: c3 Function name: if::mcdc_nested_if -Raw bytes (120): 0x[01, 01, 0b, 01, 05, 01, 2b, 05, 09, 05, 09, 2b, 0d, 05, 09, 0d, 11, 2b, 11, 05, 09, 01, 2b, 05, 09, 0e, 01, 3a, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 00, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 30, 09, 26, 02, 00, 00, 00, 0d, 00, 0e, 2b, 01, 09, 01, 0d, 28, 06, 02, 01, 0c, 00, 12, 30, 0d, 12, 01, 02, 00, 00, 0c, 00, 0d, 0d, 00, 11, 00, 12, 30, 11, 1a, 02, 00, 00, 00, 11, 00, 12, 11, 00, 13, 02, 0a, 1e, 02, 09, 00, 0a, 26, 01, 0c, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (120): 0x[01, 01, 0b, 01, 05, 01, 2b, 05, 09, 05, 09, 2b, 0d, 05, 09, 0d, 11, 2b, 11, 05, 09, 01, 2b, 05, 09, 0e, 01, 3b, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 00, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 30, 09, 26, 02, 00, 00, 00, 0d, 00, 0e, 2b, 01, 09, 01, 0d, 28, 06, 02, 01, 0c, 00, 12, 30, 0d, 12, 01, 02, 00, 00, 0c, 00, 0d, 0d, 00, 11, 00, 12, 30, 11, 1a, 02, 00, 00, 00, 11, 00, 12, 11, 00, 13, 02, 0a, 1e, 02, 09, 00, 0a, 26, 01, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 11 @@ -180,7 +180,7 @@ Number of expressions: 11 - expression 9 operands: lhs = Counter(0), rhs = Expression(10, Add) - expression 10 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 14 -- Code(Counter(0)) at (prev + 58, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 59, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 8) to (start + 0, 9) true = c1 diff --git a/tests/coverage/mcdc/if.coverage b/tests/coverage/mcdc/if.coverage index cee74de3c5f..b000c7d5d2f 100644 --- a/tests/coverage/mcdc/if.coverage +++ b/tests/coverage/mcdc/if.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ min-llvm-version: 19 LL| |//@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/if.rs b/tests/coverage/mcdc/if.rs index 895b736d066..a2abb2edf11 100644 --- a/tests/coverage/mcdc/if.rs +++ b/tests/coverage/mcdc/if.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ min-llvm-version: 19 //@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/inlined_expressions.cov-map b/tests/coverage/mcdc/inlined_expressions.cov-map index 7d78e572a3b..6a112b66e88 100644 --- a/tests/coverage/mcdc/inlined_expressions.cov-map +++ b/tests/coverage/mcdc/inlined_expressions.cov-map @@ -1,12 +1,12 @@ Function name: inlined_expressions::inlined_instance -Raw bytes (50): 0x[01, 01, 02, 01, 05, 05, 09, 06, 01, 07, 01, 01, 06, 28, 03, 02, 01, 05, 00, 0b, 30, 05, 02, 01, 02, 00, 00, 05, 00, 06, 05, 00, 0a, 00, 0b, 30, 09, 06, 02, 00, 00, 00, 0a, 00, 0b, 01, 01, 01, 00, 02] +Raw bytes (50): 0x[01, 01, 02, 01, 05, 05, 09, 06, 01, 08, 01, 01, 06, 28, 03, 02, 01, 05, 00, 0b, 30, 05, 02, 01, 02, 00, 00, 05, 00, 06, 05, 00, 0a, 00, 0b, 30, 09, 06, 02, 00, 00, 00, 0a, 00, 0b, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 7, 1) to (start + 1, 6) +- Code(Counter(0)) at (prev + 8, 1) to (start + 1, 6) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 5) to (start + 0, 11) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 5) to (start + 0, 6) true = c1 diff --git a/tests/coverage/mcdc/inlined_expressions.coverage b/tests/coverage/mcdc/inlined_expressions.coverage index 12bf55d6460..57c655a2054 100644 --- a/tests/coverage/mcdc/inlined_expressions.coverage +++ b/tests/coverage/mcdc/inlined_expressions.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ min-llvm-version: 19 LL| |//@ compile-flags: -Zcoverage-options=mcdc -Copt-level=z -Cllvm-args=--inline-threshold=0 diff --git a/tests/coverage/mcdc/inlined_expressions.rs b/tests/coverage/mcdc/inlined_expressions.rs index dbab0b8a662..651e2fe8438 100644 --- a/tests/coverage/mcdc/inlined_expressions.rs +++ b/tests/coverage/mcdc/inlined_expressions.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ min-llvm-version: 19 //@ compile-flags: -Zcoverage-options=mcdc -Copt-level=z -Cllvm-args=--inline-threshold=0 diff --git a/tests/coverage/mcdc/nested_if.cov-map b/tests/coverage/mcdc/nested_if.cov-map index 59564404481..72c7d68840d 100644 --- a/tests/coverage/mcdc/nested_if.cov-map +++ b/tests/coverage/mcdc/nested_if.cov-map @@ -1,5 +1,5 @@ Function name: nested_if::doubly_nested_if_in_condition -Raw bytes (168): 0x[01, 01, 0e, 01, 05, 05, 09, 05, 09, 05, 13, 09, 19, 19, 1d, 05, 1f, 09, 1d, 09, 0d, 2b, 05, 01, 15, 33, 05, 37, 15, 01, 11, 14, 01, 0e, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 15, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 06, 02, 00, 10, 00, 36, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 30, 0d, 21, 02, 00, 00, 00, 15, 00, 36, 0a, 00, 18, 00, 19, 28, 03, 02, 00, 18, 00, 1e, 30, 19, 0e, 01, 02, 00, 00, 18, 00, 19, 19, 00, 1d, 00, 1e, 30, 1d, 16, 02, 00, 00, 00, 1d, 00, 1e, 1d, 00, 21, 00, 25, 1a, 00, 2f, 00, 34, 23, 00, 39, 00, 3e, 21, 00, 48, 00, 4c, 11, 00, 4f, 02, 06, 26, 02, 0c, 02, 06, 2e, 03, 01, 00, 02] +Raw bytes (168): 0x[01, 01, 0e, 01, 05, 05, 09, 05, 09, 05, 13, 09, 19, 19, 1d, 05, 1f, 09, 1d, 09, 0d, 2b, 05, 01, 15, 33, 05, 37, 15, 01, 11, 14, 01, 0f, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 15, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 06, 02, 00, 10, 00, 36, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 30, 0d, 21, 02, 00, 00, 00, 15, 00, 36, 0a, 00, 18, 00, 19, 28, 03, 02, 00, 18, 00, 1e, 30, 19, 0e, 01, 02, 00, 00, 18, 00, 19, 19, 00, 1d, 00, 1e, 30, 1d, 16, 02, 00, 00, 00, 1d, 00, 1e, 1d, 00, 21, 00, 25, 1a, 00, 2f, 00, 34, 23, 00, 39, 00, 3e, 21, 00, 48, 00, 4c, 11, 00, 4f, 02, 06, 26, 02, 0c, 02, 06, 2e, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 14 @@ -18,7 +18,7 @@ Number of expressions: 14 - expression 12 operands: lhs = Expression(13, Add), rhs = Counter(5) - expression 13 operands: lhs = Counter(0), rhs = Counter(4) Number of file 0 mappings: 20 -- Code(Counter(0)) at (prev + 14, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 15, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 9, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 78) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -58,7 +58,7 @@ Number of file 0 mappings: 20 Highest counter ID seen: c8 Function name: nested_if::nested_if_in_condition -Raw bytes (124): 0x[01, 01, 0d, 01, 05, 05, 09, 05, 09, 05, 1f, 09, 0d, 09, 0d, 05, 1f, 09, 0d, 27, 05, 01, 15, 2f, 05, 33, 15, 01, 11, 0e, 01, 06, 01, 01, 09, 28, 06, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 15, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 0a, 00, 15, 00, 16, 30, 0d, 1a, 02, 00, 00, 00, 15, 00, 16, 1f, 00, 19, 00, 1d, 1a, 00, 27, 00, 2c, 11, 00, 2f, 02, 06, 22, 02, 0c, 02, 06, 2a, 03, 01, 00, 02] +Raw bytes (124): 0x[01, 01, 0d, 01, 05, 05, 09, 05, 09, 05, 1f, 09, 0d, 09, 0d, 05, 1f, 09, 0d, 27, 05, 01, 15, 2f, 05, 33, 15, 01, 11, 0e, 01, 07, 01, 01, 09, 28, 06, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 15, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 0a, 00, 15, 00, 16, 30, 0d, 1a, 02, 00, 00, 00, 15, 00, 16, 1f, 00, 19, 00, 1d, 1a, 00, 27, 00, 2c, 11, 00, 2f, 02, 06, 22, 02, 0c, 02, 06, 2a, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 13 @@ -76,7 +76,7 @@ Number of expressions: 13 - expression 11 operands: lhs = Expression(12, Add), rhs = Counter(5) - expression 12 operands: lhs = Counter(0), rhs = Counter(4) Number of file 0 mappings: 14 -- Code(Counter(0)) at (prev + 6, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 7, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 46) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -106,7 +106,7 @@ Number of file 0 mappings: 14 Highest counter ID seen: c5 Function name: nested_if::nested_in_then_block_in_condition -Raw bytes (176): 0x[01, 01, 12, 01, 05, 05, 09, 05, 09, 05, 33, 09, 0d, 09, 0d, 33, 11, 09, 0d, 11, 15, 33, 15, 09, 0d, 05, 33, 09, 0d, 3b, 05, 01, 1d, 43, 05, 47, 1d, 01, 19, 14, 01, 21, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 19, 1d, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 0a, 00, 15, 00, 16, 30, 0d, 2e, 02, 00, 00, 00, 15, 00, 16, 33, 00, 1c, 00, 1d, 28, 06, 02, 00, 1c, 00, 22, 30, 11, 1a, 01, 02, 00, 00, 1c, 00, 1d, 11, 00, 21, 00, 22, 30, 15, 22, 02, 00, 00, 00, 21, 00, 22, 15, 00, 25, 00, 29, 26, 00, 33, 00, 38, 2e, 00, 44, 00, 49, 19, 00, 4c, 02, 06, 36, 02, 0c, 02, 06, 3e, 03, 01, 00, 02] +Raw bytes (176): 0x[01, 01, 12, 01, 05, 05, 09, 05, 09, 05, 33, 09, 0d, 09, 0d, 33, 11, 09, 0d, 11, 15, 33, 15, 09, 0d, 05, 33, 09, 0d, 3b, 05, 01, 1d, 43, 05, 47, 1d, 01, 19, 14, 01, 22, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 19, 1d, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 0a, 00, 15, 00, 16, 30, 0d, 2e, 02, 00, 00, 00, 15, 00, 16, 33, 00, 1c, 00, 1d, 28, 06, 02, 00, 1c, 00, 22, 30, 11, 1a, 01, 02, 00, 00, 1c, 00, 1d, 11, 00, 21, 00, 22, 30, 15, 22, 02, 00, 00, 00, 21, 00, 22, 15, 00, 25, 00, 29, 26, 00, 33, 00, 38, 2e, 00, 44, 00, 49, 19, 00, 4c, 02, 06, 36, 02, 0c, 02, 06, 3e, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 18 @@ -129,7 +129,7 @@ Number of expressions: 18 - expression 16 operands: lhs = Expression(17, Add), rhs = Counter(7) - expression 17 operands: lhs = Counter(0), rhs = Counter(6) Number of file 0 mappings: 20 -- Code(Counter(0)) at (prev + 33, 1) to (start + 1, 9) +- Code(Counter(0)) at (prev + 34, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 9, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 75) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 @@ -170,7 +170,7 @@ Number of file 0 mappings: 20 Highest counter ID seen: c7 Function name: nested_if::nested_single_condition_decision -Raw bytes (89): 0x[01, 01, 08, 01, 05, 05, 09, 05, 09, 13, 05, 01, 11, 1b, 05, 1f, 11, 01, 0d, 0b, 01, 16, 01, 04, 09, 28, 03, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 11, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 09, 0a, 00, 10, 00, 11, 09, 00, 14, 00, 19, 0a, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 0e, 02, 0c, 02, 06, 16, 03, 01, 00, 02] +Raw bytes (89): 0x[01, 01, 08, 01, 05, 05, 09, 05, 09, 13, 05, 01, 11, 1b, 05, 1f, 11, 01, 0d, 0b, 01, 17, 01, 04, 09, 28, 03, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 11, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 09, 0a, 00, 10, 00, 11, 09, 00, 14, 00, 19, 0a, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 0e, 02, 0c, 02, 06, 16, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 8 @@ -183,7 +183,7 @@ Number of expressions: 8 - expression 6 operands: lhs = Expression(7, Add), rhs = Counter(4) - expression 7 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 11 -- Code(Counter(0)) at (prev + 22, 1) to (start + 4, 9) +- Code(Counter(0)) at (prev + 23, 1) to (start + 4, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 4, 8) to (start + 0, 41) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 diff --git a/tests/coverage/mcdc/nested_if.coverage b/tests/coverage/mcdc/nested_if.coverage index 4c872708a6e..ca0cb54d581 100644 --- a/tests/coverage/mcdc/nested_if.coverage +++ b/tests/coverage/mcdc/nested_if.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ min-llvm-version: 19 LL| |//@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/nested_if.rs b/tests/coverage/mcdc/nested_if.rs index 3356a768a69..83f188ea47e 100644 --- a/tests/coverage/mcdc/nested_if.rs +++ b/tests/coverage/mcdc/nested_if.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ min-llvm-version: 19 //@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/non_control_flow.cov-map b/tests/coverage/mcdc/non_control_flow.cov-map index ee128d997c0..c282d53c5ac 100644 --- a/tests/coverage/mcdc/non_control_flow.cov-map +++ b/tests/coverage/mcdc/non_control_flow.cov-map @@ -1,5 +1,5 @@ Function name: non_control_flow::assign_3 -Raw bytes (79): 0x[01, 01, 04, 01, 05, 01, 0b, 05, 09, 09, 0d, 0a, 01, 15, 01, 00, 28, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 04, 03, 00, 0d, 00, 18, 30, 05, 02, 01, 00, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 30, 09, 06, 02, 03, 00, 00, 12, 00, 13, 09, 00, 17, 00, 18, 30, 0d, 0e, 03, 00, 00, 00, 17, 00, 18, 01, 01, 05, 01, 02] +Raw bytes (79): 0x[01, 01, 04, 01, 05, 01, 0b, 05, 09, 09, 0d, 0a, 01, 16, 01, 00, 28, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 04, 03, 00, 0d, 00, 18, 30, 05, 02, 01, 00, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 30, 09, 06, 02, 03, 00, 00, 12, 00, 13, 09, 00, 17, 00, 18, 30, 0d, 0e, 03, 00, 00, 00, 17, 00, 18, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 4 @@ -8,7 +8,7 @@ Number of expressions: 4 - expression 2 operands: lhs = Counter(1), rhs = Counter(2) - expression 3 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 10 -- Code(Counter(0)) at (prev + 21, 1) to (start + 0, 40) +- Code(Counter(0)) at (prev + 22, 1) to (start + 0, 40) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - MCDCDecision { bitmap_idx: 4, conditions_num: 3 } at (prev + 0, 13) to (start + 0, 24) @@ -28,7 +28,7 @@ Number of file 0 mappings: 10 Highest counter ID seen: c3 Function name: non_control_flow::assign_3_bis -Raw bytes (81): 0x[01, 01, 05, 01, 05, 05, 09, 01, 09, 01, 13, 09, 0d, 0a, 01, 1a, 01, 00, 2c, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 05, 03, 00, 0d, 00, 18, 30, 05, 02, 01, 03, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 06, 03, 00, 02, 00, 12, 00, 13, 0a, 00, 17, 00, 18, 30, 0d, 0e, 02, 00, 00, 00, 17, 00, 18, 01, 01, 05, 01, 02] +Raw bytes (81): 0x[01, 01, 05, 01, 05, 05, 09, 01, 09, 01, 13, 09, 0d, 0a, 01, 1b, 01, 00, 2c, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 05, 03, 00, 0d, 00, 18, 30, 05, 02, 01, 03, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 06, 03, 00, 02, 00, 12, 00, 13, 0a, 00, 17, 00, 18, 30, 0d, 0e, 02, 00, 00, 00, 17, 00, 18, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 @@ -38,7 +38,7 @@ Number of expressions: 5 - expression 3 operands: lhs = Counter(0), rhs = Expression(4, Add) - expression 4 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 10 -- Code(Counter(0)) at (prev + 26, 1) to (start + 0, 44) +- Code(Counter(0)) at (prev + 27, 1) to (start + 0, 44) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - MCDCDecision { bitmap_idx: 5, conditions_num: 3 } at (prev + 0, 13) to (start + 0, 24) @@ -58,14 +58,14 @@ Number of file 0 mappings: 10 Highest counter ID seen: c3 Function name: non_control_flow::assign_and -Raw bytes (60): 0x[01, 01, 02, 01, 05, 05, 09, 08, 01, 0b, 01, 00, 21, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 03, 02, 00, 0d, 00, 13, 30, 05, 02, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 06, 02, 00, 00, 00, 12, 00, 13, 01, 01, 05, 01, 02] +Raw bytes (60): 0x[01, 01, 02, 01, 05, 05, 09, 08, 01, 0c, 01, 00, 21, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 03, 02, 00, 0d, 00, 13, 30, 05, 02, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 06, 02, 00, 00, 00, 12, 00, 13, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 11, 1) to (start + 0, 33) +- Code(Counter(0)) at (prev + 12, 1) to (start + 0, 33) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 13) to (start + 0, 19) @@ -80,7 +80,7 @@ Number of file 0 mappings: 8 Highest counter ID seen: c2 Function name: non_control_flow::assign_or -Raw bytes (62): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 08, 01, 10, 01, 00, 20, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 03, 02, 00, 0d, 00, 13, 30, 05, 02, 01, 00, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 30, 09, 06, 02, 00, 00, 00, 12, 00, 13, 01, 01, 05, 01, 02] +Raw bytes (62): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 08, 01, 11, 01, 00, 20, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 03, 02, 00, 0d, 00, 13, 30, 05, 02, 01, 00, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 30, 09, 06, 02, 00, 00, 00, 12, 00, 13, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 @@ -88,7 +88,7 @@ Number of expressions: 3 - expression 1 operands: lhs = Counter(0), rhs = Expression(2, Add) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 8 -- Code(Counter(0)) at (prev + 16, 1) to (start + 0, 32) +- Code(Counter(0)) at (prev + 17, 1) to (start + 0, 32) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 13) to (start + 0, 19) @@ -104,23 +104,23 @@ Number of file 0 mappings: 8 Highest counter ID seen: c2 Function name: non_control_flow::foo -Raw bytes (9): 0x[01, 01, 00, 01, 01, 24, 01, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 25, 01, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 36, 1) to (start + 2, 2) +- Code(Counter(0)) at (prev + 37, 1) to (start + 2, 2) Highest counter ID seen: c0 Function name: non_control_flow::func_call -Raw bytes (50): 0x[01, 01, 02, 01, 05, 05, 09, 06, 01, 28, 01, 01, 0a, 28, 03, 02, 01, 09, 00, 0f, 30, 05, 02, 01, 02, 00, 00, 09, 00, 0a, 05, 00, 0e, 00, 0f, 30, 09, 06, 02, 00, 00, 00, 0e, 00, 0f, 01, 01, 01, 00, 02] +Raw bytes (50): 0x[01, 01, 02, 01, 05, 05, 09, 06, 01, 29, 01, 01, 0a, 28, 03, 02, 01, 09, 00, 0f, 30, 05, 02, 01, 02, 00, 00, 09, 00, 0a, 05, 00, 0e, 00, 0f, 30, 09, 06, 02, 00, 00, 00, 0e, 00, 0f, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 6 -- Code(Counter(0)) at (prev + 40, 1) to (start + 1, 10) +- Code(Counter(0)) at (prev + 41, 1) to (start + 1, 10) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 9) to (start + 0, 15) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 9) to (start + 0, 10) true = c1 @@ -133,7 +133,7 @@ Number of file 0 mappings: 6 Highest counter ID seen: c2 Function name: non_control_flow::right_comb_tree -Raw bytes (111): 0x[01, 01, 05, 01, 05, 05, 09, 09, 0d, 0d, 11, 11, 15, 0e, 01, 1f, 01, 00, 41, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 06, 05, 00, 0d, 00, 2a, 30, 05, 02, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 13, 00, 14, 30, 09, 06, 02, 03, 00, 00, 13, 00, 14, 09, 00, 19, 00, 1a, 30, 0d, 0a, 03, 04, 00, 00, 19, 00, 1a, 0d, 00, 1f, 00, 20, 30, 11, 0e, 04, 05, 00, 00, 1f, 00, 20, 11, 00, 24, 00, 27, 30, 15, 12, 05, 00, 00, 00, 24, 00, 27, 01, 01, 05, 01, 02] +Raw bytes (111): 0x[01, 01, 05, 01, 05, 05, 09, 09, 0d, 0d, 11, 11, 15, 0e, 01, 20, 01, 00, 41, 01, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 06, 05, 00, 0d, 00, 2a, 30, 05, 02, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 13, 00, 14, 30, 09, 06, 02, 03, 00, 00, 13, 00, 14, 09, 00, 19, 00, 1a, 30, 0d, 0a, 03, 04, 00, 00, 19, 00, 1a, 0d, 00, 1f, 00, 20, 30, 11, 0e, 04, 05, 00, 00, 1f, 00, 20, 11, 00, 24, 00, 27, 30, 15, 12, 05, 00, 00, 00, 24, 00, 27, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 @@ -143,7 +143,7 @@ Number of expressions: 5 - expression 3 operands: lhs = Counter(3), rhs = Counter(4) - expression 4 operands: lhs = Counter(4), rhs = Counter(5) Number of file 0 mappings: 14 -- Code(Counter(0)) at (prev + 31, 1) to (start + 0, 65) +- Code(Counter(0)) at (prev + 32, 1) to (start + 0, 65) - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) - MCDCDecision { bitmap_idx: 6, conditions_num: 5 } at (prev + 0, 13) to (start + 0, 42) diff --git a/tests/coverage/mcdc/non_control_flow.coverage b/tests/coverage/mcdc/non_control_flow.coverage index 204c46dc7b5..cead419fbdf 100644 --- a/tests/coverage/mcdc/non_control_flow.coverage +++ b/tests/coverage/mcdc/non_control_flow.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| |//@ min-llvm-version: 19 LL| |//@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/mcdc/non_control_flow.rs b/tests/coverage/mcdc/non_control_flow.rs index a836d8b55c0..6cfce6fae93 100644 --- a/tests/coverage/mcdc/non_control_flow.rs +++ b/tests/coverage/mcdc/non_control_flow.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ min-llvm-version: 19 //@ compile-flags: -Zcoverage-options=mcdc diff --git a/tests/coverage/no_cov_crate.cov-map b/tests/coverage/no_cov_crate.cov-map index 0eb86ef9366..04171fdb79b 100644 --- a/tests/coverage/no_cov_crate.cov-map +++ b/tests/coverage/no_cov_crate.cov-map @@ -1,67 +1,67 @@ Function name: no_cov_crate::add_coverage_1 -Raw bytes (9): 0x[01, 01, 00, 01, 01, 15, 01, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 16, 01, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 21, 1) to (start + 2, 2) +- Code(Counter(0)) at (prev + 22, 1) to (start + 2, 2) Highest counter ID seen: c0 Function name: no_cov_crate::add_coverage_2 -Raw bytes (9): 0x[01, 01, 00, 01, 01, 19, 01, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 1a, 01, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 25, 1) to (start + 2, 2) +- Code(Counter(0)) at (prev + 26, 1) to (start + 2, 2) Highest counter ID seen: c0 Function name: no_cov_crate::add_coverage_not_called (unused) -Raw bytes (9): 0x[01, 01, 00, 01, 00, 1e, 01, 02, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 00, 1f, 01, 02, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Zero) at (prev + 30, 1) to (start + 2, 2) +- Code(Zero) at (prev + 31, 1) to (start + 2, 2) Highest counter ID seen: (none) Function name: no_cov_crate::main -Raw bytes (9): 0x[01, 01, 00, 01, 01, 4e, 01, 0b, 02] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 4f, 01, 0b, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 78, 1) to (start + 11, 2) +- Code(Counter(0)) at (prev + 79, 1) to (start + 11, 2) Highest counter ID seen: c0 Function name: no_cov_crate::nested_fns::outer -Raw bytes (14): 0x[01, 01, 00, 02, 01, 32, 05, 02, 23, 01, 0c, 05, 00, 06] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 33, 05, 02, 23, 01, 0c, 05, 00, 06] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 50, 5) to (start + 2, 35) +- Code(Counter(0)) at (prev + 51, 5) to (start + 2, 35) - Code(Counter(0)) at (prev + 12, 5) to (start + 0, 6) Highest counter ID seen: c0 Function name: no_cov_crate::nested_fns::outer_both_covered -Raw bytes (14): 0x[01, 01, 00, 02, 01, 40, 05, 02, 17, 01, 0b, 05, 00, 06] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 41, 05, 02, 17, 01, 0b, 05, 00, 06] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 64, 5) to (start + 2, 23) +- Code(Counter(0)) at (prev + 65, 5) to (start + 2, 23) - Code(Counter(0)) at (prev + 11, 5) to (start + 0, 6) Highest counter ID seen: c0 Function name: no_cov_crate::nested_fns::outer_both_covered::inner -Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 44, 09, 01, 17, 05, 01, 18, 02, 0e, 02, 02, 14, 02, 0e, 01, 03, 09, 00, 0a] +Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 45, 09, 01, 17, 05, 01, 18, 02, 0e, 02, 02, 14, 02, 0e, 01, 03, 09, 00, 0a] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 4 -- Code(Counter(0)) at (prev + 68, 9) to (start + 1, 23) +- Code(Counter(0)) at (prev + 69, 9) to (start + 1, 23) - Code(Counter(1)) at (prev + 1, 24) to (start + 2, 14) - Code(Expression(0, Sub)) at (prev + 2, 20) to (start + 2, 14) = (c0 - c1) diff --git a/tests/coverage/no_cov_crate.coverage b/tests/coverage/no_cov_crate.coverage index a75057287bc..2a8961e6c93 100644 --- a/tests/coverage/no_cov_crate.coverage +++ b/tests/coverage/no_cov_crate.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |// Enables `coverage(off)` on the entire crate LL| |//@ reference: attributes.coverage.intro LL| |//@ reference: attributes.coverage.nesting diff --git a/tests/coverage/no_cov_crate.rs b/tests/coverage/no_cov_crate.rs index df8594e9790..72fd6317a17 100644 --- a/tests/coverage/no_cov_crate.rs +++ b/tests/coverage/no_cov_crate.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] // Enables `coverage(off)` on the entire crate //@ reference: attributes.coverage.intro //@ reference: attributes.coverage.nesting diff --git a/tests/coverage/no_spans.cov-map b/tests/coverage/no_spans.cov-map index c6178fc41cf..7f43b68fa90 100644 --- a/tests/coverage/no_spans.cov-map +++ b/tests/coverage/no_spans.cov-map @@ -1,18 +1,18 @@ Function name: no_spans::affected_function -Raw bytes (9): 0x[01, 01, 00, 01, 01, 19, 1c, 00, 1d] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 1a, 1c, 00, 1d] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 25, 28) to (start + 0, 29) +- Code(Counter(0)) at (prev + 26, 28) to (start + 0, 29) Highest counter ID seen: c0 Function name: no_spans::affected_function::{closure#0} -Raw bytes (9): 0x[01, 01, 00, 01, 01, 1a, 0c, 00, 0e] +Raw bytes (9): 0x[01, 01, 00, 01, 01, 1b, 0c, 00, 0e] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 1 -- Code(Counter(0)) at (prev + 26, 12) to (start + 0, 14) +- Code(Counter(0)) at (prev + 27, 12) to (start + 0, 14) Highest counter ID seen: c0 diff --git a/tests/coverage/no_spans.coverage b/tests/coverage/no_spans.coverage index c722210e35f..19e8c2fe5b6 100644 --- a/tests/coverage/no_spans.coverage +++ b/tests/coverage/no_spans.coverage @@ -1,3 +1,4 @@ + LL| |#![feature(coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |// If the span extractor can't find any relevant spans for a function, the diff --git a/tests/coverage/no_spans.rs b/tests/coverage/no_spans.rs index db28bfd0590..e5312406f8a 100644 --- a/tests/coverage/no_spans.rs +++ b/tests/coverage/no_spans.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 // If the span extractor can't find any relevant spans for a function, the diff --git a/tests/coverage/unreachable.coverage b/tests/coverage/unreachable.coverage index 6f9f45dce1e..aa37ccc7d33 100644 --- a/tests/coverage/unreachable.coverage +++ b/tests/coverage/unreachable.coverage @@ -1,4 +1,4 @@ - LL| |#![feature(core_intrinsics)] + LL| |#![feature(core_intrinsics, coverage_attribute)] LL| |//@ edition: 2021 LL| | LL| |// <https://github.com/rust-lang/rust/issues/116171> diff --git a/tests/coverage/unreachable.rs b/tests/coverage/unreachable.rs index d6082f85a36..443e1c8ccab 100644 --- a/tests/coverage/unreachable.rs +++ b/tests/coverage/unreachable.rs @@ -1,4 +1,4 @@ -#![feature(core_intrinsics)] +#![feature(core_intrinsics, coverage_attribute)] //@ edition: 2021 // <https://github.com/rust-lang/rust/issues/116171> diff --git a/tests/crashes/132127.rs b/tests/crashes/132127.rs deleted file mode 100644 index cca354b9876..00000000000 --- a/tests/crashes/132127.rs +++ /dev/null @@ -1,9 +0,0 @@ -//@ known-bug: #132127 -#![feature(dyn_star)] - -trait Trait {} - -fn main() { - let x: dyn* Trait + Send = 1usize; - x as dyn* Trait; -} diff --git a/tests/debuginfo/function-names.rs b/tests/debuginfo/function-names.rs index d9b61e73621..c51884451e5 100644 --- a/tests/debuginfo/function-names.rs +++ b/tests/debuginfo/function-names.rs @@ -37,7 +37,7 @@ // Const generic parameter // gdb-command:info functions -q function_names::const_generic_fn.* // gdb-check:[...]static fn function_names::const_generic_fn_bool<false>(); -// gdb-check:[...]static fn function_names::const_generic_fn_non_int<{CONST#a70c39591cb5f53d}>(); +// gdb-check:[...]static fn function_names::const_generic_fn_non_int<{CONST#ffa3db4ca1d52dce}>(); // gdb-check:[...]static fn function_names::const_generic_fn_signed_int<-7>(); // gdb-check:[...]static fn function_names::const_generic_fn_unsigned_int<14>(); diff --git a/tests/mir-opt/building/custom/arrays.arrays.built.after.mir b/tests/mir-opt/building/custom/arrays.arrays.built.after.mir deleted file mode 100644 index 30d11e31e4d..00000000000 --- a/tests/mir-opt/building/custom/arrays.arrays.built.after.mir +++ /dev/null @@ -1,14 +0,0 @@ -// MIR for `arrays` after built - -fn arrays() -> usize { - let mut _0: usize; - let mut _1: [i32; C]; - let mut _2: usize; - - bb0: { - _1 = [const 5_i32; C]; - _2 = Len(_1); - _0 = copy _2; - return; - } -} diff --git a/tests/mir-opt/building/custom/arrays.rs b/tests/mir-opt/building/custom/arrays.rs deleted file mode 100644 index 4bd6f93e113..00000000000 --- a/tests/mir-opt/building/custom/arrays.rs +++ /dev/null @@ -1,22 +0,0 @@ -// skip-filecheck -#![feature(custom_mir, core_intrinsics)] - -extern crate core; -use core::intrinsics::mir::*; - -// EMIT_MIR arrays.arrays.built.after.mir -#[custom_mir(dialect = "built")] -fn arrays<const C: usize>() -> usize { - mir! { - { - let x = [5_i32; C]; - let c = Len(x); - RET = c; - Return() - } - } -} - -fn main() { - assert_eq!(arrays::<20>(), 20); -} diff --git a/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff index 69ef6016d25..138586300ce 100644 --- a/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff +++ b/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff @@ -26,16 +26,16 @@ debug a => _9; } -+ coverage body span: $DIR/branch_match_arms.rs:13:11: 20:2 (#0) ++ coverage body span: $DIR/branch_match_arms.rs:14:11: 21:2 (#0) + coverage ExpressionId(0) => Expression { lhs: Counter(1), op: Add, rhs: Counter(2) }; + coverage ExpressionId(1) => Expression { lhs: Expression(0), op: Add, rhs: Counter(3) }; + coverage ExpressionId(2) => Expression { lhs: Counter(0), op: Subtract, rhs: Expression(1) }; -+ coverage Code(Counter(0)) => $DIR/branch_match_arms.rs:13:1: 14:21 (#0); -+ coverage Code(Counter(1)) => $DIR/branch_match_arms.rs:15:17: 15:33 (#0); -+ coverage Code(Counter(2)) => $DIR/branch_match_arms.rs:16:17: 16:33 (#0); -+ coverage Code(Counter(3)) => $DIR/branch_match_arms.rs:17:17: 17:33 (#0); -+ coverage Code(Expression(2)) => $DIR/branch_match_arms.rs:18:17: 18:33 (#0); -+ coverage Code(Counter(0)) => $DIR/branch_match_arms.rs:20:2: 20:2 (#0); ++ coverage Code(Counter(0)) => $DIR/branch_match_arms.rs:14:1: 15:21 (#0); ++ coverage Code(Counter(1)) => $DIR/branch_match_arms.rs:16:17: 16:33 (#0); ++ coverage Code(Counter(2)) => $DIR/branch_match_arms.rs:17:17: 17:33 (#0); ++ coverage Code(Counter(3)) => $DIR/branch_match_arms.rs:18:17: 18:33 (#0); ++ coverage Code(Expression(2)) => $DIR/branch_match_arms.rs:19:17: 19:33 (#0); ++ coverage Code(Counter(0)) => $DIR/branch_match_arms.rs:21:2: 21:2 (#0); + bb0: { + Coverage::CounterIncrement(0); diff --git a/tests/mir-opt/coverage/branch_match_arms.rs b/tests/mir-opt/coverage/branch_match_arms.rs index 84ffddcb289..18764b38d6e 100644 --- a/tests/mir-opt/coverage/branch_match_arms.rs +++ b/tests/mir-opt/coverage/branch_match_arms.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ test-mir-pass: InstrumentCoverage //@ compile-flags: -Cinstrument-coverage -Zno-profiler-runtime -Zcoverage-options=branch // skip-filecheck diff --git a/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-abort.diff b/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-abort.diff index 60742ef0e9a..b4197c09ac9 100644 --- a/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-abort.diff +++ b/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-abort.diff @@ -22,14 +22,14 @@ bb1: { StorageDead(_3); - _4 = Len((*_2)); + _4 = PtrMetadata(copy _2); _5 = const 4_usize; _6 = Ge(move _4, move _5); switchInt(move _6) -> [0: bb2, otherwise: bb3]; } bb2: { - _7 = Len((*_2)); + _7 = PtrMetadata(copy _2); _8 = const 3_usize; _9 = Ge(move _7, move _8); - switchInt(move _9) -> [0: bb7, otherwise: bb8]; diff --git a/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-unwind.diff b/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-unwind.diff index 7337a32f525..4bcb13ca49c 100644 --- a/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-unwind.diff +++ b/tests/mir-opt/deduplicate_blocks.is_line_doc_comment_2.DeduplicateBlocks.panic-unwind.diff @@ -22,14 +22,14 @@ bb1: { StorageDead(_3); - _4 = Len((*_2)); + _4 = PtrMetadata(copy _2); _5 = const 4_usize; _6 = Ge(move _4, move _5); switchInt(move _6) -> [0: bb2, otherwise: bb3]; } bb2: { - _7 = Len((*_2)); + _7 = PtrMetadata(copy _2); _8 = const 3_usize; _9 = Ge(move _7, move _8); - switchInt(move _9) -> [0: bb7, otherwise: bb8]; diff --git a/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-abort.diff b/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-abort.diff index c02bab3524b..94ba7082c66 100644 --- a/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-abort.diff +++ b/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-abort.diff @@ -7,18 +7,16 @@ let _2: &[T]; let _3: &[T; 3]; let _4: [T; 3]; - let mut _5: usize; - let mut _6: bool; - let mut _10: !; + let mut _8: !; scope 1 { debug v => _2; + let _5: &T; + let _6: &T; let _7: &T; - let _8: &T; - let _9: &T; scope 2 { - debug v1 => _7; - debug v2 => _8; - debug v3 => _9; + debug v1 => _5; + debug v2 => _6; + debug v3 => _7; } } @@ -27,25 +25,23 @@ _4 = [copy _1, copy _1, copy _1]; _3 = &_4; _2 = copy _3 as &[T] (PointerCoercion(Unsize, Implicit)); - nop; - nop; goto -> bb2; } bb1: { - _10 = core::panicking::panic(const "internal error: entered unreachable code") -> unwind unreachable; + _8 = core::panicking::panic(const "internal error: entered unreachable code") -> unwind unreachable; } bb2: { + StorageLive(_5); + _5 = &(*_2)[0 of 3]; + StorageLive(_6); + _6 = &(*_2)[1 of 3]; StorageLive(_7); - _7 = &(*_2)[0 of 3]; - StorageLive(_8); - _8 = &(*_2)[1 of 3]; - StorageLive(_9); - _9 = &(*_2)[2 of 3]; - StorageDead(_9); - StorageDead(_8); + _7 = &(*_2)[2 of 3]; StorageDead(_7); + StorageDead(_6); + StorageDead(_5); StorageDead(_4); return; } diff --git a/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-unwind.diff b/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-unwind.diff index 49be042588c..0455b2c326e 100644 --- a/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-unwind.diff +++ b/tests/mir-opt/issue_76432.test.SimplifyComparisonIntegral.panic-unwind.diff @@ -7,18 +7,16 @@ let _2: &[T]; let _3: &[T; 3]; let _4: [T; 3]; - let mut _5: usize; - let mut _6: bool; - let mut _10: !; + let mut _8: !; scope 1 { debug v => _2; + let _5: &T; + let _6: &T; let _7: &T; - let _8: &T; - let _9: &T; scope 2 { - debug v1 => _7; - debug v2 => _8; - debug v3 => _9; + debug v1 => _5; + debug v2 => _6; + debug v3 => _7; } } @@ -27,25 +25,23 @@ _4 = [copy _1, copy _1, copy _1]; _3 = &_4; _2 = copy _3 as &[T] (PointerCoercion(Unsize, Implicit)); - nop; - nop; goto -> bb2; } bb1: { - _10 = core::panicking::panic(const "internal error: entered unreachable code") -> unwind continue; + _8 = core::panicking::panic(const "internal error: entered unreachable code") -> unwind continue; } bb2: { + StorageLive(_5); + _5 = &(*_2)[0 of 3]; + StorageLive(_6); + _6 = &(*_2)[1 of 3]; StorageLive(_7); - _7 = &(*_2)[0 of 3]; - StorageLive(_8); - _8 = &(*_2)[1 of 3]; - StorageLive(_9); - _9 = &(*_2)[2 of 3]; - StorageDead(_9); - StorageDead(_8); + _7 = &(*_2)[2 of 3]; StorageDead(_7); + StorageDead(_6); + StorageDead(_5); StorageDead(_4); return; } diff --git a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-abort.mir b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-abort.mir index 573c0a12bc1..5876c55c52b 100644 --- a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-abort.mir @@ -25,12 +25,12 @@ fn num_to_digit(_1: char) -> u32 { bb1: { StorageLive(_3); _3 = discriminant(_2); - switchInt(move _3) -> [1: bb2, 0: bb6, otherwise: bb8]; + StorageDead(_2); + switchInt(move _3) -> [1: bb2, otherwise: bb7]; } bb2: { StorageDead(_3); - StorageDead(_2); StorageLive(_4); _4 = char::methods::<impl char>::to_digit(move _1, const 8_u32) -> [return: bb3, unwind unreachable]; } @@ -38,7 +38,7 @@ fn num_to_digit(_1: char) -> u32 { bb3: { StorageLive(_5); _5 = discriminant(_4); - switchInt(move _5) -> [0: bb4, 1: bb5, otherwise: bb8]; + switchInt(move _5) -> [0: bb4, 1: bb5, otherwise: bb6]; } bb4: { @@ -49,21 +49,20 @@ fn num_to_digit(_1: char) -> u32 { _0 = move ((_4 as Some).0: u32); StorageDead(_5); StorageDead(_4); - goto -> bb7; + goto -> bb8; } bb6: { - StorageDead(_3); - StorageDead(_2); - _0 = const 0_u32; - goto -> bb7; + unreachable; } bb7: { - return; + StorageDead(_3); + _0 = const 0_u32; + goto -> bb8; } bb8: { - unreachable; + return; } } diff --git a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-unwind.mir index 049803041d4..f1185353a43 100644 --- a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.panic-unwind.mir @@ -25,12 +25,12 @@ fn num_to_digit(_1: char) -> u32 { bb1: { StorageLive(_3); _3 = discriminant(_2); - switchInt(move _3) -> [1: bb2, 0: bb6, otherwise: bb8]; + StorageDead(_2); + switchInt(move _3) -> [1: bb2, otherwise: bb7]; } bb2: { StorageDead(_3); - StorageDead(_2); StorageLive(_4); _4 = char::methods::<impl char>::to_digit(move _1, const 8_u32) -> [return: bb3, unwind continue]; } @@ -38,7 +38,7 @@ fn num_to_digit(_1: char) -> u32 { bb3: { StorageLive(_5); _5 = discriminant(_4); - switchInt(move _5) -> [0: bb4, 1: bb5, otherwise: bb8]; + switchInt(move _5) -> [0: bb4, 1: bb5, otherwise: bb6]; } bb4: { @@ -49,21 +49,20 @@ fn num_to_digit(_1: char) -> u32 { _0 = move ((_4 as Some).0: u32); StorageDead(_5); StorageDead(_4); - goto -> bb7; + goto -> bb8; } bb6: { - StorageDead(_3); - StorageDead(_2); - _0 = const 0_u32; - goto -> bb7; + unreachable; } bb7: { - return; + StorageDead(_3); + _0 = const 0_u32; + goto -> bb8; } bb8: { - unreachable; + return; } } diff --git a/tests/mir-opt/matches_reduce_branches.my_is_some.MatchBranchSimplification.diff b/tests/mir-opt/matches_reduce_branches.my_is_some.MatchBranchSimplification.diff new file mode 100644 index 00000000000..d255278ed30 --- /dev/null +++ b/tests/mir-opt/matches_reduce_branches.my_is_some.MatchBranchSimplification.diff @@ -0,0 +1,37 @@ +- // MIR for `my_is_some` before MatchBranchSimplification ++ // MIR for `my_is_some` after MatchBranchSimplification + + fn my_is_some(_1: Option<()>) -> bool { + debug bar => _1; + let mut _0: bool; + let mut _2: isize; ++ let mut _3: isize; + + bb0: { + _2 = discriminant(_1); +- switchInt(move _2) -> [0: bb2, 1: bb3, otherwise: bb1]; +- } +- +- bb1: { +- unreachable; +- } +- +- bb2: { +- _0 = const false; +- goto -> bb4; +- } +- +- bb3: { +- _0 = const true; +- goto -> bb4; +- } +- +- bb4: { ++ StorageLive(_3); ++ _3 = move _2; ++ _0 = Ne(copy _3, const 0_isize); ++ StorageDead(_3); + return; + } + } + diff --git a/tests/mir-opt/matches_reduce_branches.rs b/tests/mir-opt/matches_reduce_branches.rs index 6787e5816a3..3372ae2f2a6 100644 --- a/tests/mir-opt/matches_reduce_branches.rs +++ b/tests/mir-opt/matches_reduce_branches.rs @@ -19,6 +19,18 @@ fn foo(bar: Option<()>) { } } +// EMIT_MIR matches_reduce_branches.my_is_some.MatchBranchSimplification.diff +// Test for #131219. +fn my_is_some(bar: Option<()>) -> bool { + // CHECK-LABEL: fn my_is_some( + // CHECK: = Ne + // CHECK: return + match bar { + Some(_) => true, + None => false, + } +} + // EMIT_MIR matches_reduce_branches.bar.MatchBranchSimplification.diff fn bar(i: i32) -> (bool, bool, bool, bool) { // CHECK-LABEL: fn bar( @@ -651,4 +663,6 @@ fn main() { let _: u8 = match_trunc_u16_u8_failed(EnumAu16::u0_0x0000); let _ = match_i128_u128(EnumAi128::A); + + let _ = my_is_some(None); } diff --git a/tests/mir-opt/reference_prop.debuginfo.ReferencePropagation.diff b/tests/mir-opt/reference_prop.debuginfo.ReferencePropagation.diff index 05ad9dbf3cc..e9eea69377f 100644 --- a/tests/mir-opt/reference_prop.debuginfo.ReferencePropagation.diff +++ b/tests/mir-opt/reference_prop.debuginfo.ReferencePropagation.diff @@ -92,7 +92,7 @@ StorageDead(_7); - StorageDead(_6); - StorageLive(_10); -- StorageLive(_11); + StorageLive(_11); - StorageLive(_12); StorageLive(_13); _26 = const debuginfo::promoted[0]; @@ -105,9 +105,8 @@ bb5: { StorageDead(_15); StorageDead(_13); -- _11 = &(*_12); -- _16 = Len((*_11)); -+ _16 = Len((*_12)); + _11 = &(*_12); + _16 = PtrMetadata(copy _11); _17 = const 3_usize; _18 = Ge(move _16, move _17); switchInt(move _18) -> [0: bb7, otherwise: bb6]; @@ -137,7 +136,7 @@ bb8: { - StorageDead(_12); -- StorageDead(_11); + StorageDead(_11); - StorageDead(_10); StorageLive(_22); StorageLive(_23); diff --git a/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String;42].AddMovesForPackedDrops.before.mir b/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String;42].AddMovesForPackedDrops.before.mir new file mode 100644 index 00000000000..13df2195ab0 --- /dev/null +++ b/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String;42].AddMovesForPackedDrops.before.mir @@ -0,0 +1,63 @@ +// MIR for `std::ptr::drop_in_place` before AddMovesForPackedDrops + +fn std::ptr::drop_in_place(_1: *mut [String; 42]) -> () { + let mut _0: (); + let mut _2: *mut [std::string::String; 42]; + let mut _3: *mut [std::string::String]; + let mut _4: usize; + let mut _5: usize; + let mut _6: *mut std::string::String; + let mut _7: bool; + let mut _8: *mut std::string::String; + let mut _9: bool; + + bb0: { + goto -> bb9; + } + + bb1: { + return; + } + + bb2 (cleanup): { + resume; + } + + bb3 (cleanup): { + _6 = &raw mut (*_3)[_5]; + _5 = Add(move _5, const 1_usize); + drop((*_6)) -> [return: bb4, unwind terminate(cleanup)]; + } + + bb4 (cleanup): { + _7 = Eq(copy _5, copy _4); + switchInt(move _7) -> [0: bb3, otherwise: bb2]; + } + + bb5: { + _8 = &raw mut (*_3)[_5]; + _5 = Add(move _5, const 1_usize); + drop((*_8)) -> [return: bb6, unwind: bb4]; + } + + bb6: { + _9 = Eq(copy _5, copy _4); + switchInt(move _9) -> [0: bb5, otherwise: bb1]; + } + + bb7: { + _4 = PtrMetadata(copy _3); + _5 = const 0_usize; + goto -> bb6; + } + + bb8: { + goto -> bb7; + } + + bb9: { + _2 = &raw mut (*_1); + _3 = move _2 as *mut [std::string::String] (PointerCoercion(Unsize, Implicit)); + goto -> bb8; + } +} diff --git a/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String].AddMovesForPackedDrops.before.mir b/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String].AddMovesForPackedDrops.before.mir index 4d1eaa6ffe3..0633b765644 100644 --- a/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String].AddMovesForPackedDrops.before.mir +++ b/tests/mir-opt/slice_drop_shim.core.ptr-drop_in_place.[String].AddMovesForPackedDrops.before.mir @@ -44,7 +44,7 @@ fn std::ptr::drop_in_place(_1: *mut [String]) -> () { } bb7: { - _2 = Len((*_1)); + _2 = PtrMetadata(copy _1); _3 = const 0_usize; goto -> bb6; } diff --git a/tests/mir-opt/slice_drop_shim.rs b/tests/mir-opt/slice_drop_shim.rs index c2f4c82ecc8..f34c34855a1 100644 --- a/tests/mir-opt/slice_drop_shim.rs +++ b/tests/mir-opt/slice_drop_shim.rs @@ -5,6 +5,8 @@ // if we use -Clink-dead-code. // EMIT_MIR core.ptr-drop_in_place.[String].AddMovesForPackedDrops.before.mir +// EMIT_MIR core.ptr-drop_in_place.[String;42].AddMovesForPackedDrops.before.mir fn main() { let _fn = std::ptr::drop_in_place::<[String]> as unsafe fn(_); + let _fn = std::ptr::drop_in_place::<[String; 42]> as unsafe fn(_); } diff --git a/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-disabled.stderr b/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-disabled.stderr index 596f7c510be..82f57864d85 100644 --- a/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-disabled.stderr +++ b/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-disabled.stderr @@ -52,7 +52,7 @@ help: enable `#![feature(const_trait_impl)]` in your crate and mark `Bar` as `#[ LL | #[const_trait] trait Bar: ~const Foo {} | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> const-super-trait.rs:10:7 | LL | x.a(); diff --git a/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-enabled.stderr b/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-enabled.stderr index 7235278d1bd..8f4c78ccfa4 100644 --- a/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-enabled.stderr +++ b/tests/run-make/const-trait-stable-toolchain/const-super-trait-nightly-enabled.stderr @@ -32,7 +32,7 @@ help: mark `Bar` as `#[const_trait]` to allow it to have `const` implementations LL | #[const_trait] trait Bar: ~const Foo {} | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> const-super-trait.rs:10:7 | LL | x.a(); diff --git a/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-disabled.stderr b/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-disabled.stderr index eacdaf5e369..b7cd7097f44 100644 --- a/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-disabled.stderr +++ b/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-disabled.stderr @@ -50,7 +50,7 @@ note: `Bar` can't be used with `~const` because it isn't annotated with `#[const 7 | trait Bar: ~const Foo {} | ^^^^^^^^^^^^^^^^^^^^^ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> const-super-trait.rs:10:7 | 10 | x.a(); diff --git a/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-enabled.stderr b/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-enabled.stderr index 9ddec6e422c..4c59d870671 100644 --- a/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-enabled.stderr +++ b/tests/run-make/const-trait-stable-toolchain/const-super-trait-stable-enabled.stderr @@ -40,7 +40,7 @@ note: `Bar` can't be used with `~const` because it isn't annotated with `#[const 7 | trait Bar: ~const Foo {} | ^^^^^^^^^^^^^^^^^^^^^ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> const-super-trait.rs:10:7 | 10 | x.a(); diff --git a/tests/run-make/dump-ice-to-disk/rmake.rs b/tests/run-make/dump-ice-to-disk/rmake.rs index 15f35eb2d3d..a7a98d31f50 100644 --- a/tests/run-make/dump-ice-to-disk/rmake.rs +++ b/tests/run-make/dump-ice-to-disk/rmake.rs @@ -83,7 +83,7 @@ fn extract_exactly_one_ice_file<P: AsRef<Path>>(name: &'static str, dir: P) -> I fn main() { // Establish baseline ICE message. - let mut default_ice_dump = OnceCell::new(); + let default_ice_dump = OnceCell::new(); run_in_tmpdir(|| { rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); let dump = extract_exactly_one_ice_file("baseline", cwd()); diff --git a/tests/run-make/embed-source-dwarf/rmake.rs b/tests/run-make/embed-source-dwarf/rmake.rs index c7106967a85..0aae07ff2e6 100644 --- a/tests/run-make/embed-source-dwarf/rmake.rs +++ b/tests/run-make/embed-source-dwarf/rmake.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use std::path::PathBuf; use std::rc::Rc; -use gimli::{AttributeValue, EndianRcSlice, Reader, RunTimeEndian}; +use gimli::{EndianRcSlice, Reader, RunTimeEndian}; use object::{Object, ObjectSection}; use run_make_support::{gimli, object, rfs, rustc}; diff --git a/tests/run-make/import-macro-verbatim/verbatim.rs b/tests/run-make/import-macro-verbatim/verbatim.rs index 56a83673c1f..0123a4a7e22 100644 --- a/tests/run-make/import-macro-verbatim/verbatim.rs +++ b/tests/run-make/import-macro-verbatim/verbatim.rs @@ -1,4 +1,4 @@ -//! Include a file by concating the verbatim path using `/` instead of `\` +//! Include a file by concatenating the verbatim path using `/` instead of `\` include!(concat!(env!("VERBATIM_DIR"), "/include/include.txt")); fn main() { diff --git a/tests/run-make/libstd-no-protected/rmake.rs b/tests/run-make/libstd-no-protected/rmake.rs index 3bba59a8f4d..4091406d46e 100644 --- a/tests/run-make/libstd-no-protected/rmake.rs +++ b/tests/run-make/libstd-no-protected/rmake.rs @@ -7,7 +7,7 @@ use run_make_support::object::Endianness; use run_make_support::object::read::archive::ArchiveFile; use run_make_support::object::read::elf::{FileHeader as _, SectionHeader as _}; -use run_make_support::rfs::{read, read_dir}; +use run_make_support::rfs::read; use run_make_support::{has_prefix, has_suffix, object, path, rustc, shallow_find_files, target}; type FileHeader = run_make_support::object::elf::FileHeader64<Endianness>; diff --git a/tests/run-make/libtest-thread-limit/rmake.rs b/tests/run-make/libtest-thread-limit/rmake.rs index fe14d2c046c..817328cd3c3 100644 --- a/tests/run-make/libtest-thread-limit/rmake.rs +++ b/tests/run-make/libtest-thread-limit/rmake.rs @@ -15,10 +15,7 @@ // Reason: this should be ignored in cg_clif (Cranelift) CI and anywhere // else that uses panic=abort. -use std::ffi::{self, CStr, CString}; -use std::path::PathBuf; - -use run_make_support::{libc, run, rustc}; +use run_make_support::{libc, rustc}; fn main() { rustc().input("test.rs").arg("--test").run(); diff --git a/tests/run-make/llvm-outputs/rmake.rs b/tests/run-make/llvm-outputs/rmake.rs index 632e9a09ba5..2ce31b260a1 100644 --- a/tests/run-make/llvm-outputs/rmake.rs +++ b/tests/run-make/llvm-outputs/rmake.rs @@ -9,8 +9,8 @@ fn main() { let mut path_ir = PathBuf::new(); run_in_tmpdir(|| { let p = cwd(); - path_bc = p.join("nonexistant_dir_bc"); - path_ir = p.join("nonexistant_dir_ir"); + path_bc = p.join("nonexistent_dir_bc"); + path_ir = p.join("nonexistent_dir_ir"); rustc().input("-").stdin_buf("fn main() {}").out_dir(&path_bc).emit("llvm-bc").run(); rustc().input("-").stdin_buf("fn main() {}").out_dir(&path_ir).emit("llvm-ir").run(); assert!(path_bc.exists()); diff --git a/tests/run-make/missing-unstable-trait-bound/rmake.rs b/tests/run-make/missing-unstable-trait-bound/rmake.rs index 20f77f7c9aa..3f76c65247d 100644 --- a/tests/run-make/missing-unstable-trait-bound/rmake.rs +++ b/tests/run-make/missing-unstable-trait-bound/rmake.rs @@ -6,7 +6,7 @@ // Ensure that on stable we don't suggest restricting with an unsafe trait and we continue // mentioning the rest of the obligation chain. -use run_make_support::{diff, rust_lib_name, rustc}; +use run_make_support::{diff, rustc}; fn main() { let out = rustc() diff --git a/tests/run-make/musl-default-linking/rmake.rs b/tests/run-make/musl-default-linking/rmake.rs index b6d428d3f27..d203595a447 100644 --- a/tests/run-make/musl-default-linking/rmake.rs +++ b/tests/run-make/musl-default-linking/rmake.rs @@ -48,7 +48,7 @@ fn main() { let default = &target_spec["crt-static-default"]; // If the value is `null`, then the default to dynamically link from - // musl_base was not overriden. + // musl_base was not overridden. if default.is_null() { continue; } diff --git a/tests/run-make/no-alloc-shim/rmake.rs b/tests/run-make/no-alloc-shim/rmake.rs index c398a3177df..d61ef5de8c5 100644 --- a/tests/run-make/no-alloc-shim/rmake.rs +++ b/tests/run-make/no-alloc-shim/rmake.rs @@ -13,7 +13,7 @@ // Tracking issue: https://github.com/rust-lang/rust/issues/128602 // Discussion: https://github.com/rust-lang/rust/pull/128407#discussion_r1702439172 -use run_make_support::{cc, cwd, has_extension, has_prefix, run, rustc, shallow_find_files}; +use run_make_support::{cc, has_extension, has_prefix, run, rustc, shallow_find_files}; fn main() { rustc().input("foo.rs").crate_type("bin").emit("obj").panic("abort").run(); diff --git a/tests/run-make/no-builtins-lto/rmake.rs b/tests/run-make/no-builtins-lto/rmake.rs index 8e0c3a63649..56fdfde42f0 100644 --- a/tests/run-make/no-builtins-lto/rmake.rs +++ b/tests/run-make/no-builtins-lto/rmake.rs @@ -1,4 +1,4 @@ -// The rlib produced by a no_builtins crate should be explicitely linked +// The rlib produced by a no_builtins crate should be explicitly linked // during compilation, and as a result be present in the linker arguments. // See the comments inside this file for more details. // See https://github.com/rust-lang/rust/pull/35637 diff --git a/tests/run-make/remove-dir-all-race/rmake.rs b/tests/run-make/remove-dir-all-race/rmake.rs index 03c94b76127..32abca92424 100644 --- a/tests/run-make/remove-dir-all-race/rmake.rs +++ b/tests/run-make/remove-dir-all-race/rmake.rs @@ -1,13 +1,13 @@ //@ ignore-windows // This test attempts to make sure that running `remove_dir_all` -// doesn't result in a NotFound error one of the files it +// doesn't result in a NotFound error if one of the files it // is deleting is deleted concurrently. // // The windows implementation for `remove_dir_all` is significantly // more complicated, and has not yet been brought up to par with // the implementation on other platforms, so this test is marked as -// `ignore-windows` until someone more expirenced with windows can +// `ignore-windows` until someone more experienced with windows can // sort that out. use std::fs::remove_dir_all; @@ -27,13 +27,12 @@ fn main() { write("outer/inner.txt", b"sometext"); thread::scope(|scope| { - let t1 = scope.spawn(|| { + scope.spawn(|| { thread::sleep(Duration::from_nanos(i)); remove_dir_all("outer").unwrap(); }); - let race_happened_ref = &race_happened; - let t2 = scope.spawn(|| { + scope.spawn(|| { let r1 = remove_dir_all("outer/inner"); let r2 = remove_dir_all("outer/inner.txt"); if r1.is_ok() && r2.is_err() { @@ -44,10 +43,10 @@ fn main() { assert!(!Path::new("outer").exists()); - // trying to remove a nonexistant top-level directory should + // trying to remove a nonexistent top-level directory should // still result in an error. let Err(err) = remove_dir_all("outer") else { - panic!("removing nonexistant dir did not result in an error"); + panic!("removing nonexistent dir did not result in an error"); }; assert_eq!(err.kind(), std::io::ErrorKind::NotFound); } diff --git a/tests/run-make/rustdoc-map-file/rmake.rs b/tests/run-make/rustdoc-map-file/rmake.rs index d7e3510fe31..50dcc603c02 100644 --- a/tests/run-make/rustdoc-map-file/rmake.rs +++ b/tests/run-make/rustdoc-map-file/rmake.rs @@ -1,8 +1,6 @@ // This test ensures that all items from `foo` are correctly generated into the `redirect-map.json` // file with `--generate-redirect-map` rustdoc option. -use std::path::Path; - use run_make_support::rfs::read_to_string; use run_make_support::{path, rustdoc, serde_json}; diff --git a/tests/run-make/rustdoc-output-stdout/rmake.rs b/tests/run-make/rustdoc-output-stdout/rmake.rs index bcf5e4d9723..d2fd0451163 100644 --- a/tests/run-make/rustdoc-output-stdout/rmake.rs +++ b/tests/run-make/rustdoc-output-stdout/rmake.rs @@ -1,8 +1,6 @@ // This test verifies that rustdoc `-o -` prints JSON on stdout and doesn't generate // a JSON file. -use std::path::PathBuf; - use run_make_support::path_helpers::{cwd, has_extension, read_dir_entries_recursive}; use run_make_support::{rustdoc, serde_json}; diff --git a/tests/run-make/symbol-visibility/rmake.rs b/tests/run-make/symbol-visibility/rmake.rs index f84e63ef74e..ec936bc3b07 100644 --- a/tests/run-make/symbol-visibility/rmake.rs +++ b/tests/run-make/symbol-visibility/rmake.rs @@ -1,7 +1,7 @@ // Dynamic libraries on Rust used to export a very high amount of symbols, // going as far as filling the output with mangled names and generic function // names. After the rework of #38117, this test checks that no mangled Rust symbols -// are exported, and that generics are only shown if explicitely requested. +// are exported, and that generics are only shown if explicitly requested. // See https://github.com/rust-lang/rust/issues/37530 use run_make_support::object::read::Object; diff --git a/tests/ui-fulldeps/auxiliary/parser.rs b/tests/ui-fulldeps/auxiliary/parser.rs new file mode 100644 index 00000000000..4ea0d814b1f --- /dev/null +++ b/tests/ui-fulldeps/auxiliary/parser.rs @@ -0,0 +1,51 @@ +#![feature(rustc_private)] + +extern crate rustc_ast; +extern crate rustc_driver; +extern crate rustc_errors; +extern crate rustc_parse; +extern crate rustc_session; +extern crate rustc_span; + +use rustc_ast::ast::{DUMMY_NODE_ID, Expr}; +use rustc_ast::mut_visit::MutVisitor; +use rustc_ast::node_id::NodeId; +use rustc_ast::ptr::P; +use rustc_ast::token; +use rustc_errors::Diag; +use rustc_parse::parser::Recovery; +use rustc_session::parse::ParseSess; +use rustc_span::{DUMMY_SP, FileName, Span}; + +pub fn parse_expr(psess: &ParseSess, source_code: &str) -> Option<P<Expr>> { + let parser = rustc_parse::unwrap_or_emit_fatal(rustc_parse::new_parser_from_source_str( + psess, + FileName::anon_source_code(source_code), + source_code.to_owned(), + )); + + let mut parser = parser.recovery(Recovery::Forbidden); + let mut expr = parser.parse_expr().map_err(Diag::cancel).ok()?; + if parser.token != token::Eof { + return None; + } + + Normalize.visit_expr(&mut expr); + Some(expr) +} + +// Erase Span information that could distinguish between identical expressions +// parsed from different source strings. +struct Normalize; + +impl MutVisitor for Normalize { + const VISIT_TOKENS: bool = true; + + fn visit_id(&mut self, id: &mut NodeId) { + *id = DUMMY_NODE_ID; + } + + fn visit_span(&mut self, span: &mut Span) { + *span = DUMMY_SP; + } +} diff --git a/tests/ui-fulldeps/internal-lints/ty_tykind_usage.rs b/tests/ui-fulldeps/internal-lints/ty_tykind_usage.rs index f77b318039d..91998a8ec45 100644 --- a/tests/ui-fulldeps/internal-lints/ty_tykind_usage.rs +++ b/tests/ui-fulldeps/internal-lints/ty_tykind_usage.rs @@ -13,34 +13,8 @@ fn main() { let kind = TyKind::Bool; //~ ERROR usage of `ty::TyKind::<kind>` match kind { - TyKind::Bool => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Char => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Int(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Uint(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Float(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Adt(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Foreign(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Str => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Array(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Pat(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Slice(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::RawPtr(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Ref(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::FnDef(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::FnPtr(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Dynamic(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Closure(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::CoroutineClosure(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Coroutine(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::CoroutineWitness(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Never => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Tuple(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Alias(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Param(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Bound(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Placeholder(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Infer(..) => (), //~ ERROR usage of `ty::TyKind::<kind>` - TyKind::Error(_) => (), //~ ERROR usage of `ty::TyKind::<kind>` + TyKind::Bool => {}, //~ ERROR usage of `ty::TyKind::<kind>` + _ => {} } if let ty::Int(int_ty) = kind {} diff --git a/tests/ui-fulldeps/internal-lints/ty_tykind_usage.stderr b/tests/ui-fulldeps/internal-lints/ty_tykind_usage.stderr index 53bf5cb1a82..19a73b36bfe 100644 --- a/tests/ui-fulldeps/internal-lints/ty_tykind_usage.stderr +++ b/tests/ui-fulldeps/internal-lints/ty_tykind_usage.stderr @@ -13,179 +13,17 @@ LL | #[deny(rustc::usage_of_ty_tykind)] error: usage of `ty::TyKind::<kind>` --> $DIR/ty_tykind_usage.rs:16:9 | -LL | TyKind::Bool => (), +LL | TyKind::Bool => {}, | ^^^^^^ help: try using `ty::<kind>` directly: `ty` error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:17:9 - | -LL | TyKind::Char => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:18:9 - | -LL | TyKind::Int(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:19:9 - | -LL | TyKind::Uint(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:20:9 - | -LL | TyKind::Float(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:21:9 - | -LL | TyKind::Adt(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:22:9 - | -LL | TyKind::Foreign(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:23:9 - | -LL | TyKind::Str => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:24:9 - | -LL | TyKind::Array(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:25:9 - | -LL | TyKind::Pat(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:26:9 - | -LL | TyKind::Slice(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:27:9 - | -LL | TyKind::RawPtr(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:28:9 - | -LL | TyKind::Ref(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:29:9 - | -LL | TyKind::FnDef(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:30:9 - | -LL | TyKind::FnPtr(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:31:9 - | -LL | TyKind::Dynamic(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:32:9 - | -LL | TyKind::Closure(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:33:9 - | -LL | TyKind::CoroutineClosure(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:34:9 - | -LL | TyKind::Coroutine(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:35:9 - | -LL | TyKind::CoroutineWitness(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:36:9 - | -LL | TyKind::Never => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:37:9 - | -LL | TyKind::Tuple(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:38:9 - | -LL | TyKind::Alias(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:39:9 - | -LL | TyKind::Param(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:40:9 - | -LL | TyKind::Bound(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:41:9 - | -LL | TyKind::Placeholder(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:42:9 - | -LL | TyKind::Infer(..) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:43:9 - | -LL | TyKind::Error(_) => (), - | ^^^^^^ help: try using `ty::<kind>` directly: `ty` - -error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:48:12 + --> $DIR/ty_tykind_usage.rs:22:12 | LL | if let TyKind::Int(int_ty) = kind {} | ^^^^^^ help: try using `ty::<kind>` directly: `ty` error: usage of `ty::TyKind` - --> $DIR/ty_tykind_usage.rs:50:24 + --> $DIR/ty_tykind_usage.rs:24:24 | LL | fn ty_kind(ty_bad: TyKind<'_>, ty_good: Ty<'_>) {} | ^^^^^^^^^^ @@ -193,7 +31,7 @@ LL | fn ty_kind(ty_bad: TyKind<'_>, ty_good: Ty<'_>) {} = help: try using `Ty` instead error: usage of `ty::TyKind` - --> $DIR/ty_tykind_usage.rs:52:37 + --> $DIR/ty_tykind_usage.rs:26:37 | LL | fn ir_ty_kind<I: Interner>(bad: IrTyKind<I>) -> IrTyKind<I> { | ^^^^^^^^^^^ @@ -201,7 +39,7 @@ LL | fn ir_ty_kind<I: Interner>(bad: IrTyKind<I>) -> IrTyKind<I> { = help: try using `Ty` instead error: usage of `ty::TyKind` - --> $DIR/ty_tykind_usage.rs:52:53 + --> $DIR/ty_tykind_usage.rs:26:53 | LL | fn ir_ty_kind<I: Interner>(bad: IrTyKind<I>) -> IrTyKind<I> { | ^^^^^^^^^^^ @@ -209,12 +47,12 @@ LL | fn ir_ty_kind<I: Interner>(bad: IrTyKind<I>) -> IrTyKind<I> { = help: try using `Ty` instead error: usage of `ty::TyKind::<kind>` - --> $DIR/ty_tykind_usage.rs:55:9 + --> $DIR/ty_tykind_usage.rs:29:9 | LL | IrTyKind::Bool | --------^^^^^^ | | | help: try using `ty::<kind>` directly: `ty` -error: aborting due to 34 previous errors +error: aborting due to 7 previous errors diff --git a/tests/ui-fulldeps/pprust-expr-roundtrip.rs b/tests/ui-fulldeps/pprust-expr-roundtrip.rs index 8379ca86494..37e328a315f 100644 --- a/tests/ui-fulldeps/pprust-expr-roundtrip.rs +++ b/tests/ui-fulldeps/pprust-expr-roundtrip.rs @@ -1,5 +1,7 @@ //@ run-pass //@ ignore-cross-compile +//@ aux-crate: parser=parser.rs +//@ edition: 2021 // The general idea of this test is to enumerate all "interesting" expressions and check that // `parse(print(e)) == e` for all `e`. Here's what's interesting, for the purposes of this test: @@ -21,7 +23,6 @@ extern crate rustc_ast; extern crate rustc_ast_pretty; -extern crate rustc_data_structures; extern crate rustc_parse; extern crate rustc_session; extern crate rustc_span; @@ -32,28 +33,17 @@ extern crate thin_vec; #[allow(unused_extern_crates)] extern crate rustc_driver; +use parser::parse_expr; use rustc_ast::mut_visit::{visit_clobber, MutVisitor}; use rustc_ast::ptr::P; use rustc_ast::*; use rustc_ast_pretty::pprust; -use rustc_parse::{new_parser_from_source_str, unwrap_or_emit_fatal}; use rustc_session::parse::ParseSess; use rustc_span::source_map::Spanned; use rustc_span::symbol::Ident; -use rustc_span::{FileName, DUMMY_SP}; +use rustc_span::DUMMY_SP; use thin_vec::{thin_vec, ThinVec}; -fn parse_expr(psess: &ParseSess, src: &str) -> Option<P<Expr>> { - let src_as_string = src.to_string(); - - let mut p = unwrap_or_emit_fatal(new_parser_from_source_str( - psess, - FileName::Custom(src_as_string.clone()), - src_as_string, - )); - p.parse_expr().map_err(|e| e.cancel()).ok() -} - // Helper functions for building exprs fn expr(kind: ExprKind) -> P<Expr> { P(Expr { id: DUMMY_NODE_ID, kind, span: DUMMY_SP, attrs: AttrVec::new(), tokens: None }) diff --git a/tests/ui-fulldeps/pprust-parenthesis-insertion.rs b/tests/ui-fulldeps/pprust-parenthesis-insertion.rs index 184458bad55..94c7964392d 100644 --- a/tests/ui-fulldeps/pprust-parenthesis-insertion.rs +++ b/tests/ui-fulldeps/pprust-parenthesis-insertion.rs @@ -1,5 +1,7 @@ //@ run-pass //@ ignore-cross-compile +//@ aux-crate: parser=parser.rs +//@ edition: 2021 // This test covers the AST pretty-printer's automatic insertion of parentheses // into unparenthesized syntax trees according to precedence and various grammar @@ -31,8 +33,6 @@ extern crate rustc_ast; extern crate rustc_ast_pretty; -extern crate rustc_driver; -extern crate rustc_errors; extern crate rustc_parse; extern crate rustc_session; extern crate rustc_span; @@ -40,15 +40,12 @@ extern crate rustc_span; use std::mem; use std::process::ExitCode; -use rustc_ast::ast::{DUMMY_NODE_ID, Expr, ExprKind}; +use parser::parse_expr; +use rustc_ast::ast::{Expr, ExprKind}; use rustc_ast::mut_visit::{self, DummyAstNode as _, MutVisitor}; -use rustc_ast::node_id::NodeId; use rustc_ast::ptr::P; use rustc_ast_pretty::pprust; -use rustc_errors::Diag; -use rustc_parse::parser::Recovery; use rustc_session::parse::ParseSess; -use rustc_span::{DUMMY_SP, FileName, Span}; // Every parenthesis in the following expressions is re-inserted by the // pretty-printer. @@ -61,6 +58,9 @@ static EXPRS: &[&str] = &[ "(2 + 2) * 2", "2 * (2 + 2)", "2 + 2 + 2", + // Right-associative operator. + "2 += 2 += 2", + "(2 += 2) += 2", // Return has lower precedence than a binary operator. "(return 2) + 2", "2 + (return 2)", // FIXME: no parenthesis needed. @@ -89,6 +89,13 @@ static EXPRS: &[&str] = &[ // allowed, except if the break is also labeled. "break 'outer 'inner: loop {} + 2", "break ('inner: loop {} + 2)", + // Grammar restriction: ranges cannot be the endpoint of another range. + "(2..2)..2", + "2..(2..2)", + "(2..2)..", + "..(2..2)", + // Grammar restriction: comparison operators cannot be chained (1 < 2 == false). + "((1 < 2) == false) as usize", // Grammar restriction: the value in let-else is not allowed to end in a // curly brace. "{ let _ = 1 + 1 else {}; }", @@ -113,10 +120,6 @@ static EXPRS: &[&str] = &[ "if let _ = () && (Struct {}).x {}", */ /* - // FIXME: pretty-printer produces invalid syntax. `(1 < 2 == false) as usize` - "((1 < 2) == false) as usize", - */ - /* // FIXME: pretty-printer produces invalid syntax. `for _ in 1..{ 2 } {}` "for _ in (1..{ 2 }) {}", */ @@ -129,10 +132,6 @@ static EXPRS: &[&str] = &[ "(0.).to_string()", "0. .. 1.", */ - /* - // FIXME: pretty-printer loses the dyn*. `i as Trait` - "i as dyn* Trait", - */ ]; // Flatten the content of parenthesis nodes into their parent node. For example @@ -154,34 +153,6 @@ impl MutVisitor for Unparenthesize { } } -// Erase Span information that could distinguish between identical expressions -// parsed from different source strings. -struct Normalize; - -impl MutVisitor for Normalize { - const VISIT_TOKENS: bool = true; - - fn visit_id(&mut self, id: &mut NodeId) { - *id = DUMMY_NODE_ID; - } - - fn visit_span(&mut self, span: &mut Span) { - *span = DUMMY_SP; - } -} - -fn parse_expr(psess: &ParseSess, source_code: &str) -> Option<P<Expr>> { - let parser = rustc_parse::unwrap_or_emit_fatal(rustc_parse::new_parser_from_source_str( - psess, - FileName::anon_source_code(source_code), - source_code.to_owned(), - )); - - let mut expr = parser.recovery(Recovery::Forbidden).parse_expr().map_err(Diag::cancel).ok()?; - Normalize.visit_expr(&mut expr); - Some(expr) -} - fn main() -> ExitCode { let mut status = ExitCode::SUCCESS; let mut fail = |description: &str, before: &str, after: &str| { @@ -197,7 +168,9 @@ fn main() -> ExitCode { let psess = &ParseSess::new(vec![rustc_parse::DEFAULT_LOCALE_RESOURCE]); for &source_code in EXPRS { - let expr = parse_expr(psess, source_code).unwrap(); + let Some(expr) = parse_expr(psess, source_code) else { + panic!("Failed to parse original test case: {source_code}"); + }; // Check for FALSE POSITIVE: pretty-printer inserting parentheses where not needed. // Pseudocode: diff --git a/tests/ui/asm/non-const.rs b/tests/ui/asm/non-const.rs index 63c46563226..dc9317b90b1 100644 --- a/tests/ui/asm/non-const.rs +++ b/tests/ui/asm/non-const.rs @@ -8,4 +8,4 @@ fn main() {} fn non_const_fn(x: i32) -> i32 { x } global_asm!("/* {} */", const non_const_fn(0)); -//~^ERROR: cannot call non-const fn +//~^ERROR: cannot call non-const function diff --git a/tests/ui/asm/non-const.stderr b/tests/ui/asm/non-const.stderr index 5fae2ac9843..eac4fe841bf 100644 --- a/tests/ui/asm/non-const.stderr +++ b/tests/ui/asm/non-const.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `non_const_fn` in constants +error[E0015]: cannot call non-const function `non_const_fn` in constants --> $DIR/non-const.rs:10:31 | LL | global_asm!("/* {} */", const non_const_fn(0)); diff --git a/tests/ui/associated-consts/issue-93775.rs b/tests/ui/associated-consts/issue-93775.rs index d7416d03707..88e88b55987 100644 --- a/tests/ui/associated-consts/issue-93775.rs +++ b/tests/ui/associated-consts/issue-93775.rs @@ -1,6 +1,6 @@ -//@ ignore-windows-msvc -// FIXME(#132111, #133432): this test is flaky on windows msvc, it sometimes fail but it sometimes -// passes. +//@ ignore-rustc-debug-assertions +// Similar to stress testing, the test case requires a larger call stack, +// so we ignore rustc's debug assertions. //@ build-pass // ignore-tidy-linelength diff --git a/tests/ui/async-await/async-closures/def-path.stderr b/tests/ui/async-await/async-closures/def-path.stderr index cf25b2d2d23..13ebaf67e54 100644 --- a/tests/ui/async-await/async-closures/def-path.stderr +++ b/tests/ui/async-await/async-closures/def-path.stderr @@ -5,11 +5,11 @@ LL | let x = async || {}; | -- the expected `async` closure body LL | LL | let () = x(); - | ^^ --- this expression has type `{static main::{closure#0}::{closure#0}<?17t> upvar_tys=?16t witness=?6t}` + | ^^ --- this expression has type `{static main::{closure#0}::{closure#0}<?17t> upvar_tys=?16t resume_ty=ResumeTy yield_ty=() return_ty=() witness=?6t}` | | | expected `async` closure body, found `()` | - = note: expected `async` closure body `{static main::{closure#0}::{closure#0}<?17t> upvar_tys=?16t witness=?6t}` + = note: expected `async` closure body `{static main::{closure#0}::{closure#0}<?17t> upvar_tys=?16t resume_ty=ResumeTy yield_ty=() return_ty=() witness=?6t}` found unit type `()` error: aborting due to 1 previous error diff --git a/tests/ui/backtrace/std-backtrace.rs b/tests/ui/backtrace/std-backtrace.rs index b4806457877..57d953a8640 100644 --- a/tests/ui/backtrace/std-backtrace.rs +++ b/tests/ui/backtrace/std-backtrace.rs @@ -3,7 +3,6 @@ //@ ignore-wasm32 spawning processes is not supported //@ ignore-openbsd no support for libbacktrace without filename //@ ignore-sgx no processes -//@ ignore-msvc see #62897 and `backtrace-debuginfo.rs` test //@ ignore-fuchsia Backtraces not symbolized //@ compile-flags:-g //@ compile-flags:-Cstrip=none diff --git a/tests/ui/bootstrap/rustc_bootstap.force_stable.stderr b/tests/ui/bootstrap/rustc_bootstrap.force_stable.stderr index f378f3c70dd..f378f3c70dd 100644 --- a/tests/ui/bootstrap/rustc_bootstap.force_stable.stderr +++ b/tests/ui/bootstrap/rustc_bootstrap.force_stable.stderr diff --git a/tests/ui/bootstrap/rustc_bootstap.rs b/tests/ui/bootstrap/rustc_bootstrap.rs index 3d792ef4be4..daa28e0cdf2 100644 --- a/tests/ui/bootstrap/rustc_bootstap.rs +++ b/tests/ui/bootstrap/rustc_bootstrap.rs @@ -1,5 +1,5 @@ -//! Check `RUSTC_BOOTSTRAP`'s behavior in relation to feature stability and what rustc considers -//! itself to be (stable vs non-stable ). +//! Check the compiler's behavior when the perma-unstable env var `RUSTC_BOOTSTRAP` is set in the +//! environment in relation to feature stability and which channel rustc considers itself to be. //! //! `RUSTC_BOOTSTRAP` accepts: //! diff --git a/tests/ui/borrowck/issue-64453.rs b/tests/ui/borrowck/issue-64453.rs index 5f1f35d6ca9..587bf0e80f5 100644 --- a/tests/ui/borrowck/issue-64453.rs +++ b/tests/ui/borrowck/issue-64453.rs @@ -2,7 +2,7 @@ struct Project; struct Value; static settings_dir: String = format!(""); -//~^ ERROR cannot call non-const fn +//~^ ERROR cannot call non-const function fn from_string(_: String) -> Value { Value diff --git a/tests/ui/borrowck/issue-64453.stderr b/tests/ui/borrowck/issue-64453.stderr index 98b05ead649..8ec9a10f09f 100644 --- a/tests/ui/borrowck/issue-64453.stderr +++ b/tests/ui/borrowck/issue-64453.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `format` in statics +error[E0015]: cannot call non-const function `format` in statics --> $DIR/issue-64453.rs:4:31 | LL | static settings_dir: String = format!(""); diff --git a/tests/ui/const-generics/nested-type.full.stderr b/tests/ui/const-generics/nested-type.full.stderr index 04dc84ea3cf..e5a1f230380 100644 --- a/tests/ui/const-generics/nested-type.full.stderr +++ b/tests/ui/const-generics/nested-type.full.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `Foo::{constant#0}::Foo::<17>::value` in constants +error[E0015]: cannot call non-const associated function `Foo::{constant#0}::Foo::<17>::value` in constants --> $DIR/nested-type.rs:15:5 | LL | Foo::<17>::value() diff --git a/tests/ui/const-generics/nested-type.min.stderr b/tests/ui/const-generics/nested-type.min.stderr index cdc0b9807d9..8282acd4ea7 100644 --- a/tests/ui/const-generics/nested-type.min.stderr +++ b/tests/ui/const-generics/nested-type.min.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `Foo::{constant#0}::Foo::<17>::value` in constants +error[E0015]: cannot call non-const associated function `Foo::{constant#0}::Foo::<17>::value` in constants --> $DIR/nested-type.rs:15:5 | LL | Foo::<17>::value() diff --git a/tests/ui/const-generics/nested-type.rs b/tests/ui/const-generics/nested-type.rs index a9d106237b3..ddd84d472bf 100644 --- a/tests/ui/const-generics/nested-type.rs +++ b/tests/ui/const-generics/nested-type.rs @@ -13,7 +13,7 @@ struct Foo<const N: [u8; { } Foo::<17>::value() - //~^ ERROR cannot call non-const fn + //~^ ERROR cannot call non-const associated function }]>; //[min]~^^^^^^^^^^^^ ERROR `[u8; { diff --git a/tests/ui/consts/const-call.rs b/tests/ui/consts/const-call.rs index 28e89559fe5..851b66f7623 100644 --- a/tests/ui/consts/const-call.rs +++ b/tests/ui/consts/const-call.rs @@ -4,5 +4,5 @@ fn f(x: usize) -> usize { fn main() { let _ = [0; f(2)]; - //~^ ERROR cannot call non-const fn + //~^ ERROR cannot call non-const function } diff --git a/tests/ui/consts/const-call.stderr b/tests/ui/consts/const-call.stderr index 4e7098a5c8f..b9dcf5addb5 100644 --- a/tests/ui/consts/const-call.stderr +++ b/tests/ui/consts/const-call.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `f` in constants +error[E0015]: cannot call non-const function `f` in constants --> $DIR/const-call.rs:6:17 | LL | let _ = [0; f(2)]; diff --git a/tests/ui/consts/const-eval/format.rs b/tests/ui/consts/const-eval/format.rs index b12df824a33..e56d15e935b 100644 --- a/tests/ui/consts/const-eval/format.rs +++ b/tests/ui/consts/const-eval/format.rs @@ -1,14 +1,14 @@ const fn failure() { panic!("{:?}", 0); //~^ ERROR cannot call non-const formatting macro in constant functions - //~| ERROR cannot call non-const fn `Arguments::<'_>::new_v1::<1, 1>` in constant functions + //~| ERROR cannot call non-const associated function `Arguments::<'_>::new_v1::<1, 1>` in constant functions } const fn print() { println!("{:?}", 0); //~^ ERROR cannot call non-const formatting macro in constant functions - //~| ERROR cannot call non-const fn `Arguments::<'_>::new_v1::<2, 1>` in constant functions - //~| ERROR cannot call non-const fn `_print` in constant functions + //~| ERROR cannot call non-const associated function `Arguments::<'_>::new_v1::<2, 1>` in constant functions + //~| ERROR cannot call non-const function `_print` in constant functions } fn main() {} diff --git a/tests/ui/consts/const-eval/format.stderr b/tests/ui/consts/const-eval/format.stderr index ce3f9f2190e..25ed44e0f33 100644 --- a/tests/ui/consts/const-eval/format.stderr +++ b/tests/ui/consts/const-eval/format.stderr @@ -7,7 +7,7 @@ LL | panic!("{:?}", 0); = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants = note: this error originates in the macro `$crate::const_format_args` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `Arguments::<'_>::new_v1::<1, 1>` in constant functions +error[E0015]: cannot call non-const associated function `Arguments::<'_>::new_v1::<1, 1>` in constant functions --> $DIR/format.rs:2:5 | LL | panic!("{:?}", 0); @@ -25,7 +25,7 @@ LL | println!("{:?}", 0); = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants = note: this error originates in the macro `$crate::format_args_nl` which comes from the expansion of the macro `println` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `Arguments::<'_>::new_v1::<2, 1>` in constant functions +error[E0015]: cannot call non-const associated function `Arguments::<'_>::new_v1::<2, 1>` in constant functions --> $DIR/format.rs:8:5 | LL | println!("{:?}", 0); @@ -34,7 +34,7 @@ LL | println!("{:?}", 0); = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants = note: this error originates in the macro `$crate::format_args_nl` which comes from the expansion of the macro `println` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `_print` in constant functions +error[E0015]: cannot call non-const function `_print` in constant functions --> $DIR/format.rs:8:5 | LL | println!("{:?}", 0); diff --git a/tests/ui/consts/const-eval/ub-slice-get-unchecked.stderr b/tests/ui/consts/const-eval/ub-slice-get-unchecked.stderr index 94aa3ee4d7a..6e428079afe 100644 --- a/tests/ui/consts/const-eval/ub-slice-get-unchecked.stderr +++ b/tests/ui/consts/const-eval/ub-slice-get-unchecked.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `core::slice::<impl [()]>::get_unchecked::<std::ops::Range<usize>>` in constants +error[E0015]: cannot call non-const method `core::slice::<impl [()]>::get_unchecked::<std::ops::Range<usize>>` in constants --> $DIR/ub-slice-get-unchecked.rs:7:29 | LL | const B: &[()] = unsafe { A.get_unchecked(3..1) }; diff --git a/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs b/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs index 31c15400f84..0b475087d55 100644 --- a/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs +++ b/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs @@ -5,7 +5,7 @@ extern "C" { const extern "C" fn bar() { unsafe { regular_in_block(); - //~^ ERROR: cannot call non-const fn + //~^ ERROR: cannot call non-const function } } @@ -14,7 +14,7 @@ extern "C" fn regular() {} const extern "C" fn foo() { unsafe { regular(); - //~^ ERROR: cannot call non-const fn + //~^ ERROR: cannot call non-const function } } diff --git a/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr b/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr index 5d37f524e03..1fa881cf42b 100644 --- a/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr +++ b/tests/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `regular_in_block` in constant functions +error[E0015]: cannot call non-const function `regular_in_block` in constant functions --> $DIR/const-extern-fn-call-extern-fn.rs:7:9 | LL | regular_in_block(); @@ -6,7 +6,7 @@ LL | regular_in_block(); | = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `regular` in constant functions +error[E0015]: cannot call non-const function `regular` in constant functions --> $DIR/const-extern-fn-call-extern-fn.rs:16:9 | LL | regular(); diff --git a/tests/ui/consts/const-fn-not-safe-for-const.stderr b/tests/ui/consts/const-fn-not-safe-for-const.stderr index 674e05a0ba9..e8f0566e73d 100644 --- a/tests/ui/consts/const-fn-not-safe-for-const.stderr +++ b/tests/ui/consts/const-fn-not-safe-for-const.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `random` in constant functions +error[E0015]: cannot call non-const function `random` in constant functions --> $DIR/const-fn-not-safe-for-const.rs:14:5 | LL | random() diff --git a/tests/ui/consts/control-flow/issue-46843.rs b/tests/ui/consts/control-flow/issue-46843.rs index ddddc8505c6..fd6366d3c18 100644 --- a/tests/ui/consts/control-flow/issue-46843.rs +++ b/tests/ui/consts/control-flow/issue-46843.rs @@ -8,7 +8,7 @@ fn non_const() -> Thing { } pub const Q: i32 = match non_const() { - //~^ ERROR cannot call non-const fn + //~^ ERROR cannot call non-const function Thing::This => 1, Thing::That => 0 }; diff --git a/tests/ui/consts/control-flow/issue-46843.stderr b/tests/ui/consts/control-flow/issue-46843.stderr index 69bf78839be..42eb035647c 100644 --- a/tests/ui/consts/control-flow/issue-46843.stderr +++ b/tests/ui/consts/control-flow/issue-46843.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `non_const` in constants +error[E0015]: cannot call non-const function `non_const` in constants --> $DIR/issue-46843.rs:10:26 | LL | pub const Q: i32 = match non_const() { diff --git a/tests/ui/consts/issue-16538.stderr b/tests/ui/consts/issue-16538.stderr index c4f5364b4d7..8bd11541a7d 100644 --- a/tests/ui/consts/issue-16538.stderr +++ b/tests/ui/consts/issue-16538.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `Y::foo` in statics +error[E0015]: cannot call non-const function `Y::foo` in statics --> $DIR/issue-16538.rs:11:23 | LL | static foo: &Y::X = &*Y::foo(Y::x as *const Y::X); diff --git a/tests/ui/consts/issue-32829-2.rs b/tests/ui/consts/issue-32829-2.rs index d70b5a8c4e1..c7ad0979293 100644 --- a/tests/ui/consts/issue-32829-2.rs +++ b/tests/ui/consts/issue-32829-2.rs @@ -8,7 +8,7 @@ const bad : u32 = { const bad_two : u32 = { { invalid(); - //~^ ERROR: cannot call non-const fn `invalid` + //~^ ERROR: cannot call non-const function `invalid` 0 } }; @@ -30,7 +30,7 @@ static bad_four : u32 = { static bad_five : u32 = { { invalid(); - //~^ ERROR: cannot call non-const fn `invalid` + //~^ ERROR: cannot call non-const function `invalid` 0 } }; @@ -52,7 +52,7 @@ static mut bad_seven : u32 = { static mut bad_eight : u32 = { { invalid(); - //~^ ERROR: cannot call non-const fn `invalid` + //~^ ERROR: cannot call non-const function `invalid` 0 } }; diff --git a/tests/ui/consts/issue-32829-2.stderr b/tests/ui/consts/issue-32829-2.stderr index bd0b8c15b55..eedd9d34e55 100644 --- a/tests/ui/consts/issue-32829-2.stderr +++ b/tests/ui/consts/issue-32829-2.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `invalid` in constants +error[E0015]: cannot call non-const function `invalid` in constants --> $DIR/issue-32829-2.rs:10:9 | LL | invalid(); @@ -6,7 +6,7 @@ LL | invalid(); | = note: calls in constants are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `invalid` in statics +error[E0015]: cannot call non-const function `invalid` in statics --> $DIR/issue-32829-2.rs:32:9 | LL | invalid(); @@ -15,7 +15,7 @@ LL | invalid(); = note: calls in statics are limited to constant functions, tuple structs and tuple variants = note: consider wrapping this expression in `std::sync::LazyLock::new(|| ...)` -error[E0015]: cannot call non-const fn `invalid` in statics +error[E0015]: cannot call non-const function `invalid` in statics --> $DIR/issue-32829-2.rs:54:9 | LL | invalid(); diff --git a/tests/ui/consts/issue-43105.rs b/tests/ui/consts/issue-43105.rs index a4ee34c0532..738b73416e0 100644 --- a/tests/ui/consts/issue-43105.rs +++ b/tests/ui/consts/issue-43105.rs @@ -1,7 +1,7 @@ fn xyz() -> u8 { 42 } const NUM: u8 = xyz(); -//~^ ERROR cannot call non-const fn +//~^ ERROR cannot call non-const function fn main() { match 1 { diff --git a/tests/ui/consts/issue-43105.stderr b/tests/ui/consts/issue-43105.stderr index 0e08feb58de..c030c0f5fcd 100644 --- a/tests/ui/consts/issue-43105.stderr +++ b/tests/ui/consts/issue-43105.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `xyz` in constants +error[E0015]: cannot call non-const function `xyz` in constants --> $DIR/issue-43105.rs:3:17 | LL | const NUM: u8 = xyz(); diff --git a/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.rs b/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.rs index 258997597ea..6a6b0e666e1 100644 --- a/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.rs +++ b/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.rs @@ -1,7 +1,7 @@ const fn foo(a: i32) -> Vec<i32> { vec![1, 2, 3] //~^ ERROR allocations are not allowed - //~| ERROR cannot call non-const fn + //~| ERROR cannot call non-const method } fn main() {} diff --git a/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.stderr b/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.stderr index 74234108911..8e52a7aa35e 100644 --- a/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.stderr +++ b/tests/ui/consts/min_const_fn/bad_const_fn_body_ice.stderr @@ -6,7 +6,7 @@ LL | vec![1, 2, 3] | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [i32]>::into_vec::<std::alloc::Global>` in constant functions +error[E0015]: cannot call non-const method `slice::<impl [i32]>::into_vec::<std::alloc::Global>` in constant functions --> $DIR/bad_const_fn_body_ice.rs:2:5 | LL | vec![1, 2, 3] diff --git a/tests/ui/consts/mir_check_nonconst.rs b/tests/ui/consts/mir_check_nonconst.rs index b6f34b922fa..eede36bd6ed 100644 --- a/tests/ui/consts/mir_check_nonconst.rs +++ b/tests/ui/consts/mir_check_nonconst.rs @@ -6,6 +6,6 @@ fn bar() -> Foo { } static foo: Foo = bar(); -//~^ ERROR cannot call non-const fn +//~^ ERROR cannot call non-const function fn main() {} diff --git a/tests/ui/consts/mir_check_nonconst.stderr b/tests/ui/consts/mir_check_nonconst.stderr index 95d64622ad7..e930fa2103d 100644 --- a/tests/ui/consts/mir_check_nonconst.stderr +++ b/tests/ui/consts/mir_check_nonconst.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `bar` in statics +error[E0015]: cannot call non-const function `bar` in statics --> $DIR/mir_check_nonconst.rs:8:19 | LL | static foo: Foo = bar(); diff --git a/tests/ui/consts/missing_span_in_backtrace.stderr b/tests/ui/consts/missing_span_in_backtrace.stderr index 72d15702e89..05ae7305dbc 100644 --- a/tests/ui/consts/missing_span_in_backtrace.stderr +++ b/tests/ui/consts/missing_span_in_backtrace.stderr @@ -7,6 +7,8 @@ note: inside `std::ptr::read::<MaybeUninit<MaybeUninit<u8>>>` --> $SRC_DIR/core/src/ptr/mod.rs:LL:COL note: inside `std::ptr::swap_nonoverlapping_simple_untyped::<MaybeUninit<u8>>` --> $SRC_DIR/core/src/ptr/mod.rs:LL:COL +note: inside `swap_nonoverlapping::compiletime::<MaybeUninit<u8>>` + --> $SRC_DIR/core/src/ptr/mod.rs:LL:COL note: inside `swap_nonoverlapping::<MaybeUninit<u8>>` --> $SRC_DIR/core/src/ptr/mod.rs:LL:COL note: inside `X` @@ -20,6 +22,7 @@ note: inside `X` | |_________^ = help: this code performed an operation that depends on the underlying bytes representing a pointer = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported + = note: this error originates in the macro `$crate::intrinsics::const_eval_select` which comes from the expansion of the macro `const_eval_select` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/tests/ui/coroutine/clone-impl-static.rs b/tests/ui/coroutine/clone-impl-static.rs index 62d4392e30c..f6fadff7faf 100644 --- a/tests/ui/coroutine/clone-impl-static.rs +++ b/tests/ui/coroutine/clone-impl-static.rs @@ -1,6 +1,8 @@ //@compile-flags: --diagnostic-width=300 // gate-test-coroutine_clone // Verifies that static coroutines cannot be cloned/copied. +// This is important: the cloned coroutine would reference state of the original +// coroutine, leading to semantic nonsense. #![feature(coroutines, coroutine_clone, stmt_expr_attributes)] diff --git a/tests/ui/coroutine/clone-impl-static.stderr b/tests/ui/coroutine/clone-impl-static.stderr index bf16b166960..db1d2770346 100644 --- a/tests/ui/coroutine/clone-impl-static.stderr +++ b/tests/ui/coroutine/clone-impl-static.stderr @@ -1,27 +1,27 @@ -error[E0277]: the trait bound `{static coroutine@$DIR/clone-impl-static.rs:9:5: 9:19}: Copy` is not satisfied - --> $DIR/clone-impl-static.rs:12:16 +error[E0277]: the trait bound `{static coroutine@$DIR/clone-impl-static.rs:11:5: 11:19}: Copy` is not satisfied + --> $DIR/clone-impl-static.rs:14:16 | LL | check_copy(&gen); - | ---------- ^^^^ the trait `Copy` is not implemented for `{static coroutine@$DIR/clone-impl-static.rs:9:5: 9:19}` + | ---------- ^^^^ the trait `Copy` is not implemented for `{static coroutine@$DIR/clone-impl-static.rs:11:5: 11:19}` | | | required by a bound introduced by this call | note: required by a bound in `check_copy` - --> $DIR/clone-impl-static.rs:18:18 + --> $DIR/clone-impl-static.rs:20:18 | LL | fn check_copy<T: Copy>(_x: &T) {} | ^^^^ required by this bound in `check_copy` -error[E0277]: the trait bound `{static coroutine@$DIR/clone-impl-static.rs:9:5: 9:19}: Clone` is not satisfied - --> $DIR/clone-impl-static.rs:14:17 +error[E0277]: the trait bound `{static coroutine@$DIR/clone-impl-static.rs:11:5: 11:19}: Clone` is not satisfied + --> $DIR/clone-impl-static.rs:16:17 | LL | check_clone(&gen); - | ----------- ^^^^ the trait `Clone` is not implemented for `{static coroutine@$DIR/clone-impl-static.rs:9:5: 9:19}` + | ----------- ^^^^ the trait `Clone` is not implemented for `{static coroutine@$DIR/clone-impl-static.rs:11:5: 11:19}` | | | required by a bound introduced by this call | note: required by a bound in `check_clone` - --> $DIR/clone-impl-static.rs:19:19 + --> $DIR/clone-impl-static.rs:21:19 | LL | fn check_clone<T: Clone>(_x: &T) {} | ^^^^^ required by this bound in `check_clone` diff --git a/tests/ui/coroutine/print/coroutine-print-verbose-2.stderr b/tests/ui/coroutine/print/coroutine-print-verbose-2.stderr index 2ab9d35f05a..8877d45ddda 100644 --- a/tests/ui/coroutine/print/coroutine-print-verbose-2.stderr +++ b/tests/ui/coroutine/print/coroutine-print-verbose-2.stderr @@ -9,7 +9,7 @@ LL | | drop(a); LL | | }); | |______^ coroutine is not `Sync` | - = help: within `{main::{closure#0} upvar_tys=() witness={main::{closure#0}}}`, the trait `Sync` is not implemented for `NotSync` + = help: within `{main::{closure#0} upvar_tys=() resume_ty=() yield_ty=() return_ty=() witness={main::{closure#0}}}`, the trait `Sync` is not implemented for `NotSync` note: coroutine is not `Sync` as this value is used across a yield --> $DIR/coroutine-print-verbose-2.rs:20:9 | @@ -34,7 +34,7 @@ LL | | drop(a); LL | | }); | |______^ coroutine is not `Send` | - = help: within `{main::{closure#1} upvar_tys=() witness={main::{closure#1}}}`, the trait `Send` is not implemented for `NotSend` + = help: within `{main::{closure#1} upvar_tys=() resume_ty=() yield_ty=() return_ty=() witness={main::{closure#1}}}`, the trait `Send` is not implemented for `NotSend` note: coroutine is not `Send` as this value is used across a yield --> $DIR/coroutine-print-verbose-2.rs:27:9 | diff --git a/tests/ui/coroutine/print/coroutine-print-verbose-3.stderr b/tests/ui/coroutine/print/coroutine-print-verbose-3.stderr index dce45aeae56..2f9f20cf1ff 100644 --- a/tests/ui/coroutine/print/coroutine-print-verbose-3.stderr +++ b/tests/ui/coroutine/print/coroutine-print-verbose-3.stderr @@ -11,7 +11,7 @@ LL | | }; | |_____^ expected `()`, found coroutine | = note: expected unit type `()` - found coroutine `{main::{closure#0} upvar_tys=?4t witness=?6t}` + found coroutine `{main::{closure#0} upvar_tys=?4t resume_ty=() yield_ty=i32 return_ty=&'?1 str witness=?6t}` error: aborting due to 1 previous error diff --git a/tests/ui/coverage-attr/allowed-positions.rs b/tests/ui/coverage-attr/allowed-positions.rs new file mode 100644 index 00000000000..f1169fa6570 --- /dev/null +++ b/tests/ui/coverage-attr/allowed-positions.rs @@ -0,0 +1,116 @@ +//! Tests where the `#[coverage(..)]` attribute can and cannot be used. + +//@ reference: attributes.coverage.allowed-positions + +#![feature(coverage_attribute)] +#![feature(extern_types)] +#![feature(impl_trait_in_assoc_type)] +#![warn(unused_attributes)] +#![coverage(off)] + +#[coverage(off)] +mod submod {} + +#[coverage(off)] //~ ERROR coverage attribute not allowed here [E0788] +type MyTypeAlias = (); + +#[coverage(off)] //~ ERROR [E0788] +trait MyTrait { + #[coverage(off)] //~ ERROR [E0788] + const TRAIT_ASSOC_CONST: u32; + + #[coverage(off)] //~ ERROR [E0788] + type TraitAssocType; + + #[coverage(off)] //~ ERROR [E0788] + fn trait_method(&self); + + #[coverage(off)] + fn trait_method_with_default(&self) {} + + #[coverage(off)] //~ ERROR [E0788] + fn trait_assoc_fn(); +} + +#[coverage(off)] +impl MyTrait for () { + const TRAIT_ASSOC_CONST: u32 = 0; + + #[coverage(off)] //~ ERROR [E0788] + type TraitAssocType = Self; + + #[coverage(off)] + fn trait_method(&self) {} + #[coverage(off)] + fn trait_method_with_default(&self) {} + #[coverage(off)] + fn trait_assoc_fn() {} +} + +trait HasAssocType { + type T; + fn constrain_assoc_type() -> Self::T; +} + +impl HasAssocType for () { + #[coverage(off)] //~ ERROR [E0788] + type T = impl Copy; + fn constrain_assoc_type() -> Self::T {} +} + +#[coverage(off)] //~ ERROR [E0788] +struct MyStruct { + #[coverage(off)] //~ ERROR [E0788] + field: u32, +} + +#[coverage(off)] +impl MyStruct { + #[coverage(off)] + fn method(&self) {} + #[coverage(off)] + fn assoc_fn() {} +} + +extern "C" { + #[coverage(off)] //~ ERROR [E0788] + static X: u32; + + #[coverage(off)] //~ ERROR [E0788] + type T; + + #[coverage(off)] //~ ERROR [E0788] + fn foreign_fn(); +} + +#[coverage(off)] +fn main() { + #[coverage(off)] //~ ERROR [E0788] + let _ = (); + + // Currently not allowed on let statements, even if they bind to a closure. + // It might be nice to support this as a special case someday, but trying + // to define the precise boundaries of that special case might be tricky. + #[coverage(off)] //~ ERROR [E0788] + let _let_closure = || (); + + // In situations where attributes can already be applied to expressions, + // the coverage attribute is allowed on closure expressions. + let _closure_tail_expr = { + #[coverage(off)] + || () + }; + + // Applying attributes to arbitrary expressions requires an unstable + // feature, but if that feature were enabled then this would be allowed. + let _closure_expr = #[coverage(off)] || (); + //~^ ERROR attributes on expressions are experimental [E0658] + + match () { + #[coverage(off)] //~ ERROR [E0788] + () => (), + } + + #[coverage(off)] //~ ERROR [E0788] + return (); +} diff --git a/tests/ui/coverage-attr/allowed-positions.stderr b/tests/ui/coverage-attr/allowed-positions.stderr new file mode 100644 index 00000000000..34562a4da1b --- /dev/null +++ b/tests/ui/coverage-attr/allowed-positions.stderr @@ -0,0 +1,192 @@ +error[E0658]: attributes on expressions are experimental + --> $DIR/allowed-positions.rs:106:25 + | +LL | let _closure_expr = #[coverage(off)] || (); + | ^^^^^^^^^^^^^^^^ + | + = note: see issue #15701 <https://github.com/rust-lang/rust/issues/15701> for more information + = help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:14:1 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | type MyTypeAlias = (); + | ---------------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:17:1 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | / trait MyTrait { +LL | | #[coverage(off)] +LL | | const TRAIT_ASSOC_CONST: u32; +... | +LL | | fn trait_assoc_fn(); +LL | | } + | |_- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:61:1 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | / struct MyStruct { +LL | | #[coverage(off)] +LL | | field: u32, +LL | | } + | |_- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:63:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | field: u32, + | ---------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:88:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | let _ = (); + | ----------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:94:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | let _let_closure = || (); + | ------------------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:110:9 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | () => (), + | -------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:114:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | return (); + | --------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:19:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | const TRAIT_ASSOC_CONST: u32; + | ----------------------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:22:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | type TraitAssocType; + | -------------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:25:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | fn trait_method(&self); + | ----------------------- function has no body + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:31:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | fn trait_assoc_fn(); + | -------------------- function has no body + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:39:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | type TraitAssocType = Self; + | --------------------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:56:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | type T = impl Copy; + | ------------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:76:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | static X: u32; + | -------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:79:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | type T; + | ------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error[E0788]: coverage attribute not allowed here + --> $DIR/allowed-positions.rs:82:5 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ +LL | fn foreign_fn(); + | ---------------- function has no body + | + = help: coverage attribute can be applied to a function (with body), impl block, or module + +error: aborting due to 18 previous errors + +Some errors have detailed explanations: E0658, E0788. +For more information about an error, try `rustc --explain E0658`. diff --git a/tests/ui/coverage-attr/bad-attr-ice.stderr b/tests/ui/coverage-attr/bad-attr-ice.feat.stderr index dc88bb8d1a4..50e1c39d4f8 100644 --- a/tests/ui/coverage-attr/bad-attr-ice.stderr +++ b/tests/ui/coverage-attr/bad-attr-ice.feat.stderr @@ -1,5 +1,5 @@ error: malformed `coverage` attribute input - --> $DIR/bad-attr-ice.rs:9:1 + --> $DIR/bad-attr-ice.rs:11:1 | LL | #[coverage] | ^^^^^^^^^^^ diff --git a/tests/ui/coverage-attr/bad-attr-ice.nofeat.stderr b/tests/ui/coverage-attr/bad-attr-ice.nofeat.stderr new file mode 100644 index 00000000000..e8bdd99c9b9 --- /dev/null +++ b/tests/ui/coverage-attr/bad-attr-ice.nofeat.stderr @@ -0,0 +1,26 @@ +error: malformed `coverage` attribute input + --> $DIR/bad-attr-ice.rs:11:1 + | +LL | #[coverage] + | ^^^^^^^^^^^ + | +help: the following are the possible correct uses + | +LL | #[coverage(off)] + | +LL | #[coverage(on)] + | + +error[E0658]: the `#[coverage]` attribute is an experimental feature + --> $DIR/bad-attr-ice.rs:11:1 + | +LL | #[coverage] + | ^^^^^^^^^^^ + | + = note: see issue #84605 <https://github.com/rust-lang/rust/issues/84605> for more information + = help: add `#![feature(coverage_attribute)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/coverage-attr/bad-attr-ice.rs b/tests/ui/coverage-attr/bad-attr-ice.rs index 4b58989e3a2..8d57bbbf49a 100644 --- a/tests/ui/coverage-attr/bad-attr-ice.rs +++ b/tests/ui/coverage-attr/bad-attr-ice.rs @@ -1,3 +1,5 @@ +#![cfg_attr(feat, feature(coverage_attribute))] +//@ revisions: feat nofeat //@ compile-flags: -Cinstrument-coverage //@ needs-profiler-runtime //@ reference: attributes.coverage.syntax @@ -8,4 +10,8 @@ #[coverage] //~^ ERROR malformed `coverage` attribute input +//[nofeat]~| the `#[coverage]` attribute is an experimental feature fn main() {} + +// FIXME(#130766): When the `#[coverage(..)]` attribute is stabilized, +// get rid of the revisions and just make this a normal test. diff --git a/tests/ui/coverage-attr/bad-syntax.rs b/tests/ui/coverage-attr/bad-syntax.rs index ad6c5dc03f1..062e82ee4b6 100644 --- a/tests/ui/coverage-attr/bad-syntax.rs +++ b/tests/ui/coverage-attr/bad-syntax.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.syntax //@ reference: attributes.coverage.duplicates diff --git a/tests/ui/coverage-attr/bad-syntax.stderr b/tests/ui/coverage-attr/bad-syntax.stderr index 072a8c4ca94..5592e89070d 100644 --- a/tests/ui/coverage-attr/bad-syntax.stderr +++ b/tests/ui/coverage-attr/bad-syntax.stderr @@ -1,5 +1,5 @@ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:16:1 + --> $DIR/bad-syntax.rs:17:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -12,7 +12,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:19:1 + --> $DIR/bad-syntax.rs:20:1 | LL | #[coverage = true] | ^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:22:1 + --> $DIR/bad-syntax.rs:23:1 | LL | #[coverage()] | ^^^^^^^^^^^^^ @@ -38,7 +38,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:25:1 + --> $DIR/bad-syntax.rs:26:1 | LL | #[coverage(off, off)] | ^^^^^^^^^^^^^^^^^^^^^ @@ -51,7 +51,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:28:1 + --> $DIR/bad-syntax.rs:29:1 | LL | #[coverage(off, on)] | ^^^^^^^^^^^^^^^^^^^^ @@ -64,7 +64,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:31:1 + --> $DIR/bad-syntax.rs:32:1 | LL | #[coverage(bogus)] | ^^^^^^^^^^^^^^^^^^ @@ -77,7 +77,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:34:1 + --> $DIR/bad-syntax.rs:35:1 | LL | #[coverage(bogus, off)] | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -90,7 +90,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/bad-syntax.rs:37:1 + --> $DIR/bad-syntax.rs:38:1 | LL | #[coverage(off, bogus)] | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -103,7 +103,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: expected identifier, found `,` - --> $DIR/bad-syntax.rs:43:12 + --> $DIR/bad-syntax.rs:44:12 | LL | #[coverage(,off)] | ^ expected identifier @@ -115,25 +115,25 @@ LL + #[coverage(off)] | error: multiple `coverage` attributes - --> $DIR/bad-syntax.rs:8:1 + --> $DIR/bad-syntax.rs:9:1 | LL | #[coverage(off)] | ^^^^^^^^^^^^^^^^ help: remove this attribute | note: attribute also specified here - --> $DIR/bad-syntax.rs:9:1 + --> $DIR/bad-syntax.rs:10:1 | LL | #[coverage(off)] | ^^^^^^^^^^^^^^^^ error: multiple `coverage` attributes - --> $DIR/bad-syntax.rs:12:1 + --> $DIR/bad-syntax.rs:13:1 | LL | #[coverage(off)] | ^^^^^^^^^^^^^^^^ help: remove this attribute | note: attribute also specified here - --> $DIR/bad-syntax.rs:13:1 + --> $DIR/bad-syntax.rs:14:1 | LL | #[coverage(on)] | ^^^^^^^^^^^^^^^ diff --git a/tests/ui/coverage-attr/name-value.rs b/tests/ui/coverage-attr/name-value.rs index cdb2f6490f2..ffd9afe2ce1 100644 --- a/tests/ui/coverage-attr/name-value.rs +++ b/tests/ui/coverage-attr/name-value.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.syntax @@ -19,7 +20,7 @@ mod my_mod_inner { #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input -//~| ERROR attribute should be applied to a function definition or closure +//~| ERROR [E0788] struct MyStruct; #[coverage = "off"] @@ -27,22 +28,22 @@ struct MyStruct; impl MyStruct { #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] const X: u32 = 7; } #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input -//~| ERROR attribute should be applied to a function definition or closure +//~| ERROR [E0788] trait MyTrait { #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] const X: u32; #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] type T; } @@ -51,12 +52,12 @@ trait MyTrait { impl MyTrait for MyStruct { #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] const X: u32 = 8; #[coverage = "off"] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] type T = (); } diff --git a/tests/ui/coverage-attr/name-value.stderr b/tests/ui/coverage-attr/name-value.stderr index 38101764d6f..bfd22ed5451 100644 --- a/tests/ui/coverage-attr/name-value.stderr +++ b/tests/ui/coverage-attr/name-value.stderr @@ -1,5 +1,5 @@ error: malformed `coverage` attribute input - --> $DIR/name-value.rs:11:1 + --> $DIR/name-value.rs:12:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -12,7 +12,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:16:5 + --> $DIR/name-value.rs:17:5 | LL | #![coverage = "off"] | ^^^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | #![coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:20:1 + --> $DIR/name-value.rs:21:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -38,7 +38,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:28:5 + --> $DIR/name-value.rs:29:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -51,7 +51,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:25:1 + --> $DIR/name-value.rs:26:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -64,7 +64,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:38:5 + --> $DIR/name-value.rs:39:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -77,7 +77,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:43:5 + --> $DIR/name-value.rs:44:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -90,7 +90,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:34:1 + --> $DIR/name-value.rs:35:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -103,7 +103,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:52:5 + --> $DIR/name-value.rs:53:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -116,7 +116,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:57:5 + --> $DIR/name-value.rs:58:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -129,7 +129,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:49:1 + --> $DIR/name-value.rs:50:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -142,7 +142,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/name-value.rs:63:1 + --> $DIR/name-value.rs:64:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -154,17 +154,19 @@ LL | #[coverage(off)] LL | #[coverage(on)] | -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:20:1 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:21:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ ... LL | struct MyStruct; - | ---------------- not a function or closure + | ---------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:34:1 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:35:1 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ @@ -174,52 +176,64 @@ LL | | #[coverage = "off"] ... | LL | | type T; LL | | } - | |_- not a function or closure + | |_- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:38:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:39:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ ... LL | const X: u32; - | ------------- not a function or closure + | ------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:43:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:44:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ ... LL | type T; - | ------- not a function or closure + | ------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:28:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:29:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ ... LL | const X: u32 = 7; - | ----------------- not a function or closure + | ----------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:52:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:53:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ ... LL | const X: u32 = 8; - | ----------------- not a function or closure + | ----------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/name-value.rs:57:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/name-value.rs:58:5 | LL | #[coverage = "off"] | ^^^^^^^^^^^^^^^^^^^ ... LL | type T = (); - | ------------ not a function or closure + | ------------ not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module error: aborting due to 19 previous errors diff --git a/tests/ui/coverage-attr/no-coverage.rs b/tests/ui/coverage-attr/no-coverage.rs deleted file mode 100644 index 691456aee40..00000000000 --- a/tests/ui/coverage-attr/no-coverage.rs +++ /dev/null @@ -1,50 +0,0 @@ -//@ reference: attributes.coverage.allowed-positions - -#![feature(extern_types)] -#![feature(impl_trait_in_assoc_type)] -#![warn(unused_attributes)] -#![coverage(off)] - -#[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure -trait Trait { - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - const X: u32; - - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - type T; - - type U; -} - -#[coverage(off)] -impl Trait for () { - const X: u32 = 0; - - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - type T = Self; - - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - type U = impl Trait; //~ ERROR unconstrained opaque type -} - -extern "C" { - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - static X: u32; - - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - type T; -} - -#[coverage(off)] -fn main() { - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - let _ = (); - - match () { - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - () => (), - } - - #[coverage(off)] //~ ERROR attribute should be applied to a function definition or closure - return (); -} diff --git a/tests/ui/coverage-attr/no-coverage.stderr b/tests/ui/coverage-attr/no-coverage.stderr deleted file mode 100644 index 2421d2771f5..00000000000 --- a/tests/ui/coverage-attr/no-coverage.stderr +++ /dev/null @@ -1,96 +0,0 @@ -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:8:1 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | / trait Trait { -LL | | #[coverage(off)] -LL | | const X: u32; -... | -LL | | type U; -LL | | } - | |_- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:40:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | let _ = (); - | ----------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:44:9 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | () => (), - | -------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:48:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | return (); - | --------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:10:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | const X: u32; - | ------------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:13:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | type T; - | ------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:23:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | type T = Self; - | -------------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:26:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | type U = impl Trait; - | -------------------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:31:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | static X: u32; - | -------------- not a function or closure - -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/no-coverage.rs:34:5 - | -LL | #[coverage(off)] - | ^^^^^^^^^^^^^^^^ -LL | type T; - | ------- not a function or closure - -error: unconstrained opaque type - --> $DIR/no-coverage.rs:27:14 - | -LL | type U = impl Trait; - | ^^^^^^^^^^ - | - = note: `U` must be used in combination with a concrete type within the same impl - -error: aborting due to 11 previous errors - -For more information about this error, try `rustc --explain E0788`. diff --git a/tests/ui/coverage-attr/subword.rs b/tests/ui/coverage-attr/subword.rs index ff5b750e70e..84a5132ac4a 100644 --- a/tests/ui/coverage-attr/subword.rs +++ b/tests/ui/coverage-attr/subword.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.syntax diff --git a/tests/ui/coverage-attr/subword.stderr b/tests/ui/coverage-attr/subword.stderr index 3a106898f8b..a672ff4ac41 100644 --- a/tests/ui/coverage-attr/subword.stderr +++ b/tests/ui/coverage-attr/subword.stderr @@ -1,5 +1,5 @@ error: malformed `coverage` attribute input - --> $DIR/subword.rs:7:1 + --> $DIR/subword.rs:8:1 | LL | #[coverage(yes(milord))] | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -12,7 +12,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/subword.rs:10:1 + --> $DIR/subword.rs:11:1 | LL | #[coverage(no(milord))] | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/subword.rs:13:1 + --> $DIR/subword.rs:14:1 | LL | #[coverage(yes = "milord")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -38,7 +38,7 @@ LL | #[coverage(on)] | ~~~~~~~~~~~~~~~ error: malformed `coverage` attribute input - --> $DIR/subword.rs:16:1 + --> $DIR/subword.rs:17:1 | LL | #[coverage(no = "milord")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/coverage-attr/word-only.rs b/tests/ui/coverage-attr/word-only.rs index 496268fd8c8..d0f743938f3 100644 --- a/tests/ui/coverage-attr/word-only.rs +++ b/tests/ui/coverage-attr/word-only.rs @@ -1,3 +1,4 @@ +#![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.syntax @@ -19,7 +20,7 @@ mod my_mod_inner { #[coverage] //~^ ERROR malformed `coverage` attribute input -//~| ERROR attribute should be applied to a function definition or closure +//~| ERROR [E0788] struct MyStruct; #[coverage] @@ -27,22 +28,22 @@ struct MyStruct; impl MyStruct { #[coverage] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] const X: u32 = 7; } #[coverage] //~^ ERROR malformed `coverage` attribute input -//~| ERROR attribute should be applied to a function definition or closure +//~| ERROR [E0788] trait MyTrait { #[coverage] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] const X: u32; #[coverage] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] type T; } @@ -51,12 +52,12 @@ trait MyTrait { impl MyTrait for MyStruct { #[coverage] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] const X: u32 = 8; #[coverage] //~^ ERROR malformed `coverage` attribute input - //~| ERROR attribute should be applied to a function definition or closure + //~| ERROR [E0788] type T = (); } diff --git a/tests/ui/coverage-attr/word-only.stderr b/tests/ui/coverage-attr/word-only.stderr index 154ea61f3a3..bad50b0c961 100644 --- a/tests/ui/coverage-attr/word-only.stderr +++ b/tests/ui/coverage-attr/word-only.stderr @@ -1,5 +1,5 @@ error: malformed `coverage` attribute input - --> $DIR/word-only.rs:11:1 + --> $DIR/word-only.rs:12:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -12,7 +12,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:16:5 + --> $DIR/word-only.rs:17:5 | LL | #![coverage] | ^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | #![coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:20:1 + --> $DIR/word-only.rs:21:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -38,7 +38,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:28:5 + --> $DIR/word-only.rs:29:5 | LL | #[coverage] | ^^^^^^^^^^^ @@ -51,7 +51,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:25:1 + --> $DIR/word-only.rs:26:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -64,7 +64,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:38:5 + --> $DIR/word-only.rs:39:5 | LL | #[coverage] | ^^^^^^^^^^^ @@ -77,7 +77,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:43:5 + --> $DIR/word-only.rs:44:5 | LL | #[coverage] | ^^^^^^^^^^^ @@ -90,7 +90,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:34:1 + --> $DIR/word-only.rs:35:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -103,7 +103,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:52:5 + --> $DIR/word-only.rs:53:5 | LL | #[coverage] | ^^^^^^^^^^^ @@ -116,7 +116,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:57:5 + --> $DIR/word-only.rs:58:5 | LL | #[coverage] | ^^^^^^^^^^^ @@ -129,7 +129,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:49:1 + --> $DIR/word-only.rs:50:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -142,7 +142,7 @@ LL | #[coverage(on)] | error: malformed `coverage` attribute input - --> $DIR/word-only.rs:63:1 + --> $DIR/word-only.rs:64:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -154,17 +154,19 @@ LL | #[coverage(off)] LL | #[coverage(on)] | -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:20:1 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:21:1 | LL | #[coverage] | ^^^^^^^^^^^ ... LL | struct MyStruct; - | ---------------- not a function or closure + | ---------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:34:1 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:35:1 | LL | #[coverage] | ^^^^^^^^^^^ @@ -174,52 +176,64 @@ LL | | #[coverage] ... | LL | | type T; LL | | } - | |_- not a function or closure + | |_- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:38:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:39:5 | LL | #[coverage] | ^^^^^^^^^^^ ... LL | const X: u32; - | ------------- not a function or closure + | ------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:43:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:44:5 | LL | #[coverage] | ^^^^^^^^^^^ ... LL | type T; - | ------- not a function or closure + | ------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:28:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:29:5 | LL | #[coverage] | ^^^^^^^^^^^ ... LL | const X: u32 = 7; - | ----------------- not a function or closure + | ----------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:52:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:53:5 | LL | #[coverage] | ^^^^^^^^^^^ ... LL | const X: u32 = 8; - | ----------------- not a function or closure + | ----------------- not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module -error[E0788]: attribute should be applied to a function definition or closure - --> $DIR/word-only.rs:57:5 +error[E0788]: coverage attribute not allowed here + --> $DIR/word-only.rs:58:5 | LL | #[coverage] | ^^^^^^^^^^^ ... LL | type T = (); - | ------------ not a function or closure + | ------------ not a function, impl block, or module + | + = help: coverage attribute can be applied to a function (with body), impl block, or module error: aborting due to 19 previous errors diff --git a/tests/ui/drop/lint-tail-expr-drop-order.rs b/tests/ui/drop/lint-tail-expr-drop-order.rs index cc7c081740d..b2a5db0d871 100644 --- a/tests/ui/drop/lint-tail-expr-drop-order.rs +++ b/tests/ui/drop/lint-tail-expr-drop-order.rs @@ -2,7 +2,6 @@ // This lint is to capture potential change in program semantics // due to implementation of RFC 3606 <https://github.com/rust-lang/rfcs/pull/3606> //@ edition: 2021 -//@ build-fail #![deny(tail_expr_drop_order)] //~ NOTE: the lint level is defined here #![allow(dropping_copy_types)] diff --git a/tests/ui/drop/lint-tail-expr-drop-order.stderr b/tests/ui/drop/lint-tail-expr-drop-order.stderr index b6cf5f40b6e..92afae5af67 100644 --- a/tests/ui/drop/lint-tail-expr-drop-order.stderr +++ b/tests/ui/drop/lint-tail-expr-drop-order.stderr @@ -1,5 +1,5 @@ error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:42:15 + --> $DIR/lint-tail-expr-drop-order.rs:41:15 | LL | let x = LoudDropper; | - @@ -19,14 +19,14 @@ LL | } = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | LL | | } | |_^ note: `x` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | @@ -34,13 +34,13 @@ LL | | } | |_^ = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages note: the lint level is defined here - --> $DIR/lint-tail-expr-drop-order.rs:7:9 + --> $DIR/lint-tail-expr-drop-order.rs:6:9 | LL | #![deny(tail_expr_drop_order)] | ^^^^^^^^^^^^^^^^^^^^ error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:67:19 + --> $DIR/lint-tail-expr-drop-order.rs:66:19 | LL | let x = LoudDropper; | - @@ -60,14 +60,14 @@ LL | } = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | LL | | } | |_^ note: `x` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | @@ -76,7 +76,7 @@ LL | | } = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:94:7 + --> $DIR/lint-tail-expr-drop-order.rs:93:7 | LL | let x = LoudDropper; | - @@ -96,14 +96,14 @@ LL | } = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | LL | | } | |_^ note: `x` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | @@ -112,7 +112,7 @@ LL | | } = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:147:5 + --> $DIR/lint-tail-expr-drop-order.rs:146:5 | LL | let future = f(); | ------ @@ -132,14 +132,14 @@ LL | } = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | LL | | } | |_^ note: `future` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | @@ -148,7 +148,7 @@ LL | | } = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:164:14 + --> $DIR/lint-tail-expr-drop-order.rs:163:14 | LL | let x = T::default(); | - @@ -170,7 +170,7 @@ LL | } = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:178:5 + --> $DIR/lint-tail-expr-drop-order.rs:177:5 | LL | let x: Result<LoudDropper, ()> = Ok(LoudDropper); | - @@ -190,14 +190,14 @@ LL | } = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | LL | | } | |_^ note: `x` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | @@ -206,7 +206,7 @@ LL | | } = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:222:5 + --> $DIR/lint-tail-expr-drop-order.rs:221:5 | LL | let x = LoudDropper2; | - @@ -226,7 +226,7 @@ LL | } = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:195:5 + --> $DIR/lint-tail-expr-drop-order.rs:194:5 | LL | / impl Drop for LoudDropper3 { LL | | @@ -236,7 +236,7 @@ LL | | } LL | | } | |_____^ note: `x` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:207:5 + --> $DIR/lint-tail-expr-drop-order.rs:206:5 | LL | / impl Drop for LoudDropper2 { LL | | @@ -248,7 +248,7 @@ LL | | } = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages error: relative drop order changing in Rust 2024 - --> $DIR/lint-tail-expr-drop-order.rs:235:13 + --> $DIR/lint-tail-expr-drop-order.rs:234:13 | LL | LoudDropper.get() | ^^^^^^^^^^^ @@ -268,14 +268,14 @@ LL | )); = warning: this changes meaning in Rust 2024 = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> note: `#1` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | LL | | } | |_^ note: `_x` invokes this custom destructor - --> $DIR/lint-tail-expr-drop-order.rs:11:1 + --> $DIR/lint-tail-expr-drop-order.rs:10:1 | LL | / impl Drop for LoudDropper { ... | diff --git a/tests/ui/drop/tail_expr_drop_order-on-coroutine-unwind.rs b/tests/ui/drop/tail_expr_drop_order-on-coroutine-unwind.rs new file mode 100644 index 00000000000..5b9c24978b4 --- /dev/null +++ b/tests/ui/drop/tail_expr_drop_order-on-coroutine-unwind.rs @@ -0,0 +1,28 @@ +//@ edition: 2021 + +// Make sure we don't ICE when emitting the "lint" drop statement +// used for tail_expr_drop_order. + +#![deny(tail_expr_drop_order)] + +struct Drop; +impl std::ops::Drop for Drop { + fn drop(&mut self) {} +} + +async fn func() -> Result<(), Drop> { + todo!() +} + +async fn retry_db() -> Result<(), Drop> { + loop { + match func().await { + //~^ ERROR relative drop order changing in Rust 2024 + //~| WARNING this changes meaning in Rust 2024 + Ok(()) => return Ok(()), + Err(e) => {} + } + } +} + +fn main() {} diff --git a/tests/ui/drop/tail_expr_drop_order-on-coroutine-unwind.stderr b/tests/ui/drop/tail_expr_drop_order-on-coroutine-unwind.stderr new file mode 100644 index 00000000000..d98100bc1b0 --- /dev/null +++ b/tests/ui/drop/tail_expr_drop_order-on-coroutine-unwind.stderr @@ -0,0 +1,52 @@ +error: relative drop order changing in Rust 2024 + --> $DIR/tail_expr_drop_order-on-coroutine-unwind.rs:19:15 + | +LL | match func().await { + | ^^^^^^^----- + | | | + | | this value will be stored in a temporary; let us call it `#1` + | | `#1` will be dropped later as of Edition 2024 + | this value will be stored in a temporary; let us call it `#2` + | up until Edition 2021 `#2` is dropped last but will be dropped earlier in Edition 2024 +... +LL | Err(e) => {} + | - + | | + | `e` calls a custom destructor + | `e` will be dropped later as of Edition 2024 +LL | } +LL | } + | - now the temporary value is dropped here, before the local variables in the block or statement + | + = warning: this changes meaning in Rust 2024 + = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/temporary-tail-expr-scope.html> +note: `#2` invokes this custom destructor + --> $DIR/tail_expr_drop_order-on-coroutine-unwind.rs:9:1 + | +LL | / impl std::ops::Drop for Drop { +LL | | fn drop(&mut self) {} +LL | | } + | |_^ +note: `#1` invokes this custom destructor + --> $DIR/tail_expr_drop_order-on-coroutine-unwind.rs:9:1 + | +LL | / impl std::ops::Drop for Drop { +LL | | fn drop(&mut self) {} +LL | | } + | |_^ +note: `e` invokes this custom destructor + --> $DIR/tail_expr_drop_order-on-coroutine-unwind.rs:9:1 + | +LL | / impl std::ops::Drop for Drop { +LL | | fn drop(&mut self) {} +LL | | } + | |_^ + = note: most of the time, changing drop order is harmless; inspect the `impl Drop`s for side effects like releasing locks or sending messages +note: the lint level is defined here + --> $DIR/tail_expr_drop_order-on-coroutine-unwind.rs:6:9 + | +LL | #![deny(tail_expr_drop_order)] + | ^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 1 previous error + diff --git a/tests/ui/dyn-star/cell.rs b/tests/ui/dyn-star/cell.rs new file mode 100644 index 00000000000..f4c7927a39d --- /dev/null +++ b/tests/ui/dyn-star/cell.rs @@ -0,0 +1,34 @@ +// This test with Cell also indirectly exercises UnsafeCell in dyn*. +// +//@ run-pass + +#![feature(dyn_star)] +#![allow(incomplete_features)] + +use std::cell::Cell; + +trait Rw<T> { + fn read(&self) -> T; + fn write(&self, v: T); +} + +impl<T: Copy> Rw<T> for Cell<T> { + fn read(&self) -> T { + self.get() + } + fn write(&self, v: T) { + self.set(v) + } +} + +fn make_dyn_star() -> dyn* Rw<usize> { + Cell::new(42usize) as dyn* Rw<usize> +} + +fn main() { + let x = make_dyn_star(); + + assert_eq!(x.read(), 42); + x.write(24); + assert_eq!(x.read(), 24); +} diff --git a/tests/ui/dyn-star/error.rs b/tests/ui/dyn-star/error.rs index 7288596f3fa..1d252d2ce42 100644 --- a/tests/ui/dyn-star/error.rs +++ b/tests/ui/dyn-star/error.rs @@ -6,7 +6,7 @@ use std::fmt::Debug; trait Foo {} fn make_dyn_star() { - let i = 42; + let i = 42usize; let dyn_i: dyn* Foo = i; //~ ERROR trait bound `usize: Foo` is not satisfied } diff --git a/tests/ui/dyn-star/float-as-dyn-star.stderr b/tests/ui/dyn-star/float-as-dyn-star.stderr index 9caba512e5f..06071a27afc 100644 --- a/tests/ui/dyn-star/float-as-dyn-star.stderr +++ b/tests/ui/dyn-star/float-as-dyn-star.stderr @@ -14,7 +14,9 @@ LL | f32::from_bits(0x1) as f64 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ `f64` needs to be a pointer-like type | = help: the trait `PointerLike` is not implemented for `f64` - = help: the trait `PointerLike` is implemented for `usize` + = help: the following other types implement trait `PointerLike`: + isize + usize error: aborting due to 1 previous error; 1 warning emitted diff --git a/tests/ui/dyn-star/illegal.rs b/tests/ui/dyn-star/illegal.rs new file mode 100644 index 00000000000..ce0d784fcd2 --- /dev/null +++ b/tests/ui/dyn-star/illegal.rs @@ -0,0 +1,16 @@ +#![feature(dyn_star)] +//~^ WARN the feature `dyn_star` is incomplete + +trait Foo {} + +pub fn lol(x: dyn* Foo + Send) { + x as dyn* Foo; + //~^ ERROR casting `(dyn* Foo + Send + 'static)` as `dyn* Foo` is invalid +} + +fn lol2(x: &dyn Foo) { + *x as dyn* Foo; + //~^ ERROR `dyn Foo` needs to have the same ABI as a pointer +} + +fn main() {} diff --git a/tests/ui/dyn-star/illegal.stderr b/tests/ui/dyn-star/illegal.stderr new file mode 100644 index 00000000000..fdf3c813a23 --- /dev/null +++ b/tests/ui/dyn-star/illegal.stderr @@ -0,0 +1,27 @@ +warning: the feature `dyn_star` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/illegal.rs:1:12 + | +LL | #![feature(dyn_star)] + | ^^^^^^^^ + | + = note: see issue #102425 <https://github.com/rust-lang/rust/issues/102425> for more information + = note: `#[warn(incomplete_features)]` on by default + +error[E0606]: casting `(dyn* Foo + Send + 'static)` as `dyn* Foo` is invalid + --> $DIR/illegal.rs:7:5 + | +LL | x as dyn* Foo; + | ^^^^^^^^^^^^^ + +error[E0277]: `dyn Foo` needs to have the same ABI as a pointer + --> $DIR/illegal.rs:12:5 + | +LL | *x as dyn* Foo; + | ^^ `dyn Foo` needs to be a pointer-like type + | + = help: the trait `PointerLike` is not implemented for `dyn Foo` + +error: aborting due to 2 previous errors; 1 warning emitted + +Some errors have detailed explanations: E0277, E0606. +For more information about an error, try `rustc --explain E0277`. diff --git a/tests/ui/dyn-star/pointer-like-impl-rules.rs b/tests/ui/dyn-star/pointer-like-impl-rules.rs new file mode 100644 index 00000000000..c234e86e09a --- /dev/null +++ b/tests/ui/dyn-star/pointer-like-impl-rules.rs @@ -0,0 +1,82 @@ +//@ check-fail + +#![feature(extern_types)] +#![feature(pointer_like_trait)] + +use std::marker::PointerLike; + +struct NotReprTransparent; +impl PointerLike for NotReprTransparent {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: the struct `NotReprTransparent` is not `repr(transparent)` + +#[repr(transparent)] +struct FieldIsPl(usize); +impl PointerLike for FieldIsPl {} + +#[repr(transparent)] +struct FieldIsPlAndHasOtherField(usize, ()); +impl PointerLike for FieldIsPlAndHasOtherField {} + +#[repr(transparent)] +struct FieldIsNotPl(u8); +impl PointerLike for FieldIsNotPl {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: the field `0` of struct `FieldIsNotPl` does not implement `PointerLike` + +#[repr(transparent)] +struct GenericFieldIsNotPl<T>(T); +impl<T> PointerLike for GenericFieldIsNotPl<T> {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: the field `0` of struct `GenericFieldIsNotPl<T>` does not implement `PointerLike` + +#[repr(transparent)] +struct GenericFieldIsPl<T>(T); +impl<T: PointerLike> PointerLike for GenericFieldIsPl<T> {} + +#[repr(transparent)] +struct IsZeroSized(()); +impl PointerLike for IsZeroSized {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: the struct `IsZeroSized` is `repr(transparent)`, but does not have a non-trivial field + +trait SomeTrait {} +impl PointerLike for dyn SomeTrait {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: types of dynamic or unknown size + +extern "C" { + type ExternType; +} +impl PointerLike for ExternType {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: types of dynamic or unknown size + +struct LocalSizedType(&'static str); +struct LocalUnsizedType(str); + +// This is not a special error but a normal coherence error, +// which should still happen. +impl PointerLike for &LocalSizedType {} +//~^ ERROR: conflicting implementations of trait `PointerLike` +//~| NOTE: conflicting implementation in crate `core` + +impl PointerLike for &LocalUnsizedType {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: references to dynamically-sized types are too large to be `PointerLike` + +impl PointerLike for Box<LocalSizedType> {} +//~^ ERROR: conflicting implementations of trait `PointerLike` +//~| NOTE: conflicting implementation in crate `alloc` + +impl PointerLike for Box<LocalUnsizedType> {} +//~^ ERROR: implementation must be applied to type that +//~| NOTE: boxes of dynamically-sized types are too large to be `PointerLike` + +fn expects_pointer_like(x: impl PointerLike) {} + +fn main() { + expects_pointer_like(FieldIsPl(1usize)); + expects_pointer_like(FieldIsPlAndHasOtherField(1usize, ())); + expects_pointer_like(GenericFieldIsPl(1usize)); +} diff --git a/tests/ui/dyn-star/pointer-like-impl-rules.stderr b/tests/ui/dyn-star/pointer-like-impl-rules.stderr new file mode 100644 index 00000000000..39f08f442c4 --- /dev/null +++ b/tests/ui/dyn-star/pointer-like-impl-rules.stderr @@ -0,0 +1,85 @@ +error[E0119]: conflicting implementations of trait `PointerLike` for type `&LocalSizedType` + --> $DIR/pointer-like-impl-rules.rs:60:1 + | +LL | impl PointerLike for &LocalSizedType {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: conflicting implementation in crate `core`: + - impl<T> PointerLike for &T; + +error[E0119]: conflicting implementations of trait `PointerLike` for type `Box<LocalSizedType>` + --> $DIR/pointer-like-impl-rules.rs:68:1 + | +LL | impl PointerLike for Box<LocalSizedType> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: conflicting implementation in crate `alloc`: + - impl<T> PointerLike for Box<T>; + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:9:1 + | +LL | impl PointerLike for NotReprTransparent {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the struct `NotReprTransparent` is not `repr(transparent)` + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:23:1 + | +LL | impl PointerLike for FieldIsNotPl {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the field `0` of struct `FieldIsNotPl` does not implement `PointerLike` + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:29:1 + | +LL | impl<T> PointerLike for GenericFieldIsNotPl<T> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the field `0` of struct `GenericFieldIsNotPl<T>` does not implement `PointerLike` + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:39:1 + | +LL | impl PointerLike for IsZeroSized {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the struct `IsZeroSized` is `repr(transparent)`, but does not have a non-trivial field (it is zero-sized) + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:44:1 + | +LL | impl PointerLike for dyn SomeTrait {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: types of dynamic or unknown size may not implement `PointerLike` + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:51:1 + | +LL | impl PointerLike for ExternType {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: types of dynamic or unknown size may not implement `PointerLike` + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:64:1 + | +LL | impl PointerLike for &LocalUnsizedType {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: references to dynamically-sized types are too large to be `PointerLike` + +error: implementation must be applied to type that has the same ABI as a pointer, or is `repr(transparent)` and whose field is `PointerLike` + --> $DIR/pointer-like-impl-rules.rs:72:1 + | +LL | impl PointerLike for Box<LocalUnsizedType> {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: boxes of dynamically-sized types are too large to be `PointerLike` + +error: aborting due to 10 previous errors + +For more information about this error, try `rustc --explain E0119`. diff --git a/tests/ui/error-codes/E0010-teach.rs b/tests/ui/error-codes/E0010-teach.rs index 146e68df14a..0eef2478387 100644 --- a/tests/ui/error-codes/E0010-teach.rs +++ b/tests/ui/error-codes/E0010-teach.rs @@ -3,5 +3,5 @@ #![allow(warnings)] const CON: Vec<i32> = vec![1, 2, 3]; //~ ERROR E0010 -//~| ERROR cannot call non-const fn +//~| ERROR cannot call non-const method fn main() {} diff --git a/tests/ui/error-codes/E0010-teach.stderr b/tests/ui/error-codes/E0010-teach.stderr index 37a9892ccbf..82bbe01aef7 100644 --- a/tests/ui/error-codes/E0010-teach.stderr +++ b/tests/ui/error-codes/E0010-teach.stderr @@ -7,7 +7,7 @@ LL | const CON: Vec<i32> = vec![1, 2, 3]; = note: The runtime heap is not yet available at compile-time, so no runtime heap allocations can be created. = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [i32]>::into_vec::<std::alloc::Global>` in constants +error[E0015]: cannot call non-const method `slice::<impl [i32]>::into_vec::<std::alloc::Global>` in constants --> $DIR/E0010-teach.rs:5:23 | LL | const CON: Vec<i32> = vec![1, 2, 3]; diff --git a/tests/ui/error-codes/E0010.rs b/tests/ui/error-codes/E0010.rs index 11721efffcb..edb96714dd3 100644 --- a/tests/ui/error-codes/E0010.rs +++ b/tests/ui/error-codes/E0010.rs @@ -1,5 +1,5 @@ #![allow(warnings)] const CON: Vec<i32> = vec![1, 2, 3]; //~ ERROR E0010 -//~| ERROR cannot call non-const fn +//~| ERROR cannot call non-const method fn main() {} diff --git a/tests/ui/error-codes/E0010.stderr b/tests/ui/error-codes/E0010.stderr index 08947222422..87b722b5f65 100644 --- a/tests/ui/error-codes/E0010.stderr +++ b/tests/ui/error-codes/E0010.stderr @@ -6,7 +6,7 @@ LL | const CON: Vec<i32> = vec![1, 2, 3]; | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [i32]>::into_vec::<std::alloc::Global>` in constants +error[E0015]: cannot call non-const method `slice::<impl [i32]>::into_vec::<std::alloc::Global>` in constants --> $DIR/E0010.rs:3:23 | LL | const CON: Vec<i32> = vec![1, 2, 3]; diff --git a/tests/ui/error-codes/E0015.rs b/tests/ui/error-codes/E0015.rs index b0211358d81..7a80308d7bb 100644 --- a/tests/ui/error-codes/E0015.rs +++ b/tests/ui/error-codes/E0015.rs @@ -3,6 +3,6 @@ fn create_some() -> Option<u8> { } const FOO: Option<u8> = create_some(); -//~^ ERROR cannot call non-const fn `create_some` in constants [E0015] +//~^ ERROR cannot call non-const function `create_some` in constants [E0015] fn main() {} diff --git a/tests/ui/error-codes/E0015.stderr b/tests/ui/error-codes/E0015.stderr index 9d892a3e098..0c983d28434 100644 --- a/tests/ui/error-codes/E0015.stderr +++ b/tests/ui/error-codes/E0015.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `create_some` in constants +error[E0015]: cannot call non-const function `create_some` in constants --> $DIR/E0015.rs:5:25 | LL | const FOO: Option<u8> = create_some(); diff --git a/tests/ui/error-codes/E0452.rs b/tests/ui/error-codes/E0452.rs index 5066cd99be9..4e5a6c93014 100644 --- a/tests/ui/error-codes/E0452.rs +++ b/tests/ui/error-codes/E0452.rs @@ -2,5 +2,7 @@ //~| ERROR E0452 //~| ERROR E0452 //~| ERROR E0452 + //~| ERROR E0452 + //~| ERROR E0452 fn main() { } diff --git a/tests/ui/error-codes/E0452.stderr b/tests/ui/error-codes/E0452.stderr index 986c135ed89..c20429e363d 100644 --- a/tests/ui/error-codes/E0452.stderr +++ b/tests/ui/error-codes/E0452.stderr @@ -28,6 +28,22 @@ LL | #![allow(foo = "")] | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 4 previous errors +error[E0452]: malformed lint attribute input + --> $DIR/E0452.rs:1:10 + | +LL | #![allow(foo = "")] + | ^^^^^^^^ bad attribute argument + | + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0452]: malformed lint attribute input + --> $DIR/E0452.rs:1:10 + | +LL | #![allow(foo = "")] + | ^^^^^^^^ bad attribute argument + | + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 6 previous errors For more information about this error, try `rustc --explain E0452`. diff --git a/tests/ui/explicit-tail-calls/constck.rs b/tests/ui/explicit-tail-calls/constck.rs index 36fc3ef6f99..4179fe8caf3 100644 --- a/tests/ui/explicit-tail-calls/constck.rs +++ b/tests/ui/explicit-tail-calls/constck.rs @@ -4,14 +4,14 @@ const fn f() { if false { become not_const(); - //~^ error: cannot call non-const fn `not_const` in constant functions + //~^ error: cannot call non-const function `not_const` in constant functions } } const fn g((): ()) { if false { become yes_const(not_const()); - //~^ error: cannot call non-const fn `not_const` in constant functions + //~^ error: cannot call non-const function `not_const` in constant functions } } diff --git a/tests/ui/explicit-tail-calls/constck.stderr b/tests/ui/explicit-tail-calls/constck.stderr index d9967c45fa0..c223d273b38 100644 --- a/tests/ui/explicit-tail-calls/constck.stderr +++ b/tests/ui/explicit-tail-calls/constck.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `not_const` in constant functions +error[E0015]: cannot call non-const function `not_const` in constant functions --> $DIR/constck.rs:6:16 | LL | become not_const(); @@ -6,7 +6,7 @@ LL | become not_const(); | = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `not_const` in constant functions +error[E0015]: cannot call non-const function `not_const` in constant functions --> $DIR/constck.rs:13:26 | LL | become yes_const(not_const()); diff --git a/tests/ui/feature-gates/feature-gate-auto-traits.rs b/tests/ui/feature-gates/feature-gate-auto-traits.rs index 80cfa9cee89..aab9e784fe9 100644 --- a/tests/ui/feature-gates/feature-gate-auto-traits.rs +++ b/tests/ui/feature-gates/feature-gate-auto-traits.rs @@ -7,6 +7,6 @@ auto trait AutoDummyTrait {} //~^ ERROR auto traits are experimental and possibly buggy impl !AutoDummyTrait for DummyStruct {} -//~^ ERROR negative trait bounds are not yet fully implemented; use marker types for now +//~^ ERROR negative trait bounds are not fully implemented; use marker types for now fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-auto-traits.stderr b/tests/ui/feature-gates/feature-gate-auto-traits.stderr index 139229ca809..8fa5168b2d0 100644 --- a/tests/ui/feature-gates/feature-gate-auto-traits.stderr +++ b/tests/ui/feature-gates/feature-gate-auto-traits.stderr @@ -8,7 +8,7 @@ LL | auto trait AutoDummyTrait {} = help: add `#![feature(auto_traits)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error[E0658]: negative trait bounds are not yet fully implemented; use marker types for now +error[E0658]: negative trait bounds are not fully implemented; use marker types for now --> $DIR/feature-gate-auto-traits.rs:9:6 | LL | impl !AutoDummyTrait for DummyStruct {} diff --git a/tests/ui/feature-gates/feature-gate-no-coverage.rs b/tests/ui/feature-gates/feature-gate-coverage-attribute.rs index 9c28b293854..0a463755f13 100644 --- a/tests/ui/feature-gates/feature-gate-no-coverage.rs +++ b/tests/ui/feature-gates/feature-gate-coverage-attribute.rs @@ -7,7 +7,7 @@ struct Foo { b: u32, } -#[coverage(off)] +#[coverage(off)] //~ ERROR the `#[coverage]` attribute is an experimental feature fn requires_feature_coverage() -> bool { let bar = Foo { a: 0, b: 0 }; bar == Foo { a: 0, b: 0 } diff --git a/tests/ui/feature-gates/feature-gate-coverage-attribute.stderr b/tests/ui/feature-gates/feature-gate-coverage-attribute.stderr new file mode 100644 index 00000000000..00e0f0afbde --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-coverage-attribute.stderr @@ -0,0 +1,22 @@ +error[E0557]: feature has been removed + --> $DIR/feature-gate-coverage-attribute.rs:2:12 + | +LL | #![feature(no_coverage)] + | ^^^^^^^^^^^ feature has been removed + | + = note: renamed to `coverage_attribute` + +error[E0658]: the `#[coverage]` attribute is an experimental feature + --> $DIR/feature-gate-coverage-attribute.rs:10:1 + | +LL | #[coverage(off)] + | ^^^^^^^^^^^^^^^^ + | + = note: see issue #84605 <https://github.com/rust-lang/rust/issues/84605> for more information + = help: add `#![feature(coverage_attribute)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0557, E0658. +For more information about an error, try `rustc --explain E0557`. diff --git a/tests/ui/feature-gates/feature-gate-no-coverage.stderr b/tests/ui/feature-gates/feature-gate-no-coverage.stderr deleted file mode 100644 index fa378bbd9dc..00000000000 --- a/tests/ui/feature-gates/feature-gate-no-coverage.stderr +++ /dev/null @@ -1,11 +0,0 @@ -error[E0557]: feature has been removed - --> $DIR/feature-gate-no-coverage.rs:2:12 - | -LL | #![feature(no_coverage)] - | ^^^^^^^^^^^ feature has been removed - | - = note: renamed to `coverage_attribute` - -error: aborting due to 1 previous error - -For more information about this error, try `rustc --explain E0557`. diff --git a/tests/ui/issues/issue-7364.rs b/tests/ui/issues/issue-7364.rs index 0608f902fde..4ce9beb68cd 100644 --- a/tests/ui/issues/issue-7364.rs +++ b/tests/ui/issues/issue-7364.rs @@ -3,6 +3,6 @@ use std::cell::RefCell; // Regression test for issue 7364 static boxed: Box<RefCell<isize>> = Box::new(RefCell::new(0)); //~^ ERROR `RefCell<isize>` cannot be shared between threads safely [E0277] -//~| ERROR cannot call non-const fn +//~| ERROR cannot call non-const associated function fn main() { } diff --git a/tests/ui/issues/issue-7364.stderr b/tests/ui/issues/issue-7364.stderr index 65ec1d75053..a47a90c90ce 100644 --- a/tests/ui/issues/issue-7364.stderr +++ b/tests/ui/issues/issue-7364.stderr @@ -11,7 +11,7 @@ note: required because it appears within the type `Box<RefCell<isize>>` --> $SRC_DIR/alloc/src/boxed.rs:LL:COL = note: shared static variables must have a type that implements `Sync` -error[E0015]: cannot call non-const fn `Box::<RefCell<isize>>::new` in statics +error[E0015]: cannot call non-const associated function `Box::<RefCell<isize>>::new` in statics --> $DIR/issue-7364.rs:4:37 | LL | static boxed: Box<RefCell<isize>> = Box::new(RefCell::new(0)); diff --git a/tests/ui/lint/command-line-register-unknown-lint-tool.stderr b/tests/ui/lint/command-line-register-unknown-lint-tool.stderr index 65aa1962830..7bdee5ec9a4 100644 --- a/tests/ui/lint/command-line-register-unknown-lint-tool.stderr +++ b/tests/ui/lint/command-line-register-unknown-lint-tool.stderr @@ -7,6 +7,11 @@ error[E0602]: unknown lint tool: `unknown_tool` = note: requested on the command line with `-A unknown_tool::foo` = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 2 previous errors +error[E0602]: unknown lint tool: `unknown_tool` + | + = note: requested on the command line with `-A unknown_tool::foo` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 3 previous errors For more information about this error, try `rustc --explain E0602`. diff --git a/tests/ui/lint/force-warn/warnings-lint-group.stderr b/tests/ui/lint/force-warn/warnings-lint-group.stderr index 3e73269a233..a303bb573f9 100644 --- a/tests/ui/lint/force-warn/warnings-lint-group.stderr +++ b/tests/ui/lint/force-warn/warnings-lint-group.stderr @@ -4,6 +4,10 @@ error[E0602]: `warnings` lint group is not supported with ´--force-warn´ | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 2 previous errors +error[E0602]: `warnings` lint group is not supported with ´--force-warn´ + | + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 3 previous errors For more information about this error, try `rustc --explain E0602`. diff --git a/tests/ui/lint/lint-malformed.rs b/tests/ui/lint/lint-malformed.rs index 188e702f98b..cf5570753d8 100644 --- a/tests/ui/lint/lint-malformed.rs +++ b/tests/ui/lint/lint-malformed.rs @@ -3,4 +3,6 @@ //~| ERROR malformed lint attribute //~| ERROR malformed lint attribute //~| ERROR malformed lint attribute + //~| ERROR malformed lint attribute + //~| ERROR malformed lint attribute fn main() { } diff --git a/tests/ui/lint/lint-malformed.stderr b/tests/ui/lint/lint-malformed.stderr index 2c9f045de71..0bdcc293b65 100644 --- a/tests/ui/lint/lint-malformed.stderr +++ b/tests/ui/lint/lint-malformed.stderr @@ -34,6 +34,22 @@ LL | #![allow(bar = "baz")] | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 5 previous errors +error[E0452]: malformed lint attribute input + --> $DIR/lint-malformed.rs:2:10 + | +LL | #![allow(bar = "baz")] + | ^^^^^^^^^^^ bad attribute argument + | + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0452]: malformed lint attribute input + --> $DIR/lint-malformed.rs:2:10 + | +LL | #![allow(bar = "baz")] + | ^^^^^^^^^^^ bad attribute argument + | + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 7 previous errors For more information about this error, try `rustc --explain E0452`. diff --git a/tests/ui/lint/lint-overflowing-ops.rs b/tests/ui/lint/lint-overflowing-ops.rs index c5b9f892b0b..eec347010ad 100644 --- a/tests/ui/lint/lint-overflowing-ops.rs +++ b/tests/ui/lint/lint-overflowing-ops.rs @@ -1,5 +1,5 @@ // Tests that overflowing or bound-exceeding operations -// are correclty linted including when they are const promoted +// are correctly linted including when they are const promoted // We are using "-Z deduplicate-diagnostics=yes" because different // build configurations emit different number of duplicate diagnostics diff --git a/tests/ui/lint/unconditional_panic_promoted.rs b/tests/ui/lint/unconditional_panic_promoted.rs index 37bcf046513..e9ae834c8d8 100644 --- a/tests/ui/lint/unconditional_panic_promoted.rs +++ b/tests/ui/lint/unconditional_panic_promoted.rs @@ -2,7 +2,7 @@ fn main() { // MIR encodes this as a reborrow from a promoted constant. - // But the array lenth can still be gotten from the type. + // But the array length can still be gotten from the type. let slice = &[0, 1]; let _ = slice[2]; //~ ERROR: this operation will panic at runtime [unconditional_panic] } diff --git a/tests/ui/macros/macros-nonfatal-errors.rs b/tests/ui/macros/macros-nonfatal-errors.rs index 46e865031ec..658455b1b5b 100644 --- a/tests/ui/macros/macros-nonfatal-errors.rs +++ b/tests/ui/macros/macros-nonfatal-errors.rs @@ -39,12 +39,18 @@ enum AttrOnInnerExpression { Baz, } -#[derive(Default)] //~ ERROR no default declared +#[derive(Default)] //~ ERROR `#[derive(Default)]` on enum with no `#[default]` enum NoDeclaredDefault { Foo, Bar, } +#[derive(Default)] //~ ERROR `#[derive(Default)]` on enum with no `#[default]` +enum NoDeclaredDefaultWithoutUnitVariant { + Foo(i32), + Bar(i32), +} + #[derive(Default)] //~ ERROR multiple declared defaults enum MultipleDefaults { #[default] diff --git a/tests/ui/macros/macros-nonfatal-errors.stderr b/tests/ui/macros/macros-nonfatal-errors.stderr index abf43e2a009..fd5e41986a8 100644 --- a/tests/ui/macros/macros-nonfatal-errors.stderr +++ b/tests/ui/macros/macros-nonfatal-errors.stderr @@ -46,18 +46,43 @@ LL | Bar([u8; #[default] 1]), | = help: consider a manual implementation of `Default` -error: no default declared +error[E0665]: `#[derive(Default)]` on enum with no `#[default]` --> $DIR/macros-nonfatal-errors.rs:42:10 | -LL | #[derive(Default)] - | ^^^^^^^ +LL | #[derive(Default)] + | ^^^^^^^ +LL | / enum NoDeclaredDefault { +LL | | Foo, +LL | | Bar, +LL | | } + | |_- this enum needs a unit variant marked with `#[default]` | - = help: make a unit variant default by placing `#[default]` above it = note: this error originates in the derive macro `Default` (in Nightly builds, run with -Z macro-backtrace for more info) +help: make this unit variant default by placing `#[default]` on it + | +LL | #[default] Foo, + | ++++++++++ +help: make this unit variant default by placing `#[default]` on it + | +LL | #[default] Bar, + | ++++++++++ -error: multiple declared defaults +error[E0665]: `#[derive(Default)]` on enum with no `#[default]` --> $DIR/macros-nonfatal-errors.rs:48:10 | +LL | #[derive(Default)] + | ^^^^^^^ +LL | / enum NoDeclaredDefaultWithoutUnitVariant { +LL | | Foo(i32), +LL | | Bar(i32), +LL | | } + | |_- this enum needs a unit variant marked with `#[default]` + | + = note: this error originates in the derive macro `Default` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: multiple declared defaults + --> $DIR/macros-nonfatal-errors.rs:54:10 + | LL | #[derive(Default)] | ^^^^^^^ ... @@ -74,7 +99,7 @@ LL | Baz, = note: this error originates in the derive macro `Default` (in Nightly builds, run with -Z macro-backtrace for more info) error: `#[default]` attribute does not accept a value - --> $DIR/macros-nonfatal-errors.rs:60:5 + --> $DIR/macros-nonfatal-errors.rs:66:5 | LL | #[default = 1] | ^^^^^^^^^^^^^^ @@ -82,7 +107,7 @@ LL | #[default = 1] = help: try using `#[default]` error: multiple `#[default]` attributes - --> $DIR/macros-nonfatal-errors.rs:68:5 + --> $DIR/macros-nonfatal-errors.rs:74:5 | LL | #[default] | ---------- `#[default]` used here @@ -93,13 +118,13 @@ LL | Foo, | = note: only one `#[default]` attribute is needed help: try removing this - --> $DIR/macros-nonfatal-errors.rs:67:5 + --> $DIR/macros-nonfatal-errors.rs:73:5 | LL | #[default] | ^^^^^^^^^^ error: multiple `#[default]` attributes - --> $DIR/macros-nonfatal-errors.rs:78:5 + --> $DIR/macros-nonfatal-errors.rs:84:5 | LL | #[default] | ---------- `#[default]` used here @@ -111,7 +136,7 @@ LL | Foo, | = note: only one `#[default]` attribute is needed help: try removing these - --> $DIR/macros-nonfatal-errors.rs:75:5 + --> $DIR/macros-nonfatal-errors.rs:81:5 | LL | #[default] | ^^^^^^^^^^ @@ -121,7 +146,7 @@ LL | #[default] | ^^^^^^^^^^ error: the `#[default]` attribute may only be used on unit enum variants - --> $DIR/macros-nonfatal-errors.rs:85:5 + --> $DIR/macros-nonfatal-errors.rs:91:5 | LL | Foo {}, | ^^^ @@ -129,7 +154,7 @@ LL | Foo {}, = help: consider a manual implementation of `Default` error: default variant must be exhaustive - --> $DIR/macros-nonfatal-errors.rs:93:5 + --> $DIR/macros-nonfatal-errors.rs:99:5 | LL | #[non_exhaustive] | ----------------- declared `#[non_exhaustive]` here @@ -139,37 +164,37 @@ LL | Foo, = help: consider a manual implementation of `Default` error: asm template must be a string literal - --> $DIR/macros-nonfatal-errors.rs:98:10 + --> $DIR/macros-nonfatal-errors.rs:104:10 | LL | asm!(invalid); | ^^^^^^^ error: `concat_idents!()` requires ident args - --> $DIR/macros-nonfatal-errors.rs:101:5 + --> $DIR/macros-nonfatal-errors.rs:107:5 | LL | concat_idents!("not", "idents"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: argument must be a string literal - --> $DIR/macros-nonfatal-errors.rs:103:17 + --> $DIR/macros-nonfatal-errors.rs:109:17 | LL | option_env!(invalid); | ^^^^^^^ error: expected string literal - --> $DIR/macros-nonfatal-errors.rs:104:10 + --> $DIR/macros-nonfatal-errors.rs:110:10 | LL | env!(invalid); | ^^^^^^^ error: `env!()` takes 1 or 2 arguments - --> $DIR/macros-nonfatal-errors.rs:105:5 + --> $DIR/macros-nonfatal-errors.rs:111:5 | LL | env!(foo, abr, baz); | ^^^^^^^^^^^^^^^^^^^ error: environment variable `RUST_HOPEFULLY_THIS_DOESNT_EXIST` not defined at compile time - --> $DIR/macros-nonfatal-errors.rs:106:5 + --> $DIR/macros-nonfatal-errors.rs:112:5 | LL | env!("RUST_HOPEFULLY_THIS_DOESNT_EXIST"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -178,7 +203,7 @@ LL | env!("RUST_HOPEFULLY_THIS_DOESNT_EXIST"); = note: this error originates in the macro `env` (in Nightly builds, run with -Z macro-backtrace for more info) error: format argument must be a string literal - --> $DIR/macros-nonfatal-errors.rs:108:13 + --> $DIR/macros-nonfatal-errors.rs:114:13 | LL | format!(invalid); | ^^^^^^^ @@ -189,19 +214,19 @@ LL | format!("{}", invalid); | +++++ error: argument must be a string literal - --> $DIR/macros-nonfatal-errors.rs:110:14 + --> $DIR/macros-nonfatal-errors.rs:116:14 | LL | include!(invalid); | ^^^^^^^ error: argument must be a string literal - --> $DIR/macros-nonfatal-errors.rs:112:18 + --> $DIR/macros-nonfatal-errors.rs:118:18 | LL | include_str!(invalid); | ^^^^^^^ error: couldn't read `$DIR/i'd be quite surprised if a file with this name existed`: $FILE_NOT_FOUND_MSG - --> $DIR/macros-nonfatal-errors.rs:113:5 + --> $DIR/macros-nonfatal-errors.rs:119:5 | LL | include_str!("i'd be quite surprised if a file with this name existed"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -209,13 +234,13 @@ LL | include_str!("i'd be quite surprised if a file with this name existed") = note: this error originates in the macro `include_str` (in Nightly builds, run with -Z macro-backtrace for more info) error: argument must be a string literal - --> $DIR/macros-nonfatal-errors.rs:114:20 + --> $DIR/macros-nonfatal-errors.rs:120:20 | LL | include_bytes!(invalid); | ^^^^^^^ error: couldn't read `$DIR/i'd be quite surprised if a file with this name existed`: $FILE_NOT_FOUND_MSG - --> $DIR/macros-nonfatal-errors.rs:115:5 + --> $DIR/macros-nonfatal-errors.rs:121:5 | LL | include_bytes!("i'd be quite surprised if a file with this name existed"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -223,13 +248,13 @@ LL | include_bytes!("i'd be quite surprised if a file with this name existed = note: this error originates in the macro `include_bytes` (in Nightly builds, run with -Z macro-backtrace for more info) error: trace_macros! accepts only `true` or `false` - --> $DIR/macros-nonfatal-errors.rs:117:5 + --> $DIR/macros-nonfatal-errors.rs:123:5 | LL | trace_macros!(invalid); | ^^^^^^^^^^^^^^^^^^^^^^ error: default variant must be exhaustive - --> $DIR/macros-nonfatal-errors.rs:127:9 + --> $DIR/macros-nonfatal-errors.rs:133:9 | LL | #[non_exhaustive] | ----------------- declared `#[non_exhaustive]` here @@ -239,10 +264,11 @@ LL | Foo, = help: consider a manual implementation of `Default` error: cannot find macro `llvm_asm` in this scope - --> $DIR/macros-nonfatal-errors.rs:99:5 + --> $DIR/macros-nonfatal-errors.rs:105:5 | LL | llvm_asm!(invalid); | ^^^^^^^^ -error: aborting due to 28 previous errors +error: aborting due to 29 previous errors +For more information about this error, try `rustc --explain E0665`. diff --git a/tests/ui/moves/auxiliary/suggest-borrow-for-generic-arg-aux.rs b/tests/ui/moves/auxiliary/suggest-borrow-for-generic-arg-aux.rs index c71238ba072..a5f58d88fb1 100644 --- a/tests/ui/moves/auxiliary/suggest-borrow-for-generic-arg-aux.rs +++ b/tests/ui/moves/auxiliary/suggest-borrow-for-generic-arg-aux.rs @@ -1,4 +1,4 @@ -//! auxiliary definitons for suggest-borrow-for-generic-arg.rs, to ensure the suggestion works on +//! auxiliary definitions for suggest-borrow-for-generic-arg.rs, to ensure the suggestion works on //! functions defined in other crates. use std::io::{self, Read, Write}; diff --git a/tests/ui/moves/suggest-borrow-for-generic-arg.fixed b/tests/ui/moves/suggest-borrow-for-generic-arg.fixed index b5e0b468aa6..e47a2d08b61 100644 --- a/tests/ui/moves/suggest-borrow-for-generic-arg.fixed +++ b/tests/ui/moves/suggest-borrow-for-generic-arg.fixed @@ -1,5 +1,5 @@ -//! Test suggetions to borrow generic arguments instead of moving. Tests for other instances of this -//! can be found in `moved-value-on-as-ref-arg.rs` and `borrow-closures-instead-of-move.rs` +//! Test suggestions to borrow generic arguments instead of moving. Tests for other instances of +//! this can be found in `moved-value-on-as-ref-arg.rs` and `borrow-closures-instead-of-move.rs` //@ run-rustfix //@ aux-crate:aux=suggest-borrow-for-generic-arg-aux.rs //@ edition: 2021 diff --git a/tests/ui/moves/suggest-borrow-for-generic-arg.rs b/tests/ui/moves/suggest-borrow-for-generic-arg.rs index e08978db63a..5895999c51d 100644 --- a/tests/ui/moves/suggest-borrow-for-generic-arg.rs +++ b/tests/ui/moves/suggest-borrow-for-generic-arg.rs @@ -1,5 +1,5 @@ -//! Test suggetions to borrow generic arguments instead of moving. Tests for other instances of this -//! can be found in `moved-value-on-as-ref-arg.rs` and `borrow-closures-instead-of-move.rs` +//! Test suggestions to borrow generic arguments instead of moving. Tests for other instances of +//! this can be found in `moved-value-on-as-ref-arg.rs` and `borrow-closures-instead-of-move.rs` //@ run-rustfix //@ aux-crate:aux=suggest-borrow-for-generic-arg-aux.rs //@ edition: 2021 diff --git a/tests/ui/panics/default-backtrace-ice.rs b/tests/ui/panics/default-backtrace-ice.rs index 718d1da5bb7..7953283f028 100644 --- a/tests/ui/panics/default-backtrace-ice.rs +++ b/tests/ui/panics/default-backtrace-ice.rs @@ -1,6 +1,8 @@ //@ unset-rustc-env:RUST_BACKTRACE //@ compile-flags:-Z treat-err-as-bug=1 //@ error-pattern:stack backtrace: +// Verify this is a full backtrace, not a short backtrace. +//@ error-pattern:__rust_begin_short_backtrace //@ failure-status:101 //@ ignore-msvc //@ normalize-stderr-test: "note: .*" -> "" diff --git a/tests/ui/panics/default-backtrace-ice.stderr b/tests/ui/panics/default-backtrace-ice.stderr index 23b863568bc..046b2cca7f9 100644 --- a/tests/ui/panics/default-backtrace-ice.stderr +++ b/tests/ui/panics/default-backtrace-ice.stderr @@ -1,5 +1,5 @@ error: internal compiler error[E0425]: cannot find value `missing_ident` in this scope - --> $DIR/default-backtrace-ice.rs:21:13 + --> $DIR/default-backtrace-ice.rs:23:13 | LL | fn main() { missing_ident; } | ^^^^^^^^^^^^^ not found in this scope diff --git a/tests/ui/parser/recover/missing-dot-on-statement-expression.fixed b/tests/ui/parser/recover/missing-dot-on-statement-expression.fixed new file mode 100644 index 00000000000..1be4485b474 --- /dev/null +++ b/tests/ui/parser/recover/missing-dot-on-statement-expression.fixed @@ -0,0 +1,28 @@ +//@ run-rustfix +#![allow(unused_must_use, dead_code)] +struct S { + field: (), +} +fn main() { + let _ = [1, 2, 3].iter().map(|x| x); //~ ERROR expected one of `.`, `;`, `?`, `else`, or an operator, found `map` + //~^ HELP you might have meant to write a method call +} +fn foo() { + let baz = S { + field: () + }; + let _ = baz.field; //~ ERROR expected one of `!`, `.`, `::`, `;`, `?`, `else`, `{`, or an operator, found `field` + //~^ HELP you might have meant to write a field +} + +fn bar() { + [1, 2, 3].iter().map(|x| x); //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `map` + //~^ HELP you might have meant to write a method call +} +fn baz() { + let baz = S { + field: () + }; + baz.field; //~ ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `field` + //~^ HELP you might have meant to write a field +} diff --git a/tests/ui/parser/recover/missing-dot-on-statement-expression.rs b/tests/ui/parser/recover/missing-dot-on-statement-expression.rs new file mode 100644 index 00000000000..5e2b545f414 --- /dev/null +++ b/tests/ui/parser/recover/missing-dot-on-statement-expression.rs @@ -0,0 +1,28 @@ +//@ run-rustfix +#![allow(unused_must_use, dead_code)] +struct S { + field: (), +} +fn main() { + let _ = [1, 2, 3].iter()map(|x| x); //~ ERROR expected one of `.`, `;`, `?`, `else`, or an operator, found `map` + //~^ HELP you might have meant to write a method call +} +fn foo() { + let baz = S { + field: () + }; + let _ = baz field; //~ ERROR expected one of `!`, `.`, `::`, `;`, `?`, `else`, `{`, or an operator, found `field` + //~^ HELP you might have meant to write a field +} + +fn bar() { + [1, 2, 3].iter()map(|x| x); //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `map` + //~^ HELP you might have meant to write a method call +} +fn baz() { + let baz = S { + field: () + }; + baz field; //~ ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `field` + //~^ HELP you might have meant to write a field +} diff --git a/tests/ui/parser/recover/missing-dot-on-statement-expression.stderr b/tests/ui/parser/recover/missing-dot-on-statement-expression.stderr new file mode 100644 index 00000000000..a04d8bd34e2 --- /dev/null +++ b/tests/ui/parser/recover/missing-dot-on-statement-expression.stderr @@ -0,0 +1,46 @@ +error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map` + --> $DIR/missing-dot-on-statement-expression.rs:7:29 + | +LL | let _ = [1, 2, 3].iter()map(|x| x); + | ^^^ expected one of `.`, `;`, `?`, `else`, or an operator + | +help: you might have meant to write a method call + | +LL | let _ = [1, 2, 3].iter().map(|x| x); + | + + +error: expected one of `!`, `.`, `::`, `;`, `?`, `else`, `{`, or an operator, found `field` + --> $DIR/missing-dot-on-statement-expression.rs:14:17 + | +LL | let _ = baz field; + | ^^^^^ expected one of 8 possible tokens + | +help: you might have meant to write a field access + | +LL | let _ = baz.field; + | + + +error: expected one of `.`, `;`, `?`, `}`, or an operator, found `map` + --> $DIR/missing-dot-on-statement-expression.rs:19:21 + | +LL | [1, 2, 3].iter()map(|x| x); + | ^^^ expected one of `.`, `;`, `?`, `}`, or an operator + | +help: you might have meant to write a method call + | +LL | [1, 2, 3].iter().map(|x| x); + | + + +error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `field` + --> $DIR/missing-dot-on-statement-expression.rs:26:9 + | +LL | baz field; + | ^^^^^ expected one of 8 possible tokens + | +help: you might have meant to write a field access + | +LL | baz.field; + | + + +error: aborting due to 4 previous errors + diff --git a/tests/ui/reachable/expr_match.rs b/tests/ui/reachable/expr_match.rs index 2fd26b54e15..1bae061c984 100644 --- a/tests/ui/reachable/expr_match.rs +++ b/tests/ui/reachable/expr_match.rs @@ -21,9 +21,13 @@ fn d() { } fn e() { - // Here the compiler fails to figure out that the `println` is dead. - match () { () if return => (), () => return } + match () { + () if return => (), + //~^ ERROR unreachable expression + () => return, + } println!("I am dead"); + //~^ ERROR unreachable statement } fn f() { diff --git a/tests/ui/reachable/expr_match.stderr b/tests/ui/reachable/expr_match.stderr index d15208609cf..ae202a6e0c3 100644 --- a/tests/ui/reachable/expr_match.stderr +++ b/tests/ui/reachable/expr_match.stderr @@ -23,5 +23,27 @@ LL | println!("I am dead"); | = note: this error originates in the macro `println` (in Nightly builds, run with -Z macro-backtrace for more info) -error: aborting due to 2 previous errors +error: unreachable expression + --> $DIR/expr_match.rs:25:25 + | +LL | () if return => (), + | ------ ^^ unreachable expression + | | + | any code following this expression is unreachable + +error: unreachable statement + --> $DIR/expr_match.rs:29:5 + | +LL | / match () { +LL | | () if return => (), +LL | | +LL | | () => return, +LL | | } + | |_____- any code following this `match` expression is unreachable, as all arms diverge +LL | println!("I am dead"); + | ^^^^^^^^^^^^^^^^^^^^^ unreachable statement + | + = note: this error originates in the macro `println` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 4 previous errors diff --git a/tests/ui/regions/regions-assoc-type-in-supertrait-outlives-container.rs b/tests/ui/regions/regions-assoc-type-in-supertrait-outlives-container.rs index eb6e66818fc..3219f63f5df 100644 --- a/tests/ui/regions/regions-assoc-type-in-supertrait-outlives-container.rs +++ b/tests/ui/regions/regions-assoc-type-in-supertrait-outlives-container.rs @@ -1,7 +1,7 @@ // Test that we are imposing the requirement that every associated // type of a bound that appears in the where clause on a struct must // outlive the location in which the type appears, even when the -// associted type is in a supertype. Issue #22246. +// associated type is in a supertype. Issue #22246. #![allow(dead_code)] diff --git a/tests/ui/resolve/issue-39559-2.stderr b/tests/ui/resolve/issue-39559-2.stderr index ea27e7bd250..f6e6917d01e 100644 --- a/tests/ui/resolve/issue-39559-2.stderr +++ b/tests/ui/resolve/issue-39559-2.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `<Dim3 as Dim>::dim` in constants +error[E0015]: cannot call non-const associated function `<Dim3 as Dim>::dim` in constants --> $DIR/issue-39559-2.rs:14:24 | LL | let array: [usize; Dim3::dim()] @@ -6,7 +6,7 @@ LL | let array: [usize; Dim3::dim()] | = note: calls in constants are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `<Dim3 as Dim>::dim` in constants +error[E0015]: cannot call non-const associated function `<Dim3 as Dim>::dim` in constants --> $DIR/issue-39559-2.rs:16:15 | LL | = [0; Dim3::dim()]; diff --git a/tests/ui/static/static-mut-not-constant.rs b/tests/ui/static/static-mut-not-constant.rs index d501e5c2956..3830b468287 100644 --- a/tests/ui/static/static-mut-not-constant.rs +++ b/tests/ui/static/static-mut-not-constant.rs @@ -1,4 +1,4 @@ static mut a: Box<isize> = Box::new(3); -//~^ ERROR cannot call non-const fn +//~^ ERROR cannot call non-const associated function fn main() {} diff --git a/tests/ui/static/static-mut-not-constant.stderr b/tests/ui/static/static-mut-not-constant.stderr index 46dc175cb29..f28ea0b1689 100644 --- a/tests/ui/static/static-mut-not-constant.stderr +++ b/tests/ui/static/static-mut-not-constant.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `Box::<isize>::new` in statics +error[E0015]: cannot call non-const associated function `Box::<isize>::new` in statics --> $DIR/static-mut-not-constant.rs:1:28 | LL | static mut a: Box<isize> = Box::new(3); diff --git a/tests/ui/static/static-vec-repeat-not-constant.stderr b/tests/ui/static/static-vec-repeat-not-constant.stderr index a3b930323d5..e6ff199ae01 100644 --- a/tests/ui/static/static-vec-repeat-not-constant.stderr +++ b/tests/ui/static/static-vec-repeat-not-constant.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `foo` in statics +error[E0015]: cannot call non-const function `foo` in statics --> $DIR/static-vec-repeat-not-constant.rs:3:25 | LL | static a: [isize; 2] = [foo(); 2]; diff --git a/tests/ui/statics/check-values-constraints.rs b/tests/ui/statics/check-values-constraints.rs index 005a7798895..9df76b5c149 100644 --- a/tests/ui/statics/check-values-constraints.rs +++ b/tests/ui/statics/check-values-constraints.rs @@ -89,7 +89,7 @@ static mut STATIC13: SafeStruct = static mut STATIC14: SafeStruct = SafeStruct { field1: SafeEnum::Variant1, - field2: SafeEnum::Variant4("str".to_string()), //~ ERROR cannot call non-const fn + field2: SafeEnum::Variant4("str".to_string()), //~ ERROR cannot call non-const method }; static STATIC15: &'static [Vec<MyOwned>] = &[ diff --git a/tests/ui/statics/check-values-constraints.stderr b/tests/ui/statics/check-values-constraints.stderr index b4ee34530d3..eb2d37d297e 100644 --- a/tests/ui/statics/check-values-constraints.stderr +++ b/tests/ui/statics/check-values-constraints.stderr @@ -19,7 +19,7 @@ LL | static STATIC11: Vec<MyOwned> = vec![MyOwned]; | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:81:33 | LL | static STATIC11: Vec<MyOwned> = vec![MyOwned]; @@ -29,7 +29,7 @@ LL | static STATIC11: Vec<MyOwned> = vec![MyOwned]; = note: consider wrapping this expression in `std::sync::LazyLock::new(|| ...)` = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `<str as ToString>::to_string` in statics +error[E0015]: cannot call non-const method `<str as ToString>::to_string` in statics --> $DIR/check-values-constraints.rs:92:38 | LL | field2: SafeEnum::Variant4("str".to_string()), @@ -46,7 +46,7 @@ LL | vec![MyOwned], | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:96:5 | LL | vec![MyOwned], @@ -64,7 +64,7 @@ LL | vec![MyOwned], | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:98:5 | LL | vec![MyOwned], @@ -82,7 +82,7 @@ LL | &vec![MyOwned], | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:103:6 | LL | &vec![MyOwned], @@ -100,7 +100,7 @@ LL | &vec![MyOwned], | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [MyOwned]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:105:6 | LL | &vec![MyOwned], @@ -118,7 +118,7 @@ LL | static STATIC19: Vec<isize> = vec![3]; | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [isize]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [isize]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:111:31 | LL | static STATIC19: Vec<isize> = vec![3]; @@ -136,7 +136,7 @@ LL | static x: Vec<isize> = vec![3]; | = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `slice::<impl [isize]>::into_vec::<std::alloc::Global>` in statics +error[E0015]: cannot call non-const method `slice::<impl [isize]>::into_vec::<std::alloc::Global>` in statics --> $DIR/check-values-constraints.rs:117:32 | LL | static x: Vec<isize> = vec![3]; diff --git a/tests/ui/suggestions/type-ascription-and-other-error.stderr b/tests/ui/suggestions/type-ascription-and-other-error.stderr index 4efddca4b47..7f8b8b7470e 100644 --- a/tests/ui/suggestions/type-ascription-and-other-error.stderr +++ b/tests/ui/suggestions/type-ascription-and-other-error.stderr @@ -3,6 +3,11 @@ error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found | LL | not rust; | ^^^^ expected one of 8 possible tokens + | +help: you might have meant to write a field access + | +LL | not.rust; + | + error: aborting due to 1 previous error diff --git a/tests/ui/suggestions/wrap-dyn-in-suggestion-issue-120223.stderr b/tests/ui/suggestions/wrap-dyn-in-suggestion-issue-120223.stderr index f7fc17ea24f..1fb3e7d211e 100644 --- a/tests/ui/suggestions/wrap-dyn-in-suggestion-issue-120223.stderr +++ b/tests/ui/suggestions/wrap-dyn-in-suggestion-issue-120223.stderr @@ -58,14 +58,14 @@ help: consider adding an explicit lifetime bound LL | executor: impl FnOnce(T) -> (dyn Future<Output = ()>) + 'static, | + +++++++++++ -error[E0310]: the parameter type `impl FnOnce(T) -> Future<Output = ()>` may not live long enough +error[E0310]: the parameter type `impl FnOnce(T) -> dyn* Future<Output = ()>` may not live long enough --> $DIR/wrap-dyn-in-suggestion-issue-120223.rs:14:5 | LL | Box::new(executor) | ^^^^^^^^^^^^^^^^^^ | | - | the parameter type `impl FnOnce(T) -> Future<Output = ()>` must be valid for the static lifetime... - | ...so that the type `impl FnOnce(T) -> Future<Output = ()>` will meet its required lifetime bounds + | the parameter type `impl FnOnce(T) -> dyn* Future<Output = ()>` must be valid for the static lifetime... + | ...so that the type `impl FnOnce(T) -> dyn* Future<Output = ()>` will meet its required lifetime bounds | help: consider adding an explicit lifetime bound | diff --git a/tests/ui/symbol-names/basic.legacy.stderr b/tests/ui/symbol-names/basic.legacy.stderr index 6ce0ae09195..2f26c0cf0d3 100644 --- a/tests/ui/symbol-names/basic.legacy.stderr +++ b/tests/ui/symbol-names/basic.legacy.stderr @@ -1,10 +1,10 @@ -error: symbol-name(_ZN5basic4main17had874e876c8b1028E) +error: symbol-name(_ZN5basic4main17h144191e1523a280eE) --> $DIR/basic.rs:8:1 | LL | #[rustc_symbol_name] | ^^^^^^^^^^^^^^^^^^^^ -error: demangling(basic::main::had874e876c8b1028) +error: demangling(basic::main::h144191e1523a280e) --> $DIR/basic.rs:8:1 | LL | #[rustc_symbol_name] diff --git a/tests/ui/symbol-names/issue-60925.legacy.stderr b/tests/ui/symbol-names/issue-60925.legacy.stderr index cc4eec470fb..cc79cc8b516 100644 --- a/tests/ui/symbol-names/issue-60925.legacy.stderr +++ b/tests/ui/symbol-names/issue-60925.legacy.stderr @@ -1,10 +1,10 @@ -error: symbol-name(_ZN11issue_609253foo37Foo$LT$issue_60925..llv$u6d$..Foo$GT$3foo17haf0d0ad2255e29c6E) +error: symbol-name(_ZN11issue_609253foo37Foo$LT$issue_60925..llv$u6d$..Foo$GT$3foo17h71f988fda3b6b180E) --> $DIR/issue-60925.rs:21:9 | LL | #[rustc_symbol_name] | ^^^^^^^^^^^^^^^^^^^^ -error: demangling(issue_60925::foo::Foo<issue_60925::llvm::Foo>::foo::haf0d0ad2255e29c6) +error: demangling(issue_60925::foo::Foo<issue_60925::llvm::Foo>::foo::h71f988fda3b6b180) --> $DIR/issue-60925.rs:21:9 | LL | #[rustc_symbol_name] diff --git a/tests/ui/tool-attributes/unknown-lint-tool-name.rs b/tests/ui/tool-attributes/unknown-lint-tool-name.rs index cd5d2f028af..59fc56d820e 100644 --- a/tests/ui/tool-attributes/unknown-lint-tool-name.rs +++ b/tests/ui/tool-attributes/unknown-lint-tool-name.rs @@ -1,5 +1,6 @@ #![deny(foo::bar)] //~ ERROR unknown tool name `foo` found in scoped lint: `foo::bar` //~| ERROR unknown tool name `foo` found in scoped lint: `foo::bar` + //~| ERROR unknown tool name `foo` found in scoped lint: `foo::bar` #[allow(foo::bar)] //~ ERROR unknown tool name `foo` found in scoped lint: `foo::bar` //~| ERROR unknown tool name `foo` found in scoped lint: `foo::bar` diff --git a/tests/ui/tool-attributes/unknown-lint-tool-name.stderr b/tests/ui/tool-attributes/unknown-lint-tool-name.stderr index 72731ab1e3d..5d99777a14a 100644 --- a/tests/ui/tool-attributes/unknown-lint-tool-name.stderr +++ b/tests/ui/tool-attributes/unknown-lint-tool-name.stderr @@ -7,7 +7,7 @@ LL | #![deny(foo::bar)] = help: add `#![register_tool(foo)]` to the crate root error[E0710]: unknown tool name `foo` found in scoped lint: `foo::bar` - --> $DIR/unknown-lint-tool-name.rs:4:9 + --> $DIR/unknown-lint-tool-name.rs:5:9 | LL | #[allow(foo::bar)] | ^^^ @@ -24,7 +24,7 @@ LL | #![deny(foo::bar)] = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error[E0710]: unknown tool name `foo` found in scoped lint: `foo::bar` - --> $DIR/unknown-lint-tool-name.rs:4:9 + --> $DIR/unknown-lint-tool-name.rs:5:9 | LL | #[allow(foo::bar)] | ^^^ @@ -32,6 +32,15 @@ LL | #[allow(foo::bar)] = help: add `#![register_tool(foo)]` to the crate root = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 4 previous errors +error[E0710]: unknown tool name `foo` found in scoped lint: `foo::bar` + --> $DIR/unknown-lint-tool-name.rs:1:9 + | +LL | #![deny(foo::bar)] + | ^^^ + | + = help: add `#![register_tool(foo)]` to the crate root + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 5 previous errors For more information about this error, try `rustc --explain E0710`. diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-0.rs b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-0.rs index bac7ee023f4..9141d327aee 100644 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-0.rs +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-0.rs @@ -1,4 +1,5 @@ -//@ compile-flags: -Znext-solver +//@ revisions: current next +//@[next] compile-flags: -Znext-solver //@ check-pass #![feature(const_trait_impl)] diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.rs b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.rs index a0375cda079..19e86b50d33 100644 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.rs +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.rs @@ -1,5 +1,4 @@ -//@ compile-flags: -Znext-solver -//@ known-bug: unknown +//@ check-pass #![feature(const_trait_impl, generic_const_exprs)] #![allow(incomplete_features)] diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.stderr b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.stderr deleted file mode 100644 index 8d1c85c0c8a..00000000000 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-1.stderr +++ /dev/null @@ -1,35 +0,0 @@ -error: `-Znext-solver=globally` and `generic_const_exprs` are incompatible, using them at the same time is not allowed - --> $DIR/assoc-type-const-bound-usage-1.rs:4:30 - | -LL | #![feature(const_trait_impl, generic_const_exprs)] - | ^^^^^^^^^^^^^^^^^^^ - | - = help: remove one of these features - -error[E0284]: type annotations needed: cannot normalize `unqualified<T>::{constant#0}` - --> $DIR/assoc-type-const-bound-usage-1.rs:15:37 - | -LL | fn unqualified<T: const Trait>() -> Type<{ T::Assoc::func() }> { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot normalize `unqualified<T>::{constant#0}` - -error[E0284]: type annotations needed: cannot normalize `qualified<T>::{constant#0}` - --> $DIR/assoc-type-const-bound-usage-1.rs:19:35 - | -LL | fn qualified<T: const Trait>() -> Type<{ <T as Trait>::Assoc::func() }> { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot normalize `qualified<T>::{constant#0}` - -error[E0284]: type annotations needed: cannot normalize `unqualified<T>::{constant#0}` - --> $DIR/assoc-type-const-bound-usage-1.rs:16:5 - | -LL | Type - | ^^^^ cannot normalize `unqualified<T>::{constant#0}` - -error[E0284]: type annotations needed: cannot normalize `qualified<T>::{constant#0}` - --> $DIR/assoc-type-const-bound-usage-1.rs:20:5 - | -LL | Type - | ^^^^ cannot normalize `qualified<T>::{constant#0}` - -error: aborting due to 5 previous errors - -For more information about this error, try `rustc --explain E0284`. diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.current.stderr b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.current.stderr new file mode 100644 index 00000000000..03da9159bea --- /dev/null +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.current.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `U: ~const Other` is not satisfied + --> $DIR/assoc-type-const-bound-usage-fail-2.rs:24:5 + | +LL | T::Assoc::<U>::func(); + | ^^^^^^^^^^^^^ + +error[E0277]: the trait bound `U: ~const Other` is not satisfied + --> $DIR/assoc-type-const-bound-usage-fail-2.rs:27:5 + | +LL | <T as Trait>::Assoc::<U>::func(); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.stderr b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.next.stderr index c7af0a220ca..ce58b486a16 100644 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.stderr +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.next.stderr @@ -1,11 +1,11 @@ error[E0277]: the trait bound `<T as Trait>::Assoc<U>: ~const Trait` is not satisfied - --> $DIR/assoc-type-const-bound-usage-fail-2.rs:23:5 + --> $DIR/assoc-type-const-bound-usage-fail-2.rs:24:5 | LL | T::Assoc::<U>::func(); | ^^^^^^^^^^^^^ error[E0277]: the trait bound `<T as Trait>::Assoc<U>: ~const Trait` is not satisfied - --> $DIR/assoc-type-const-bound-usage-fail-2.rs:25:5 + --> $DIR/assoc-type-const-bound-usage-fail-2.rs:27:5 | LL | <T as Trait>::Assoc::<U>::func(); | ^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.rs b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.rs index b3a636b0f71..bdd98eaf541 100644 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.rs +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail-2.rs @@ -1,4 +1,5 @@ -//@ compile-flags: -Znext-solver +//@ revisions: current next +//@[next] compile-flags: -Znext-solver // Check that `~const` item bounds only hold if the where clauses on the // associated type are also const. @@ -21,9 +22,11 @@ trait Other {} const fn fails<T: ~const Trait, U: Other>() { T::Assoc::<U>::func(); - //~^ ERROR the trait bound `<T as Trait>::Assoc<U>: ~const Trait` is not satisfied + //[current]~^ ERROR the trait bound `U: ~const Other` is not satisfied + //[next]~^^ ERROR the trait bound `<T as Trait>::Assoc<U>: ~const Trait` is not satisfied <T as Trait>::Assoc::<U>::func(); - //~^ ERROR the trait bound `<T as Trait>::Assoc<U>: ~const Trait` is not satisfied + //[current]~^ ERROR the trait bound `U: ~const Other` is not satisfied + //[next]~^^ ERROR the trait bound `<T as Trait>::Assoc<U>: ~const Trait` is not satisfied } const fn works<T: ~const Trait, U: ~const Other>() { diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.stderr b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.current.stderr index 99fc924ad06..9c29a894749 100644 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.stderr +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.current.stderr @@ -1,11 +1,11 @@ error[E0277]: the trait bound `T: ~const Trait` is not satisfied - --> $DIR/assoc-type-const-bound-usage-fail.rs:16:5 + --> $DIR/assoc-type-const-bound-usage-fail.rs:17:5 | LL | T::Assoc::func(); | ^^^^^^^^ error[E0277]: the trait bound `T: ~const Trait` is not satisfied - --> $DIR/assoc-type-const-bound-usage-fail.rs:18:5 + --> $DIR/assoc-type-const-bound-usage-fail.rs:19:5 | LL | <T as Trait>::Assoc::func(); | ^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.next.stderr b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.next.stderr new file mode 100644 index 00000000000..9c29a894749 --- /dev/null +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.next.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `T: ~const Trait` is not satisfied + --> $DIR/assoc-type-const-bound-usage-fail.rs:17:5 + | +LL | T::Assoc::func(); + | ^^^^^^^^ + +error[E0277]: the trait bound `T: ~const Trait` is not satisfied + --> $DIR/assoc-type-const-bound-usage-fail.rs:19:5 + | +LL | <T as Trait>::Assoc::func(); + | ^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.rs b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.rs index ce01086f0dc..3761fea1968 100644 --- a/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.rs +++ b/tests/ui/traits/const-traits/assoc-type-const-bound-usage-fail.rs @@ -1,4 +1,5 @@ -//@ compile-flags: -Znext-solver +//@ revisions: current next +//@[next] compile-flags: -Znext-solver // Check that `~const` item bounds only hold if the parent trait is `~const`. // i.e. check that we validate the const conditions for the associated type diff --git a/tests/ui/traits/const-traits/assoc-type.stderr b/tests/ui/traits/const-traits/assoc-type.current.stderr index b318675b612..4bf9acfbd65 100644 --- a/tests/ui/traits/const-traits/assoc-type.stderr +++ b/tests/ui/traits/const-traits/assoc-type.current.stderr @@ -1,11 +1,11 @@ error[E0277]: the trait bound `NonConstAdd: ~const Add` is not satisfied - --> $DIR/assoc-type.rs:36:16 + --> $DIR/assoc-type.rs:37:16 | LL | type Bar = NonConstAdd; | ^^^^^^^^^^^ | note: required by a bound in `Foo::Bar` - --> $DIR/assoc-type.rs:32:15 + --> $DIR/assoc-type.rs:33:15 | LL | type Bar: ~const Add; | ^^^^^^ required by this bound in `Foo::Bar` diff --git a/tests/ui/traits/const-traits/assoc-type.next.stderr b/tests/ui/traits/const-traits/assoc-type.next.stderr new file mode 100644 index 00000000000..4bf9acfbd65 --- /dev/null +++ b/tests/ui/traits/const-traits/assoc-type.next.stderr @@ -0,0 +1,15 @@ +error[E0277]: the trait bound `NonConstAdd: ~const Add` is not satisfied + --> $DIR/assoc-type.rs:37:16 + | +LL | type Bar = NonConstAdd; + | ^^^^^^^^^^^ + | +note: required by a bound in `Foo::Bar` + --> $DIR/assoc-type.rs:33:15 + | +LL | type Bar: ~const Add; + | ^^^^^^ required by this bound in `Foo::Bar` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/traits/const-traits/assoc-type.rs b/tests/ui/traits/const-traits/assoc-type.rs index 32c91fa51f1..a169b61994c 100644 --- a/tests/ui/traits/const-traits/assoc-type.rs +++ b/tests/ui/traits/const-traits/assoc-type.rs @@ -1,4 +1,5 @@ -//@ compile-flags: -Znext-solver +//@ revisions: current next +//@[next] compile-flags: -Znext-solver #![feature(const_trait_impl)] diff --git a/tests/ui/traits/const-traits/effects/auxiliary/minicore.rs b/tests/ui/traits/const-traits/auxiliary/minicore.rs index e606d896e93..e606d896e93 100644 --- a/tests/ui/traits/const-traits/effects/auxiliary/minicore.rs +++ b/tests/ui/traits/const-traits/auxiliary/minicore.rs diff --git a/tests/ui/traits/const-traits/call-const-trait-method-pass.stderr b/tests/ui/traits/const-traits/call-const-trait-method-pass.stderr index ef494bde98c..7746f103ac3 100644 --- a/tests/ui/traits/const-traits/call-const-trait-method-pass.stderr +++ b/tests/ui/traits/const-traits/call-const-trait-method-pass.stderr @@ -7,7 +7,7 @@ LL | impl const PartialEq for Int { = note: marking a trait with `#[const_trait]` ensures all default method bodies are `const` = note: adding a non-const method body in the future would be a breaking change -error[E0015]: cannot call non-const fn `<Int as PartialEq>::eq` in constant functions +error[E0015]: cannot call non-const method `<Int as PartialEq>::eq` in constant functions --> $DIR/call-const-trait-method-pass.rs:20:15 | LL | !self.eq(other) diff --git a/tests/ui/traits/const-traits/call-generic-in-impl.stderr b/tests/ui/traits/const-traits/call-generic-in-impl.stderr index 58d0997f5a3..a45dfd95b4a 100644 --- a/tests/ui/traits/const-traits/call-generic-in-impl.stderr +++ b/tests/ui/traits/const-traits/call-generic-in-impl.stderr @@ -17,7 +17,7 @@ note: `PartialEq` can't be used with `~const` because it isn't annotated with `# --> $SRC_DIR/core/src/cmp.rs:LL:COL = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error[E0015]: cannot call non-const fn `<T as PartialEq>::eq` in constant functions +error[E0015]: cannot call non-const method `<T as PartialEq>::eq` in constant functions --> $DIR/call-generic-in-impl.rs:12:9 | LL | PartialEq::eq(self, other) diff --git a/tests/ui/traits/const-traits/call-generic-method-chain.stderr b/tests/ui/traits/const-traits/call-generic-method-chain.stderr index d7a2a186494..40b4f14733f 100644 --- a/tests/ui/traits/const-traits/call-generic-method-chain.stderr +++ b/tests/ui/traits/const-traits/call-generic-method-chain.stderr @@ -53,7 +53,7 @@ LL | *t == *t | = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `<S as PartialEq>::eq` in constant functions +error[E0015]: cannot call non-const method `<S as PartialEq>::eq` in constant functions --> $DIR/call-generic-method-chain.rs:16:15 | LL | !self.eq(other) diff --git a/tests/ui/traits/const-traits/call-generic-method-dup-bound.stderr b/tests/ui/traits/const-traits/call-generic-method-dup-bound.stderr index 90465d0a5b2..c74f5cf786c 100644 --- a/tests/ui/traits/const-traits/call-generic-method-dup-bound.stderr +++ b/tests/ui/traits/const-traits/call-generic-method-dup-bound.stderr @@ -53,7 +53,7 @@ LL | *t == *t | = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `<S as PartialEq>::eq` in constant functions +error[E0015]: cannot call non-const method `<S as PartialEq>::eq` in constant functions --> $DIR/call-generic-method-dup-bound.rs:14:15 | LL | !self.eq(other) diff --git a/tests/ui/traits/const-traits/call-generic-method-pass.stderr b/tests/ui/traits/const-traits/call-generic-method-pass.stderr index a7626a4e99d..1a33ff5ab45 100644 --- a/tests/ui/traits/const-traits/call-generic-method-pass.stderr +++ b/tests/ui/traits/const-traits/call-generic-method-pass.stderr @@ -34,7 +34,7 @@ LL | *t == *t | = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `<S as PartialEq>::eq` in constant functions +error[E0015]: cannot call non-const method `<S as PartialEq>::eq` in constant functions --> $DIR/call-generic-method-pass.rs:16:15 | LL | !self.eq(other) diff --git a/tests/ui/traits/const-traits/const-check-fns-in-const-impl.rs b/tests/ui/traits/const-traits/const-check-fns-in-const-impl.rs index 0330ed5ca8b..f7686ea6139 100644 --- a/tests/ui/traits/const-traits/const-check-fns-in-const-impl.rs +++ b/tests/ui/traits/const-traits/const-check-fns-in-const-impl.rs @@ -12,7 +12,7 @@ fn non_const() {} impl const T for S { fn foo() { non_const() } - //~^ ERROR cannot call non-const fn + //~^ ERROR cannot call non-const function } fn main() {} diff --git a/tests/ui/traits/const-traits/const-check-fns-in-const-impl.stderr b/tests/ui/traits/const-traits/const-check-fns-in-const-impl.stderr index d111a9d5639..599a5503b0f 100644 --- a/tests/ui/traits/const-traits/const-check-fns-in-const-impl.stderr +++ b/tests/ui/traits/const-traits/const-check-fns-in-const-impl.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `non_const` in constant functions +error[E0015]: cannot call non-const function `non_const` in constant functions --> $DIR/const-check-fns-in-const-impl.rs:14:16 | LL | fn foo() { non_const() } diff --git a/tests/ui/traits/const-traits/effects/const_closure-const_trait_impl-ice-113381.rs b/tests/ui/traits/const-traits/const_closure-const_trait_impl-ice-113381.rs index 877249135cd..92427039b43 100644 --- a/tests/ui/traits/const-traits/effects/const_closure-const_trait_impl-ice-113381.rs +++ b/tests/ui/traits/const-traits/const_closure-const_trait_impl-ice-113381.rs @@ -12,6 +12,6 @@ impl Foo for () { fn main() { (const || { (()).foo() })(); - //~^ ERROR: cannot call non-const fn `<() as Foo>::foo` in constant functions + //~^ ERROR: cannot call non-const method `<() as Foo>::foo` in constant functions // FIXME(const_trait_impl) this should probably say constant closures } diff --git a/tests/ui/traits/const-traits/effects/const_closure-const_trait_impl-ice-113381.stderr b/tests/ui/traits/const-traits/const_closure-const_trait_impl-ice-113381.stderr index 243e94087bb..c08642ba5a3 100644 --- a/tests/ui/traits/const-traits/effects/const_closure-const_trait_impl-ice-113381.stderr +++ b/tests/ui/traits/const-traits/const_closure-const_trait_impl-ice-113381.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `<() as Foo>::foo` in constant functions +error[E0015]: cannot call non-const method `<() as Foo>::foo` in constant functions --> $DIR/const_closure-const_trait_impl-ice-113381.rs:14:22 | LL | (const || { (()).foo() })(); diff --git a/tests/ui/traits/const-traits/const_derives/derive-const-non-const-type.stderr b/tests/ui/traits/const-traits/const_derives/derive-const-non-const-type.stderr index 8a6401afcf1..64564de2a0c 100644 --- a/tests/ui/traits/const-traits/const_derives/derive-const-non-const-type.stderr +++ b/tests/ui/traits/const-traits/const_derives/derive-const-non-const-type.stderr @@ -8,7 +8,7 @@ LL | #[derive_const(Default)] = note: adding a non-const method body in the future would be a breaking change = note: this error originates in the derive macro `Default` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `<A as Default>::default` in constant functions +error[E0015]: cannot call non-const associated function `<A as Default>::default` in constant functions --> $DIR/derive-const-non-const-type.rs:11:14 | LL | #[derive_const(Default)] diff --git a/tests/ui/traits/const-traits/const_derives/derive-const-use.stderr b/tests/ui/traits/const-traits/const_derives/derive-const-use.stderr index 3b06f4d801a..9ad3b0c1617 100644 --- a/tests/ui/traits/const-traits/const_derives/derive-const-use.stderr +++ b/tests/ui/traits/const-traits/const_derives/derive-const-use.stderr @@ -48,7 +48,7 @@ LL | #[derive_const(Default, PartialEq)] = note: adding a non-const method body in the future would be a breaking change = note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `<S as Default>::default` in constants +error[E0015]: cannot call non-const associated function `<S as Default>::default` in constants --> $DIR/derive-const-use.rs:18:35 | LL | const _: () = assert!(S((), A) == S::default()); @@ -64,7 +64,7 @@ LL | const _: () = assert!(S((), A) == S::default()); | = note: calls in constants are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `<() as Default>::default` in constant functions +error[E0015]: cannot call non-const associated function `<() as Default>::default` in constant functions --> $DIR/derive-const-use.rs:16:14 | LL | #[derive_const(Default, PartialEq)] @@ -75,7 +75,7 @@ LL | pub struct S((), A); = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants = note: this error originates in the derive macro `Default` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `<A as Default>::default` in constant functions +error[E0015]: cannot call non-const associated function `<A as Default>::default` in constant functions --> $DIR/derive-const-use.rs:16:18 | LL | #[derive_const(Default, PartialEq)] diff --git a/tests/ui/traits/const-traits/effects/dont-prefer-param-env-for-infer-self-ty.rs b/tests/ui/traits/const-traits/dont-prefer-param-env-for-infer-self-ty.rs index 08dcd7d80b3..08dcd7d80b3 100644 --- a/tests/ui/traits/const-traits/effects/dont-prefer-param-env-for-infer-self-ty.rs +++ b/tests/ui/traits/const-traits/dont-prefer-param-env-for-infer-self-ty.rs diff --git a/tests/ui/traits/const-traits/effects/effect-param-infer.rs b/tests/ui/traits/const-traits/effect-param-infer.rs index fcacf458a9f..fcacf458a9f 100644 --- a/tests/ui/traits/const-traits/effects/effect-param-infer.rs +++ b/tests/ui/traits/const-traits/effect-param-infer.rs diff --git a/tests/ui/traits/const-traits/effects/auxiliary/cross-crate.rs b/tests/ui/traits/const-traits/effects/auxiliary/cross-crate.rs deleted file mode 100644 index e02bf6a4d2c..00000000000 --- a/tests/ui/traits/const-traits/effects/auxiliary/cross-crate.rs +++ /dev/null @@ -1,13 +0,0 @@ -//@ compile-flags: -Znext-solver -#![feature(const_trait_impl)] - -pub const fn foo() {} - -#[const_trait] -pub trait Bar { - fn bar(); -} - -impl Bar for () { - fn bar() {} -} diff --git a/tests/ui/traits/const-traits/effects/no-explicit-const-params-cross-crate.rs b/tests/ui/traits/const-traits/effects/no-explicit-const-params-cross-crate.rs deleted file mode 100644 index 97052a1d09a..00000000000 --- a/tests/ui/traits/const-traits/effects/no-explicit-const-params-cross-crate.rs +++ /dev/null @@ -1,18 +0,0 @@ -//@ aux-build: cross-crate.rs -extern crate cross_crate; - -use cross_crate::{Bar, foo}; - -fn main() { - foo::<true>(); - //~^ ERROR: function takes 0 generic arguments but 1 generic argument was supplied - <() as Bar<true>>::bar(); - //~^ ERROR: trait takes 0 generic arguments but 1 generic argument was supplied -} - -const FOO: () = { - foo::<false>(); - //~^ ERROR: function takes 0 generic arguments but 1 generic argument was supplied - <() as Bar<false>>::bar(); - //~^ ERROR: trait takes 0 generic arguments but 1 generic argument was supplied -}; diff --git a/tests/ui/traits/const-traits/effects/no-explicit-const-params-cross-crate.stderr b/tests/ui/traits/const-traits/effects/no-explicit-const-params-cross-crate.stderr deleted file mode 100644 index d1180dbd80e..00000000000 --- a/tests/ui/traits/const-traits/effects/no-explicit-const-params-cross-crate.stderr +++ /dev/null @@ -1,59 +0,0 @@ -error[E0107]: function takes 0 generic arguments but 1 generic argument was supplied - --> $DIR/no-explicit-const-params-cross-crate.rs:14:5 - | -LL | foo::<false>(); - | ^^^--------- help: remove the unnecessary generics - | | - | expected 0 generic arguments - | -note: function defined here, with 0 generic parameters - --> $DIR/auxiliary/cross-crate.rs:4:14 - | -LL | pub const fn foo() {} - | ^^^ - -error[E0107]: trait takes 0 generic arguments but 1 generic argument was supplied - --> $DIR/no-explicit-const-params-cross-crate.rs:16:12 - | -LL | <() as Bar<false>>::bar(); - | ^^^------- help: remove the unnecessary generics - | | - | expected 0 generic arguments - | -note: trait defined here, with 0 generic parameters - --> $DIR/auxiliary/cross-crate.rs:7:11 - | -LL | pub trait Bar { - | ^^^ - -error[E0107]: function takes 0 generic arguments but 1 generic argument was supplied - --> $DIR/no-explicit-const-params-cross-crate.rs:7:5 - | -LL | foo::<true>(); - | ^^^-------- help: remove the unnecessary generics - | | - | expected 0 generic arguments - | -note: function defined here, with 0 generic parameters - --> $DIR/auxiliary/cross-crate.rs:4:14 - | -LL | pub const fn foo() {} - | ^^^ - -error[E0107]: trait takes 0 generic arguments but 1 generic argument was supplied - --> $DIR/no-explicit-const-params-cross-crate.rs:9:12 - | -LL | <() as Bar<true>>::bar(); - | ^^^------ help: remove the unnecessary generics - | | - | expected 0 generic arguments - | -note: trait defined here, with 0 generic parameters - --> $DIR/auxiliary/cross-crate.rs:7:11 - | -LL | pub trait Bar { - | ^^^ - -error: aborting due to 4 previous errors - -For more information about this error, try `rustc --explain E0107`. diff --git a/tests/ui/traits/const-traits/effects/fallback.rs b/tests/ui/traits/const-traits/fallback.rs index 253d16f3251..253d16f3251 100644 --- a/tests/ui/traits/const-traits/effects/fallback.rs +++ b/tests/ui/traits/const-traits/fallback.rs diff --git a/tests/ui/traits/const-traits/effects/group-traits.rs b/tests/ui/traits/const-traits/group-traits.rs index 2c5b6cc40e6..2c5b6cc40e6 100644 --- a/tests/ui/traits/const-traits/effects/group-traits.rs +++ b/tests/ui/traits/const-traits/group-traits.rs diff --git a/tests/ui/traits/const-traits/effects/ice-112822-expected-type-for-param.rs b/tests/ui/traits/const-traits/ice-112822-expected-type-for-param.rs index 8ff15dd09cc..4cb013b9323 100644 --- a/tests/ui/traits/const-traits/effects/ice-112822-expected-type-for-param.rs +++ b/tests/ui/traits/const-traits/ice-112822-expected-type-for-param.rs @@ -10,7 +10,7 @@ const fn test() -> impl ~const Fn() { match sl { [first, remainder @ ..] => { assert_eq!(first, &b'f'); - //~^ ERROR cannot call non-const fn + //~^ ERROR cannot call non-const function //~| ERROR cannot call non-const operator } [] => panic!(), diff --git a/tests/ui/traits/const-traits/effects/ice-112822-expected-type-for-param.stderr b/tests/ui/traits/const-traits/ice-112822-expected-type-for-param.stderr index 280f8807f5f..8d9371bf9f6 100644 --- a/tests/ui/traits/const-traits/effects/ice-112822-expected-type-for-param.stderr +++ b/tests/ui/traits/const-traits/ice-112822-expected-type-for-param.stderr @@ -46,7 +46,7 @@ LL | assert_eq!(first, &b'f'); = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants = note: this error originates in the macro `assert_eq` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0015]: cannot call non-const fn `core::panicking::assert_failed::<&u8, &u8>` in constant functions +error[E0015]: cannot call non-const function `core::panicking::assert_failed::<&u8, &u8>` in constant functions --> $DIR/ice-112822-expected-type-for-param.rs:12:17 | LL | assert_eq!(first, &b'f'); diff --git a/tests/ui/traits/const-traits/effects/ice-113375-index-out-of-bounds-generics.rs b/tests/ui/traits/const-traits/ice-113375-index-out-of-bounds-generics.rs index f3cbaf847a9..f3cbaf847a9 100644 --- a/tests/ui/traits/const-traits/effects/ice-113375-index-out-of-bounds-generics.rs +++ b/tests/ui/traits/const-traits/ice-113375-index-out-of-bounds-generics.rs diff --git a/tests/ui/traits/const-traits/effects/infer-fallback.rs b/tests/ui/traits/const-traits/infer-fallback.rs index a7342d72a9c..a7342d72a9c 100644 --- a/tests/ui/traits/const-traits/effects/infer-fallback.rs +++ b/tests/ui/traits/const-traits/infer-fallback.rs diff --git a/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.rs b/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.rs index 86cb38e0bcf..da533d5e321 100644 --- a/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.rs +++ b/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.rs @@ -24,7 +24,7 @@ impl Trait for () { const fn foo() { ().foo(); - //~^ ERROR cannot call non-const fn `<() as Trait>::foo` in constant functions + //~^ ERROR cannot call non-const method `<() as Trait>::foo` in constant functions } const UWU: () = foo(); diff --git a/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.stderr b/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.stderr index c4547f4c43d..ad0829ff05f 100644 --- a/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.stderr +++ b/tests/ui/traits/const-traits/inline-incorrect-early-bound-in-ctfe.stderr @@ -7,7 +7,7 @@ LL | fn foo(self); LL | fn foo<T>(self) { | ^ found 1 type parameter -error[E0015]: cannot call non-const fn `<() as Trait>::foo` in constant functions +error[E0015]: cannot call non-const method `<() as Trait>::foo` in constant functions --> $DIR/inline-incorrect-early-bound-in-ctfe.rs:26:8 | LL | ().foo(); diff --git a/tests/ui/traits/const-traits/issue-79450.rs b/tests/ui/traits/const-traits/issue-79450.rs index 521576d27ef..5ba5036ce27 100644 --- a/tests/ui/traits/const-traits/issue-79450.rs +++ b/tests/ui/traits/const-traits/issue-79450.rs @@ -6,7 +6,7 @@ trait Tr { fn req(&self); fn prov(&self) { - println!("lul"); //~ ERROR: cannot call non-const fn `_print` in constant functions + println!("lul"); //~ ERROR: cannot call non-const function `_print` in constant functions self.req(); } } diff --git a/tests/ui/traits/const-traits/issue-79450.stderr b/tests/ui/traits/const-traits/issue-79450.stderr index 85996c21211..5bdebbbfb03 100644 --- a/tests/ui/traits/const-traits/issue-79450.stderr +++ b/tests/ui/traits/const-traits/issue-79450.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `_print` in constant functions +error[E0015]: cannot call non-const function `_print` in constant functions --> $DIR/issue-79450.rs:9:9 | LL | println!("lul"); diff --git a/tests/ui/traits/const-traits/issue-88155.rs b/tests/ui/traits/const-traits/issue-88155.rs index 08739de8313..a26128a6ecc 100644 --- a/tests/ui/traits/const-traits/issue-88155.rs +++ b/tests/ui/traits/const-traits/issue-88155.rs @@ -9,7 +9,7 @@ pub trait A { pub const fn foo<T: A>() -> bool { T::assoc() //FIXME ~^ ERROR the trait bound - //FIXME ~| ERROR cannot call non-const fn + //FIXME ~| ERROR cannot call non-const function } fn main() {} diff --git a/tests/ui/traits/const-traits/issue-88155.stderr b/tests/ui/traits/const-traits/issue-88155.stderr index 157b54214fa..2e140ac9ff6 100644 --- a/tests/ui/traits/const-traits/issue-88155.stderr +++ b/tests/ui/traits/const-traits/issue-88155.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `<T as A>::assoc` in constant functions +error[E0015]: cannot call non-const associated function `<T as A>::assoc` in constant functions --> $DIR/issue-88155.rs:10:5 | LL | T::assoc() diff --git a/tests/ui/traits/const-traits/effects/minicore-const-fn-early-bound.rs b/tests/ui/traits/const-traits/minicore-const-fn-early-bound.rs index ee47f92a0bc..ee47f92a0bc 100644 --- a/tests/ui/traits/const-traits/effects/minicore-const-fn-early-bound.rs +++ b/tests/ui/traits/const-traits/minicore-const-fn-early-bound.rs diff --git a/tests/ui/traits/const-traits/effects/minicore-deref-fail.rs b/tests/ui/traits/const-traits/minicore-deref-fail.rs index f4a7678a009..f4a7678a009 100644 --- a/tests/ui/traits/const-traits/effects/minicore-deref-fail.rs +++ b/tests/ui/traits/const-traits/minicore-deref-fail.rs diff --git a/tests/ui/traits/const-traits/effects/minicore-deref-fail.stderr b/tests/ui/traits/const-traits/minicore-deref-fail.stderr index a1f840114fc..a1f840114fc 100644 --- a/tests/ui/traits/const-traits/effects/minicore-deref-fail.stderr +++ b/tests/ui/traits/const-traits/minicore-deref-fail.stderr diff --git a/tests/ui/traits/const-traits/effects/minicore-drop-fail.rs b/tests/ui/traits/const-traits/minicore-drop-fail.rs index 274e5db21c4..274e5db21c4 100644 --- a/tests/ui/traits/const-traits/effects/minicore-drop-fail.rs +++ b/tests/ui/traits/const-traits/minicore-drop-fail.rs diff --git a/tests/ui/traits/const-traits/effects/minicore-drop-fail.stderr b/tests/ui/traits/const-traits/minicore-drop-fail.stderr index 12d1877a18a..12d1877a18a 100644 --- a/tests/ui/traits/const-traits/effects/minicore-drop-fail.stderr +++ b/tests/ui/traits/const-traits/minicore-drop-fail.stderr diff --git a/tests/ui/traits/const-traits/effects/minicore-drop-without-feature-gate.no.stderr b/tests/ui/traits/const-traits/minicore-drop-without-feature-gate.no.stderr index 218f3661e39..218f3661e39 100644 --- a/tests/ui/traits/const-traits/effects/minicore-drop-without-feature-gate.no.stderr +++ b/tests/ui/traits/const-traits/minicore-drop-without-feature-gate.no.stderr diff --git a/tests/ui/traits/const-traits/effects/minicore-drop-without-feature-gate.rs b/tests/ui/traits/const-traits/minicore-drop-without-feature-gate.rs index e75bf3db007..e75bf3db007 100644 --- a/tests/ui/traits/const-traits/effects/minicore-drop-without-feature-gate.rs +++ b/tests/ui/traits/const-traits/minicore-drop-without-feature-gate.rs diff --git a/tests/ui/traits/const-traits/effects/minicore-fn-fail.rs b/tests/ui/traits/const-traits/minicore-fn-fail.rs index ae1cbc6ca58..ae1cbc6ca58 100644 --- a/tests/ui/traits/const-traits/effects/minicore-fn-fail.rs +++ b/tests/ui/traits/const-traits/minicore-fn-fail.rs diff --git a/tests/ui/traits/const-traits/effects/minicore-fn-fail.stderr b/tests/ui/traits/const-traits/minicore-fn-fail.stderr index fa8be631a26..fa8be631a26 100644 --- a/tests/ui/traits/const-traits/effects/minicore-fn-fail.stderr +++ b/tests/ui/traits/const-traits/minicore-fn-fail.stderr diff --git a/tests/ui/traits/const-traits/effects/minicore-works.rs b/tests/ui/traits/const-traits/minicore-works.rs index c79b4fc07df..c79b4fc07df 100644 --- a/tests/ui/traits/const-traits/effects/minicore-works.rs +++ b/tests/ui/traits/const-traits/minicore-works.rs diff --git a/tests/ui/traits/const-traits/effects/mismatched_generic_args.rs b/tests/ui/traits/const-traits/mismatched_generic_args.rs index 21e91c731b3..21e91c731b3 100644 --- a/tests/ui/traits/const-traits/effects/mismatched_generic_args.rs +++ b/tests/ui/traits/const-traits/mismatched_generic_args.rs diff --git a/tests/ui/traits/const-traits/effects/mismatched_generic_args.stderr b/tests/ui/traits/const-traits/mismatched_generic_args.stderr index 8e12b40381f..8e12b40381f 100644 --- a/tests/ui/traits/const-traits/effects/mismatched_generic_args.stderr +++ b/tests/ui/traits/const-traits/mismatched_generic_args.stderr diff --git a/tests/ui/traits/const-traits/effects/no-explicit-const-params.rs b/tests/ui/traits/const-traits/no-explicit-const-params.rs index 76663292223..76663292223 100644 --- a/tests/ui/traits/const-traits/effects/no-explicit-const-params.rs +++ b/tests/ui/traits/const-traits/no-explicit-const-params.rs diff --git a/tests/ui/traits/const-traits/effects/no-explicit-const-params.stderr b/tests/ui/traits/const-traits/no-explicit-const-params.stderr index 9bd2c2cb8da..9bd2c2cb8da 100644 --- a/tests/ui/traits/const-traits/effects/no-explicit-const-params.stderr +++ b/tests/ui/traits/const-traits/no-explicit-const-params.stderr diff --git a/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.rs b/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.rs index cd8bb5963ad..fa0f7869644 100644 --- a/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.rs +++ b/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.rs @@ -11,5 +11,5 @@ impl Foo for () { fn main() { (const || { (()).foo() })(); - //~^ ERROR: cannot call non-const fn + //~^ ERROR: cannot call non-const method } diff --git a/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.stderr b/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.stderr index 97ad83130d4..2d33406c222 100644 --- a/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.stderr +++ b/tests/ui/traits/const-traits/non-const-op-const-closure-non-const-outer.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `<() as Foo>::foo` in constant functions +error[E0015]: cannot call non-const method `<() as Foo>::foo` in constant functions --> $DIR/non-const-op-const-closure-non-const-outer.rs:13:22 | LL | (const || { (()).foo() })(); diff --git a/tests/ui/traits/const-traits/non-const-op-in-closure-in-const.stderr b/tests/ui/traits/const-traits/non-const-op-in-closure-in-const.stderr index 4ddb1e8c5a9..190af5e7c2d 100644 --- a/tests/ui/traits/const-traits/non-const-op-in-closure-in-const.stderr +++ b/tests/ui/traits/const-traits/non-const-op-in-closure-in-const.stderr @@ -17,7 +17,7 @@ note: `From` can't be used with `~const` because it isn't annotated with `#[cons --> $SRC_DIR/core/src/convert/mod.rs:LL:COL = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error[E0015]: cannot call non-const fn `<B as From<A>>::from` in constant functions +error[E0015]: cannot call non-const associated function `<B as From<A>>::from` in constant functions --> $DIR/non-const-op-in-closure-in-const.rs:12:9 | LL | B::from(self) diff --git a/tests/ui/traits/const-traits/effects/project.rs b/tests/ui/traits/const-traits/project.rs index 139299753e5..139299753e5 100644 --- a/tests/ui/traits/const-traits/effects/project.rs +++ b/tests/ui/traits/const-traits/project.rs diff --git a/tests/ui/traits/const-traits/effects/span-bug-issue-121418.rs b/tests/ui/traits/const-traits/span-bug-issue-121418.rs index 50a7e12f2a7..50a7e12f2a7 100644 --- a/tests/ui/traits/const-traits/effects/span-bug-issue-121418.rs +++ b/tests/ui/traits/const-traits/span-bug-issue-121418.rs diff --git a/tests/ui/traits/const-traits/effects/span-bug-issue-121418.stderr b/tests/ui/traits/const-traits/span-bug-issue-121418.stderr index fe1e5e558b2..fe1e5e558b2 100644 --- a/tests/ui/traits/const-traits/effects/span-bug-issue-121418.stderr +++ b/tests/ui/traits/const-traits/span-bug-issue-121418.stderr diff --git a/tests/ui/traits/const-traits/effects/spec-effectvar-ice.rs b/tests/ui/traits/const-traits/spec-effectvar-ice.rs index c85b1746967..c85b1746967 100644 --- a/tests/ui/traits/const-traits/effects/spec-effectvar-ice.rs +++ b/tests/ui/traits/const-traits/spec-effectvar-ice.rs diff --git a/tests/ui/traits/const-traits/effects/spec-effectvar-ice.stderr b/tests/ui/traits/const-traits/spec-effectvar-ice.stderr index 474d96698d5..474d96698d5 100644 --- a/tests/ui/traits/const-traits/effects/spec-effectvar-ice.stderr +++ b/tests/ui/traits/const-traits/spec-effectvar-ice.stderr diff --git a/tests/ui/traits/const-traits/std-impl-gate.gated.stderr b/tests/ui/traits/const-traits/std-impl-gate.gated.stderr index f3b17130761..a78cf8ce61e 100644 --- a/tests/ui/traits/const-traits/std-impl-gate.gated.stderr +++ b/tests/ui/traits/const-traits/std-impl-gate.gated.stderr @@ -4,7 +4,7 @@ error[E0635]: unknown feature `const_default_impls` LL | #![cfg_attr(gated, feature(const_trait_impl, const_default_impls))] | ^^^^^^^^^^^^^^^^^^^ -error[E0015]: cannot call non-const fn `<Vec<usize> as Default>::default` in constant functions +error[E0015]: cannot call non-const associated function `<Vec<usize> as Default>::default` in constant functions --> $DIR/std-impl-gate.rs:13:5 | LL | Default::default() diff --git a/tests/ui/traits/const-traits/std-impl-gate.rs b/tests/ui/traits/const-traits/std-impl-gate.rs index a9e2ff06290..84091931997 100644 --- a/tests/ui/traits/const-traits/std-impl-gate.rs +++ b/tests/ui/traits/const-traits/std-impl-gate.rs @@ -11,7 +11,7 @@ fn non_const_context() -> Vec<usize> { const fn const_context() -> Vec<usize> { Default::default() - //[stock]~^ ERROR cannot call non-const fn + //[stock]~^ ERROR cannot call non-const associated function } fn main() { diff --git a/tests/ui/traits/const-traits/std-impl-gate.stock.stderr b/tests/ui/traits/const-traits/std-impl-gate.stock.stderr index 7240b5f4a94..8728f652ef9 100644 --- a/tests/ui/traits/const-traits/std-impl-gate.stock.stderr +++ b/tests/ui/traits/const-traits/std-impl-gate.stock.stderr @@ -1,4 +1,4 @@ -error[E0015]: cannot call non-const fn `<Vec<usize> as Default>::default` in constant functions +error[E0015]: cannot call non-const associated function `<Vec<usize> as Default>::default` in constant functions --> $DIR/std-impl-gate.rs:13:5 | LL | Default::default() diff --git a/tests/ui/traits/const-traits/super-traits-fail-2.nn.stderr b/tests/ui/traits/const-traits/super-traits-fail-2.nn.stderr index 51b88cf8702..8f88e3aa8bc 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-2.nn.stderr +++ b/tests/ui/traits/const-traits/super-traits-fail-2.nn.stderr @@ -45,7 +45,7 @@ help: mark `Foo` as `#[const_trait]` to allow it to have `const` implementations LL | #[const_trait] trait Foo { | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> $DIR/super-traits-fail-2.rs:20:7 | LL | x.a(); diff --git a/tests/ui/traits/const-traits/super-traits-fail-2.ny.stderr b/tests/ui/traits/const-traits/super-traits-fail-2.ny.stderr index 38fb6f05412..087e80de788 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-2.ny.stderr +++ b/tests/ui/traits/const-traits/super-traits-fail-2.ny.stderr @@ -57,7 +57,7 @@ help: mark `Foo` as `#[const_trait]` to allow it to have `const` implementations LL | #[const_trait] trait Foo { | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> $DIR/super-traits-fail-2.rs:20:7 | LL | x.a(); diff --git a/tests/ui/traits/const-traits/super-traits-fail-2.rs b/tests/ui/traits/const-traits/super-traits-fail-2.rs index 53a84bcdd1b..6cc9d739476 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-2.rs +++ b/tests/ui/traits/const-traits/super-traits-fail-2.rs @@ -19,7 +19,7 @@ trait Bar: ~const Foo {} const fn foo<T: Bar>(x: &T) { x.a(); //[yy,yn]~^ ERROR the trait bound `T: ~const Foo` - //[nn,ny]~^^ ERROR cannot call non-const fn `<T as Foo>::a` in constant functions + //[nn,ny]~^^ ERROR cannot call non-const method `<T as Foo>::a` in constant functions } fn main() {} diff --git a/tests/ui/traits/const-traits/super-traits-fail-3.nnn.stderr b/tests/ui/traits/const-traits/super-traits-fail-3.nnn.stderr index fd802fde5bd..a5ef716a62a 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-3.nnn.stderr +++ b/tests/ui/traits/const-traits/super-traits-fail-3.nnn.stderr @@ -88,7 +88,7 @@ help: enable `#![feature(const_trait_impl)]` in your crate and mark `Bar` as `#[ LL | #[const_trait] trait Bar: ~const Foo {} | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> $DIR/super-traits-fail-3.rs:36:7 | LL | x.a(); diff --git a/tests/ui/traits/const-traits/super-traits-fail-3.nny.stderr b/tests/ui/traits/const-traits/super-traits-fail-3.nny.stderr index fd802fde5bd..a5ef716a62a 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-3.nny.stderr +++ b/tests/ui/traits/const-traits/super-traits-fail-3.nny.stderr @@ -88,7 +88,7 @@ help: enable `#![feature(const_trait_impl)]` in your crate and mark `Bar` as `#[ LL | #[const_trait] trait Bar: ~const Foo {} | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> $DIR/super-traits-fail-3.rs:36:7 | LL | x.a(); diff --git a/tests/ui/traits/const-traits/super-traits-fail-3.rs b/tests/ui/traits/const-traits/super-traits-fail-3.rs index aa27554e7f8..d7e0cdc26ed 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-3.rs +++ b/tests/ui/traits/const-traits/super-traits-fail-3.rs @@ -35,7 +35,7 @@ const fn foo<T: ~const Bar>(x: &T) { //[nyy,nyn,nny,nnn]~^^^ ERROR: const trait impls are experimental x.a(); //[yyn]~^ ERROR: the trait bound `T: ~const Foo` is not satisfied - //[ynn,yny,nny,nnn]~^^ ERROR: cannot call non-const fn `<T as Foo>::a` in constant functions + //[ynn,yny,nny,nnn]~^^ ERROR: cannot call non-const method `<T as Foo>::a` in constant functions //[nyy,nyn]~^^^ ERROR: cannot call conditionally-const method `<T as Foo>::a` in constant functions } diff --git a/tests/ui/traits/const-traits/super-traits-fail-3.ynn.stderr b/tests/ui/traits/const-traits/super-traits-fail-3.ynn.stderr index 16424696eeb..f22bdd472e5 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-3.ynn.stderr +++ b/tests/ui/traits/const-traits/super-traits-fail-3.ynn.stderr @@ -68,7 +68,7 @@ help: mark `Bar` as `#[const_trait]` to allow it to have `const` implementations LL | #[const_trait] trait Bar: ~const Foo {} | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> $DIR/super-traits-fail-3.rs:36:7 | LL | x.a(); diff --git a/tests/ui/traits/const-traits/super-traits-fail-3.yny.stderr b/tests/ui/traits/const-traits/super-traits-fail-3.yny.stderr index c81544c4bf5..14b50815b8e 100644 --- a/tests/ui/traits/const-traits/super-traits-fail-3.yny.stderr +++ b/tests/ui/traits/const-traits/super-traits-fail-3.yny.stderr @@ -57,7 +57,7 @@ help: mark `Foo` as `#[const_trait]` to allow it to have `const` implementations LL | #[const_trait] trait Foo { | ++++++++++++++ -error[E0015]: cannot call non-const fn `<T as Foo>::a` in constant functions +error[E0015]: cannot call non-const method `<T as Foo>::a` in constant functions --> $DIR/super-traits-fail-3.rs:36:7 | LL | x.a(); diff --git a/tests/ui/traits/const-traits/effects/trait-fn-const.rs b/tests/ui/traits/const-traits/trait-fn-const.rs index 07eac032a82..07eac032a82 100644 --- a/tests/ui/traits/const-traits/effects/trait-fn-const.rs +++ b/tests/ui/traits/const-traits/trait-fn-const.rs diff --git a/tests/ui/traits/const-traits/effects/trait-fn-const.stderr b/tests/ui/traits/const-traits/trait-fn-const.stderr index 4d0b03046d2..4d0b03046d2 100644 --- a/tests/ui/traits/const-traits/effects/trait-fn-const.stderr +++ b/tests/ui/traits/const-traits/trait-fn-const.stderr diff --git a/tests/ui/traits/issue-77982.rs b/tests/ui/traits/issue-77982.rs index 57d7899f6dd..dce25e62e46 100644 --- a/tests/ui/traits/issue-77982.rs +++ b/tests/ui/traits/issue-77982.rs @@ -1,4 +1,5 @@ //@ ignore-windows different list of satisfying impls +//@ ignore-arm different list of satisfying impls use std::collections::HashMap; fn what() { diff --git a/tests/ui/traits/issue-77982.stderr b/tests/ui/traits/issue-77982.stderr index 0f4b3c3c877..2b26a1b7ab1 100644 --- a/tests/ui/traits/issue-77982.stderr +++ b/tests/ui/traits/issue-77982.stderr @@ -1,5 +1,5 @@ error[E0283]: type annotations needed - --> $DIR/issue-77982.rs:9:10 + --> $DIR/issue-77982.rs:10:10 | LL | opts.get(opt.as_ref()); | ^^^ ------------ type must be known at this point @@ -18,7 +18,7 @@ LL | opts.get::<Q>(opt.as_ref()); | +++++ error[E0283]: type annotations needed - --> $DIR/issue-77982.rs:9:10 + --> $DIR/issue-77982.rs:10:10 | LL | opts.get(opt.as_ref()); | ^^^ ------ type must be known at this point @@ -36,7 +36,7 @@ LL | opts.get::<Q>(opt.as_ref()); | +++++ error[E0283]: type annotations needed - --> $DIR/issue-77982.rs:14:59 + --> $DIR/issue-77982.rs:15:59 | LL | let ips: Vec<_> = (0..100_000).map(|_| u32::from(0u32.into())).collect(); | --- ^^^^ @@ -56,13 +56,13 @@ LL | let ips: Vec<_> = (0..100_000).map(|_| u32::from(<u32 as Into<T>>::into | +++++++++++++++++++++++ ~ error[E0283]: type annotations needed for `Box<_>` - --> $DIR/issue-77982.rs:37:9 + --> $DIR/issue-77982.rs:38:9 | LL | let _ = ().foo(); | ^ --- type must be known at this point | note: multiple `impl`s satisfying `(): Foo<'_, _>` found - --> $DIR/issue-77982.rs:30:1 + --> $DIR/issue-77982.rs:31:1 | LL | impl Foo<'static, u32> for () {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -74,13 +74,13 @@ LL | let _: Box<T> = ().foo(); | ++++++++ error[E0283]: type annotations needed for `Box<_>` - --> $DIR/issue-77982.rs:41:9 + --> $DIR/issue-77982.rs:42:9 | LL | let _ = (&()).bar(); | ^ --- type must be known at this point | note: multiple `impl`s satisfying `&(): Bar<'_, _>` found - --> $DIR/issue-77982.rs:33:1 + --> $DIR/issue-77982.rs:34:1 | LL | impl<'a> Bar<'static, u32> for &'a () {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/traits/negative-impls/ambiguity-cause.negative_coherence.stderr b/tests/ui/traits/negative-impls/ambiguity-cause.negative_coherence.stderr new file mode 100644 index 00000000000..4ec3414a57b --- /dev/null +++ b/tests/ui/traits/negative-impls/ambiguity-cause.negative_coherence.stderr @@ -0,0 +1,14 @@ +error[E0119]: conflicting implementations of trait `MyTrait` for type `String` + --> $DIR/ambiguity-cause.rs:10:1 + | +LL | impl<T: Copy> MyTrait for T { } + | --------------------------- first implementation here +LL | +LL | impl MyTrait for String { } + | ^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `String` + | + = note: upstream crates may add a new impl of trait `std::marker::Copy` for type `std::string::String` in future versions + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0119`. diff --git a/tests/ui/traits/negative-impls/ambiguity-cause.rs b/tests/ui/traits/negative-impls/ambiguity-cause.rs new file mode 100644 index 00000000000..30a528c535d --- /dev/null +++ b/tests/ui/traits/negative-impls/ambiguity-cause.rs @@ -0,0 +1,13 @@ +//@ revisions: simple negative_coherence + +#![feature(negative_impls)] +#![cfg_attr(negative_coherence, feature(with_negative_coherence))] + +trait MyTrait {} + +impl<T: Copy> MyTrait for T { } + +impl MyTrait for String { } +//~^ ERROR conflicting implementations of trait `MyTrait` for type `String` + +fn main() {} diff --git a/tests/ui/traits/negative-impls/ambiguity-cause.simple.stderr b/tests/ui/traits/negative-impls/ambiguity-cause.simple.stderr new file mode 100644 index 00000000000..4ec3414a57b --- /dev/null +++ b/tests/ui/traits/negative-impls/ambiguity-cause.simple.stderr @@ -0,0 +1,14 @@ +error[E0119]: conflicting implementations of trait `MyTrait` for type `String` + --> $DIR/ambiguity-cause.rs:10:1 + | +LL | impl<T: Copy> MyTrait for T { } + | --------------------------- first implementation here +LL | +LL | impl MyTrait for String { } + | ^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `String` + | + = note: upstream crates may add a new impl of trait `std::marker::Copy` for type `std::string::String` in future versions + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0119`. diff --git a/tests/ui/traits/negative-impls/feature-gate-negative_impls.rs b/tests/ui/traits/negative-impls/feature-gate-negative_impls.rs index 683fd6db6f2..8d3f6ff6d78 100644 --- a/tests/ui/traits/negative-impls/feature-gate-negative_impls.rs +++ b/tests/ui/traits/negative-impls/feature-gate-negative_impls.rs @@ -1,3 +1,3 @@ trait MyTrait {} -impl !MyTrait for u32 {} //~ ERROR negative trait bounds are not yet fully implemented +impl !MyTrait for u32 {} //~ ERROR negative trait bounds are not fully implemented fn main() {} diff --git a/tests/ui/traits/negative-impls/feature-gate-negative_impls.stderr b/tests/ui/traits/negative-impls/feature-gate-negative_impls.stderr index f3dee114116..1777dfcc993 100644 --- a/tests/ui/traits/negative-impls/feature-gate-negative_impls.stderr +++ b/tests/ui/traits/negative-impls/feature-gate-negative_impls.stderr @@ -1,4 +1,4 @@ -error[E0658]: negative trait bounds are not yet fully implemented; use marker types for now +error[E0658]: negative trait bounds are not fully implemented; use marker types for now --> $DIR/feature-gate-negative_impls.rs:2:6 | LL | impl !MyTrait for u32 {} diff --git a/tests/ui/traits/non_lifetime_binders/basic.rs b/tests/ui/traits/non_lifetime_binders/basic.rs index 7e45b76434a..533891bf830 100644 --- a/tests/ui/traits/non_lifetime_binders/basic.rs +++ b/tests/ui/traits/non_lifetime_binders/basic.rs @@ -1,5 +1,5 @@ //@ check-pass -// Basic test that show's we can succesfully typeck a `for<T>` where clause. +// Basic test that show's we can successfully typeck a `for<T>` where clause. #![feature(non_lifetime_binders)] //~^ WARN the feature `non_lifetime_binders` is incomplete diff --git a/tests/ui/typeck/typeck_type_placeholder_item.stderr b/tests/ui/typeck/typeck_type_placeholder_item.stderr index 5e32d5c429e..e62ebae5fd2 100644 --- a/tests/ui/typeck/typeck_type_placeholder_item.stderr +++ b/tests/ui/typeck/typeck_type_placeholder_item.stderr @@ -668,7 +668,7 @@ error[E0121]: the placeholder `_` is not allowed within types on item signatures LL | type F: std::ops::Fn(_); | ^ not allowed in type signatures -error[E0015]: cannot call non-const fn `<std::ops::Range<i32> as Iterator>::filter::<{closure@$DIR/typeck_type_placeholder_item.rs:230:29: 230:32}>` in constants +error[E0015]: cannot call non-const method `<std::ops::Range<i32> as Iterator>::filter::<{closure@$DIR/typeck_type_placeholder_item.rs:230:29: 230:32}>` in constants --> $DIR/typeck_type_placeholder_item.rs:230:22 | LL | const _: _ = (1..10).filter(|x| x % 2 == 0).map(|x| x * x); @@ -676,7 +676,7 @@ LL | const _: _ = (1..10).filter(|x| x % 2 == 0).map(|x| x * x); | = note: calls in constants are limited to constant functions, tuple structs and tuple variants -error[E0015]: cannot call non-const fn `<Filter<std::ops::Range<i32>, {closure@$DIR/typeck_type_placeholder_item.rs:230:29: 230:32}> as Iterator>::map::<i32, {closure@$DIR/typeck_type_placeholder_item.rs:230:49: 230:52}>` in constants +error[E0015]: cannot call non-const method `<Filter<std::ops::Range<i32>, {closure@$DIR/typeck_type_placeholder_item.rs:230:29: 230:32}> as Iterator>::map::<i32, {closure@$DIR/typeck_type_placeholder_item.rs:230:49: 230:52}>` in constants --> $DIR/typeck_type_placeholder_item.rs:230:45 | LL | const _: _ = (1..10).filter(|x| x % 2 == 0).map(|x| x * x); diff --git a/tests/ui/unpretty/expanded-exhaustive.rs b/tests/ui/unpretty/expanded-exhaustive.rs index 98fe05cf7c8..e052627e71c 100644 --- a/tests/ui/unpretty/expanded-exhaustive.rs +++ b/tests/ui/unpretty/expanded-exhaustive.rs @@ -9,6 +9,7 @@ #![feature(const_trait_impl)] #![feature(decl_macro)] #![feature(deref_patterns)] +#![feature(dyn_star)] #![feature(explicit_tail_calls)] #![feature(gen_blocks)] #![feature(let_chains)] @@ -800,6 +801,7 @@ mod types { let _: dyn Send + 'static; let _: dyn 'static + Send; let _: dyn for<'a> Send; + let _: dyn* Send; } /// TyKind::ImplTrait diff --git a/tests/ui/unpretty/expanded-exhaustive.stdout b/tests/ui/unpretty/expanded-exhaustive.stdout index 452c06dd7e4..132d00cd8ed 100644 --- a/tests/ui/unpretty/expanded-exhaustive.stdout +++ b/tests/ui/unpretty/expanded-exhaustive.stdout @@ -10,6 +10,7 @@ #![feature(const_trait_impl)] #![feature(decl_macro)] #![feature(deref_patterns)] +#![feature(dyn_star)] #![feature(explicit_tail_calls)] #![feature(gen_blocks)] #![feature(let_chains)] @@ -647,6 +648,7 @@ mod types { let _: dyn Send + 'static; let _: dyn 'static + Send; let _: dyn for<'a> Send; + let _: dyn* Send; } /// TyKind::ImplTrait const fn ty_impl_trait() { diff --git a/tests/ui/unsafe-binders/expr.rs b/tests/ui/unsafe-binders/expr.rs index d8c4c2df2cd..0fe68751f0a 100644 --- a/tests/ui/unsafe-binders/expr.rs +++ b/tests/ui/unsafe-binders/expr.rs @@ -4,10 +4,11 @@ use std::unsafe_binder::{wrap_binder, unwrap_binder}; fn main() { + unsafe { let x = 1; - let binder: unsafe<'a> &'a i32 = wrap_binder!(x); - //~^ ERROR unsafe binders are not yet implemented - //~| ERROR unsafe binders are not yet implemented - let rx = *unwrap_binder!(binder); - //~^ ERROR unsafe binders are not yet implemented + let binder: unsafe<'a> &'a i32 = wrap_binder!(&x); + //~^ ERROR unsafe binder casts are not fully implemented + let rx = *unwrap_binder!(binder); + //~^ ERROR unsafe binder casts are not fully implemented + } } diff --git a/tests/ui/unsafe-binders/expr.stderr b/tests/ui/unsafe-binders/expr.stderr index 26fae1958b0..78a288e10a3 100644 --- a/tests/ui/unsafe-binders/expr.stderr +++ b/tests/ui/unsafe-binders/expr.stderr @@ -7,23 +7,17 @@ LL | #![feature(unsafe_binders)] = note: see issue #130516 <https://github.com/rust-lang/rust/issues/130516> for more information = note: `#[warn(incomplete_features)]` on by default -error: unsafe binders are not yet implemented - --> $DIR/expr.rs:8:17 +error: unsafe binder casts are not fully implemented + --> $DIR/expr.rs:9:55 | -LL | let binder: unsafe<'a> &'a i32 = wrap_binder!(x); - | ^^^^^^^^^^^^^^^^^^ +LL | let binder: unsafe<'a> &'a i32 = wrap_binder!(&x); + | ^^ -error: unsafe binders are not yet implemented - --> $DIR/expr.rs:8:51 +error: unsafe binder casts are not fully implemented + --> $DIR/expr.rs:11:34 | -LL | let binder: unsafe<'a> &'a i32 = wrap_binder!(x); - | ^ +LL | let rx = *unwrap_binder!(binder); + | ^^^^^^ -error: unsafe binders are not yet implemented - --> $DIR/expr.rs:11:30 - | -LL | let rx = *unwrap_binder!(binder); - | ^^^^^^ - -error: aborting due to 3 previous errors; 1 warning emitted +error: aborting due to 2 previous errors; 1 warning emitted diff --git a/tests/ui/unsafe-binders/lifetime-resolution.rs b/tests/ui/unsafe-binders/lifetime-resolution.rs index aebed9599d4..b352acfadf2 100644 --- a/tests/ui/unsafe-binders/lifetime-resolution.rs +++ b/tests/ui/unsafe-binders/lifetime-resolution.rs @@ -3,16 +3,13 @@ fn foo<'a>() { let good: unsafe<'b> &'a &'b (); - //~^ ERROR unsafe binders are not yet implemented let missing: unsafe<> &'missing (); - //~^ ERROR unsafe binders are not yet implemented - //~| ERROR use of undeclared lifetime name `'missing` + //~^ ERROR use of undeclared lifetime name `'missing` fn inner<'b>() { let outer: unsafe<> &'a &'b (); - //~^ ERROR unsafe binders are not yet implemented - //~| can't use generic parameters from outer item + //~^ can't use generic parameters from outer item } } diff --git a/tests/ui/unsafe-binders/lifetime-resolution.stderr b/tests/ui/unsafe-binders/lifetime-resolution.stderr index 7a8ce929df1..69660c271bf 100644 --- a/tests/ui/unsafe-binders/lifetime-resolution.stderr +++ b/tests/ui/unsafe-binders/lifetime-resolution.stderr @@ -1,5 +1,5 @@ error[E0261]: use of undeclared lifetime name `'missing` - --> $DIR/lifetime-resolution.rs:8:28 + --> $DIR/lifetime-resolution.rs:7:28 | LL | let missing: unsafe<> &'missing (); | ^^^^^^^^ undeclared lifetime @@ -15,7 +15,7 @@ LL | fn foo<'missing, 'a>() { | +++++++++ error[E0401]: can't use generic parameters from outer item - --> $DIR/lifetime-resolution.rs:13:30 + --> $DIR/lifetime-resolution.rs:11:30 | LL | fn foo<'a>() { | -- lifetime parameter from outer item @@ -41,25 +41,7 @@ LL | #![feature(unsafe_binders)] = note: see issue #130516 <https://github.com/rust-lang/rust/issues/130516> for more information = note: `#[warn(incomplete_features)]` on by default -error: unsafe binders are not yet implemented - --> $DIR/lifetime-resolution.rs:5:15 - | -LL | let good: unsafe<'b> &'a &'b (); - | ^^^^^^^^^^^^^^^^^^^^^ - -error: unsafe binders are not yet implemented - --> $DIR/lifetime-resolution.rs:8:18 - | -LL | let missing: unsafe<> &'missing (); - | ^^^^^^^^^^^^^^^^^^^^^ - -error: unsafe binders are not yet implemented - --> $DIR/lifetime-resolution.rs:13:20 - | -LL | let outer: unsafe<> &'a &'b (); - | ^^^^^^^^^^^^^^^^^^^ - -error: aborting due to 5 previous errors; 1 warning emitted +error: aborting due to 2 previous errors; 1 warning emitted Some errors have detailed explanations: E0261, E0401. For more information about an error, try `rustc --explain E0261`. diff --git a/tests/ui/unsafe-binders/mismatch.rs b/tests/ui/unsafe-binders/mismatch.rs new file mode 100644 index 00000000000..731fe2d1ce9 --- /dev/null +++ b/tests/ui/unsafe-binders/mismatch.rs @@ -0,0 +1,43 @@ +#![feature(unsafe_binders)] +//~^ WARN the feature `unsafe_binders` is incomplete + +use std::unsafe_binder::{wrap_binder, unwrap_binder}; + +fn a() { + let _: unsafe<'a> &'a i32 = wrap_binder!(&()); + //~^ ERROR unsafe binder casts are not fully implemented + //~| ERROR mismatched types +} + +fn b() { + let _: i32 = wrap_binder!(&()); + //~^ ERROR unsafe binder casts are not fully implemented + //~| ERROR `wrap_binder!()` can only wrap into unsafe binder +} + +fn c() { + let y = 1; + unwrap_binder!(y); + //~^ ERROR unsafe binder casts are not fully implemented + //~| ERROR expected unsafe binder, found integer as input +} + +fn d() { + let unknown = Default::default(); + unwrap_binder!(unknown); + //~^ ERROR unsafe binder casts are not fully implemented + // FIXME(unsafe_binders): This should report ambiguity once we've removed + // the error above which taints the infcx. +} + +fn e() { + let x = wrap_binder!(&42); + //~^ ERROR unsafe binder casts are not fully implemented + // Currently, type inference doesn't flow backwards for unsafe binders. + // It could, perhaps, but that may cause even more surprising corners. + // FIXME(unsafe_binders): This should report ambiguity once we've removed + // the error above which taints the infcx. + let _: unsafe<'a> &'a i32 = x; +} + +fn main() {} diff --git a/tests/ui/unsafe-binders/mismatch.stderr b/tests/ui/unsafe-binders/mismatch.stderr new file mode 100644 index 00000000000..a720e5dbdc1 --- /dev/null +++ b/tests/ui/unsafe-binders/mismatch.stderr @@ -0,0 +1,68 @@ +warning: the feature `unsafe_binders` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/mismatch.rs:1:12 + | +LL | #![feature(unsafe_binders)] + | ^^^^^^^^^^^^^^ + | + = note: see issue #130516 <https://github.com/rust-lang/rust/issues/130516> for more information + = note: `#[warn(incomplete_features)]` on by default + +error: unsafe binder casts are not fully implemented + --> $DIR/mismatch.rs:7:46 + | +LL | let _: unsafe<'a> &'a i32 = wrap_binder!(&()); + | ^^^ + +error[E0308]: mismatched types + --> $DIR/mismatch.rs:7:46 + | +LL | let _: unsafe<'a> &'a i32 = wrap_binder!(&()); + | ^^^ expected `&i32`, found `&()` + | + = note: expected reference `&i32` + found reference `&()` + +error: unsafe binder casts are not fully implemented + --> $DIR/mismatch.rs:13:31 + | +LL | let _: i32 = wrap_binder!(&()); + | ^^^ + +error: `wrap_binder!()` can only wrap into unsafe binder, not `i32` + --> $DIR/mismatch.rs:13:18 + | +LL | let _: i32 = wrap_binder!(&()); + | ^^^^^^^^^^^^^^^^^ + | + = note: unsafe binders are the only valid output of wrap + = note: this error originates in the macro `wrap_binder` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: unsafe binder casts are not fully implemented + --> $DIR/mismatch.rs:20:20 + | +LL | unwrap_binder!(y); + | ^ + +error: expected unsafe binder, found integer as input of `unwrap_binder!()` + --> $DIR/mismatch.rs:20:20 + | +LL | unwrap_binder!(y); + | ^ + | + = note: only an unsafe binder type can be unwrapped + +error: unsafe binder casts are not fully implemented + --> $DIR/mismatch.rs:27:20 + | +LL | unwrap_binder!(unknown); + | ^^^^^^^ + +error: unsafe binder casts are not fully implemented + --> $DIR/mismatch.rs:34:26 + | +LL | let x = wrap_binder!(&42); + | ^^^ + +error: aborting due to 8 previous errors; 1 warning emitted + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/unsafe-binders/simple.rs b/tests/ui/unsafe-binders/simple.rs index cebff2cbfb8..6172a9e1e7b 100644 --- a/tests/ui/unsafe-binders/simple.rs +++ b/tests/ui/unsafe-binders/simple.rs @@ -1,7 +1,8 @@ +//@ check-pass + #![feature(unsafe_binders)] //~^ WARN the feature `unsafe_binders` is incomplete fn main() { let x: unsafe<'a> &'a (); - //~^ ERROR unsafe binders are not yet implemented } diff --git a/tests/ui/unsafe-binders/simple.stderr b/tests/ui/unsafe-binders/simple.stderr index a21dbd00b4c..e4b82c12b06 100644 --- a/tests/ui/unsafe-binders/simple.stderr +++ b/tests/ui/unsafe-binders/simple.stderr @@ -1,5 +1,5 @@ warning: the feature `unsafe_binders` is incomplete and may not be safe to use and/or cause compiler crashes - --> $DIR/simple.rs:1:12 + --> $DIR/simple.rs:3:12 | LL | #![feature(unsafe_binders)] | ^^^^^^^^^^^^^^ @@ -7,11 +7,5 @@ LL | #![feature(unsafe_binders)] = note: see issue #130516 <https://github.com/rust-lang/rust/issues/130516> for more information = note: `#[warn(incomplete_features)]` on by default -error: unsafe binders are not yet implemented - --> $DIR/simple.rs:5:12 - | -LL | let x: unsafe<'a> &'a (); - | ^^^^^^^^^^^^^^^^^ - -error: aborting due to 1 previous error; 1 warning emitted +warning: 1 warning emitted diff --git a/triagebot.toml b/triagebot.toml index eefb87aa298..da652ef7042 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -997,6 +997,7 @@ users_on_vacation = [ "jyn514", "celinval", "nnethercote", + "workingjubilee", ] [[assign.warn_non_default_branch.exceptions]] |
