diff options
Diffstat (limited to 'compiler')
124 files changed, 1803 insertions, 923 deletions
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 50fe37dcdb6..5ccc7d51066 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -107,7 +107,7 @@ impl Lit { /// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation. pub fn from_token(token: &Token) -> Option<Lit> { match token.uninterpolate().kind { - Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), + Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Literal(token_lit) => Some(token_lit), Interpolated(ref nt) if let NtExpr(expr) | NtLiteral(expr) = &nt.0 @@ -183,7 +183,7 @@ impl LitKind { } } -pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { +pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() @@ -214,7 +214,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } -fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { +fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() @@ -223,6 +223,24 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } +#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)] +pub enum IdentIsRaw { + No, + Yes, +} + +impl From<bool> for IdentIsRaw { + fn from(b: bool) -> Self { + if b { Self::Yes } else { Self::No } + } +} + +impl From<IdentIsRaw> for bool { + fn from(is_raw: IdentIsRaw) -> bool { + matches!(is_raw, IdentIsRaw::Yes) + } +} + // SAFETY: due to the `Clone` impl below, all fields of all variants other than // `Interpolated` must impl `Copy`. #[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -298,7 +316,7 @@ pub enum TokenKind { /// Do not forget about `NtIdent` when you want to match on identifiers. /// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to /// treat regular and interpolated identifiers in the same way. - Ident(Symbol, /* is_raw */ bool), + Ident(Symbol, IdentIsRaw), /// Lifetime identifier token. /// Do not forget about `NtLifetime` when you want to match on lifetime identifiers. /// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to @@ -411,7 +429,7 @@ impl Token { /// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: Ident) -> Self { - Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span) + Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span) } /// For interpolated tokens, returns a span of the fragment to which the interpolated @@ -567,7 +585,7 @@ impl Token { pub fn can_begin_literal_maybe_minus(&self) -> bool { match self.uninterpolate().kind { Literal(..) | BinOp(Minus) => true, - Ident(name, false) if name.is_bool_lit() => true, + Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, Interpolated(ref nt) => match &nt.0 { NtLiteral(_) => true, NtExpr(e) => match &e.kind { @@ -602,7 +620,7 @@ impl Token { /// Returns an identifier if this token is an identifier. #[inline] - pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> { + pub fn ident(&self) -> Option<(Ident, IdentIsRaw)> { // We avoid using `Token::uninterpolate` here because it's slow. match &self.kind { &Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)), @@ -755,7 +773,7 @@ impl Token { /// Returns `true` if the token is a non-raw identifier for which `pred` holds. pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool { match self.ident() { - Some((id, false)) => pred(id), + Some((id, IdentIsRaw::No)) => pred(id), _ => false, } } @@ -806,7 +824,7 @@ impl Token { _ => return None, }, SingleQuote => match joint.kind { - Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))), + Ident(name, IdentIsRaw::No) => Lifetime(Symbol::intern(&format!("'{name}"))), _ => return None, }, @@ -836,7 +854,7 @@ pub enum Nonterminal { NtPat(P<ast::Pat>), NtExpr(P<ast::Expr>), NtTy(P<ast::Ty>), - NtIdent(Ident, /* is_raw */ bool), + NtIdent(Ident, IdentIsRaw), NtLifetime(Ident), NtLiteral(P<ast::Expr>), /// Stuff inside brackets for attributes diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 298c01a4567..adc3056cc29 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -656,7 +656,7 @@ impl TokenStream { DelimSpacing::new(Spacing::JointHidden, Spacing::Alone), Delimiter::Bracket, [ - TokenTree::token_alone(token::Ident(sym::doc, false), span), + TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span), TokenTree::token_alone(token::Eq, span), TokenTree::token_alone( TokenKind::lit(token::StrRaw(num_of_hashes), data, None), diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index d1ba93f0675..77dd03d15f5 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -138,7 +138,7 @@ impl<'hir> LoweringContext<'_, 'hir> { } else { self.tcx.fn_arg_names(sig_id).len() }; - let inputs = self.arena.alloc_from_iter((0..args_count).into_iter().map(|arg| hir::Ty { + let inputs = self.arena.alloc_from_iter((0..args_count).map(|arg| hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)), span: self.lower_span(param_span), diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index ef843da7307..a5be91bb872 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -1636,7 +1636,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { if let Some(old_def_id) = self.orig_opt_local_def_id(param) { old_def_id } else { - self.dcx().span_bug(lifetime.ident.span, "no def-id for fresh lifetime"); + self.dcx() + .span_delayed_bug(lifetime.ident.span, "no def-id for fresh lifetime"); + continue; } } diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 7ea0078ea3b..6e1974f48b2 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -185,7 +185,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { // IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if` (Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _)) - if !Ident::new(*sym, *span).is_reserved() || *is_raw => + if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) => { false } @@ -197,7 +197,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { || *sym == kw::Fn || *sym == kw::SelfUpper || *sym == kw::Pub - || *is_raw => + || matches!(is_raw, IdentIsRaw::Yes) => { false } @@ -731,7 +731,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere token::NtBlock(e) => self.block_to_string(e), token::NtStmt(e) => self.stmt_to_string(e), token::NtPat(e) => self.pat_to_string(e), - token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(), + &token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw.into()).to_string(), token::NtLifetime(e) => e.to_string(), token::NtLiteral(e) => self.expr_to_string(e), token::NtVis(e) => self.vis_to_string(e), @@ -795,7 +795,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere /* Name components */ token::Ident(s, is_raw) => { - IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into() + IdentPrinter::new(s, is_raw.into(), convert_dollar_crate).to_string().into() } token::Lifetime(s) => s.to_string().into(), diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index e1509da913a..3c6bd1d36fd 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -1559,7 +1559,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // A bare path doesn't need a `let` assignment, it's already a simple // binding access. // As a new binding wasn't added, we don't need to modify the advancing call. - sugg.push((loop_span.with_hi(pat_span.lo()), format!("while let Some("))); + sugg.push((loop_span.with_hi(pat_span.lo()), "while let Some(".to_string())); sugg.push(( pat_span.shrink_to_hi().with_hi(head.span.lo()), ") = ".to_string(), diff --git a/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs b/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs index b2c7a98142e..6beae61ca7f 100644 --- a/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs +++ b/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs @@ -134,14 +134,13 @@ impl OutlivesSuggestionBuilder { for (r, bound) in unified.into_iter() { if !unified_already.contains(fr) { - suggested.push(SuggestedConstraint::Equal(fr_name.clone(), bound)); + suggested.push(SuggestedConstraint::Equal(fr_name, bound)); unified_already.insert(r); } } if !other.is_empty() { - let other = - other.iter().map(|(_, rname)| rname.clone()).collect::<SmallVec<_>>(); + let other = other.iter().map(|(_, rname)| *rname).collect::<SmallVec<_>>(); suggested.push(SuggestedConstraint::Outlives(fr_name, other)) } } diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 50d22881c3e..e586c58781c 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -280,7 +280,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { .iter() .rfind(|param| param.def_id.to_def_id() == defid) .is_some() { - suggestions.push((bounded_span.shrink_to_hi(), format!(" + 'static"))); + suggestions.push((bounded_span.shrink_to_hi(), " + 'static".to_string())); } }); }); diff --git a/compiler/rustc_borrowck/src/type_check/relate_tys.rs b/compiler/rustc_borrowck/src/type_check/relate_tys.rs index dd355c3525c..61b803ea38d 100644 --- a/compiler/rustc_borrowck/src/type_check/relate_tys.rs +++ b/compiler/rustc_borrowck/src/type_check/relate_tys.rs @@ -123,7 +123,11 @@ impl<'me, 'bccx, 'tcx> NllTypeRelating<'me, 'bccx, 'tcx> { // `handle_opaque_type` cannot handle subtyping, so to support subtyping // we instead eagerly generalize here. This is a bit of a mess but will go // away once we're using the new solver. - let mut enable_subtyping = |ty, ty_is_expected| { + // + // Given `opaque rel B`, we create a new infer var `ty_vid` constrain it + // by using `ty_vid rel B` and then finally and end by equating `ty_vid` to + // the opaque. + let mut enable_subtyping = |ty, opaque_is_expected| { let ty_vid = infcx.next_ty_var_id_in_universe( TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, @@ -132,7 +136,7 @@ impl<'me, 'bccx, 'tcx> NllTypeRelating<'me, 'bccx, 'tcx> { ty::UniverseIndex::ROOT, ); - let variance = if ty_is_expected { + let variance = if opaque_is_expected { self.ambient_variance } else { self.ambient_variance.xform(ty::Contravariant) @@ -140,7 +144,7 @@ impl<'me, 'bccx, 'tcx> NllTypeRelating<'me, 'bccx, 'tcx> { self.type_checker.infcx.instantiate_ty_var( self, - ty_is_expected, + opaque_is_expected, ty_vid, variance, ty, @@ -149,8 +153,8 @@ impl<'me, 'bccx, 'tcx> NllTypeRelating<'me, 'bccx, 'tcx> { }; let (a, b) = match (a.kind(), b.kind()) { - (&ty::Alias(ty::Opaque, ..), _) => (a, enable_subtyping(b, false)?), - (_, &ty::Alias(ty::Opaque, ..)) => (enable_subtyping(a, true)?, b), + (&ty::Alias(ty::Opaque, ..), _) => (a, enable_subtyping(b, true)?), + (_, &ty::Alias(ty::Opaque, ..)) => (enable_subtyping(a, false)?, b), _ => unreachable!( "expected at least one opaque type in `relate_opaques`, got {a} and {b}." ), diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index c5a73c31995..263081ea19e 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -1,3 +1,4 @@ +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter}; @@ -416,7 +417,7 @@ fn parse_reg<'a>( ) -> PResult<'a, ast::InlineAsmRegOrRegClass> { p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let result = match p.token.uninterpolate().kind { - token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name), + token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name), token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => { *explicit_reg = true; ast::InlineAsmRegOrRegClass::Reg(symbol) diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs index d244897f8a5..01821ee833f 100644 --- a/compiler/rustc_builtin_macros/src/assert/context.rs +++ b/compiler/rustc_builtin_macros/src/assert/context.rs @@ -1,7 +1,6 @@ use rustc_ast::{ ptr::P, - token, - token::Delimiter, + token::{self, Delimiter, IdentIsRaw}, tokenstream::{DelimSpan, TokenStream, TokenTree}, BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability, Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID, @@ -170,7 +169,10 @@ impl<'cx, 'a> Context<'cx, 'a> { ]; let captures = self.capture_decls.iter().flat_map(|cap| { [ - TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span), + TokenTree::token_joint_hidden( + token::Ident(cap.ident.name, IdentIsRaw::No), + cap.ident.span, + ), TokenTree::token_alone(token::Comma, self.span), ] }); diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs index 97d6b82de98..bf4693cd541 100644 --- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs @@ -198,7 +198,7 @@ where match fields { Unnamed(fields, is_tuple) => { let path_expr = cx.expr_path(outer_pat_path); - if !*is_tuple { + if matches!(is_tuple, IsTuple::No) { path_expr } else { let fields = fields diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index d5a42566e19..0bd2d423a29 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -62,8 +62,8 @@ fn default_struct_substructure( let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ThinVec::new()); let expr = match summary { - Unnamed(_, false) => cx.expr_ident(trait_span, substr.type_ident), - Unnamed(fields, true) => { + Unnamed(_, IsTuple::No) => cx.expr_ident(trait_span, substr.type_ident), + Unnamed(fields, IsTuple::Yes) => { let exprs = fields.iter().map(|sp| default_call(*sp)).collect(); cx.expr_call_ident(trait_span, substr.type_ident, exprs) } diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 6eeb028728c..3ee4fded749 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -286,10 +286,16 @@ pub struct FieldInfo { pub other_selflike_exprs: Vec<P<Expr>>, } +#[derive(Copy, Clone)] +pub enum IsTuple { + No, + Yes, +} + /// Fields for a static method pub enum StaticFields { /// Tuple and unit structs/enum variants like this. - Unnamed(Vec<Span>, bool /*is tuple*/), + Unnamed(Vec<Span>, IsTuple), /// Normal structs/struct variants. Named(Vec<(Ident, Span)>), } @@ -1439,7 +1445,10 @@ impl<'a> TraitDef<'a> { } } - let is_tuple = matches!(struct_def, ast::VariantData::Tuple(..)); + let is_tuple = match struct_def { + ast::VariantData::Tuple(..) => IsTuple::Yes, + _ => IsTuple::No, + }; match (just_spans.is_empty(), named_idents.is_empty()) { (false, false) => cx .dcx() diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index b66f7111ff0..3366378d38d 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -10,6 +10,7 @@ use rustc_ast::{ use rustc_data_structures::fx::FxHashSet; use rustc_errors::{Applicability, DiagnosticBuilder, MultiSpan, PResult, SingleLabelManySpans}; use rustc_expand::base::{self, *}; +use rustc_parse::parser::Recovered; use rustc_parse_format as parse; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{BytePos, InnerSpan, Span}; @@ -111,9 +112,8 @@ fn parse_args<'a>(ecx: &mut ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult< _ => return Err(err), } } - Ok(recovered) => { - assert!(recovered); - } + Ok(Recovered::Yes) => (), + Ok(Recovered::No) => unreachable!(), } } first = false; diff --git a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch new file mode 100644 index 00000000000..6af11e54d88 --- /dev/null +++ b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch @@ -0,0 +1,47 @@ +From 9f65e742ba3e41474e6126c6c4469c48eaa6ca7e Mon Sep 17 00:00:00 2001 +From: Chris Denton <chris@chrisdenton.dev> +Date: Tue, 20 Feb 2024 16:01:40 -0300 +Subject: [PATCH] Don't use raw-dylib in std + +--- + library/std/src/sys/pal/windows/c.rs | 2 +- + library/std/src/sys/pal/windows/rand.rs | 3 +-- + 2 files changed, 2 insertions(+), 3 deletions(-) + +diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs +index ad8e01bfa9b..9ca8e4c16ce 100644 +--- a/library/std/src/sys/pal/windows/c.rs ++++ b/library/std/src/sys/pal/windows/c.rs +@@ -323,7 +323,7 @@ pub unsafe fn NtWriteFile( + + // Use raw-dylib to import ProcessPrng as we can't rely on there being an import library. + cfg_if::cfg_if! { +-if #[cfg(not(target_vendor = "win7"))] { ++if #[cfg(any())] { + #[cfg(target_arch = "x86")] + #[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")] + extern "system" { +diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs +index e427546222a..f2fe42a4d51 100644 +--- a/library/std/src/sys/pal/windows/rand.rs ++++ b/library/std/src/sys/pal/windows/rand.rs +@@ -2,7 +2,7 @@ + use core::mem; + use core::ptr; + +-#[cfg(not(target_vendor = "win7"))] ++#[cfg(any())] + #[inline] + pub fn hashmap_random_keys() -> (u64, u64) { + let mut v = (0, 0); +@@ -13,7 +13,6 @@ pub fn hashmap_random_keys() -> (u64, u64) { + v + } + +-#[cfg(target_vendor = "win7")] + pub fn hashmap_random_keys() -> (u64, u64) { + use crate::ffi::c_void; + use crate::io; +-- +2.42.0.windows.2 + diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs index 67bc86e4c90..b6bbc81732e 100644 --- a/compiler/rustc_codegen_llvm/src/back/archive.rs +++ b/compiler/rustc_codegen_llvm/src/back/archive.rs @@ -313,7 +313,7 @@ fn get_llvm_object_symbols( llvm::LLVMRustGetSymbols( buf.as_ptr(), buf.len(), - &mut *state as *mut &mut _ as *mut c_void, + std::ptr::addr_of_mut!(*state) as *mut c_void, callback, error_callback, ) diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 23e6f054a7c..58c0765d814 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -1079,7 +1079,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( .map(|(arg_idx, val)| { let idx = val.unwrap_leaf().try_to_i32().unwrap(); if idx >= i32::try_from(total_len).unwrap() { - bx.sess().dcx().emit_err(InvalidMonomorphization::ShuffleIndexOutOfBounds { + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { span, name, arg_idx: arg_idx as u64, @@ -1138,24 +1138,15 @@ fn generic_simd_intrinsic<'ll, 'tcx>( let val = bx.const_get_elt(vector, i as u64); match bx.const_to_opt_u128(val, true) { None => { - bx.sess().dcx().emit_err( - InvalidMonomorphization::ShuffleIndexNotConstant { - span, - name, - arg_idx, - }, - ); - None + bug!("typeck should have already ensured that these are const") } Some(idx) if idx >= total_len => { - bx.sess().dcx().emit_err( - InvalidMonomorphization::ShuffleIndexOutOfBounds { - span, - name, - arg_idx, - total_len, - }, - ); + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx, + total_len, + }); None } Some(idx) => Some(bx.const_i32(idx as i32)), @@ -1184,10 +1175,22 @@ fn generic_simd_intrinsic<'ll, 'tcx>( out_ty: arg_tys[2] } ); + let idx = bx + .const_to_opt_u128(args[1].immediate(), false) + .expect("typeck should have ensure that this is a const"); + if idx >= in_len.into() { + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx: 1, + total_len: in_len.into(), + }); + return Ok(bx.const_null(llret_ty)); + } return Ok(bx.insert_element( args[0].immediate(), args[2].immediate(), - args[1].immediate(), + bx.const_i32(idx as i32), )); } if name == sym::simd_extract { @@ -1195,7 +1198,19 @@ fn generic_simd_intrinsic<'ll, 'tcx>( ret_ty == in_elem, InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty } ); - return Ok(bx.extract_element(args[0].immediate(), args[1].immediate())); + let idx = bx + .const_to_opt_u128(args[1].immediate(), false) + .expect("typeck should have ensure that this is a const"); + if idx >= in_len.into() { + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx: 1, + total_len: in_len.into(), + }); + return Ok(bx.const_null(llret_ty)); + } + return Ok(bx.extract_element(args[0].immediate(), bx.const_i32(idx as i32))); } if name == sym::simd_select { @@ -2091,9 +2106,16 @@ fn generic_simd_intrinsic<'ll, 'tcx>( return Ok(args[0].immediate()); } + #[derive(Copy, Clone)] + enum Sign { + Unsigned, + Signed, + } + use Sign::*; + enum Style { Float, - Int(/* is signed? */ bool), + Int(Sign), Unsupported, } @@ -2101,11 +2123,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>( // vectors of pointer-sized integers should've been // disallowed before here, so this unwrap is safe. ty::Int(i) => ( - Style::Int(true), + Style::Int(Signed), i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Uint(u) => ( - Style::Int(false), + Style::Int(Unsigned), u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Float(f) => (Style::Float, f.bit_width()), @@ -2113,11 +2135,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }; let (out_style, out_width) = match out_elem.kind() { ty::Int(i) => ( - Style::Int(true), + Style::Int(Signed), i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Uint(u) => ( - Style::Int(false), + Style::Int(Unsigned), u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Float(f) => (Style::Float, f.bit_width()), @@ -2125,31 +2147,31 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }; match (in_style, out_style) { - (Style::Int(in_is_signed), Style::Int(_)) => { + (Style::Int(sign), Style::Int(_)) => { return Ok(match in_width.cmp(&out_width) { Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty), Ordering::Equal => args[0].immediate(), - Ordering::Less => { - if in_is_signed { - bx.sext(args[0].immediate(), llret_ty) - } else { - bx.zext(args[0].immediate(), llret_ty) - } - } + Ordering::Less => match sign { + Sign::Signed => bx.sext(args[0].immediate(), llret_ty), + Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty), + }, }); } - (Style::Int(in_is_signed), Style::Float) => { - return Ok(if in_is_signed { - bx.sitofp(args[0].immediate(), llret_ty) - } else { - bx.uitofp(args[0].immediate(), llret_ty) - }); + (Style::Int(Sign::Signed), Style::Float) => { + return Ok(bx.sitofp(args[0].immediate(), llret_ty)); } - (Style::Float, Style::Int(out_is_signed)) => { - return Ok(match (out_is_signed, name == sym::simd_as) { - (false, false) => bx.fptoui(args[0].immediate(), llret_ty), - (true, false) => bx.fptosi(args[0].immediate(), llret_ty), - (_, true) => bx.cast_float_to_int(out_is_signed, args[0].immediate(), llret_ty), + (Style::Int(Sign::Unsigned), Style::Float) => { + return Ok(bx.uitofp(args[0].immediate(), llret_ty)); + } + (Style::Float, Style::Int(sign)) => { + return Ok(match (sign, name == sym::simd_as) { + (Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty), + (Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty), + (_, true) => bx.cast_float_to_int( + matches!(sign, Sign::Signed), + args[0].immediate(), + llret_ty, + ), }); } (Style::Float, Style::Float) => { diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index 35210b0b2e8..c84461e53eb 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -169,7 +169,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { fn print_pass_timings(&self) { unsafe { let mut size = 0; - let cstr = llvm::LLVMRustPrintPassTimings(&mut size as *mut usize); + let cstr = llvm::LLVMRustPrintPassTimings(std::ptr::addr_of_mut!(size)); if cstr.is_null() { println!("failed to get pass timings"); } else { @@ -182,7 +182,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { fn print_statistics(&self) { unsafe { let mut size = 0; - let cstr = llvm::LLVMRustPrintStatistics(&mut size as *mut usize); + let cstr = llvm::LLVMRustPrintStatistics(std::ptr::addr_of_mut!(size)); if cstr.is_null() { println!("failed to get pass stats"); } else { diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 54e8ed85e32..1b2beac56a2 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -435,7 +435,7 @@ pub(crate) fn print(req: &PrintRequest, mut out: &mut dyn PrintBackendInfo, sess &tm, cpu_cstring.as_ptr(), callback, - &mut out as *mut &mut dyn PrintBackendInfo as *mut c_void, + std::ptr::addr_of_mut!(out) as *mut c_void, ); } } diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index fa7719d8971..5ba66d1be43 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -106,14 +106,12 @@ codegen_ssa_invalid_monomorphization_return_type = invalid monomorphization of ` codegen_ssa_invalid_monomorphization_second_argument_length = invalid monomorphization of `{$name}` intrinsic: expected second argument with length {$in_len} (same as input type `{$in_ty}`), found `{$arg_ty}` with length {$out_len} -codegen_ssa_invalid_monomorphization_shuffle_index_not_constant = invalid monomorphization of `{$name}` intrinsic: shuffle index #{$arg_idx} is not a constant - -codegen_ssa_invalid_monomorphization_shuffle_index_out_of_bounds = invalid monomorphization of `{$name}` intrinsic: shuffle index #{$arg_idx} is out of bounds (limit {$total_len}) - codegen_ssa_invalid_monomorphization_simd_argument = invalid monomorphization of `{$name}` intrinsic: expected SIMD argument type, found non-SIMD `{$ty}` codegen_ssa_invalid_monomorphization_simd_first = invalid monomorphization of `{$name}` intrinsic: expected SIMD first type, found non-SIMD `{$ty}` +codegen_ssa_invalid_monomorphization_simd_index_out_of_bounds = invalid monomorphization of `{$name}` intrinsic: SIMD index #{$arg_idx} is out of bounds (limit {$total_len}) + codegen_ssa_invalid_monomorphization_simd_input = invalid monomorphization of `{$name}` intrinsic: expected SIMD input type, found non-SIMD `{$ty}` codegen_ssa_invalid_monomorphization_simd_return = invalid monomorphization of `{$name}` intrinsic: expected SIMD return type, found non-SIMD `{$ty}` diff --git a/compiler/rustc_codegen_ssa/src/back/metadata.rs b/compiler/rustc_codegen_ssa/src/back/metadata.rs index 8e76e47cfef..c6b04431fab 100644 --- a/compiler/rustc_codegen_ssa/src/back/metadata.rs +++ b/compiler/rustc_codegen_ssa/src/back/metadata.rs @@ -623,7 +623,7 @@ pub fn create_compressed_metadata_file_for_xcoff( /// that contains a custom section of the name `section_name` with contents /// `data`. /// -/// NB: the `object` crate does not yet have support for writing the the wasm +/// NB: the `object` crate does not yet have support for writing the wasm /// object file format. The format is simple enough that for now an extra crate /// from crates.io (such as `wasm-encoder`). The file format is: /// diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index e42a8bd9ed9..a7ac502b248 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -797,16 +797,8 @@ pub enum InvalidMonomorphization<'tcx> { out_ty: Ty<'tcx>, }, - #[diag(codegen_ssa_invalid_monomorphization_shuffle_index_not_constant, code = E0511)] - ShuffleIndexNotConstant { - #[primary_span] - span: Span, - name: Symbol, - arg_idx: u64, - }, - - #[diag(codegen_ssa_invalid_monomorphization_shuffle_index_out_of_bounds, code = E0511)] - ShuffleIndexOutOfBounds { + #[diag(codegen_ssa_invalid_monomorphization_simd_index_out_of_bounds, code = E0511)] + SimdIndexOutOfBounds { #[primary_span] span: Span, name: Symbol, diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 00007110938..9c7aadb81f8 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -319,7 +319,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { targets: &SwitchTargets, ) { let discr = self.codegen_operand(bx, discr); + let discr_value = discr.immediate(); let switch_ty = discr.layout.ty; + // If our discriminant is a constant we can branch directly + if let Some(const_discr) = bx.const_to_opt_u128(discr_value, false) { + let target = targets.target_for_value(const_discr); + bx.br(helper.llbb_with_cleanup(self, target)); + return; + }; + let mut target_iter = targets.iter(); if target_iter.len() == 1 { // If there are two targets (one conditional, one fallback), emit `br` instead of @@ -330,14 +338,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if switch_ty == bx.tcx().types.bool { // Don't generate trivial icmps when switching on bool. match test_value { - 0 => bx.cond_br(discr.immediate(), llfalse, lltrue), - 1 => bx.cond_br(discr.immediate(), lltrue, llfalse), + 0 => bx.cond_br(discr_value, llfalse, lltrue), + 1 => bx.cond_br(discr_value, lltrue, llfalse), _ => bug!(), } } else { let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty)); let llval = bx.const_uint_big(switch_llty, test_value); - let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval); + let cmp = bx.icmp(IntPredicate::IntEQ, discr_value, llval); bx.cond_br(cmp, lltrue, llfalse); } } else if self.cx.sess().opts.optimize == OptLevel::No @@ -362,11 +370,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let ll2 = helper.llbb_with_cleanup(self, target2); let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty)); let llval = bx.const_uint_big(switch_llty, test_value1); - let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval); + let cmp = bx.icmp(IntPredicate::IntEQ, discr_value, llval); bx.cond_br(cmp, ll1, ll2); } else { bx.switch( - discr.immediate(), + discr_value, helper.llbb_with_cleanup(self, targets.otherwise()), target_iter.map(|(value, target)| (value, helper.llbb_with_cleanup(self, target))), ); diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index 959ec2ca865..82ce9ecd21d 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -140,7 +140,7 @@ pub fn intern_const_alloc_recursive< alloc.1.mutability = base_mutability; alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect() } else { - intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect() + intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect() }; // We need to distinguish "has just been interned" from "was already in `tcx`", // so we track this in a separate set. @@ -277,7 +277,7 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>> // We are not doing recursive interning, so we don't currently support provenance. // (If this assertion ever triggers, we should just implement a // proper recursive interning loop -- or just call `intern_const_alloc_recursive`. - if !self.tcx.try_get_global_alloc(prov.alloc_id()).is_some() { + if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() { panic!("`intern_with_temp_alloc` with nested allocations"); } } diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index f020616f6d8..1cb991b38f7 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -379,10 +379,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let (input, input_len) = self.operand_to_simd(&args[0])?; let (dest, dest_len) = self.place_to_simd(dest)?; assert_eq!(input_len, dest_len, "Return vector length must match input length"); - assert!( - index < dest_len, - "Index `{index}` must be in bounds of vector with length {dest_len}" - ); + // Bounds are not checked by typeck so we have to do it ourselves. + if index >= input_len { + throw_ub_format!( + "`simd_insert` index {index} is out-of-bounds of vector with length {input_len}" + ); + } for i in 0..dest_len { let place = self.project_index(&dest, i)?; @@ -397,10 +399,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { sym::simd_extract => { let index = u64::from(self.read_scalar(&args[1])?.to_u32()?); let (input, input_len) = self.operand_to_simd(&args[0])?; - assert!( - index < input_len, - "index `{index}` must be in bounds of vector with length {input_len}" - ); + // Bounds are not checked by typeck so we have to do it ourselves. + if index >= input_len { + throw_ub_format!( + "`simd_extract` index {index} is out-of-bounds of vector with length {input_len}" + ); + } self.copy_op(&self.project_index(&input, index)?, dest)?; } sym::likely | sym::unlikely | sym::black_box => { diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs index adcb6ceaebf..32202ac3ede 100644 --- a/compiler/rustc_data_structures/src/sync.rs +++ b/compiler/rustc_data_structures/src/sync.rs @@ -429,7 +429,7 @@ impl<T> RwLock<T> { #[inline(always)] pub fn leak(&self) -> &T { let guard = self.read(); - let ret = unsafe { &*(&*guard as *const T) }; + let ret = unsafe { &*std::ptr::addr_of!(*guard) }; std::mem::forget(guard); ret } diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index ada8fa2e965..3f667e264e8 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -844,7 +844,7 @@ impl DiagCtxt { .emitted_diagnostic_codes .iter() .filter_map(|&code| { - if registry.try_find_description(code).is_ok().clone() { + if registry.try_find_description(code).is_ok() { Some(code.to_string()) } else { None diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs index e66cfbe6fb6..ffc8f782fd3 100644 --- a/compiler/rustc_expand/src/mbe/macro_check.rs +++ b/compiler/rustc_expand/src/mbe/macro_check.rs @@ -107,7 +107,7 @@ use crate::errors; use crate::mbe::{KleeneToken, TokenTree}; -use rustc_ast::token::{Delimiter, Token, TokenKind}; +use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{DiagnosticMessage, MultiSpan}; @@ -409,7 +409,7 @@ fn check_nested_occurrences( match (state, tt) { ( NestedMacroState::Empty, - &TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }), + &TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }), ) => { if name == kw::MacroRules { state = NestedMacroState::MacroRules; diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index c82609503c1..bf99e9e6d5c 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -8,6 +8,7 @@ use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser} use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc}; use crate::mbe::transcribe::transcribe; +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; @@ -1302,7 +1303,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, - Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, + Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => { + IsInFollow::Yes + } _ => IsInFollow::No(TOKENS), }, _ => IsInFollow::No(TOKENS), @@ -1313,7 +1316,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq => IsInFollow::Yes, - Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, + Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => { + IsInFollow::Yes + } _ => IsInFollow::No(TOKENS), }, _ => IsInFollow::No(TOKENS), @@ -1336,7 +1341,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { | BinOp(token::Shr) | Semi | BinOp(token::Or) => IsInFollow::Yes, - Ident(name, false) if name == kw::As || name == kw::Where => { + Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => { IsInFollow::Yes } _ => IsInFollow::No(TOKENS), @@ -1364,7 +1369,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, - Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes, + Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes, + Ident(name, _) if name != kw::Priv => IsInFollow::Yes, _ => { if token.can_begin_type() { IsInFollow::Yes diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs index 3ca0787ce8e..84f7dc4771a 100644 --- a/compiler/rustc_expand/src/mbe/metavar_expr.rs +++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs @@ -1,4 +1,4 @@ -use rustc_ast::token::{self, Delimiter}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw}; use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; use rustc_ast::{LitIntType, LitKind}; use rustc_ast_pretty::pprust; @@ -142,7 +142,7 @@ fn parse_ident<'sess>( if let Some(tt) = iter.next() && let TokenTree::Token(token, _) = tt { - if let Some((elem, false)) = token.ident() { + if let Some((elem, IdentIsRaw::No)) = token.ident() { return Ok(elem); } let token_str = pprust::token_to_string(token); diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 0fdfa563138..ec1dd807d1a 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -2,7 +2,7 @@ use crate::errors; use crate::mbe::macro_parser::count_metavar_decls; use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree}; -use rustc_ast::token::{self, Delimiter, Token}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, Token}; use rustc_ast::{tokenstream, NodeId}; use rustc_ast_pretty::pprust; use rustc_feature::Features; @@ -222,7 +222,7 @@ fn parse_tree<'a>( Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); let span = ident.span.with_lo(span.lo()); - if ident.name == kw::Crate && !is_raw { + if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) { TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) } else { TokenTree::MetaVar(span, ident) diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 519e4a634d8..4a18055d4ca 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -555,23 +555,14 @@ fn count_repetitions<'a>( ) -> PResult<'a, usize> { // Recursively count the number of matches in `matched` at given depth // (or at the top-level of `matched` if no depth is given). - fn count<'a>( - cx: &ExtCtxt<'a>, - depth_curr: usize, - depth_max: usize, - matched: &NamedMatch, - sp: &DelimSpan, - ) -> PResult<'a, usize> { + fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> { match matched { MatchedTokenTree(_) | MatchedNonterminal(_) => Ok(1), MatchedSeq(named_matches) => { if depth_curr == depth_max { Ok(named_matches.len()) } else { - named_matches - .iter() - .map(|elem| count(cx, depth_curr + 1, depth_max, elem, sp)) - .sum() + named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum() } } } @@ -612,7 +603,7 @@ fn count_repetitions<'a>( return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() })); } - count(cx, depth_user, depth_max, matched, sp) + count(depth_user, depth_max, matched) } /// Returns a `NamedMatch` item declared on the LHS given an arbitrary [Ident] diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index 7a888250ca1..0e07b41b43c 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -2,6 +2,7 @@ use crate::tests::{ matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error, }; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; @@ -74,9 +75,12 @@ fn string_to_tts_macro() { match tts { [ - TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. }, + _, + ), TokenTree::Token(Token { kind: token::Not, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _), + TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _), TokenTree::Delimited(.., macro_delim, macro_tts), ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => { let tts = ¯o_tts.trees().collect::<Vec<_>>(); @@ -90,7 +94,10 @@ fn string_to_tts_macro() { match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name, IdentIsRaw::No), .. }, + _, + ), ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => { } _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), @@ -99,7 +106,10 @@ fn string_to_tts_macro() { match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name, IdentIsRaw::No), .. }, + _, + ), ] if second_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {} _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), @@ -119,8 +129,11 @@ fn string_to_tts_1() { let tts = string_to_stream("fn a(b: i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)), - TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)), + TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)), + TokenTree::token_joint_hidden( + token::Ident(Symbol::intern("a"), IdentIsRaw::No), + sp(3, 4), + ), TokenTree::Delimited( DelimSpan::from_pair(sp(4, 5), sp(11, 12)), // `JointHidden` because the `(` is followed immediately by @@ -128,10 +141,16 @@ fn string_to_tts_1() { DelimSpacing::new(Spacing::JointHidden, Spacing::Alone), Delimiter::Parenthesis, TokenStream::new(vec![ - TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)), + TokenTree::token_joint( + token::Ident(Symbol::intern("b"), IdentIsRaw::No), + sp(5, 6), + ), TokenTree::token_alone(token::Colon, sp(6, 7)), // `JointHidden` because the `i32` is immediately followed by the `)`. - TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)), + TokenTree::token_joint_hidden( + token::Ident(sym::i32, IdentIsRaw::No), + sp(8, 11), + ), ]) .into(), ), @@ -143,7 +162,10 @@ fn string_to_tts_1() { DelimSpacing::new(Spacing::Alone, Spacing::Alone), Delimiter::Brace, TokenStream::new(vec![ - TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)), + TokenTree::token_joint( + token::Ident(Symbol::intern("b"), IdentIsRaw::No), + sp(15, 16), + ), // `Alone` because the `;` is followed by whitespace. TokenTree::token_alone(token::Semi, sp(16, 17)), ]) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 62d0deb2d3a..87ea8690ffe 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -1,4 +1,5 @@ use crate::base::ExtCtxt; +use ast::token::IdentIsRaw; use pm::bridge::{ server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree, }; @@ -216,7 +217,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre Question => op("?"), SingleQuote => op("'"), - Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })), + Ident(sym, is_raw) => { + trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span })) + } Lifetime(name) => { let ident = symbol::Ident::new(name, span).without_first_quote(); trees.extend([ @@ -238,7 +241,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre escaped.extend(ch.escape_debug()); } let stream = [ - Ident(sym::doc, false), + Ident(sym::doc, IdentIsRaw::No), Eq, TokenKind::lit(token::Str, Symbol::intern(&escaped), None), ] @@ -259,7 +262,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre Interpolated(ref nt) if let NtIdent(ident, is_raw) = &nt.0 => { trees.push(TokenTree::Ident(Ident { sym: ident.name, - is_raw: *is_raw, + is_raw: matches!(is_raw, IdentIsRaw::Yes), span: ident.span, })) } @@ -352,7 +355,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>> } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { rustc.sess().symbol_gallery.insert(sym, span); - smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw), span)] + smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)] } TokenTree::Literal(self::Literal { kind: self::LitKind::Integer, @@ -570,7 +573,7 @@ impl server::TokenStream for Rustc<'_, '_> { match &expr.kind { ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => { Ok(tokenstream::TokenStream::token_alone( - token::Ident(token_lit.symbol, false), + token::Ident(token_lit.symbol, IdentIsRaw::No), expr.span, )) } diff --git a/compiler/rustc_expand/src/tokenstream/tests.rs b/compiler/rustc_expand/src/tokenstream/tests.rs index 91c4dd732e3..78795e86fd5 100644 --- a/compiler/rustc_expand/src/tokenstream/tests.rs +++ b/compiler/rustc_expand/src/tokenstream/tests.rs @@ -1,6 +1,6 @@ use crate::tests::string_to_stream; -use rustc_ast::token; +use rustc_ast::token::{self, IdentIsRaw}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_span::create_default_session_globals_then; use rustc_span::{BytePos, Span, Symbol}; @@ -86,7 +86,8 @@ fn test_diseq_1() { fn test_is_empty() { create_default_session_globals_then(|| { let test0 = TokenStream::default(); - let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1)); + let test1 = + TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1)); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 18f6ae35054..1b2993dabdb 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -84,6 +84,8 @@ declare_features! ( (accepted, cfg_doctest, "1.40.0", Some(62210)), /// Enables `#[cfg(panic = "...")]` config key. (accepted, cfg_panic, "1.60.0", Some(77443)), + /// Allows `cfg(target_abi = "...")`. + (accepted, cfg_target_abi, "CURRENT_RUSTC_VERSION", Some(80970)), /// Allows `cfg(target_feature = "...")`. (accepted, cfg_target_feature, "1.27.0", Some(29717)), /// Allows `cfg(target_vendor = "...")`. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 99875ec5405..6cb41be7c94 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -25,7 +25,6 @@ pub type GatedCfg = (Symbol, Symbol, GateFn); const GATED_CFGS: &[GatedCfg] = &[ // (name in cfg, feature, function to check if the feature is enabled) (sym::overflow_checks, sym::cfg_overflow_checks, cfg_fn!(cfg_overflow_checks)), - (sym::target_abi, sym::cfg_target_abi, cfg_fn!(cfg_target_abi)), (sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)), ( sym::target_has_atomic_equal_alignment, @@ -792,6 +791,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_intrinsic, Normal, template!(Word), ErrorFollowing, "the `#[rustc_intrinsic]` attribute is used to declare intrinsics with function bodies", ), + rustc_attr!( + rustc_no_mir_inline, Normal, template!(Word), WarnFollowing, + "#[rustc_no_mir_inline] prevents the MIR inliner from inlining a function while not affecting codegen" + ), // ========================================================================== // Internal attributes, Testing: diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 9012b731a13..8eea4b7d8a8 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -373,8 +373,6 @@ declare_features! ( (unstable, cfg_sanitize, "1.41.0", Some(39699)), /// Allows `cfg(sanitizer_cfi_generalize_pointers)` and `cfg(sanitizer_cfi_normalize_integers)`. (unstable, cfg_sanitizer_cfi, "1.77.0", Some(89653)), - /// Allows `cfg(target_abi = "...")`. - (unstable, cfg_target_abi, "1.55.0", Some(80970)), /// Allows `cfg(target(abi = "..."))`. (unstable, cfg_target_compact, "1.63.0", Some(96901)), /// Allows `cfg(target_has_atomic_load_store = "...")`. @@ -518,7 +516,7 @@ declare_features! ( (unstable, marker_trait_attr, "1.30.0", Some(29864)), /// Allows exhaustive pattern matching on types that contain uninhabited types in cases that are /// unambiguously sound. - (incomplete, min_exhaustive_patterns, "1.77.0", Some(119612)), + (unstable, min_exhaustive_patterns, "1.77.0", Some(119612)), /// A minimal, sound subset of specialization intended to be used by the /// standard library until the soundness issues with specialization /// are fixed. diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index fcb15925f6a..78e7c636a3e 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -3004,6 +3004,11 @@ impl<'hir> Item<'hir> { matches!(self.kind, ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..)) } + /// Check if this is an [`ItemKind::Struct`] or [`ItemKind::Union`]. + pub fn is_struct_or_union(&self) -> bool { + matches!(self.kind, ItemKind::Struct(..) | ItemKind::Union(..)) + } + expect_methods_self_kind! { expect_extern_crate, Option<Symbol>, ItemKind::ExternCrate(s), *s; diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs index 7bdbab4325c..b5e69b8e376 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs @@ -154,10 +154,8 @@ pub(super) fn check_refining_return_position_impl_trait_in_trait<'tcx>( trait_m_sig.inputs_and_output, )); if !ocx.select_all_or_error().is_empty() { - // This code path is not reached in any tests, but may be reachable. If - // this is triggered, it should be converted to `delayed_bug` and the - // triggering case turned into a test. - tcx.dcx().bug("encountered errors when checking RPITIT refinement (selection)"); + tcx.dcx().delayed_bug("encountered errors when checking RPITIT refinement (selection)"); + return; } let outlives_env = OutlivesEnvironment::with_bounds( param_env, @@ -165,10 +163,8 @@ pub(super) fn check_refining_return_position_impl_trait_in_trait<'tcx>( ); let errors = infcx.resolve_regions(&outlives_env); if !errors.is_empty() { - // This code path is not reached in any tests, but may be reachable. If - // this is triggered, it should be converted to `delayed_bug` and the - // triggering case turned into a test. - tcx.dcx().bug("encountered errors when checking RPITIT refinement (regions)"); + tcx.dcx().delayed_bug("encountered errors when checking RPITIT refinement (regions)"); + return; } // Resolve any lifetime variables that may have been introduced during normalization. let Ok((trait_bounds, impl_bounds)) = infcx.fully_resolve((trait_bounds, impl_bounds)) else { diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs index 9e1e884d976..3aef29f4ae4 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs @@ -24,6 +24,11 @@ struct InherentOverlapChecker<'tcx> { tcx: TyCtxt<'tcx>, } +rustc_index::newtype_index! { + #[orderable] + pub struct RegionId {} +} + impl<'tcx> InherentOverlapChecker<'tcx> { /// Checks whether any associated items in impls 1 and 2 share the same identifier and /// namespace. @@ -205,11 +210,6 @@ impl<'tcx> InherentOverlapChecker<'tcx> { // This is advantageous to running the algorithm over the // entire graph when there are many connected regions. - rustc_index::newtype_index! { - #[orderable] - pub struct RegionId {} - } - struct ConnectedRegion { idents: SmallVec<[Symbol; 8]>, impl_blocks: FxHashSet<usize>, diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs index 07bbaa1926e..b46a67d08eb 100644 --- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs +++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs @@ -1,14 +1,13 @@ //! Orphan checker: every impl either implements a trait defined in this //! crate or pertains to a type defined in this crate. +use crate::errors; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_middle::ty::{self, AliasKind, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::LocalDefId; use rustc_span::Span; -use rustc_trait_selection::traits; - -use crate::errors; +use rustc_trait_selection::traits::{self, IsFirstInputType}; #[instrument(skip(tcx), level = "debug")] pub(crate) fn orphan_check_impl( @@ -288,7 +287,7 @@ fn emit_orphan_check_error<'tcx>( (Vec::new(), Vec::new(), Vec::new(), Vec::new(), Vec::new()); let mut sugg = None; for &(mut ty, is_target_ty) in &tys { - let span = if is_target_ty { + let span = if matches!(is_target_ty, IsFirstInputType::Yes) { // Point at `D<A>` in `impl<A, B> for C<B> in D<A>` self_ty_span } else { @@ -321,7 +320,8 @@ fn emit_orphan_check_error<'tcx>( } } - let is_foreign = !trait_ref.def_id.is_local() && !is_target_ty; + let is_foreign = + !trait_ref.def_id.is_local() && matches!(is_target_ty, IsFirstInputType::No); match &ty.kind() { ty::Slice(_) => { diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 642009dfa48..e9c9ec6ba53 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -1025,7 +1025,15 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::AdtDef<'_> { let is_anonymous = item.ident.name == kw::Empty; let repr = if is_anonymous { - tcx.adt_def(tcx.local_parent(def_id)).repr() + let parent = tcx.local_parent(def_id); + if let Node::Item(item) = tcx.hir_node_by_def_id(parent) + && item.is_struct_or_union() + { + tcx.adt_def(parent).repr() + } else { + tcx.dcx().span_delayed_bug(item.span, "anonymous field inside non struct/union"); + ty::ReprOptions::default() + } } else { tcx.repr_options_of_def(def_id) }; diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs index 410a069f956..9cc6c16c126 100644 --- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs @@ -315,7 +315,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { if is_host_effect { if let Some(idx) = host_effect_index { - tcx.dcx().span_bug( + tcx.dcx().span_delayed_bug( param.span, format!("parent also has host effect param? index: {idx}, def: {def_id:?}"), ); diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 351ac2eb770..f70bb8c4289 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -640,16 +640,30 @@ pub(super) fn implied_predicates_with_filter( // Now require that immediate supertraits are converted, which will, in // turn, reach indirect supertraits, so we detect cycles now instead of - // overflowing during elaboration. - if matches!(filter, PredicateFilter::SelfOnly) { - for &(pred, span) in implied_bounds { - debug!("superbound: {:?}", pred); - if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() - && bound.polarity == ty::ImplPolarity::Positive - { - tcx.at(span).super_predicates_of(bound.def_id()); + // overflowing during elaboration. Same for implied predicates, which + // make sure we walk into associated type bounds. + match filter { + PredicateFilter::SelfOnly => { + for &(pred, span) in implied_bounds { + debug!("superbound: {:?}", pred); + if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() + && bound.polarity == ty::ImplPolarity::Positive + { + tcx.at(span).super_predicates_of(bound.def_id()); + } + } + } + PredicateFilter::SelfAndAssociatedTypeBounds => { + for &(pred, span) in implied_bounds { + debug!("superbound: {:?}", pred); + if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() + && bound.polarity == ty::ImplPolarity::Positive + { + tcx.at(span).implied_predicates_of(bound.def_id()); + } } } + _ => {} } ty::GenericPredicates { parent: None, predicates: implied_bounds } diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index cb131f1d166..e852ee0f049 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -401,12 +401,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { // check that the `if` expr without `else` is the fn body's expr if expr.span == sp { - return self.get_fn_decl(hir_id).and_then(|(_, fn_decl, _)| { + return self.get_fn_decl(hir_id).map(|(_, fn_decl, _)| { let (ty, span) = match fn_decl.output { hir::FnRetTy::DefaultReturn(span) => ("()".to_string(), span), hir::FnRetTy::Return(ty) => (ty_to_string(ty), ty.span), }; - Some((span, format!("expected `{ty}` because of this return type"))) + (span, format!("expected `{ty}` because of this return type")) }); } } diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index 89cc46dc5ab..81440b0562e 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -77,8 +77,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // coercions from ! to `expected`. if ty.is_never() { if let Some(_) = self.typeck_results.borrow().adjustments().get(expr.hir_id) { - self.dcx() - .span_bug(expr.span, "expression with never type wound up being adjusted"); + let reported = self.dcx().span_delayed_bug( + expr.span, + "expression with never type wound up being adjusted", + ); + return Ty::new_error(self.tcx(), reported); } let adj_ty = self.next_ty_var(TypeVariableOrigin { diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 9303e437a96..7d448820ceb 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -846,7 +846,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let item_name = item_segment.ident; let result = self .resolve_fully_qualified_call(span, item_name, ty.normalized, qself.span, hir_id) - .and_then(|r| { + .map(|r| { // lint bare trait if the method is found in the trait if span.edition().at_least_rust_2021() && let Some(diag) = @@ -854,7 +854,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { diag.emit(); } - Ok(r) + r }) .or_else(|error| { let guar = self diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 2d9ec9f6bab..75e4dd5a61c 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -1,7 +1,11 @@ use crate::coercion::CoerceMany; use crate::errors::SuggestPtrNullMut; use crate::fn_ctxt::arg_matrix::{ArgMatrix, Compatibility, Error, ExpectedIdx, ProvidedIdx}; +use crate::fn_ctxt::infer::FnCall; use crate::gather_locals::Declaration; +use crate::method::probe::IsSuggestion; +use crate::method::probe::Mode::MethodCall; +use crate::method::probe::ProbeScope::TraitsInScope; use crate::method::MethodCallee; use crate::TupleArgumentsFlag::*; use crate::{errors, Expectation::*}; @@ -451,7 +455,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { call_expr: &'tcx hir::Expr<'tcx>, ) -> ErrorGuaranteed { // Next, let's construct the error - let (error_span, full_call_span, call_name, is_method) = match &call_expr.kind { + let (error_span, call_ident, full_call_span, call_name, is_method) = match &call_expr.kind { hir::ExprKind::Call( hir::Expr { hir_id, span, kind: hir::ExprKind::Path(qpath), .. }, _, @@ -463,12 +467,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { CtorOf::Struct => "struct", CtorOf::Variant => "enum variant", }; - (call_span, *span, name, false) + (call_span, None, *span, name, false) } else { - (call_span, *span, "function", false) + (call_span, None, *span, "function", false) } } - hir::ExprKind::Call(hir::Expr { span, .. }, _) => (call_span, *span, "function", false), + hir::ExprKind::Call(hir::Expr { span, .. }, _) => { + (call_span, None, *span, "function", false) + } hir::ExprKind::MethodCall(path_segment, _, _, span) => { let ident_span = path_segment.ident.span; let ident_span = if let Some(args) = path_segment.args { @@ -476,7 +482,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else { ident_span }; - (*span, ident_span, "method", true) + (*span, Some(path_segment.ident), ident_span, "method", true) } k => span_bug!(call_span, "checking argument types on a non-call: `{:?}`", k), }; @@ -530,6 +536,103 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let callee_ty = callee_expr .and_then(|callee_expr| self.typeck_results.borrow().expr_ty_adjusted_opt(callee_expr)); + // Obtain another method on `Self` that have similar name. + let similar_assoc = |call_name: Ident| -> Option<(ty::AssocItem, ty::FnSig<'_>)> { + if let Some(callee_ty) = callee_ty + && let Ok(Some(assoc)) = self.probe_op( + call_name.span, + MethodCall, + Some(call_name), + None, + IsSuggestion(true), + callee_ty.peel_refs(), + callee_expr.unwrap().hir_id, + TraitsInScope, + |mut ctxt| ctxt.probe_for_similar_candidate(), + ) + && let ty::AssocKind::Fn = assoc.kind + && assoc.fn_has_self_parameter + { + let args = self.infcx.fresh_args_for_item(call_name.span, assoc.def_id); + let fn_sig = tcx.fn_sig(assoc.def_id).instantiate(tcx, args); + + self.instantiate_binder_with_fresh_vars(call_name.span, FnCall, fn_sig); + } + None + }; + + let suggest_confusable = |err: &mut DiagnosticBuilder<'_>| { + let Some(call_name) = call_ident else { + return; + }; + let Some(callee_ty) = callee_ty else { + return; + }; + let input_types: Vec<Ty<'_>> = provided_arg_tys.iter().map(|(ty, _)| *ty).collect(); + // Check for other methods in the following order + // - methods marked as `rustc_confusables` with the provided arguments + // - methods with the same argument type/count and short levenshtein distance + // - methods marked as `rustc_confusables` (done) + // - methods with short levenshtein distance + + // Look for commonly confusable method names considering arguments. + if let Some(_name) = self.confusable_method_name( + err, + callee_ty.peel_refs(), + call_name, + Some(input_types.clone()), + ) { + return; + } + // Look for method names with short levenshtein distance, considering arguments. + if let Some((assoc, fn_sig)) = similar_assoc(call_name) + && fn_sig.inputs()[1..] + .iter() + .zip(input_types.iter()) + .all(|(expected, found)| self.can_coerce(*expected, *found)) + && fn_sig.inputs()[1..].len() == input_types.len() + { + err.span_suggestion_verbose( + call_name.span, + format!("you might have meant to use `{}`", assoc.name), + assoc.name, + Applicability::MaybeIncorrect, + ); + return; + } + // Look for commonly confusable method names disregarding arguments. + if let Some(_name) = + self.confusable_method_name(err, callee_ty.peel_refs(), call_name, None) + { + return; + } + // Look for similarly named methods with levenshtein distance with the right + // number of arguments. + if let Some((assoc, fn_sig)) = similar_assoc(call_name) + && fn_sig.inputs()[1..].len() == input_types.len() + { + err.span_note( + tcx.def_span(assoc.def_id), + format!( + "there's is a method with similar name `{}`, but the arguments don't match", + assoc.name, + ), + ); + return; + } + // Fallthrough: look for similarly named methods with levenshtein distance. + if let Some((assoc, _)) = similar_assoc(call_name) { + err.span_note( + tcx.def_span(assoc.def_id), + format!( + "there's is a method with similar name `{}`, but their argument count \ + doesn't match", + assoc.name, + ), + ); + return; + } + }; // A "softer" version of the `demand_compatible`, which checks types without persisting them, // and treats error types differently // This will allow us to "probe" for other argument orders that would likely have been correct @@ -694,6 +797,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(mismatch_idx), is_method, ); + suggest_confusable(&mut err); return err.emit(); } } @@ -718,7 +822,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if cfg!(debug_assertions) { span_bug!(error_span, "expected errors from argument matrix"); } else { - return tcx.dcx().emit_err(errors::ArgMismatchIndeterminate { span: error_span }); + let mut err = + tcx.dcx().create_err(errors::ArgMismatchIndeterminate { span: error_span }); + suggest_confusable(&mut err); + return err.emit(); } } @@ -733,7 +840,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let trace = mk_trace(provided_span, formal_and_expected_inputs[*expected_idx], provided_ty); if !matches!(trace.cause.as_failure_code(*e), FailureCode::Error0308) { - reported = Some(self.err_ctxt().report_and_explain_type_error(trace, *e).emit()); + let mut err = self.err_ctxt().report_and_explain_type_error(trace, *e); + suggest_confusable(&mut err); + reported = Some(err.emit()); return false; } true @@ -802,6 +911,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(expected_idx.as_usize()), is_method, ); + suggest_confusable(&mut err); return err.emit(); } @@ -829,6 +939,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .with_code(err_code.to_owned()) }; + suggest_confusable(&mut err); // As we encounter issues, keep track of what we want to provide for the suggestion let mut labels = vec![]; // If there is a single error, we give a specific suggestion; otherwise, we change to diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index e57717c25d9..f09af999957 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -1061,20 +1061,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return; } - let scope = self - .tcx - .hir() - .parent_iter(id) - .filter(|(_, node)| { - matches!( - node, - Node::Expr(Expr { kind: ExprKind::Closure(..), .. }) - | Node::Item(_) - | Node::TraitItem(_) - | Node::ImplItem(_) - ) - }) - .next(); + let scope = self.tcx.hir().parent_iter(id).find(|(_, node)| { + matches!( + node, + Node::Expr(Expr { kind: ExprKind::Closure(..), .. }) + | Node::Item(_) + | Node::TraitItem(_) + | Node::ImplItem(_) + ) + }); let in_closure = matches!(scope, Some((_, Node::Expr(Expr { kind: ExprKind::Closure(..), .. })))); diff --git a/compiler/rustc_hir_typeck/src/mem_categorization.rs b/compiler/rustc_hir_typeck/src/mem_categorization.rs index c300ec7444b..1a860aa4067 100644 --- a/compiler/rustc_hir_typeck/src/mem_categorization.rs +++ b/compiler/rustc_hir_typeck/src/mem_categorization.rs @@ -582,7 +582,8 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { match ty.kind() { ty::Tuple(args) => Ok(args.len()), _ => { - self.tcx().dcx().span_bug(span, "tuple pattern not applied to a tuple"); + self.tcx().dcx().span_delayed_bug(span, "tuple pattern not applied to a tuple"); + Err(()) } } } diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index a58e194e20a..7117a59c409 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -54,7 +54,7 @@ pub use self::PickKind::*; #[derive(Clone, Copy, Debug)] pub struct IsSuggestion(pub bool); -struct ProbeContext<'a, 'tcx> { +pub(crate) struct ProbeContext<'a, 'tcx> { fcx: &'a FnCtxt<'a, 'tcx>, span: Span, mode: Mode, @@ -355,7 +355,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .unwrap() } - fn probe_op<OP, R>( + pub(crate) fn probe_op<OP, R>( &'a self, span: Span, mode: Mode, @@ -1750,7 +1750,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { /// Similarly to `probe_for_return_type`, this method attempts to find the best matching /// candidate method where the method name may have been misspelled. Similarly to other /// edit distance based suggestions, we provide at most one such suggestion. - fn probe_for_similar_candidate(&mut self) -> Result<Option<ty::AssocItem>, MethodError<'tcx>> { + pub(crate) fn probe_for_similar_candidate( + &mut self, + ) -> Result<Option<ty::AssocItem>, MethodError<'tcx>> { debug!("probing for method names similar to {:?}", self.method_name); self.probe(|_| { @@ -1766,6 +1768,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { ); pcx.allow_similar_names = true; pcx.assemble_inherent_candidates(); + pcx.assemble_extension_candidates_for_all_traits(); let method_names = pcx.candidate_method_names(|_| true); pcx.allow_similar_names = false; @@ -1775,6 +1778,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { pcx.reset(); pcx.method_name = Some(method_name); pcx.assemble_inherent_candidates(); + pcx.assemble_extension_candidates_for_all_traits(); pcx.pick_core().and_then(|pick| pick.ok()).map(|pick| pick.item) }) .collect(); @@ -1942,7 +1946,21 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { let hir_id = self.fcx.tcx.local_def_id_to_hir_id(local_def_id); let attrs = self.fcx.tcx.hir().attrs(hir_id); for attr in attrs { - let sym::doc = attr.name_or_empty() else { + if sym::doc == attr.name_or_empty() { + } else if sym::rustc_confusables == attr.name_or_empty() { + let Some(confusables) = attr.meta_item_list() else { + continue; + }; + // #[rustc_confusables("foo", "bar"))] + for n in confusables { + if let Some(lit) = n.lit() + && name.as_str() == lit.symbol.as_str() + { + return true; + } + } + continue; + } else { continue; }; let Some(values) = attr.meta_item_list() else { diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index cc111af5d8f..f0586328835 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -23,6 +23,7 @@ use rustc_hir::PatKind::Binding; use rustc_hir::PathSegment; use rustc_hir::{ExprKind, Node, QPath}; use rustc_infer::infer::{ + self, type_variable::{TypeVariableOrigin, TypeVariableOriginKind}, RegionVariableOrigin, }; @@ -274,7 +275,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .span_if_local(def_id) .unwrap_or_else(|| self.tcx.def_span(def_id)); err.span_label(sp, format!("private {kind} defined here")); - self.suggest_valid_traits(&mut err, out_of_scope_traits, true); + self.suggest_valid_traits(&mut err, item_name, out_of_scope_traits, true); err.emit(); } @@ -369,9 +370,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; if let Some(file) = file { err.note(format!("the full type name has been written to '{}'", file.display())); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note("consider using `--verbose` to print the full type name to the console"); } err @@ -496,9 +495,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Some(file) = ty_file { err.note(format!("the full type name has been written to '{}'", file.display(),)); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note("consider using `--verbose` to print the full type name to the console"); } if rcvr_ty.references_error() { err.downgrade_to_delayed_bug(); @@ -1209,32 +1206,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.note(format!( "the {item_kind} was found for\n{type_candidates}{additional_types}" )); - } else { - 'outer: for inherent_impl_did in - self.tcx.inherent_impls(adt.did()).into_iter().flatten() - { - for inherent_method in - self.tcx.associated_items(inherent_impl_did).in_definition_order() - { - if let Some(attr) = self - .tcx - .get_attr(inherent_method.def_id, sym::rustc_confusables) - && let Some(candidates) = parse_confusables(attr) - && candidates.contains(&item_name.name) - { - err.span_suggestion_verbose( - item_name.span, - format!( - "you might have meant to use `{}`", - inherent_method.name - ), - inherent_method.name, - Applicability::MaybeIncorrect, - ); - break 'outer; - } - } - } } } } else { @@ -1260,6 +1231,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { label_span_not_found(&mut err); } + let confusable_suggested = self.confusable_method_name( + &mut err, + rcvr_ty, + item_name, + args.map(|args| { + args.iter() + .map(|expr| { + self.node_ty_opt(expr.hir_id).unwrap_or_else(|| { + self.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::MiscVariable, + span: expr.span, + }) + }) + }) + .collect() + }), + ); + // Don't suggest (for example) `expr.field.clone()` if `expr.clone()` // can't be called due to `typeof(expr): Clone` not holding. if unsatisfied_predicates.is_empty() { @@ -1361,31 +1350,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else if let Some(similar_candidate) = similar_candidate { // Don't emit a suggestion if we found an actual method // that had unsatisfied trait bounds - if unsatisfied_predicates.is_empty() { - let def_kind = similar_candidate.kind.as_def_kind(); - // Methods are defined within the context of a struct and their first parameter is always self, - // which represents the instance of the struct the method is being called on - // Associated functions don’t take self as a parameter and - // they are not methods because they don’t have an instance of the struct to work with. - if def_kind == DefKind::AssocFn && similar_candidate.fn_has_self_parameter { - err.span_suggestion( - span, - "there is a method with a similar name", - similar_candidate.name, - Applicability::MaybeIncorrect, - ); - } else { - err.span_suggestion( - span, - format!( - "there is {} {} with a similar name", - self.tcx.def_kind_descr_article(def_kind, similar_candidate.def_id), - self.tcx.def_kind_descr(def_kind, similar_candidate.def_id) - ), - similar_candidate.name, - Applicability::MaybeIncorrect, - ); - } + if unsatisfied_predicates.is_empty() + // ...or if we already suggested that name because of `rustc_confusable` annotation. + && Some(similar_candidate.name) != confusable_suggested + { + self.find_likely_intended_associated_item( + &mut err, + similar_candidate, + span, + args, + mode, + ); } } // If an appropriate error source is not found, check method chain for possible candiates @@ -1437,6 +1412,146 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(err) } + fn find_likely_intended_associated_item( + &self, + err: &mut DiagnosticBuilder<'_>, + similar_candidate: ty::AssocItem, + span: Span, + args: Option<&'tcx [hir::Expr<'tcx>]>, + mode: Mode, + ) { + let tcx = self.tcx; + let def_kind = similar_candidate.kind.as_def_kind(); + let an = self.tcx.def_kind_descr_article(def_kind, similar_candidate.def_id); + let msg = format!( + "there is {an} {} `{}` with a similar name", + self.tcx.def_kind_descr(def_kind, similar_candidate.def_id), + similar_candidate.name, + ); + // Methods are defined within the context of a struct and their first parameter + // is always `self`, which represents the instance of the struct the method is + // being called on Associated functions don’t take self as a parameter and they are + // not methods because they don’t have an instance of the struct to work with. + if def_kind == DefKind::AssocFn { + let ty_args = self.infcx.fresh_args_for_item(span, similar_candidate.def_id); + let fn_sig = tcx.fn_sig(similar_candidate.def_id).instantiate(tcx, ty_args); + let fn_sig = self.instantiate_binder_with_fresh_vars(span, infer::FnCall, fn_sig); + if similar_candidate.fn_has_self_parameter { + if let Some(args) = args + && fn_sig.inputs()[1..].len() == args.len() + { + // We found a method with the same number of arguments as the method + // call expression the user wrote. + err.span_suggestion_verbose( + span, + msg, + similar_candidate.name, + Applicability::MaybeIncorrect, + ); + } else { + // We found a method but either the expression is not a method call or + // the argument count didn't match. + err.span_help( + tcx.def_span(similar_candidate.def_id), + format!( + "{msg}{}", + if let None = args { "" } else { ", but with different arguments" }, + ), + ); + } + } else if let Some(args) = args + && fn_sig.inputs().len() == args.len() + { + // We have fn call expression and the argument count match the associated + // function we found. + err.span_suggestion_verbose( + span, + msg, + similar_candidate.name, + Applicability::MaybeIncorrect, + ); + } else { + err.span_help(tcx.def_span(similar_candidate.def_id), msg); + } + } else if let Mode::Path = mode + && args.unwrap_or(&[]).is_empty() + { + // We have an associated item syntax and we found something that isn't an fn. + err.span_suggestion_verbose( + span, + msg, + similar_candidate.name, + Applicability::MaybeIncorrect, + ); + } else { + // The expression is a function or method call, but the item we found is an + // associated const or type. + err.span_help(tcx.def_span(similar_candidate.def_id), msg); + } + } + + pub(crate) fn confusable_method_name( + &self, + err: &mut DiagnosticBuilder<'_>, + rcvr_ty: Ty<'tcx>, + item_name: Ident, + call_args: Option<Vec<Ty<'tcx>>>, + ) -> Option<Symbol> { + if let ty::Adt(adt, adt_args) = rcvr_ty.kind() { + for inherent_impl_did in self.tcx.inherent_impls(adt.did()).into_iter().flatten() { + for inherent_method in + self.tcx.associated_items(inherent_impl_did).in_definition_order() + { + if let Some(attr) = + self.tcx.get_attr(inherent_method.def_id, sym::rustc_confusables) + && let Some(candidates) = parse_confusables(attr) + && candidates.contains(&item_name.name) + && let ty::AssocKind::Fn = inherent_method.kind + { + let args = + ty::GenericArgs::identity_for_item(self.tcx, inherent_method.def_id) + .rebase_onto( + self.tcx, + inherent_method.container_id(self.tcx), + adt_args, + ); + let fn_sig = + self.tcx.fn_sig(inherent_method.def_id).instantiate(self.tcx, args); + let fn_sig = self.instantiate_binder_with_fresh_vars( + item_name.span, + infer::FnCall, + fn_sig, + ); + if let Some(ref args) = call_args + && fn_sig.inputs()[1..] + .iter() + .zip(args.into_iter()) + .all(|(expected, found)| self.can_coerce(*expected, *found)) + && fn_sig.inputs()[1..].len() == args.len() + { + err.span_suggestion_verbose( + item_name.span, + format!("you might have meant to use `{}`", inherent_method.name), + inherent_method.name, + Applicability::MaybeIncorrect, + ); + return Some(inherent_method.name); + } else if let None = call_args { + err.span_note( + self.tcx.def_span(inherent_method.def_id), + format!( + "you might have meant to use method `{}`", + inherent_method.name, + ), + ); + return Some(inherent_method.name); + } + } + } + } + } + None + } fn note_candidates_on_method_error( &self, rcvr_ty: Ty<'tcx>, @@ -2768,6 +2883,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn suggest_valid_traits( &self, err: &mut DiagnosticBuilder<'_>, + item_name: Ident, valid_out_of_scope_traits: Vec<DefId>, explain: bool, ) -> bool { @@ -2786,9 +2902,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.help("items from traits can only be used if the trait is in scope"); } let msg = format!( - "the following {traits_are} implemented but not in scope; \ - perhaps add a `use` for {one_of_them}:", - traits_are = if candidates.len() == 1 { "trait is" } else { "traits are" }, + "{this_trait_is} implemented but not in scope; perhaps you want to import \ + {one_of_them}", + this_trait_is = if candidates.len() == 1 { + format!( + "trait `{}` which provides `{item_name}` is", + self.tcx.item_name(candidates[0]), + ) + } else { + format!("the following traits which provide `{item_name}` are") + }, one_of_them = if candidates.len() == 1 { "it" } else { "one of them" }, ); @@ -2996,7 +3119,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } } - if self.suggest_valid_traits(err, valid_out_of_scope_traits, true) { + if self.suggest_valid_traits(err, item_name, valid_out_of_scope_traits, true) { return; } @@ -3282,7 +3405,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { [] => {} [trait_info] if trait_info.def_id.is_local() => { if impls_trait(trait_info.def_id) { - self.suggest_valid_traits(err, vec![trait_info.def_id], false); + self.suggest_valid_traits(err, item_name, vec![trait_info.def_id], false); } else { err.subdiagnostic( self.dcx(), @@ -3309,7 +3432,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { )); for (i, trait_info) in trait_infos.iter().enumerate() { if impls_trait(trait_info.def_id) { - self.suggest_valid_traits(err, vec![trait_info.def_id], false); + self.suggest_valid_traits( + err, + item_name, + vec![trait_info.def_id], + false, + ); } msg.push_str(&format!( "\ncandidate #{}: `{}`", diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs index b15c9ef9018..c28c1c77603 100644 --- a/compiler/rustc_hir_typeck/src/pat.rs +++ b/compiler/rustc_hir_typeck/src/pat.rs @@ -2035,7 +2035,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { slice: Option<&'tcx Pat<'tcx>>, span: Span, ) -> Option<Ty<'tcx>> { - if !slice.is_none() { + if slice.is_some() { return None; } diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 99882a42abc..9f70fee993d 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -175,8 +175,12 @@ impl CanonicalizeMode for CanonicalizeQueryResponse { ), ty::ReVar(vid) => { - let universe = - infcx.inner.borrow_mut().unwrap_region_constraints().var_universe(vid); + let universe = infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .probe_value(vid) + .unwrap_err(); canonicalizer.canonical_var_for_region( CanonicalVarInfo { kind: CanonicalVarKind::Region(universe) }, r, diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index d40f3f501f5..7715f2ef43a 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -1938,7 +1938,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", path.display(), )); - diag.note(format!("consider using `--verbose` to print the full type name to the console")); + diag.note("consider using `--verbose` to print the full type name to the console"); } } } @@ -2437,6 +2437,14 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { let suggestion = if has_lifetimes { format!(" + {lt_name}") } else { format!(": {lt_name}") }; suggs.push((sp, suggestion)) + } else if let GenericKind::Alias(ref p) = bound_kind + && let ty::Projection = p.kind(self.tcx) + && let DefKind::AssocTy = self.tcx.def_kind(p.def_id) + && let Some(ty::ImplTraitInTraitData::Trait { .. }) = + self.tcx.opt_rpitit_info(p.def_id) + { + // The lifetime found in the `impl` is longer than the one on the RPITIT. + // Do not suggest `<Type as Trait>::{opaque}: 'static`. } else if let Some(generics) = self.tcx.hir().get_generics(suggestion_scope) { let pred = format!("{bound_kind}: {lt_name}"); let suggestion = format!("{} {}", generics.add_where_or_trailing_comma(), pred); diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 8b5710ee9ed..c30f42c1656 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -374,7 +374,10 @@ impl<'tcx> ty::InferCtxtLike for InferCtxt<'tcx> { } fn universe_of_lt(&self, lt: ty::RegionVid) -> Option<ty::UniverseIndex> { - Some(self.universe_of_region_vid(lt)) + match self.inner.borrow_mut().unwrap_region_constraints().probe_value(lt) { + Err(universe) => Some(universe), + Ok(_) => None, + } } fn root_ty_var(&self, vid: TyVid) -> TyVid { @@ -1155,11 +1158,6 @@ impl<'tcx> InferCtxt<'tcx> { self.inner.borrow_mut().unwrap_region_constraints().universe(r) } - /// Return the universe that the region variable `r` was created in. - pub fn universe_of_region_vid(&self, vid: ty::RegionVid) -> ty::UniverseIndex { - self.inner.borrow_mut().unwrap_region_constraints().var_universe(vid) - } - /// Number of region variables created so far. pub fn num_region_vars(&self) -> usize { self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() diff --git a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs index b2bcbbf2e53..9e2f3a10b6a 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs @@ -90,11 +90,11 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { } } -struct LeakCheck<'me, 'tcx> { +struct LeakCheck<'a, 'b, 'tcx> { tcx: TyCtxt<'tcx>, outer_universe: ty::UniverseIndex, - mini_graph: &'me MiniGraph<'tcx>, - rcc: &'me RegionConstraintCollector<'me, 'tcx>, + mini_graph: &'a MiniGraph<'tcx>, + rcc: &'a mut RegionConstraintCollector<'b, 'tcx>, // Initially, for each SCC S, stores a placeholder `P` such that `S = P` // must hold. @@ -117,13 +117,13 @@ struct LeakCheck<'me, 'tcx> { scc_universes: IndexVec<LeakCheckScc, SccUniverse<'tcx>>, } -impl<'me, 'tcx> LeakCheck<'me, 'tcx> { +impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> { fn new( tcx: TyCtxt<'tcx>, outer_universe: ty::UniverseIndex, max_universe: ty::UniverseIndex, - mini_graph: &'me MiniGraph<'tcx>, - rcc: &'me RegionConstraintCollector<'me, 'tcx>, + mini_graph: &'a MiniGraph<'tcx>, + rcc: &'a mut RegionConstraintCollector<'b, 'tcx>, ) -> Self { let dummy_scc_universe = SccUniverse { universe: max_universe, region: None }; Self { diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index 71adf526097..ee97dd36807 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -8,12 +8,11 @@ use super::{ }; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::intern::Interned; use rustc_data_structures::sync::Lrc; use rustc_data_structures::undo_log::UndoLogs; use rustc_data_structures::unify as ut; use rustc_index::IndexVec; -use rustc_middle::infer::unify_key::{RegionVidKey, UnifiedRegion}; +use rustc_middle::infer::unify_key::{RegionVariableValue, RegionVidKey}; use rustc_middle::ty::ReStatic; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::ty::{ReBound, ReVar}; @@ -292,6 +291,18 @@ type CombineMap<'tcx> = FxHashMap<TwoRegions<'tcx>, RegionVid>; #[derive(Debug, Clone, Copy)] pub struct RegionVariableInfo { pub origin: RegionVariableOrigin, + // FIXME: This is only necessary for `fn take_and_reset_data` and + // `lexical_region_resolve`. We should rework `lexical_region_resolve` + // in the near/medium future anyways and could move the unverse info + // for `fn take_and_reset_data` into a separate table which is + // only populated when needed. + // + // For both of these cases it is fine that this can diverge from the + // actual universe of the variable, which is directly stored in the + // unification table for unknown region variables. At some point we could + // stop emitting bidirectional outlives constraints if equate succeeds. + // This would be currently unsound as it would cause us to drop the universe + // changes in `lexical_region_resolve`. pub universe: ty::UniverseIndex, } @@ -395,7 +406,11 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { // `RegionConstraintData` contains the relationship here. if *any_unifications { *any_unifications = false; - self.unification_table_mut().reset_unifications(|_| UnifiedRegion::new(None)); + // Manually inlined `self.unification_table_mut()` as `self` is used in the closure. + ut::UnificationTable::with_log(&mut self.storage.unification_table, &mut self.undo_log) + .reset_unifications(|key| RegionVariableValue::Unknown { + universe: self.storage.var_infos[key.vid].universe, + }); } data @@ -422,18 +437,13 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { ) -> RegionVid { let vid = self.var_infos.push(RegionVariableInfo { origin, universe }); - let u_vid = self.unification_table_mut().new_key(UnifiedRegion::new(None)); + let u_vid = self.unification_table_mut().new_key(RegionVariableValue::Unknown { universe }); assert_eq!(vid, u_vid.vid); self.undo_log.push(AddVar(vid)); debug!("created new region variable {:?} in {:?} with origin {:?}", vid, universe, origin); vid } - /// Returns the universe for the given variable. - pub(super) fn var_universe(&self, vid: RegionVid) -> ty::UniverseIndex { - self.var_infos[vid].universe - } - /// Returns the origin for the given variable. pub(super) fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin { self.var_infos[vid].origin @@ -467,26 +477,41 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { pub(super) fn make_eqregion( &mut self, origin: SubregionOrigin<'tcx>, - sub: Region<'tcx>, - sup: Region<'tcx>, + a: Region<'tcx>, + b: Region<'tcx>, ) { - if sub != sup { + if a != b { // Eventually, it would be nice to add direct support for // equating regions. - self.make_subregion(origin.clone(), sub, sup); - self.make_subregion(origin, sup, sub); - - match (sub, sup) { - (Region(Interned(ReVar(sub), _)), Region(Interned(ReVar(sup), _))) => { - debug!("make_eqregion: unifying {:?} with {:?}", sub, sup); - self.unification_table_mut().union(*sub, *sup); - self.any_unifications = true; + self.make_subregion(origin.clone(), a, b); + self.make_subregion(origin, b, a); + + match (a.kind(), b.kind()) { + (ty::ReVar(a), ty::ReVar(b)) => { + debug!("make_eqregion: unifying {:?} with {:?}", a, b); + if self.unification_table_mut().unify_var_var(a, b).is_ok() { + self.any_unifications = true; + } + } + (ty::ReVar(vid), _) => { + debug!("make_eqregion: unifying {:?} with {:?}", vid, b); + if self + .unification_table_mut() + .unify_var_value(vid, RegionVariableValue::Known { value: b }) + .is_ok() + { + self.any_unifications = true; + }; } - (Region(Interned(ReVar(vid), _)), value) - | (value, Region(Interned(ReVar(vid), _))) => { - debug!("make_eqregion: unifying {:?} with {:?}", vid, value); - self.unification_table_mut().union_value(*vid, UnifiedRegion::new(Some(value))); - self.any_unifications = true; + (_, ty::ReVar(vid)) => { + debug!("make_eqregion: unifying {:?} with {:?}", a, vid); + if self + .unification_table_mut() + .unify_var_value(vid, RegionVariableValue::Known { value: a }) + .is_ok() + { + self.any_unifications = true; + }; } (_, _) => {} } @@ -603,18 +628,21 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { tcx: TyCtxt<'tcx>, vid: ty::RegionVid, ) -> ty::Region<'tcx> { - let mut ut = self.unification_table_mut(); // FIXME(rust-lang/ena#42): unnecessary mut + let mut ut = self.unification_table_mut(); let root_vid = ut.find(vid).vid; - let resolved = ut - .probe_value(root_vid) - .get_value_ignoring_universes() - .unwrap_or_else(|| ty::Region::new_var(tcx, root_vid)); - - // Don't resolve a variable to a region that it cannot name. - if self.var_universe(vid).can_name(self.universe(resolved)) { - resolved - } else { - ty::Region::new_var(tcx, vid) + match ut.probe_value(root_vid) { + RegionVariableValue::Known { value } => value, + RegionVariableValue::Unknown { .. } => ty::Region::new_var(tcx, root_vid), + } + } + + pub fn probe_value( + &mut self, + vid: ty::RegionVid, + ) -> Result<ty::Region<'tcx>, ty::UniverseIndex> { + match self.unification_table_mut().probe_value(vid) { + RegionVariableValue::Known { value } => Ok(value), + RegionVariableValue::Unknown { universe } => Err(universe), } } @@ -654,7 +682,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { new_r } - pub fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex { + pub fn universe(&mut self, region: Region<'tcx>) -> ty::UniverseIndex { match *region { ty::ReStatic | ty::ReErased @@ -662,7 +690,10 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { | ty::ReEarlyParam(..) | ty::ReError(_) => ty::UniverseIndex::ROOT, ty::RePlaceholder(placeholder) => placeholder.universe, - ty::ReVar(vid) => self.var_universe(vid), + ty::ReVar(vid) => match self.probe_value(vid) { + Ok(value) => self.universe(value), + Err(universe) => universe, + }, ty::ReBound(..) => bug!("universe(): encountered bound region {:?}", region), } } diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs index c4c9ddb1ad8..2e1ea19078c 100644 --- a/compiler/rustc_infer/src/infer/relate/generalize.rs +++ b/compiler/rustc_infer/src/infer/relate/generalize.rs @@ -26,13 +26,13 @@ impl<'tcx> InferCtxt<'tcx> { /// This is *not* expected to be used anywhere except for an implementation of /// `TypeRelation`. Do not use this, and instead please use `At::eq`, for all /// other usecases (i.e. setting the value of a type var). - #[instrument(level = "debug", skip(self, relation, target_is_expected))] + #[instrument(level = "debug", skip(self, relation))] pub fn instantiate_ty_var<R: ObligationEmittingRelation<'tcx>>( &self, relation: &mut R, target_is_expected: bool, target_vid: ty::TyVid, - ambient_variance: ty::Variance, + instantiation_variance: ty::Variance, source_ty: Ty<'tcx>, ) -> RelateResult<'tcx, ()> { debug_assert!(self.inner.borrow_mut().type_variables().probe(target_vid).is_unknown()); @@ -46,7 +46,7 @@ impl<'tcx> InferCtxt<'tcx> { // // We then relate `generalized_ty <: source_ty`,adding constraints like `'x: '?2` and `?1 <: ?3`. let Generalization { value_may_be_infer: generalized_ty, has_unconstrained_ty_var } = - self.generalize(relation.span(), target_vid, ambient_variance, source_ty)?; + self.generalize(relation.span(), target_vid, instantiation_variance, source_ty)?; // Constrain `b_vid` to the generalized type `generalized_ty`. if let &ty::Infer(ty::TyVar(generalized_vid)) = generalized_ty.kind() { @@ -73,7 +73,7 @@ impl<'tcx> InferCtxt<'tcx> { // the alias can be normalized to something which does not // mention `?0`. if self.next_trait_solver() { - let (lhs, rhs, direction) = match ambient_variance { + let (lhs, rhs, direction) = match instantiation_variance { ty::Variance::Invariant => { (generalized_ty.into(), source_ty.into(), AliasRelationDirection::Equate) } @@ -106,22 +106,28 @@ impl<'tcx> InferCtxt<'tcx> { } } } else { - // HACK: make sure that we `a_is_expected` continues to be - // correct when relating the generalized type with the source. + // NOTE: The `instantiation_variance` is not the same variance as + // used by the relation. When instantiating `b`, `target_is_expected` + // is flipped and the `instantion_variance` is also flipped. To + // constrain the `generalized_ty` while using the original relation, + // we therefore only have to flip the arguments. + // + // ```ignore (not code) + // ?a rel B + // instantiate_ty_var(?a, B) # expected and variance not flipped + // B' rel B + // ``` + // or + // ```ignore (not code) + // A rel ?b + // instantiate_ty_var(?b, A) # expected and variance flipped + // A rel A' + // ``` if target_is_expected == relation.a_is_expected() { - relation.relate_with_variance( - ambient_variance, - ty::VarianceDiagInfo::default(), - generalized_ty, - source_ty, - )?; + relation.relate(generalized_ty, source_ty)?; } else { - relation.relate_with_variance( - ambient_variance.xform(ty::Contravariant), - ty::VarianceDiagInfo::default(), - source_ty, - generalized_ty, - )?; + debug!("flip relation"); + relation.relate(source_ty, generalized_ty)?; } } diff --git a/compiler/rustc_lint/Cargo.toml b/compiler/rustc_lint/Cargo.toml index fa1133e7780..2271321b8bf 100644 --- a/compiler/rustc_lint/Cargo.toml +++ b/compiler/rustc_lint/Cargo.toml @@ -23,6 +23,7 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_type_ir = { path = "../rustc_type_ir" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" unicode-security = "0.1.0" # tidy-alphabetical-end diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 1548646c04a..f89cb8ed8b6 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -414,6 +414,29 @@ lint_non_fmt_panic_unused = } .add_fmt_suggestion = or add a "{"{"}{"}"}" format string to use the message literally +lint_non_local_definitions_cargo_update = the {$macro_kind} `{$macro_name}` may come from an old version of the `{$crate_name}` crate, try updating your dependency with `cargo update -p {$crate_name}` + +lint_non_local_definitions_deprecation = this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue <https://github.com/rust-lang/rust/issues/120363> + +lint_non_local_definitions_impl = non-local `impl` definition, they should be avoided as they go against expectation + .help = + move this `impl` block outside the of the current {$body_kind_descr} {$depth -> + [one] `{$body_name}` + *[other] `{$body_name}` and up {$depth} bodies + } + .non_local = an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module and anon-const at the same nesting as the trait or type + .const_anon = use a const-anon item to suppress this lint + +lint_non_local_definitions_macro_rules = non-local `macro_rules!` definition, they should be avoided as they go against expectation + .help = + remove the `#[macro_export]` or move this `macro_rules!` outside the of the current {$body_kind_descr} {$depth -> + [one] `{$body_name}` + *[other] `{$body_name}` and up {$depth} bodies + } + .non_local = a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module + lint_non_snake_case = {$sort} `{$name}` should have a snake case name .rename_or_convert_suggestion = rename the identifier or convert it to a snake case raw identifier .cannot_convert_note = `{$sc}` cannot be used as a raw identifier @@ -429,6 +452,7 @@ lint_non_upper_case_global = {$sort} `{$name}` should have an upper case name lint_noop_method_call = call to `.{$method}()` on a reference in this situation does nothing .suggestion = remove this redundant call .note = the type `{$orig_ty}` does not implement `{$trait_}`, so calling `{$method}` on `&{$orig_ty}` copies the reference, which does not do anything and can be removed + .derive_suggestion = if you meant to clone `{$orig_ty}`, implement `Clone` for it lint_only_cast_u8_to_char = only `u8` can be cast into `char` .suggestion = use a `char` literal instead diff --git a/compiler/rustc_lint/src/array_into_iter.rs b/compiler/rustc_lint/src/array_into_iter.rs index 3a5c585366a..993b1d739a1 100644 --- a/compiler/rustc_lint/src/array_into_iter.rs +++ b/compiler/rustc_lint/src/array_into_iter.rs @@ -70,11 +70,15 @@ impl<'tcx> LateLintPass<'tcx> for ArrayIntoIter { // Check if the method call actually calls the libcore // `IntoIterator::into_iter`. - let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap(); - match cx.tcx.trait_of_item(def_id) { - Some(trait_id) if cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id) => {} - _ => return, - }; + let trait_id = cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .and_then(|did| cx.tcx.trait_of_item(did)); + if trait_id.is_none() + || !cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id.unwrap()) + { + return; + } // As this is a method call expression, we have at least one argument. let receiver_ty = cx.typeck_results().expr_ty(receiver_arg); diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 92f7497362e..9f73d2e6812 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1825,7 +1825,7 @@ impl KeywordIdents { match tt { // Only report non-raw idents. TokenTree::Token(token, _) => { - if let Some((ident, false)) = token.ident() { + if let Some((ident, token::IdentIsRaw::No)) = token.ident() { self.check_ident_token(cx, UnderMacro(true), ident); } } diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index d8e12c04f75..0e446e09d41 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -70,6 +70,7 @@ mod methods; mod multiple_supertrait_upcastable; mod non_ascii_idents; mod non_fmt_panic; +mod non_local_def; mod nonstandard_style; mod noop_method_call; mod opaque_hidden_inferred_bound; @@ -105,6 +106,7 @@ use methods::*; use multiple_supertrait_upcastable::*; use non_ascii_idents::*; use non_fmt_panic::NonPanicFmt; +use non_local_def::*; use nonstandard_style::*; use noop_method_call::*; use opaque_hidden_inferred_bound::*; @@ -229,6 +231,7 @@ late_lint_methods!( MissingDebugImplementations: MissingDebugImplementations, MissingDoc: MissingDoc, AsyncFnInTrait: AsyncFnInTrait, + NonLocalDefinitions: NonLocalDefinitions::default(), ] ] ); @@ -527,6 +530,11 @@ fn register_builtins(store: &mut LintStore) { "no longer needed, see #93367 \ <https://github.com/rust-lang/rust/issues/93367> for more information", ); + store.register_removed( + "const_patterns_without_partial_eq", + "converted into hard error, see RFC #3535 \ + <https://rust-lang.github.io/rfcs/3535-constants-in-patterns.html> for more information", + ); } fn register_internals(store: &mut LintStore) { diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index f067c365170..d8a0b75f8bd 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1314,6 +1314,12 @@ pub struct NoopMethodCallDiag<'a> { pub trait_: Symbol, #[suggestion(code = "", applicability = "machine-applicable")] pub label: Span, + #[suggestion( + lint_derive_suggestion, + code = "#[derive(Clone)]\n", + applicability = "maybe-incorrect" + )] + pub suggest_derive: Option<Span>, } #[derive(LintDiagnostic)] @@ -1328,6 +1334,45 @@ pub struct SuspiciousDoubleRefCloneDiag<'a> { pub ty: Ty<'a>, } +// non_local_defs.rs +#[derive(LintDiagnostic)] +pub enum NonLocalDefinitionsDiag { + #[diag(lint_non_local_definitions_impl)] + #[help] + #[note(lint_non_local)] + #[note(lint_exception)] + #[note(lint_non_local_definitions_deprecation)] + Impl { + depth: u32, + body_kind_descr: &'static str, + body_name: String, + #[subdiagnostic] + cargo_update: Option<NonLocalDefinitionsCargoUpdateNote>, + #[suggestion(lint_const_anon, code = "_", applicability = "machine-applicable")] + const_anon: Option<Span>, + }, + #[diag(lint_non_local_definitions_macro_rules)] + #[help] + #[note(lint_non_local)] + #[note(lint_exception)] + #[note(lint_non_local_definitions_deprecation)] + MacroRules { + depth: u32, + body_kind_descr: &'static str, + body_name: String, + #[subdiagnostic] + cargo_update: Option<NonLocalDefinitionsCargoUpdateNote>, + }, +} + +#[derive(Subdiagnostic)] +#[note(lint_non_local_definitions_cargo_update)] +pub struct NonLocalDefinitionsCargoUpdateNote { + pub macro_kind: &'static str, + pub macro_name: Symbol, + pub crate_name: Symbol, +} + // pass_by_value.rs #[derive(LintDiagnostic)] #[diag(lint_pass_by_value)] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs new file mode 100644 index 00000000000..6cb6fd1cbd5 --- /dev/null +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -0,0 +1,222 @@ +use rustc_hir::{def::DefKind, Body, Item, ItemKind, Node, Path, QPath, TyKind}; +use rustc_span::def_id::{DefId, LOCAL_CRATE}; +use rustc_span::{sym, symbol::kw, ExpnKind, MacroKind}; + +use smallvec::{smallvec, SmallVec}; + +use crate::lints::{NonLocalDefinitionsCargoUpdateNote, NonLocalDefinitionsDiag}; +use crate::{LateContext, LateLintPass, LintContext}; + +declare_lint! { + /// The `non_local_definitions` lint checks for `impl` blocks and `#[macro_export]` + /// macro inside bodies (functions, enum discriminant, ...). + /// + /// ### Example + /// + /// ```rust + /// trait MyTrait {} + /// struct MyStruct; + /// + /// fn foo() { + /// impl MyTrait for MyStruct {} + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Creating non-local definitions go against expectation and can create discrepancies + /// in tooling. It should be avoided. It may become deny-by-default in edition 2024 + /// and higher, see see the tracking issue <https://github.com/rust-lang/rust/issues/120363>. + /// + /// An `impl` definition is non-local if it is nested inside an item and neither + /// the type nor the trait are at the same nesting level as the `impl` block. + /// + /// All nested bodies (functions, enum discriminant, array length, consts) (expect for + /// `const _: Ty = { ... }` in top-level module, which is still undecided) are checked. + pub NON_LOCAL_DEFINITIONS, + Warn, + "checks for non-local definitions", + report_in_external_macro +} + +#[derive(Default)] +pub struct NonLocalDefinitions { + body_depth: u32, +} + +impl_lint_pass!(NonLocalDefinitions => [NON_LOCAL_DEFINITIONS]); + +// FIXME(Urgau): Figure out how to handle modules nested in bodies. +// It's currently not handled by the current logic because modules are not bodies. +// They don't even follow the correct order (check_body -> check_mod -> check_body_post) +// instead check_mod is called after every body has been handled. + +impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { + fn check_body(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) { + self.body_depth += 1; + } + + fn check_body_post(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) { + self.body_depth -= 1; + } + + fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { + if self.body_depth == 0 { + return; + } + + let parent = cx.tcx.parent(item.owner_id.def_id.into()); + let parent_def_kind = cx.tcx.def_kind(parent); + let parent_opt_item_name = cx.tcx.opt_item_name(parent); + + // Per RFC we (currently) ignore anon-const (`const _: Ty = ...`) in top-level module. + if self.body_depth == 1 + && parent_def_kind == DefKind::Const + && parent_opt_item_name == Some(kw::Underscore) + { + return; + } + + let cargo_update = || { + let oexpn = item.span.ctxt().outer_expn_data(); + if let Some(def_id) = oexpn.macro_def_id + && let ExpnKind::Macro(macro_kind, macro_name) = oexpn.kind + && def_id.krate != LOCAL_CRATE + && std::env::var_os("CARGO").is_some() + { + Some(NonLocalDefinitionsCargoUpdateNote { + macro_kind: macro_kind.descr(), + macro_name, + crate_name: cx.tcx.crate_name(def_id.krate), + }) + } else { + None + } + }; + + match item.kind { + ItemKind::Impl(impl_) => { + // The RFC states: + // + // > An item nested inside an expression-containing item (through any + // > level of nesting) may not define an impl Trait for Type unless + // > either the **Trait** or the **Type** is also nested inside the + // > same expression-containing item. + // + // To achieve this we get try to get the paths of the _Trait_ and + // _Type_, and we look inside thoses paths to try a find in one + // of them a type whose parent is the same as the impl definition. + // + // If that's the case this means that this impl block declaration + // is using local items and so we don't lint on it. + + // We also ignore anon-const in item by including the anon-const + // parent as well; and since it's quite uncommon, we use smallvec + // to avoid unnecessary heap allocations. + let local_parents: SmallVec<[DefId; 1]> = if parent_def_kind == DefKind::Const + && parent_opt_item_name == Some(kw::Underscore) + { + smallvec![parent, cx.tcx.parent(parent)] + } else { + smallvec![parent] + }; + + let self_ty_has_local_parent = match impl_.self_ty.kind { + TyKind::Path(QPath::Resolved(_, ty_path)) => { + path_has_local_parent(ty_path, cx, &*local_parents) + } + TyKind::TraitObject([principle_poly_trait_ref, ..], _, _) => { + path_has_local_parent( + principle_poly_trait_ref.trait_ref.path, + cx, + &*local_parents, + ) + } + TyKind::TraitObject([], _, _) + | TyKind::InferDelegation(_, _) + | TyKind::Slice(_) + | TyKind::Array(_, _) + | TyKind::Ptr(_) + | TyKind::Ref(_, _) + | TyKind::BareFn(_) + | TyKind::Never + | TyKind::Tup(_) + | TyKind::Path(_) + | TyKind::AnonAdt(_) + | TyKind::OpaqueDef(_, _, _) + | TyKind::Typeof(_) + | TyKind::Infer + | TyKind::Err(_) => false, + }; + + let of_trait_has_local_parent = impl_ + .of_trait + .map(|of_trait| path_has_local_parent(of_trait.path, cx, &*local_parents)) + .unwrap_or(false); + + // If none of them have a local parent (LOGICAL NOR) this means that + // this impl definition is a non-local definition and so we lint on it. + if !(self_ty_has_local_parent || of_trait_has_local_parent) { + let const_anon = if self.body_depth == 1 + && parent_def_kind == DefKind::Const + && parent_opt_item_name != Some(kw::Underscore) + && let Some(parent) = parent.as_local() + && let Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) + && let ItemKind::Const(ty, _, _) = item.kind + && let TyKind::Tup(&[]) = ty.kind + { + Some(item.ident.span) + } else { + None + }; + + cx.emit_span_lint( + NON_LOCAL_DEFINITIONS, + item.span, + NonLocalDefinitionsDiag::Impl { + depth: self.body_depth, + body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent), + body_name: parent_opt_item_name + .map(|s| s.to_ident_string()) + .unwrap_or_else(|| "<unnameable>".to_string()), + cargo_update: cargo_update(), + const_anon, + }, + ) + } + } + ItemKind::Macro(_macro, MacroKind::Bang) + if cx.tcx.has_attr(item.owner_id.def_id, sym::macro_export) => + { + cx.emit_span_lint( + NON_LOCAL_DEFINITIONS, + item.span, + NonLocalDefinitionsDiag::MacroRules { + depth: self.body_depth, + body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent), + body_name: parent_opt_item_name + .map(|s| s.to_ident_string()) + .unwrap_or_else(|| "<unnameable>".to_string()), + cargo_update: cargo_update(), + }, + ) + } + _ => {} + } + } +} + +/// Given a path and a parent impl def id, this checks if the if parent resolution +/// def id correspond to the def id of the parent impl definition. +/// +/// Given this path, we will look at the path (and ignore any generic args): +/// +/// ```text +/// std::convert::PartialEq<Foo<Bar>> +/// ^^^^^^^^^^^^^^^^^^^^^^^ +/// ``` +fn path_has_local_parent(path: &Path<'_>, cx: &LateContext<'_>, local_parents: &[DefId]) -> bool { + path.res.opt_def_id().is_some_and(|did| local_parents.contains(&cx.tcx.parent(did))) +} diff --git a/compiler/rustc_lint/src/noop_method_call.rs b/compiler/rustc_lint/src/noop_method_call.rs index 26c5e4fb483..970d411fb06 100644 --- a/compiler/rustc_lint/src/noop_method_call.rs +++ b/compiler/rustc_lint/src/noop_method_call.rs @@ -121,10 +121,20 @@ impl<'tcx> LateLintPass<'tcx> for NoopMethodCall { let orig_ty = expr_ty.peel_refs(); if receiver_ty == expr_ty { + let suggest_derive = match orig_ty.kind() { + ty::Adt(def, _) => Some(cx.tcx.def_span(def.did()).shrink_to_lo()), + _ => None, + }; cx.emit_span_lint( NOOP_METHOD_CALL, span, - NoopMethodCallDiag { method: call.ident.name, orig_ty, trait_, label: span }, + NoopMethodCallDiag { + method: call.ident.name, + orig_ty, + trait_, + label: span, + suggest_derive, + }, ); } else { match name { diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 84a050a242a..1cddb45428c 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -32,7 +32,6 @@ declare_lint_pass! { CONFLICTING_REPR_HINTS, CONST_EVALUATABLE_UNCHECKED, CONST_ITEM_MUTATION, - CONST_PATTERNS_WITHOUT_PARTIAL_EQ, DEAD_CODE, DEPRECATED, DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME, @@ -2343,57 +2342,6 @@ declare_lint! { } declare_lint! { - /// The `const_patterns_without_partial_eq` lint detects constants that are used in patterns, - /// whose type does not implement `PartialEq`. - /// - /// ### Example - /// - /// ```rust,compile_fail - /// #![deny(const_patterns_without_partial_eq)] - /// - /// trait EnumSetType { - /// type Repr; - /// } - /// - /// enum Enum8 { } - /// impl EnumSetType for Enum8 { - /// type Repr = u8; - /// } - /// - /// #[derive(PartialEq, Eq)] - /// struct EnumSet<T: EnumSetType> { - /// __enumset_underlying: T::Repr, - /// } - /// - /// const CONST_SET: EnumSet<Enum8> = EnumSet { __enumset_underlying: 3 }; - /// - /// fn main() { - /// match CONST_SET { - /// CONST_SET => { /* ok */ } - /// _ => panic!("match fell through?"), - /// } - /// } - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// Previous versions of Rust accepted constants in patterns, even if those constants' types - /// did not have `PartialEq` implemented. The compiler falls back to comparing the value - /// field-by-field. In the future we'd like to ensure that pattern matching always - /// follows `PartialEq` semantics, so that trait bound will become a requirement for - /// matching on constants. - pub CONST_PATTERNS_WITHOUT_PARTIAL_EQ, - Warn, - "constant in pattern does not implement `PartialEq`", - @future_incompatible = FutureIncompatibleInfo { - reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps, - reference: "issue #116122 <https://github.com/rust-lang/rust/issues/116122>", - }; -} - -declare_lint! { /// The `ambiguous_associated_items` lint detects ambiguity between /// [associated items] and [enum variants]. /// diff --git a/compiler/rustc_macros/src/diagnostics/mod.rs b/compiler/rustc_macros/src/diagnostics/mod.rs index 33dffe6998a..044bbadf41c 100644 --- a/compiler/rustc_macros/src/diagnostics/mod.rs +++ b/compiler/rustc_macros/src/diagnostics/mod.rs @@ -55,7 +55,8 @@ use synstructure::Structure; /// /// See rustc dev guide for more examples on using the `#[derive(Diagnostic)]`: /// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html> -pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn session_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); DiagnosticDerive::new(s).into_tokens() } @@ -101,7 +102,8 @@ pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream { /// /// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`: /// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html#reference> -pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); LintDiagnosticDerive::new(s).into_tokens() } @@ -151,6 +153,7 @@ pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { /// /// diag.subdiagnostic(RawIdentifierSuggestion { span, applicability, ident }); /// ``` -pub fn session_subdiagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn session_subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); SubdiagnosticDeriveBuilder::new().into_tokens(s) } diff --git a/compiler/rustc_macros/src/hash_stable.rs b/compiler/rustc_macros/src/hash_stable.rs index a6396ba687d..6b3210cad7b 100644 --- a/compiler/rustc_macros/src/hash_stable.rs +++ b/compiler/rustc_macros/src/hash_stable.rs @@ -74,6 +74,8 @@ fn hash_stable_derive_with_mode( HashStableMode::Generic | HashStableMode::NoContext => parse_quote!(__CTX), }; + s.underscore_const(true); + // no_context impl is able to derive by-field, which is closer to a perfect derive. s.add_bounds(match mode { HashStableMode::Normal | HashStableMode::Generic => synstructure::AddBounds::Generics, diff --git a/compiler/rustc_macros/src/lift.rs b/compiler/rustc_macros/src/lift.rs index 3dedd88fb19..f7a84ba1510 100644 --- a/compiler/rustc_macros/src/lift.rs +++ b/compiler/rustc_macros/src/lift.rs @@ -4,6 +4,7 @@ use syn::parse_quote; pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { s.add_bounds(synstructure::AddBounds::Generics); s.bind_with(|_| synstructure::BindStyle::Move); + s.underscore_const(true); let tcx: syn::Lifetime = parse_quote!('tcx); let newtcx: syn::GenericParam = parse_quote!('__lifted); diff --git a/compiler/rustc_macros/src/serialize.rs b/compiler/rustc_macros/src/serialize.rs index 98b53945b91..5fa11d22f0e 100644 --- a/compiler/rustc_macros/src/serialize.rs +++ b/compiler/rustc_macros/src/serialize.rs @@ -15,6 +15,7 @@ pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_type_ir::codec::TyDecoder #bound }); s.add_bounds(synstructure::AddBounds::Fields); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -26,6 +27,7 @@ pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! { '__a }); let decoder_ty = quote! { DecodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -34,6 +36,7 @@ pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_span::SpanDecoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -42,12 +45,13 @@ pub fn decodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_serialize::Decoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } fn decodable_body( - s: synstructure::Structure<'_>, + mut s: synstructure::Structure<'_>, decoder_ty: TokenStream, ) -> proc_macro2::TokenStream { if let syn::Data::Union(_) = s.ast().data { @@ -93,6 +97,7 @@ fn decodable_body( } } }; + s.underscore_const(true); s.bound_impl( quote!(::rustc_serialize::Decodable<#decoder_ty>), @@ -130,6 +135,7 @@ pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_type_ir::codec::TyEncoder #bound }); s.add_bounds(synstructure::AddBounds::Fields); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -141,6 +147,7 @@ pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! { '__a }); let encoder_ty = quote! { EncodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, true) } @@ -149,6 +156,7 @@ pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_span::SpanEncoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -157,6 +165,7 @@ pub fn encodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -170,6 +179,7 @@ fn encodable_body( panic!("cannot derive on union") } + s.underscore_const(true); s.bind_with(|binding| { // Handle the lack of a blanket reference impl. if let syn::Type::Reference(_) = binding.ast().ty { diff --git a/compiler/rustc_macros/src/type_foldable.rs b/compiler/rustc_macros/src/type_foldable.rs index 5ee4d879313..5617c53b119 100644 --- a/compiler/rustc_macros/src/type_foldable.rs +++ b/compiler/rustc_macros/src/type_foldable.rs @@ -6,6 +6,8 @@ pub fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:: panic!("cannot derive on union") } + s.underscore_const(true); + if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { s.add_impl_generic(parse_quote! { 'tcx }); } diff --git a/compiler/rustc_macros/src/type_visitable.rs b/compiler/rustc_macros/src/type_visitable.rs index dcd505a105e..c8430380345 100644 --- a/compiler/rustc_macros/src/type_visitable.rs +++ b/compiler/rustc_macros/src/type_visitable.rs @@ -6,6 +6,8 @@ pub fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: panic!("cannot derive on union") } + s.underscore_const(true); + // ignore fields with #[type_visitable(ignore)] s.filter(|bi| { let mut ignored = false; diff --git a/compiler/rustc_middle/src/infer/unify_key.rs b/compiler/rustc_middle/src/infer/unify_key.rs index 63c0ebd5f6b..84b428297db 100644 --- a/compiler/rustc_middle/src/infer/unify_key.rs +++ b/compiler/rustc_middle/src/infer/unify_key.rs @@ -1,4 +1,4 @@ -use crate::ty::{self, Region, Ty, TyCtxt}; +use crate::ty::{self, Ty, TyCtxt}; use rustc_data_structures::unify::{NoError, UnifyKey, UnifyValue}; use rustc_span::def_id::DefId; use rustc_span::symbol::Symbol; @@ -10,26 +10,16 @@ pub trait ToType { fn to_type<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>; } -#[derive(PartialEq, Copy, Clone, Debug)] -pub struct UnifiedRegion<'tcx> { - value: Option<ty::Region<'tcx>>, -} - -impl<'tcx> UnifiedRegion<'tcx> { - pub fn new(value: Option<Region<'tcx>>) -> Self { - Self { value } - } - - /// The caller is responsible for checking universe compatibility before using this value. - pub fn get_value_ignoring_universes(self) -> Option<Region<'tcx>> { - self.value - } +#[derive(Copy, Clone, Debug)] +pub enum RegionVariableValue<'tcx> { + Known { value: ty::Region<'tcx> }, + Unknown { universe: ty::UniverseIndex }, } #[derive(PartialEq, Copy, Clone, Debug)] pub struct RegionVidKey<'tcx> { pub vid: ty::RegionVid, - pub phantom: PhantomData<UnifiedRegion<'tcx>>, + pub phantom: PhantomData<RegionVariableValue<'tcx>>, } impl<'tcx> From<ty::RegionVid> for RegionVidKey<'tcx> { @@ -39,7 +29,7 @@ impl<'tcx> From<ty::RegionVid> for RegionVidKey<'tcx> { } impl<'tcx> UnifyKey for RegionVidKey<'tcx> { - type Value = UnifiedRegion<'tcx>; + type Value = RegionVariableValue<'tcx>; #[inline] fn index(&self) -> u32 { self.vid.as_u32() @@ -53,36 +43,47 @@ impl<'tcx> UnifyKey for RegionVidKey<'tcx> { } } -impl<'tcx> UnifyValue for UnifiedRegion<'tcx> { - type Error = NoError; +pub struct RegionUnificationError; +impl<'tcx> UnifyValue for RegionVariableValue<'tcx> { + type Error = RegionUnificationError; - fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> { - // We pick the value of the least universe because it is compatible with more variables. - // This is *not* necessary for completeness. - #[cold] - fn min_universe<'tcx>(r1: Region<'tcx>, r2: Region<'tcx>) -> Region<'tcx> { - cmp::min_by_key(r1, r2, |r| match r.kind() { - ty::ReStatic - | ty::ReErased - | ty::ReLateParam(..) - | ty::ReEarlyParam(..) - | ty::ReError(_) => ty::UniverseIndex::ROOT, - ty::RePlaceholder(placeholder) => placeholder.universe, - ty::ReVar(..) | ty::ReBound(..) => bug!("not a universal region"), - }) - } - - Ok(match (value1.value, value2.value) { - // Here we can just pick one value, because the full constraints graph - // will be handled later. Ideally, we might want a `MultipleValues` - // variant or something. For now though, this is fine. - (Some(val1), Some(val2)) => Self { value: Some(min_universe(val1, val2)) }, + fn unify_values(value1: &Self, value2: &Self) -> Result<Self, Self::Error> { + match (*value1, *value2) { + (RegionVariableValue::Known { .. }, RegionVariableValue::Known { .. }) => { + Err(RegionUnificationError) + } - (Some(_), _) => *value1, - (_, Some(_)) => *value2, + (RegionVariableValue::Known { value }, RegionVariableValue::Unknown { universe }) + | (RegionVariableValue::Unknown { universe }, RegionVariableValue::Known { value }) => { + let universe_of_value = match value.kind() { + ty::ReStatic + | ty::ReErased + | ty::ReLateParam(..) + | ty::ReEarlyParam(..) + | ty::ReError(_) => ty::UniverseIndex::ROOT, + ty::RePlaceholder(placeholder) => placeholder.universe, + ty::ReVar(..) | ty::ReBound(..) => bug!("not a universal region"), + }; + + if universe.can_name(universe_of_value) { + Ok(RegionVariableValue::Known { value }) + } else { + Err(RegionUnificationError) + } + } - (None, None) => *value1, - }) + ( + RegionVariableValue::Unknown { universe: a }, + RegionVariableValue::Unknown { universe: b }, + ) => { + // If we unify two unconstrained regions then whatever + // value they wind up taking (which must be the same value) must + // be nameable by both universes. Therefore, the resulting + // universe is the minimum of the two universes, because that is + // the one which contains the fewest names in scope. + Ok(RegionVariableValue::Unknown { universe: a.min(b) }) + } + } } } diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index 9c0846e9fb1..c3e4a03ad16 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -24,6 +24,8 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![cfg_attr(bootstrap, feature(exhaustive_patterns))] +#![cfg_attr(not(bootstrap), feature(min_exhaustive_patterns))] #![feature(rustdoc_internals)] #![feature(allocator_api)] #![feature(array_windows)] @@ -32,7 +34,6 @@ #![feature(core_intrinsics)] #![feature(const_type_name)] #![feature(discriminant_kind)] -#![feature(exhaustive_patterns)] #![feature(coroutines)] #![feature(generic_nonzero)] #![feature(if_let_guard)] diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index f4dfbe059eb..9c7c46f2ad2 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -233,7 +233,7 @@ impl<'sess> OnDiskCache<'sess> { for (index, file) in files.iter().enumerate() { let index = SourceFileIndex(index as u32); - let file_ptr: *const SourceFile = &**file as *const _; + let file_ptr: *const SourceFile = std::ptr::addr_of!(**file); file_to_file_index.insert(file_ptr, index); let source_file_id = EncodedSourceFileId::new(tcx, file); file_index_to_stable_id.insert(index, source_file_id); @@ -835,7 +835,7 @@ pub struct CacheEncoder<'a, 'tcx> { impl<'a, 'tcx> CacheEncoder<'a, 'tcx> { #[inline] fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex { - self.file_to_file_index[&(&*source_file as *const SourceFile)] + self.file_to_file_index[&std::ptr::addr_of!(*source_file)] } /// Encode something with additional information that allows to do some diff --git a/compiler/rustc_middle/src/traits/select.rs b/compiler/rustc_middle/src/traits/select.rs index e3050007c7b..8e9751f4529 100644 --- a/compiler/rustc_middle/src/traits/select.rs +++ b/compiler/rustc_middle/src/traits/select.rs @@ -139,7 +139,7 @@ pub enum SelectionCandidate<'tcx> { /// generated for an `async ||` expression. AsyncClosureCandidate, - /// Implementation of the the `AsyncFnKindHelper` helper trait, which + /// Implementation of the `AsyncFnKindHelper` helper trait, which /// is used internally to delay computation for async closures until after /// upvar analysis is performed in HIR typeck. AsyncFnKindHelperCandidate, diff --git a/compiler/rustc_middle/src/ty/list.rs b/compiler/rustc_middle/src/ty/list.rs index 4f9c9d85763..336c2dce114 100644 --- a/compiler/rustc_middle/src/ty/list.rs +++ b/compiler/rustc_middle/src/ty/list.rs @@ -61,7 +61,7 @@ impl<T> List<T> { // length) that is 64-byte aligned, thus featuring the necessary // trailing padding for elements with up to 64-byte alignment. static EMPTY_SLICE: InOrder<usize, MaxAlign> = InOrder(0, MaxAlign); - unsafe { &*(&EMPTY_SLICE as *const _ as *const List<T>) } + unsafe { &*(std::ptr::addr_of!(EMPTY_SLICE) as *const List<T>) } } pub fn len(&self) -> usize { diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 19f8ba124f1..520fc1dd7aa 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -139,7 +139,7 @@ pub trait Printer<'tcx>: Sized { _, hir::CoroutineSource::Closure, )) = self.tcx().coroutine_kind(def_id) - && args.len() >= parent_args.len() + 1 + && args.len() > parent_args.len() { return self.path_generic_args( |cx| cx.print_def_path(def_id, parent_args), diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 92ec1a83bee..1910841f268 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2929,7 +2929,7 @@ define_print_and_forward_display! { ty::ExistentialTraitRef<'tcx> { // Use a type that can't appear in defaults of type parameters. - let dummy_self = Ty::new_fresh(cx.tcx(),0); + let dummy_self = Ty::new_fresh(cx.tcx(), 0); let trait_ref = self.with_self_ty(cx.tcx(), dummy_self); p!(print(trait_ref.print_only_trait_path())) } diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index f9a8795f5d6..641a278c1d3 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -1150,39 +1150,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// the value, we will set and generate a branch to the appropriate /// pre-binding block. /// - /// If we find that *NONE* of the candidates apply, we branch to the - /// `otherwise_block`, setting it to `Some` if required. In principle, this - /// means that the input list was not exhaustive, though at present we - /// sometimes are not smart enough to recognize all exhaustive inputs. + /// If we find that *NONE* of the candidates apply, we branch to `otherwise_block`. /// /// It might be surprising that the input can be non-exhaustive. /// Indeed, initially, it is not, because all matches are /// exhaustive in Rust. But during processing we sometimes divide /// up the list of candidates and recurse with a non-exhaustive - /// list. This is important to keep the size of the generated code - /// under control. See [`Builder::test_candidates`] for more details. + /// list. This is how our lowering approach (called "backtracking + /// automaton" in the literature) works. + /// See [`Builder::test_candidates`] for more details. /// /// If `fake_borrows` is `Some`, then places which need fake borrows /// will be added to it. /// - /// For an example of a case where we set `otherwise_block`, even for an - /// exhaustive match, consider: - /// + /// For an example of how we use `otherwise_block`, consider: /// ``` - /// # fn foo(x: (bool, bool)) { - /// match x { - /// (true, true) => (), - /// (_, false) => (), - /// (false, true) => (), + /// # fn foo((x, y): (bool, bool)) -> u32 { + /// match (x, y) { + /// (true, true) => 1, + /// (_, false) => 2, + /// (false, true) => 3, /// } /// # } /// ``` + /// For this match, we generate something like: + /// ``` + /// # fn foo((x, y): (bool, bool)) -> u32 { + /// if x { + /// if y { + /// return 1 + /// } else { + /// // continue + /// } + /// } else { + /// // continue + /// } + /// if y { + /// if x { + /// // This is actually unreachable because the `(true, true)` case was handled above. + /// // continue + /// } else { + /// return 3 + /// } + /// } else { + /// return 2 + /// } + /// // this is the final `otherwise_block`, which is unreachable because the match was exhaustive. + /// unreachable!() + /// # } + /// ``` + /// + /// Every `continue` is an instance of branching to some `otherwise_block` somewhere deep within + /// the algorithm. For more details on why we lower like this, see [`Builder::test_candidates`]. /// - /// For this match, we check if `x.0` matches `true` (for the first - /// arm). If it doesn't match, we check `x.1`. If `x.1` is `true` we check - /// if `x.0` matches `false` (for the third arm). In the (impossible at - /// runtime) case when `x.0` is now `true`, we branch to - /// `otherwise_block`. + /// Note how we test `x` twice. This is the tradeoff of backtracking automata: we prefer smaller + /// code size at the expense of non-optimal code paths. #[instrument(skip(self, fake_borrows), level = "debug")] fn match_candidates<'pat>( &mut self, @@ -1557,18 +1579,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - /// This is the most subtle part of the matching algorithm. At - /// this point, the input candidates have been fully simplified, - /// and so we know that all remaining match-pairs require some - /// sort of test. To decide what test to perform, we take the highest - /// priority candidate (the first one in the list, as of January 2021) - /// and extract the first match-pair from the list. From this we decide - /// what kind of test is needed using [`Builder::test`], defined in the - /// [`test` module](mod@test). + /// Pick a test to run. Which test doesn't matter as long as it is guaranteed to fully match at + /// least one match pair. We currently simply pick the test corresponding to the first match + /// pair of the first candidate in the list. /// - /// *Note:* taking the first match pair is somewhat arbitrary, and - /// we might do better here by choosing more carefully what to - /// test. + /// *Note:* taking the first match pair is somewhat arbitrary, and we might do better here by + /// choosing more carefully what to test. /// /// For example, consider the following possible match-pairs: /// @@ -1580,121 +1596,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// [`Switch`]: TestKind::Switch /// [`SwitchInt`]: TestKind::SwitchInt /// [`Range`]: TestKind::Range - /// - /// Once we know what sort of test we are going to perform, this - /// test may also help us winnow down our candidates. So we walk over - /// the candidates (from high to low priority) and check. This - /// gives us, for each outcome of the test, a transformed list of - /// candidates. For example, if we are testing `x.0`'s variant, - /// and we have a candidate `(x.0 @ Some(v), x.1 @ 22)`, - /// then we would have a resulting candidate of `((x.0 as Some).0 @ v, x.1 @ 22)`. - /// Note that the first match-pair is now simpler (and, in fact, irrefutable). - /// - /// But there may also be candidates that the test just doesn't - /// apply to. The classical example involves wildcards: - /// - /// ``` - /// # let (x, y, z) = (true, true, true); - /// match (x, y, z) { - /// (true , _ , true ) => true, // (0) - /// (_ , true , _ ) => true, // (1) - /// (false, false, _ ) => false, // (2) - /// (true , _ , false) => false, // (3) - /// } - /// # ; - /// ``` - /// - /// In that case, after we test on `x`, there are 2 overlapping candidate - /// sets: - /// - /// - If the outcome is that `x` is true, candidates 0, 1, and 3 - /// - If the outcome is that `x` is false, candidates 1 and 2 - /// - /// Here, the traditional "decision tree" method would generate 2 - /// separate code-paths for the 2 separate cases. - /// - /// In some cases, this duplication can create an exponential amount of - /// code. This is most easily seen by noticing that this method terminates - /// with precisely the reachable arms being reachable - but that problem - /// is trivially NP-complete: - /// - /// ```ignore (illustrative) - /// match (var0, var1, var2, var3, ...) { - /// (true , _ , _ , false, true, ...) => false, - /// (_ , true, true , false, _ , ...) => false, - /// (false, _ , false, false, _ , ...) => false, - /// ... - /// _ => true - /// } - /// ``` - /// - /// Here the last arm is reachable only if there is an assignment to - /// the variables that does not match any of the literals. Therefore, - /// compilation would take an exponential amount of time in some cases. - /// - /// That kind of exponential worst-case might not occur in practice, but - /// our simplistic treatment of constants and guards would make it occur - /// in very common situations - for example [#29740]: - /// - /// ```ignore (illustrative) - /// match x { - /// "foo" if foo_guard => ..., - /// "bar" if bar_guard => ..., - /// "baz" if baz_guard => ..., - /// ... - /// } - /// ``` - /// - /// [#29740]: https://github.com/rust-lang/rust/issues/29740 - /// - /// Here we first test the match-pair `x @ "foo"`, which is an [`Eq` test]. - /// - /// [`Eq` test]: TestKind::Eq - /// - /// It might seem that we would end up with 2 disjoint candidate - /// sets, consisting of the first candidate or the other two, but our - /// algorithm doesn't reason about `"foo"` being distinct from the other - /// constants; it considers the latter arms to potentially match after - /// both outcomes, which obviously leads to an exponential number - /// of tests. - /// - /// To avoid these kinds of problems, our algorithm tries to ensure - /// the amount of generated tests is linear. When we do a k-way test, - /// we return an additional "unmatched" set alongside the obvious `k` - /// sets. When we encounter a candidate that would be present in more - /// than one of the sets, we put it and all candidates below it into the - /// "unmatched" set. This ensures these `k+1` sets are disjoint. - /// - /// After we perform our test, we branch into the appropriate candidate - /// set and recurse with `match_candidates`. These sub-matches are - /// obviously non-exhaustive - as we discarded our otherwise set - so - /// we set their continuation to do `match_candidates` on the - /// "unmatched" set (which is again non-exhaustive). - /// - /// If you apply this to the above test, you basically wind up - /// with an if-else-if chain, testing each candidate in turn, - /// which is precisely what we want. - /// - /// In addition to avoiding exponential-time blowups, this algorithm - /// also has the nice property that each guard and arm is only generated - /// once. - fn test_candidates<'pat, 'b, 'c>( + fn pick_test( &mut self, - span: Span, - scrutinee_span: Span, - mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>], - start_block: BasicBlock, - otherwise_block: BasicBlock, + candidates: &mut [&mut Candidate<'_, 'tcx>], fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>, - ) { - // extract the match-pair from the highest priority candidate + ) -> (PlaceBuilder<'tcx>, Test<'tcx>) { + // Extract the match-pair from the highest priority candidate let match_pair = &candidates.first().unwrap().match_pairs[0]; let mut test = self.test(match_pair); let match_place = match_pair.place.clone(); - // most of the time, the test to perform is simply a function - // of the main candidate; but for a test like SwitchInt, we - // may want to add cases based on the candidates that are + debug!(?test, ?match_pair); + // Most of the time, the test to perform is simply a function of the main candidate; but for + // a test like SwitchInt, we may want to add cases based on the candidates that are // available match test.kind { TestKind::SwitchInt { switch_ty: _, ref mut options } => { @@ -1721,20 +1635,58 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fb.insert(resolved_place); } - // perform the test, branching to one of N blocks. For each of - // those N possible outcomes, create a (initially empty) - // vector of candidates. Those are the candidates that still - // apply if the test has that particular outcome. - debug!("test_candidates: test={:?} match_pair={:?}", test, match_pair); + (match_place, test) + } + + /// Given a test, we sort the input candidates into several buckets. If a candidate only matches + /// in one of the branches of `test`, we move it there. If it could match in more than one of + /// the branches of `test`, we stop sorting candidates. + /// + /// This returns a pair of + /// - the candidates that weren't sorted; + /// - for each possible outcome of the test, the candidates that match in that outcome. + /// + /// Moreover, we transform the branched candidates to reflect the fact that we know which + /// outcome of `test` occurred. + /// + /// For example: + /// ``` + /// # let (x, y, z) = (true, true, true); + /// match (x, y, z) { + /// (true , _ , true ) => true, // (0) + /// (false, false, _ ) => false, // (1) + /// (_ , true , _ ) => true, // (2) + /// (true , _ , false) => false, // (3) + /// } + /// # ; + /// ``` + /// + /// Assume we are testing on `x`. There are 2 overlapping candidate sets: + /// - If the outcome is that `x` is true, candidates 0, 2, and 3 + /// - If the outcome is that `x` is false, candidates 1 and 2 + /// + /// Following our algorithm, candidate 0 is sorted into outcome `x == true`, candidate 1 goes + /// into outcome `x == false`, and candidate 2 and 3 remain unsorted. + /// + /// The sorted candidates are transformed: + /// - candidate 0 becomes `[z @ true]` since we know that `x` was `true`; + /// - candidate 1 becomes `[y @ false]` since we know that `x` was `false`. + fn sort_candidates<'b, 'c, 'pat>( + &mut self, + match_place: &PlaceBuilder<'tcx>, + test: &Test<'tcx>, + mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>], + ) -> (&'b mut [&'c mut Candidate<'pat, 'tcx>], Vec<Vec<&'b mut Candidate<'pat, 'tcx>>>) { + // For each of the N possible outcomes, create a (initially empty) vector of candidates. + // Those are the candidates that apply if the test has that particular outcome. let mut target_candidates: Vec<Vec<&mut Candidate<'pat, 'tcx>>> = vec![]; target_candidates.resize_with(test.targets(), Default::default); let total_candidate_count = candidates.len(); - // Sort the candidates into the appropriate vector in - // `target_candidates`. Note that at some point we may - // encounter a candidate where the test is not relevant; at - // that point, we stop sorting. + // Sort the candidates into the appropriate vector in `target_candidates`. Note that at some + // point we may encounter a candidate where the test is not relevant; at that point, we stop + // sorting. while let Some(candidate) = candidates.first_mut() { let Some(idx) = self.sort_candidate(&match_place, &test, candidate) else { break; @@ -1743,7 +1695,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target_candidates[idx].push(candidate); candidates = rest; } - // at least the first candidate ought to be tested + + // At least the first candidate ought to be tested assert!( total_candidate_count > candidates.len(), "{total_candidate_count}, {candidates:#?}" @@ -1751,16 +1704,130 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("tested_candidates: {}", total_candidate_count - candidates.len()); debug!("untested_candidates: {}", candidates.len()); + (candidates, target_candidates) + } + + /// This is the most subtle part of the match lowering algorithm. At this point, the input + /// candidates have been fully simplified, so all remaining match-pairs require some sort of + /// test. + /// + /// Once we pick what sort of test we are going to perform, this test will help us winnow down + /// our candidates. So we walk over the candidates (from high to low priority) and check. We + /// compute, for each outcome of the test, a transformed list of candidates. If a candidate + /// matches in a single branch of our test, we add it to the corresponding outcome. We also + /// transform it to record the fact that we know which outcome occurred. + /// + /// For example, if we are testing `x.0`'s variant, and we have a candidate `(x.0 @ Some(v), x.1 + /// @ 22)`, then we would have a resulting candidate of `((x.0 as Some).0 @ v, x.1 @ 22)` in the + /// branch corresponding to `Some`. To ensure we make progress, we always pick a test that + /// results in simplifying the first candidate. + /// + /// But there may also be candidates that the test doesn't + /// apply to. The classical example is wildcards: + /// + /// ``` + /// # let (x, y, z) = (true, true, true); + /// match (x, y, z) { + /// (true , _ , true ) => true, // (0) + /// (false, false, _ ) => false, // (1) + /// (_ , true , _ ) => true, // (2) + /// (true , _ , false) => false, // (3) + /// } + /// # ; + /// ``` + /// + /// Here, the traditional "decision tree" method would generate 2 separate code-paths for the 2 + /// possible values of `x`. This would however duplicate some candidates, which would need to be + /// lowered several times. + /// + /// In some cases, this duplication can create an exponential amount of + /// code. This is most easily seen by noticing that this method terminates + /// with precisely the reachable arms being reachable - but that problem + /// is trivially NP-complete: + /// + /// ```ignore (illustrative) + /// match (var0, var1, var2, var3, ...) { + /// (true , _ , _ , false, true, ...) => false, + /// (_ , true, true , false, _ , ...) => false, + /// (false, _ , false, false, _ , ...) => false, + /// ... + /// _ => true + /// } + /// ``` + /// + /// Here the last arm is reachable only if there is an assignment to + /// the variables that does not match any of the literals. Therefore, + /// compilation would take an exponential amount of time in some cases. + /// + /// In rustc, we opt instead for the "backtracking automaton" approach. This guarantees we never + /// duplicate a candidate (except in the presence of or-patterns). In fact this guarantee is + /// ensured by the fact that we carry around `&mut Candidate`s which can't be duplicated. + /// + /// To make this work, whenever we decide to perform a test, if we encounter a candidate that + /// could match in more than one branch of the test, we stop. We generate code for the test and + /// for the candidates in its branches; the remaining candidates will be tested if the + /// candidates in the branches fail to match. + /// + /// For example, if we test on `x` in the following: + /// ``` + /// # fn foo((x, y, z): (bool, bool, bool)) -> u32 { + /// match (x, y, z) { + /// (true , _ , true ) => 0, + /// (false, false, _ ) => 1, + /// (_ , true , _ ) => 2, + /// (true , _ , false) => 3, + /// } + /// # } + /// ``` + /// this function generates code that looks more of less like: + /// ``` + /// # fn foo((x, y, z): (bool, bool, bool)) -> u32 { + /// if x { + /// match (y, z) { + /// (_, true) => return 0, + /// _ => {} // continue matching + /// } + /// } else { + /// match (y, z) { + /// (false, _) => return 1, + /// _ => {} // continue matching + /// } + /// } + /// // the block here is `remainder_start` + /// match (x, y, z) { + /// (_ , true , _ ) => 2, + /// (true , _ , false) => 3, + /// _ => unreachable!(), + /// } + /// # } + /// ``` + fn test_candidates<'pat, 'b, 'c>( + &mut self, + span: Span, + scrutinee_span: Span, + candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>], + start_block: BasicBlock, + otherwise_block: BasicBlock, + fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>, + ) { + // Extract the match-pair from the highest priority candidate and build a test from it. + let (match_place, test) = self.pick_test(candidates, fake_borrows); + + // For each of the N possible test outcomes, build the vector of candidates that applies if + // the test has that particular outcome. + let (remaining_candidates, target_candidates) = + self.sort_candidates(&match_place, &test, candidates); + // The block that we should branch to if none of the // `target_candidates` match. - let remainder_start = if !candidates.is_empty() { + let remainder_start = if !remaining_candidates.is_empty() { let remainder_start = self.cfg.start_new_block(); self.match_candidates( span, scrutinee_span, remainder_start, otherwise_block, - candidates, + remaining_candidates, fake_borrows, ); remainder_start diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 48b93ce0ac5..101f1cb9f2f 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -768,6 +768,14 @@ pub struct TypeNotStructural<'tcx> { } #[derive(Diagnostic)] +#[diag(mir_build_non_partial_eq_match)] +pub struct TypeNotPartialEq<'tcx> { + #[primary_span] + pub span: Span, + pub non_peq_ty: Ty<'tcx>, +} + +#[derive(Diagnostic)] #[diag(mir_build_invalid_pattern)] pub struct InvalidPattern<'tcx> { #[primary_span] @@ -822,12 +830,6 @@ pub struct NontrivialStructuralMatch<'tcx> { pub non_sm_ty: Ty<'tcx>, } -#[derive(LintDiagnostic)] -#[diag(mir_build_non_partial_eq_match)] -pub struct NonPartialEqMatch<'tcx> { - pub non_peq_ty: Ty<'tcx>, -} - #[derive(Diagnostic)] #[diag(mir_build_pattern_not_covered, code = E0005)] pub(crate) struct PatternNotCovered<'s, 'tcx> { diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index c77c80d9f4b..09727f9b71b 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -16,7 +16,7 @@ use std::cell::Cell; use super::PatCtxt; use crate::errors::{ - IndirectStructuralMatch, InvalidPattern, NaNPattern, NonPartialEqMatch, PointerPattern, + IndirectStructuralMatch, InvalidPattern, NaNPattern, PointerPattern, TypeNotPartialEq, TypeNotStructural, UnionPattern, UnsizedPattern, }; @@ -208,15 +208,12 @@ impl<'tcx> ConstToPat<'tcx> { ); } - // Always check for `PartialEq`, even if we emitted other lints. (But not if there were - // any errors.) This ensures it shows up in cargo's future-compat reports as well. + // Always check for `PartialEq` if we had no other errors yet. if !self.type_has_partial_eq_impl(cv.ty()) { - self.tcx().emit_node_span_lint( - lint::builtin::CONST_PATTERNS_WITHOUT_PARTIAL_EQ, - self.id, - self.span, - NonPartialEqMatch { non_peq_ty: cv.ty() }, - ); + let err = TypeNotPartialEq { span: self.span, non_peq_ty: cv.ty() }; + let e = self.tcx().dcx().emit_err(err); + let kind = PatKind::Error(e); + return Box::new(Pat { span: self.span, ty: cv.ty(), kind }); } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index 0329e1d3096..0b03cb52373 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -223,19 +223,14 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { // If we are handling a range with associated constants (e.g. // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated // constants somewhere. Have them on the range pattern. - for ascr in [lo_ascr, hi_ascr] { - if let Some(ascription) = ascr { - kind = PatKind::AscribeUserType { - ascription, - subpattern: Box::new(Pat { span, ty, kind }), - }; - } + for ascription in [lo_ascr, hi_ascr].into_iter().flatten() { + kind = PatKind::AscribeUserType { + ascription, + subpattern: Box::new(Pat { span, ty, kind }), + }; } - for inline_const in [lo_inline, hi_inline] { - if let Some(def) = inline_const { - kind = - PatKind::InlineConstant { def, subpattern: Box::new(Pat { span, ty, kind }) }; - } + for def in [lo_inline, hi_inline].into_iter().flatten() { + kind = PatKind::InlineConstant { def, subpattern: Box::new(Pat { span, ty, kind }) }; } Ok(kind) } diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index c6badbe78a4..c97192435ce 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -52,19 +52,18 @@ impl CoverageGraph { } } - let mut basic_coverage_blocks = - Self { bcbs, bb_to_bcb, successors, predecessors, dominators: None }; - let dominators = dominators::dominators(&basic_coverage_blocks); - basic_coverage_blocks.dominators = Some(dominators); + let mut this = Self { bcbs, bb_to_bcb, successors, predecessors, dominators: None }; + + this.dominators = Some(dominators::dominators(&this)); // The coverage graph's entry-point node (bcb0) always starts with bb0, // which never has predecessors. Any other blocks merged into bcb0 can't // have multiple (coverage-relevant) predecessors, so bcb0 always has // zero in-edges. - assert!(basic_coverage_blocks[START_BCB].leader_bb() == mir::START_BLOCK); - assert!(basic_coverage_blocks.predecessors[START_BCB].is_empty()); + assert!(this[START_BCB].leader_bb() == mir::START_BLOCK); + assert!(this.predecessors[START_BCB].is_empty()); - basic_coverage_blocks + this } fn compute_basic_coverage_blocks( diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index 98fb1d8e1c9..4260a6f0c6f 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -90,23 +90,23 @@ pub(super) fn generate_coverage_spans( struct CurrCovspan { span: Span, bcb: BasicCoverageBlock, - is_closure: bool, + is_hole: bool, } impl CurrCovspan { - fn new(span: Span, bcb: BasicCoverageBlock, is_closure: bool) -> Self { - Self { span, bcb, is_closure } + fn new(span: Span, bcb: BasicCoverageBlock, is_hole: bool) -> Self { + Self { span, bcb, is_hole } } fn into_prev(self) -> PrevCovspan { - let Self { span, bcb, is_closure } = self; - PrevCovspan { span, bcb, merged_spans: vec![span], is_closure } + let Self { span, bcb, is_hole } = self; + PrevCovspan { span, bcb, merged_spans: vec![span], is_hole } } fn into_refined(self) -> RefinedCovspan { - // This is only called in cases where `curr` is a closure span that has + // This is only called in cases where `curr` is a hole span that has // been carved out of `prev`. - debug_assert!(self.is_closure); + debug_assert!(self.is_hole); self.into_prev().into_refined() } } @@ -118,12 +118,12 @@ struct PrevCovspan { /// List of all the original spans from MIR that have been merged into this /// span. Mainly used to precisely skip over gaps when truncating a span. merged_spans: Vec<Span>, - is_closure: bool, + is_hole: bool, } impl PrevCovspan { fn is_mergeable(&self, other: &CurrCovspan) -> bool { - self.bcb == other.bcb && !self.is_closure && !other.is_closure + self.bcb == other.bcb && !self.is_hole && !other.is_hole } fn merge_from(&mut self, other: &CurrCovspan) { @@ -142,8 +142,8 @@ impl PrevCovspan { } fn refined_copy(&self) -> RefinedCovspan { - let &Self { span, bcb, merged_spans: _, is_closure } = self; - RefinedCovspan { span, bcb, is_closure } + let &Self { span, bcb, merged_spans: _, is_hole } = self; + RefinedCovspan { span, bcb, is_hole } } fn into_refined(self) -> RefinedCovspan { @@ -156,12 +156,12 @@ impl PrevCovspan { struct RefinedCovspan { span: Span, bcb: BasicCoverageBlock, - is_closure: bool, + is_hole: bool, } impl RefinedCovspan { fn is_mergeable(&self, other: &Self) -> bool { - self.bcb == other.bcb && !self.is_closure && !other.is_closure + self.bcb == other.bcb && !self.is_hole && !other.is_hole } fn merge_from(&mut self, other: &Self) { @@ -176,16 +176,16 @@ impl RefinedCovspan { /// * Remove duplicate source code coverage regions /// * Merge spans that represent continuous (both in source code and control flow), non-branching /// execution -/// * Carve out (leave uncovered) any span that will be counted by another MIR (notably, closures) +/// * Carve out (leave uncovered) any "hole" spans that need to be left blank +/// (e.g. closures that will be counted by their own MIR body) struct SpansRefiner { /// The initial set of coverage spans, sorted by `Span` (`lo` and `hi`) and by relative /// dominance between the `BasicCoverageBlock`s of equal `Span`s. sorted_spans_iter: std::vec::IntoIter<SpanFromMir>, - /// The current coverage span to compare to its `prev`, to possibly merge, discard, force the - /// discard of the `prev` (and or `pending_dups`), or keep both (with `prev` moved to - /// `pending_dups`). If `curr` is not discarded or merged, it becomes `prev` for the next - /// iteration. + /// The current coverage span to compare to its `prev`, to possibly merge, discard, + /// or cause `prev` to be modified or discarded. + /// If `curr` is not discarded or merged, it becomes `prev` for the next iteration. some_curr: Option<CurrCovspan>, /// The coverage span from a prior iteration; typically assigned from that iteration's `curr`. @@ -229,7 +229,7 @@ impl SpansRefiner { let curr = self.curr(); if prev.is_mergeable(curr) { - debug!(" same bcb (and neither is a closure), merge with prev={prev:?}"); + debug!(?prev, "curr will be merged into prev"); let curr = self.take_curr(); self.prev_mut().merge_from(&curr); } else if prev.span.hi() <= curr.span.lo() { @@ -238,15 +238,13 @@ impl SpansRefiner { ); let prev = self.take_prev().into_refined(); self.refined_spans.push(prev); - } else if prev.is_closure { + } else if prev.is_hole { // drop any equal or overlapping span (`curr`) and keep `prev` to test again in the // next iter - debug!( - " curr overlaps a closure (prev). Drop curr and keep prev for next iter. prev={prev:?}", - ); + debug!(?prev, "prev (a hole) overlaps curr, so discarding curr"); self.take_curr(); // Discards curr. - } else if curr.is_closure { - self.carve_out_span_for_closure(); + } else if curr.is_hole { + self.carve_out_span_for_hole(); } else { self.cutoff_prev_at_overlapping_curr(); } @@ -270,10 +268,9 @@ impl SpansRefiner { } }); - // Remove spans derived from closures, originally added to ensure the coverage - // regions for the current function leave room for the closure's own coverage regions - // (injected separately, from the closure's own MIR). - self.refined_spans.retain(|covspan| !covspan.is_closure); + // Discard hole spans, since their purpose was to carve out chunks from + // other spans, but we don't want the holes themselves in the final mappings. + self.refined_spans.retain(|covspan| !covspan.is_hole); self.refined_spans } @@ -316,48 +313,43 @@ impl SpansRefiner { { // Skip curr because prev has already advanced beyond the end of curr. // This can only happen if a prior iteration updated `prev` to skip past - // a region of code, such as skipping past a closure. - debug!( - " prev.span starts after curr.span, so curr will be dropped (skipping past \ - closure?); prev={prev:?}", - ); + // a region of code, such as skipping past a hole. + debug!(?prev, "prev.span starts after curr.span, so curr will be dropped"); } else { - self.some_curr = Some(CurrCovspan::new(curr.span, curr.bcb, curr.is_closure)); + self.some_curr = Some(CurrCovspan::new(curr.span, curr.bcb, curr.is_hole)); return true; } } false } - /// If `prev`s span extends left of the closure (`curr`), carve out the closure's span from - /// `prev`'s span. (The closure's coverage counters will be injected when processing the - /// closure's own MIR.) Add the portion of the span to the left of the closure; and if the span - /// extends to the right of the closure, update `prev` to that portion of the span. For any - /// `pending_dups`, repeat the same process. - fn carve_out_span_for_closure(&mut self) { + /// If `prev`s span extends left of the hole (`curr`), carve out the hole's span from + /// `prev`'s span. Add the portion of the span to the left of the hole; and if the span + /// extends to the right of the hole, update `prev` to that portion of the span. + fn carve_out_span_for_hole(&mut self) { let prev = self.prev(); let curr = self.curr(); let left_cutoff = curr.span.lo(); let right_cutoff = curr.span.hi(); - let has_pre_closure_span = prev.span.lo() < right_cutoff; - let has_post_closure_span = prev.span.hi() > right_cutoff; - - if has_pre_closure_span { - let mut pre_closure = self.prev().refined_copy(); - pre_closure.span = pre_closure.span.with_hi(left_cutoff); - debug!(" prev overlaps a closure. Adding span for pre_closure={:?}", pre_closure); - self.refined_spans.push(pre_closure); + let has_pre_hole_span = prev.span.lo() < right_cutoff; + let has_post_hole_span = prev.span.hi() > right_cutoff; + + if has_pre_hole_span { + let mut pre_hole = prev.refined_copy(); + pre_hole.span = pre_hole.span.with_hi(left_cutoff); + debug!(?pre_hole, "prev overlaps a hole; adding pre-hole span"); + self.refined_spans.push(pre_hole); } - if has_post_closure_span { - // Mutate `prev.span` to start after the closure (and discard curr). + if has_post_hole_span { + // Mutate `prev.span` to start after the hole (and discard curr). self.prev_mut().span = self.prev().span.with_lo(right_cutoff); - debug!(" Mutated prev.span to start after the closure. prev={:?}", self.prev()); + debug!(prev=?self.prev(), "mutated prev to start after the hole"); // Prevent this curr from becoming prev. - let closure_covspan = self.take_curr().into_refined(); - self.refined_spans.push(closure_covspan); // since self.prev() was already updated + let hole_covspan = self.take_curr().into_refined(); + self.refined_spans.push(hole_covspan); // since self.prev() was already updated } } diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index b91ab811918..099a354f45d 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -52,14 +52,14 @@ pub(super) fn mir_to_initial_sorted_coverage_spans( // - Span A extends further left, or // - Both have the same start and span A extends further right .then_with(|| Ord::cmp(&a.span.hi(), &b.span.hi()).reverse()) - // If two spans have the same lo & hi, put closure spans first, - // as they take precedence over non-closure spans. - .then_with(|| Ord::cmp(&a.is_closure, &b.is_closure).reverse()) + // If two spans have the same lo & hi, put hole spans first, + // as they take precedence over non-hole spans. + .then_with(|| Ord::cmp(&a.is_hole, &b.is_hole).reverse()) // After deduplication, we want to keep only the most-dominated BCB. .then_with(|| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb).reverse()) }); - // Among covspans with the same span, keep only one. Closure spans take + // Among covspans with the same span, keep only one. Hole spans take // precedence, otherwise keep the one with the most-dominated BCB. // (Ideally we should try to preserve _all_ non-dominating BCBs, but that // requires a lot more complexity in the span refiner, for little benefit.) @@ -78,8 +78,8 @@ pub(super) fn mir_to_initial_sorted_coverage_spans( fn remove_unwanted_macro_spans(initial_spans: &mut Vec<SpanFromMir>) { let mut seen_macro_spans = FxHashSet::default(); initial_spans.retain(|covspan| { - // Ignore (retain) closure spans and non-macro-expansion spans. - if covspan.is_closure || covspan.visible_macro.is_none() { + // Ignore (retain) hole spans and non-macro-expansion spans. + if covspan.is_hole || covspan.visible_macro.is_none() { return true; } @@ -96,7 +96,7 @@ fn split_visible_macro_spans(initial_spans: &mut Vec<SpanFromMir>) { let mut extra_spans = vec![]; initial_spans.retain(|covspan| { - if covspan.is_closure { + if covspan.is_hole { return true; } @@ -112,7 +112,7 @@ fn split_visible_macro_spans(initial_spans: &mut Vec<SpanFromMir>) { return true; } - assert!(!covspan.is_closure); + assert!(!covspan.is_hole); extra_spans.push(SpanFromMir::new(before, covspan.visible_macro, covspan.bcb, false)); extra_spans.push(SpanFromMir::new(after, covspan.visible_macro, covspan.bcb, false)); false // Discard the original covspan that we just split. @@ -148,6 +148,8 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>( let expn_span = filtered_statement_span(statement)?; let (span, visible_macro) = unexpand(expn_span)?; + // A statement that looks like the assignment of a closure expression + // is treated as a "hole" span, to be carved out of other spans. Some(SpanFromMir::new(span, visible_macro, bcb, is_closure_like(statement))) }); @@ -336,7 +338,10 @@ pub(super) struct SpanFromMir { pub(super) span: Span, visible_macro: Option<Symbol>, pub(super) bcb: BasicCoverageBlock, - pub(super) is_closure: bool, + /// If true, this covspan represents a "hole" that should be carved out + /// from other spans, e.g. because it represents a closure expression that + /// will be instrumented separately as its own function. + pub(super) is_hole: bool, } impl SpanFromMir { @@ -348,8 +353,8 @@ impl SpanFromMir { span: Span, visible_macro: Option<Symbol>, bcb: BasicCoverageBlock, - is_closure: bool, + is_hole: bool, ) -> Self { - Self { span, visible_macro, bcb, is_closure } + Self { span, visible_macro, bcb, is_hole } } } diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 2009539d4d0..36546a03cdf 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -421,6 +421,10 @@ impl<'tcx> Inliner<'tcx> { callee_attrs: &CodegenFnAttrs, cross_crate_inlinable: bool, ) -> Result<(), &'static str> { + if self.tcx.has_attr(callsite.callee.def_id(), sym::rustc_no_mir_inline) { + return Err("#[rustc_no_mir_inline]"); + } + if let InlineAttr::Never = callee_attrs.inline { return Err("never inline hint"); } diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index 577b8f2080f..2e11da4d585 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -482,17 +482,40 @@ impl<'tcx> Validator<'_, 'tcx> { match op { BinOp::Div | BinOp::Rem => { if lhs_ty.is_integral() { + let sz = lhs_ty.primitive_size(self.tcx); // Integer division: the RHS must be a non-zero const. - let const_val = match rhs { + let rhs_val = match rhs { Operand::Constant(c) => { - c.const_.try_eval_bits(self.tcx, self.param_env) + c.const_.try_eval_scalar_int(self.tcx, self.param_env) } _ => None, }; - match const_val { + match rhs_val.map(|x| x.try_to_uint(sz).unwrap()) { + // for the zero test, int vs uint does not matter Some(x) if x != 0 => {} // okay _ => return Err(Unpromotable), // value not known or 0 -- not okay } + // Furthermore, for signed divison, we also have to exclude `int::MIN / -1`. + if lhs_ty.is_signed() { + match rhs_val.map(|x| x.try_to_int(sz).unwrap()) { + Some(-1) | None => { + // The RHS is -1 or unknown, so we have to be careful. + // But is the LHS int::MIN? + let lhs_val = match lhs { + Operand::Constant(c) => c + .const_ + .try_eval_scalar_int(self.tcx, self.param_env), + _ => None, + }; + let lhs_min = sz.signed_int_min(); + match lhs_val.map(|x| x.try_to_int(sz).unwrap()) { + Some(x) if x != lhs_min => {} // okay + _ => return Err(Unpromotable), // value not known or int::MIN -- not okay + } + } + _ => {} + } + } } } // The remaining operations can never fail. diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index c768ea93b5f..dc9f5bad765 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -4,7 +4,7 @@ use crate::errors; use crate::lexer::unicode_chars::UNICODE_ARRAY; use crate::make_unclosed_delims_error; use rustc_ast::ast::{self, AttrStyle}; -use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey}; @@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym }); } self.sess.raw_identifier_spans.push(span); - token::Ident(sym, true) + token::Ident(sym, IdentIsRaw::Yes) } rustc_lexer::TokenKind::UnknownPrefix => { self.report_unknown_prefix(start); @@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let span = self.mk_sp(start, self.pos); self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default() .push(span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } // split up (raw) c string literals to an ident and a string literal when edition < 2021. rustc_lexer::TokenKind::Literal { @@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let sym = nfc_normalize(self.str_from(start)); let span = self.mk_sp(start, self.pos); self.sess.symbol_gallery.insert(sym, span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index a136abaa28b..3b4e05332fa 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[ // fancier error recovery to it, as there will be less overall work to do this way. const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[ (" ", "Space", None), - ("_", "Underscore", Some(token::Ident(kw::Underscore, false))), + ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))), ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))), (",", "Comma", Some(token::Comma)), (";", "Semicolon", Some(token::Semi)), diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 0cc2170714c..517e3d82787 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -21,6 +21,8 @@ use crate::errors::{ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; +use ast::token::IdentIsRaw; +use parser::Recovered; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; @@ -264,7 +266,7 @@ impl<'a> Parser<'a> { pub(super) fn expected_ident_found( &mut self, recover: bool, - ) -> PResult<'a, (Ident, /* is_raw */ bool)> { + ) -> PResult<'a, (Ident, IdentIsRaw)> { if let TokenKind::DocComment(..) = self.prev_token.kind { return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything { span: self.prev_token.span, @@ -290,11 +292,11 @@ impl<'a> Parser<'a> { let bad_token = self.token.clone(); // suggest prepending a keyword in identifier position with `r#` - let suggest_raw = if let Some((ident, false)) = self.token.ident() + let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { - recovered_ident = Some((ident, true)); + recovered_ident = Some((ident, IdentIsRaw::Yes)); // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`, // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#` @@ -320,7 +322,7 @@ impl<'a> Parser<'a> { let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| { let (invalid, valid) = self.token.span.split_at(len as u32); - recovered_ident = Some((Ident::new(valid_portion, valid), false)); + recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No)); HelpIdentifierStartsWithNumber { num_span: invalid } }); @@ -429,7 +431,7 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -532,7 +534,7 @@ impl<'a> Parser<'a> { sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); - return Ok(true); + return Ok(Recovered::Yes); } else if self.look_ahead(0, |t| { t == &token::CloseDelim(Delimiter::Brace) || ((t.can_begin_expr() || t.can_begin_item()) @@ -556,7 +558,7 @@ impl<'a> Parser<'a> { unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); - return Ok(true); + return Ok(Recovered::Yes); } } @@ -653,9 +655,9 @@ impl<'a> Parser<'a> { // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying // that in the parser requires unbounded lookahead, so we only add a hint to the existing // error rather than replacing it entirely. - if ((self.prev_token.kind == TokenKind::Ident(sym::c, false) + if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No) && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) - || (self.prev_token.kind == TokenKind::Ident(sym::cr, false) + || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No) && matches!( &self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound @@ -711,7 +713,7 @@ impl<'a> Parser<'a> { if self.check_too_many_raw_str_terminators(&mut err) { if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { err.emit(); - return Ok(true); + return Ok(Recovered::Yes); } else { return Err(err); } @@ -1223,7 +1225,7 @@ impl<'a> Parser<'a> { |p| p.parse_generic_arg(None), ); match x { - Ok((_, _, false)) => { + Ok((_, _, Recovered::No)) => { if self.eat(&token::Gt) { // We made sense of it. Improve the error message. e.span_suggestion_verbose( @@ -1247,7 +1249,7 @@ impl<'a> Parser<'a> { } } } - Ok((_, _, true)) => {} + Ok((_, _, Recovered::Yes)) => {} Err(err) => { err.cancel(); } @@ -1286,7 +1288,7 @@ impl<'a> Parser<'a> { err: &mut ComparisonOperatorsCannotBeChained, inner_op: &Expr, outer_op: &Spanned<AssocOp>, - ) -> bool /* advanced the cursor */ { + ) -> Recovered { if let ExprKind::Binary(op, l1, r1) = &inner_op.kind { if let ExprKind::Field(_, ident) = l1.kind && ident.as_str().parse::<i32>().is_err() @@ -1294,7 +1296,7 @@ impl<'a> Parser<'a> { { // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish // suggestion being the only one to apply is high. - return false; + return Recovered::No; } return match (op.node, &outer_op.node) { // `x == y == z` @@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> { span: inner_op.span.shrink_to_hi(), middle_term: expr_to_str(r1), }); - false // Keep the current parse behavior, where the AST is `(x < y) < z`. + Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`. } // `x == y < z` (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => { @@ -1327,12 +1329,12 @@ impl<'a> Parser<'a> { left: r1.span.shrink_to_lo(), right: r2.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::Yes } } } @@ -1347,19 +1349,19 @@ impl<'a> Parser<'a> { left: l1.span.shrink_to_lo(), right: r1.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::No } } } - _ => false, + _ => Recovered::No, }; } - false + Recovered::No } /// Produces an error if comparison operators are chained (RFC #558). @@ -1487,8 +1489,9 @@ impl<'a> Parser<'a> { // If it looks like a genuine attempt to chain operators (as opposed to a // misformatted turbofish, for instance), suggest a correct form. - if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op) - { + let recovered = self + .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); + if matches!(recovered, Recovered::Yes) { self.dcx().emit_err(err); mk_err_expr(self, inner_op.span.to(self.prev_token.span)) } else { @@ -1500,7 +1503,7 @@ impl<'a> Parser<'a> { let recover = self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); self.dcx().emit_err(err); - if recover { + if matches!(recover, Recovered::Yes) { return mk_err_expr(self, inner_op.span.to(self.prev_token.span)); } } @@ -1840,10 +1843,7 @@ impl<'a> Parser<'a> { /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub(super) fn unexpected_try_recover( - &mut self, - t: &TokenKind, - ) -> PResult<'a, bool /* recovered */> { + pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { let token_str = pprust::token_kind_to_string(t); let this_token_str = super::token_descr(&self.token); let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 8826c06bebd..1ad637451b1 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -3,13 +3,14 @@ use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, - SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, + AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions, + SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken, }; use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use ast::mut_visit::{noop_visit_expr, MutVisitor}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment}; use core::mem; use rustc_ast::ptr::P; @@ -126,7 +127,7 @@ impl<'a> Parser<'a> { match self.parse_expr_res(restrictions, None) { Ok(expr) => Ok(expr), Err(err) => match self.token.ident() { - Some((Ident { name: kw::Underscore, .. }, false)) + Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` @@ -457,7 +458,9 @@ impl<'a> Parser<'a> { return None; } (Some(op), _) => (op, self.token.span), - (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::and, span }, IdentIsRaw::No))) + if self.may_recover() => + { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "and".into(), @@ -465,7 +468,7 @@ impl<'a> Parser<'a> { }); (AssocOp::LAnd, span) } - (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "or".into(), @@ -742,7 +745,7 @@ impl<'a> Parser<'a> { ( // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), - token::Ident(kw::For | kw::Loop | kw::While, false), + token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No), ) if segments.len() == 1 => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { @@ -955,19 +958,20 @@ impl<'a> Parser<'a> { fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { loop { - let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { - // we are using noexpect here because we don't expect a `?` directly after a `return` - // which could be suggested otherwise - self.eat_noexpect(&token::Question) - } else { - self.eat(&token::Question) - }; + let has_question = + if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { + // we are using noexpect here because we don't expect a `?` directly after a `return` + // which could be suggested otherwise + self.eat_noexpect(&token::Question) + } else { + self.eat(&token::Question) + }; if has_question { // `expr?` e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } - let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { + let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { // we are using noexpect here because we don't expect a `.` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Dot) @@ -1126,19 +1130,19 @@ impl<'a> Parser<'a> { // 1. DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => { assert!(suffix.is_none()); - self.token = Token::new(token::Ident(sym, false), ident_span); + self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span); let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token)) } // 1.2 | 1.2e3 DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => { - self.token = Token::new(token::Ident(symbol1, false), ident1_span); + self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span); // This needs to be `Spacing::Alone` to prevent regressions. // See issue #76399 and PR #76285 for more details let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1)); - let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); + let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span); self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None) } @@ -1555,7 +1559,7 @@ impl<'a> Parser<'a> { return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err)); } }; - let kind = if es.len() == 1 && !trailing_comma { + let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) { // `(e)` is parenthesized `e`. ExprKind::Paren(es.into_iter().next().unwrap()) } else { @@ -1946,7 +1950,7 @@ impl<'a> Parser<'a> { self.bump(); // `builtin` self.bump(); // `#` - let Some((ident, false)) = self.token.ident() else { + let Some((ident, IdentIsRaw::No)) = self.token.ident() else { let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span }); return Err(err); }; @@ -3087,10 +3091,10 @@ impl<'a> Parser<'a> { if !require_comma { arm_body = Some(expr); this.eat(&token::Comma); - Ok(false) + Ok(Recovered::No) } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) { arm_body = Some(body); - Ok(true) + Ok(Recovered::Yes) } else { let expr_span = expr.span; arm_body = Some(expr); @@ -3171,7 +3175,7 @@ impl<'a> Parser<'a> { this.dcx().emit_err(errors::MissingCommaAfterMatchArm { span: arm_span.shrink_to_hi(), }); - return Ok(true); + return Ok(Recovered::Yes); } Err(err) }); @@ -3574,7 +3578,7 @@ impl<'a> Parser<'a> { fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> { match self.token.ident() { Some((ident, is_raw)) - if (is_raw || !ident.is_reserved()) + if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved()) && self.look_ahead(1, |t| *t == token::Colon) => { Some(ast::ExprField { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 77381ef4626..2e049ca908f 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,8 +1,12 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; -use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; +use super::{ + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing, + TrailingToken, +}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; +use ast::token::IdentIsRaw; use rustc_ast::ast::*; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, TokenKind}; @@ -1076,7 +1080,7 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident @ Ident { name: kw::Underscore, .. }, false)) => { + Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => { self.bump(); Ok(ident) } @@ -1453,7 +1457,7 @@ impl<'a> Parser<'a> { let (variants, _) = if self.token == TokenKind::Semi { self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span }); self.bump(); - (thin_vec![], false) + (thin_vec![], Trailing::No) } else { self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span)) .map_err(|mut err| { @@ -1530,10 +1534,10 @@ impl<'a> Parser<'a> { err.span_label(span, "while parsing this enum"); err.help(help); err.emit(); - (thin_vec![], true) + (thin_vec![], Recovered::Yes) } }; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { let body = match this.parse_tuple_struct_body() { Ok(body) => body, @@ -1618,7 +1622,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } // No `where` so: `struct Foo<T>;` } else if self.eat(&token::Semi) { @@ -1630,7 +1634,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } // Tuple-style struct definition with optional where-clause. } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); @@ -1659,14 +1663,14 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body( "union", class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else { let token_str = super::token_descr(&self.token); let msg = format!("expected `where` or `{{` after union name, found {token_str}"); @@ -1683,14 +1687,14 @@ impl<'a> Parser<'a> { adt_ty: &str, ident_span: Span, parsed_where: bool, - ) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> { + ) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> { let mut fields = ThinVec::new(); - let mut recovered = false; + let mut recovered = Recovered::No; if self.eat(&token::OpenDelim(Delimiter::Brace)) { while self.token != token::CloseDelim(Delimiter::Brace) { let field = self.parse_field_def(adt_ty).map_err(|e| { self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No); - recovered = true; + recovered = Recovered::Yes; e }); match field { @@ -1962,7 +1966,7 @@ impl<'a> Parser<'a> { let (ident, is_raw) = self.ident_or_err(true)?; if ident.name == kw::Underscore { self.sess.gated_spans.gate(sym::unnamed_fields, lo); - } else if !is_raw && ident.is_reserved() { + } else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let snapshot = self.create_snapshot_for_diagnostic(); let err = if self.check_fn_front_matter(false, Case::Sensitive) { let inherited_vis = Visibility { @@ -2461,8 +2465,8 @@ impl<'a> Parser<'a> { // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't // account for this. match self.expect_one_of(&[], &[]) { - Ok(true) => {} - Ok(false) => unreachable!(), + Ok(Recovered::Yes) => {} + Ok(Recovered::No) => unreachable!(), Err(mut err) => { // Qualifier keywords ordering check enum WrongKw { @@ -2740,7 +2744,7 @@ impl<'a> Parser<'a> { fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> { // Extract an identifier *after* having confirmed that the token is one. let expect_self_ident = |this: &mut Self| match this.token.ident() { - Some((ident, false)) => { + Some((ident, IdentIsRaw::No)) => { this.bump(); ident } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index dea2b9e6ca7..29dd2eeb56a 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -11,6 +11,7 @@ mod stmt; mod ty; use crate::lexer::UnmatchedDelim; +use ast::token::IdentIsRaw; pub use attr_wrapper::AttrWrapper; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; @@ -357,6 +358,25 @@ pub enum FollowedByType { No, } +/// Whether a function performed recovery +#[derive(Copy, Clone, Debug)] +pub enum Recovered { + No, + Yes, +} + +impl From<Recovered> for bool { + fn from(r: Recovered) -> bool { + matches!(r, Recovered::Yes) + } +} + +#[derive(Copy, Clone, Debug)] +pub enum Trailing { + No, + Yes, +} + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, @@ -455,11 +475,11 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(false) + Ok(Recovered::No) } else { self.unexpected_try_recover(t) } @@ -475,13 +495,13 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { if edible.contains(&self.token.kind) { self.bump(); - Ok(false) + Ok(Recovered::No) } else if inedible.contains(&self.token.kind) { // leave it in the input - Ok(false) + Ok(Recovered::No) } else if self.token.kind != token::Eof && self.last_unexpected_token_span == Some(self.token.span) { @@ -499,7 +519,7 @@ impl<'a> Parser<'a> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { let (ident, is_raw) = self.ident_or_err(recover)?; - if !is_raw && ident.is_reserved() { + if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let err = self.expected_ident_found_err(); if recover { err.emit(); @@ -511,7 +531,7 @@ impl<'a> Parser<'a> { Ok(ident) } - fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> { + fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> { match self.token.ident() { Some(ident) => Ok(ident), None => self.expected_ident_found(recover), @@ -568,7 +588,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { true @@ -598,7 +618,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() }); @@ -783,10 +803,10 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { let mut first = true; - let mut recovered = false; - let mut trailing = false; + let mut recovered = Recovered::No; + let mut trailing = Trailing::No; let mut v = ThinVec::new(); while !self.expect_any_with_type(kets, expect) { @@ -800,12 +820,12 @@ impl<'a> Parser<'a> { } else { // check for separator match self.expect(t) { - Ok(false) /* not recovered */ => { + Ok(Recovered::No) => { self.current_closure.take(); } - Ok(true) /* recovered */ => { + Ok(Recovered::Yes) => { self.current_closure.take(); - recovered = true; + recovered = Recovered::Yes; break; } Err(mut expect_err) => { @@ -900,7 +920,7 @@ impl<'a> Parser<'a> { } } if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { - trailing = true; + trailing = Trailing::Yes; break; } @@ -978,7 +998,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } @@ -990,9 +1010,9 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; - if !recovered { + if matches!(recovered, Recovered::No) { self.eat(ket); } Ok((val, trailing)) @@ -1007,7 +1027,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.expect(bra)?; self.parse_seq_to_end(ket, sep, f) } @@ -1019,7 +1039,7 @@ impl<'a> Parser<'a> { &mut self, delim: Delimiter, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_unspanned_seq( &token::OpenDelim(delim), &token::CloseDelim(delim), @@ -1034,7 +1054,7 @@ impl<'a> Parser<'a> { fn parse_paren_comma_seq<T>( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_delim_comma_seq(Delimiter::Parenthesis, f) } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 071d6b72f3b..f1572a18a8b 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -201,6 +201,6 @@ impl<'a> Parser<'a> { /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { +fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> { token.ident().filter(|(ident, _)| ident.name != kw::Underscore) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 75fc013d3e6..2ede19b11e0 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -1,4 +1,4 @@ -use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken}; +use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken}; use crate::errors::{ self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed, DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt, @@ -311,7 +311,7 @@ impl<'a> Parser<'a> { matches!( &token.uninterpolate().kind, token::FatArrow // e.g. `a | => 0,`. - | token::Ident(kw::If, false) // e.g. `a | if expr`. + | token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`. | token::Eq // e.g. `let a | = 0`. | token::Semi // e.g. `let a |;`. | token::Colon // e.g. `let a | :`. @@ -696,7 +696,9 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. - Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { + let paren_pattern = + fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest()); + if paren_pattern { let pat = fields.into_iter().next().unwrap(); let close_paren = self.prev_token.span; @@ -714,7 +716,7 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(begin.clone(), form)? + self.parse_pat_range_begin_with(begin.clone(), form) } // recover ranges with parentheses around the `(start)..` PatKind::Err(_) @@ -729,15 +731,15 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)? + self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form) } // (pat) with optional parentheses - _ => PatKind::Paren(pat), + _ => Ok(PatKind::Paren(pat)), } } else { - PatKind::Tuple(fields) - }) + Ok(PatKind::Tuple(fields)) + } } /// Parse a mutable binding with the `mut` token already eaten. diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 681039999a6..6e7bbe7e06d 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, Restrictions, TokenType}; use crate::errors::PathSingleColon; use crate::{errors, maybe_whole}; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::{ @@ -390,7 +391,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident, false)) if ident.is_path_segment_keyword() => { + Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => { self.bump(); Ok(ident) } diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 1bae5b32240..ee02b69c614 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -11,6 +11,7 @@ use crate::errors; use crate::maybe_whole; use crate::errors::MalformedLoopLabel; +use crate::parser::Recovered; use ast::Label; use rustc_ast as ast; use rustc_ast::ptr::P; @@ -661,7 +662,6 @@ impl<'a> Parser<'a> { if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => { // Just check for errors and recover; do not eat semicolon yet. - // `expect_one_of` returns PResult<'a, bool /* recovered */> let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]); @@ -669,7 +669,7 @@ impl<'a> Parser<'a> { let replace_with_err = 'break_recover: { match expect_result { // Recover from parser, skip type error to avoid extra errors. - Ok(true) => true, + Ok(Recovered::Yes) => true, Err(e) => { if self.recover_colon_as_semi() { // recover_colon_as_semi has already emitted a nicer error. @@ -691,7 +691,7 @@ impl<'a> Parser<'a> { token.kind, token::Ident( kw::For | kw::Loop | kw::While, - false + token::IdentIsRaw::No ) | token::OpenDelim(Delimiter::Brace) ) }) @@ -735,7 +735,7 @@ impl<'a> Parser<'a> { true } - Ok(false) => false, + Ok(Recovered::No) => false, } }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 23a92e6dd3d..e3fe535bd5f 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,4 +1,4 @@ -use super::{Parser, PathStyle, TokenType}; +use super::{Parser, PathStyle, TokenType, Trailing}; use crate::errors::{ self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType, @@ -415,7 +415,7 @@ impl<'a> Parser<'a> { Ok(ty) })?; - if ts.len() == 1 && !trailing { + if ts.len() == 1 && matches!(trailing, Trailing::No) { let ty = ts.into_iter().next().unwrap().into_inner(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs index b0982029657..5d46581f646 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_parse/src/validate_attr.rs @@ -88,7 +88,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta // results in `ast::ExprKind::Err`. In that case we delay // the error because an earlier error will have already // been reported. - let msg = format!("attribute value must be a literal"); + let msg = "attribute value must be a literal"; let mut err = sess.dcx.struct_span_err(expr.span, msg); if let ast::ExprKind::Err = expr.kind { err.downgrade_to_delayed_bug(); diff --git a/compiler/rustc_pattern_analysis/src/constructor.rs b/compiler/rustc_pattern_analysis/src/constructor.rs index 24824682b74..483986969d1 100644 --- a/compiler/rustc_pattern_analysis/src/constructor.rs +++ b/compiler/rustc_pattern_analysis/src/constructor.rs @@ -694,18 +694,14 @@ impl<Cx: TypeCx> Clone for Constructor<Cx> { fn clone(&self) -> Self { match self { Constructor::Struct => Constructor::Struct, - Constructor::Variant(idx) => Constructor::Variant(idx.clone()), + Constructor::Variant(idx) => Constructor::Variant(*idx), Constructor::Ref => Constructor::Ref, - Constructor::Slice(slice) => Constructor::Slice(slice.clone()), + Constructor::Slice(slice) => Constructor::Slice(*slice), Constructor::UnionField => Constructor::UnionField, - Constructor::Bool(b) => Constructor::Bool(b.clone()), - Constructor::IntRange(range) => Constructor::IntRange(range.clone()), - Constructor::F32Range(lo, hi, end) => { - Constructor::F32Range(lo.clone(), hi.clone(), end.clone()) - } - Constructor::F64Range(lo, hi, end) => { - Constructor::F64Range(lo.clone(), hi.clone(), end.clone()) - } + Constructor::Bool(b) => Constructor::Bool(*b), + Constructor::IntRange(range) => Constructor::IntRange(*range), + Constructor::F32Range(lo, hi, end) => Constructor::F32Range(lo.clone(), *hi, *end), + Constructor::F64Range(lo, hi, end) => Constructor::F64Range(lo.clone(), *hi, *end), Constructor::Str(value) => Constructor::Str(value.clone()), Constructor::Opaque(inner) => Constructor::Opaque(inner.clone()), Constructor::Or => Constructor::Or, diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs index a24da448766..f672051be5a 100644 --- a/compiler/rustc_pattern_analysis/src/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -1189,6 +1189,25 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> { } Ok(matrix) } + + /// Recover row usefulness and intersection information from a processed specialized matrix. + /// `specialized` must come from `self.specialize_constructor`. + fn unspecialize(&mut self, specialized: Self) { + for child_row in specialized.rows() { + let parent_row_id = child_row.parent_row; + let parent_row = &mut self.rows[parent_row_id]; + // A parent row is useful if any of its children is. + parent_row.useful |= child_row.useful; + for child_intersection in child_row.intersects.iter() { + // Convert the intersecting ids into ids for the parent matrix. + let parent_intersection = specialized.rows[child_intersection].parent_row; + // Note: self-intersection can happen with or-patterns. + if parent_intersection != parent_row_id { + parent_row.intersects.insert(parent_intersection); + } + } + } + } } /// Pretty-printer for matrices of patterns, example: @@ -1558,21 +1577,6 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( // Accumulate the found witnesses. ret.extend(witnesses); - for child_row in spec_matrix.rows() { - let parent_row_id = child_row.parent_row; - let parent_row = &mut matrix.rows[parent_row_id]; - // A parent row is useful if any of its children is. - parent_row.useful |= child_row.useful; - for child_intersection in child_row.intersects.iter() { - // Convert the intersecting ids into ids for the parent matrix. - let parent_intersection = spec_matrix.rows[child_intersection].parent_row; - // Note: self-intersection can happen with or-patterns. - if parent_intersection != parent_row_id { - parent_row.intersects.insert(parent_intersection); - } - } - } - // Detect ranges that overlap on their endpoints. if let Constructor::IntRange(overlap_range) = ctor { if overlap_range.is_singleton() @@ -1582,6 +1586,8 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( collect_overlapping_range_endpoints(mcx, overlap_range, matrix, &spec_matrix); } } + + matrix.unspecialize(spec_matrix); } // Record usefulness in the patterns. diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index 9d8a9f5fce3..1c6bd887128 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -988,7 +988,10 @@ impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> { fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { if let hir::ExprKind::Struct(qpath, fields, ref base) = expr.kind { let res = self.typeck_results().qpath_res(qpath, expr.hir_id); - let adt = self.typeck_results().expr_ty(expr).ty_adt_def().unwrap(); + let Some(adt) = self.typeck_results().expr_ty(expr).ty_adt_def() else { + self.tcx.dcx().span_delayed_bug(expr.span, "no adt_def for expression"); + return; + }; let variant = adt.variant_of_res(res); if let Some(base) = *base { // If the expression uses FRU we need to make sure all the unmentioned fields diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index d64a3b43aad..222dd69dbc4 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -1111,7 +1111,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { suggestions.extend( tmp_suggestions .into_iter() - .filter(|s| use_prelude || this.is_builtin_macro(s.res)), + .filter(|s| use_prelude.into() || this.is_builtin_macro(s.res)), ); } } diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index 4583f991cab..7e7424be303 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -23,6 +23,18 @@ use Namespace::*; type Visibility = ty::Visibility<LocalDefId>; +#[derive(Copy, Clone)] +pub enum UsePrelude { + No, + Yes, +} + +impl From<UsePrelude> for bool { + fn from(up: UsePrelude) -> bool { + matches!(up, UsePrelude::Yes) + } +} + impl<'a, 'tcx> Resolver<'a, 'tcx> { /// A generic scope visitor. /// Visits scopes in order to resolve some identifier in them or perform other actions. @@ -32,12 +44,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { scope_set: ScopeSet<'a>, parent_scope: &ParentScope<'a>, ctxt: SyntaxContext, - mut visitor: impl FnMut( - &mut Self, - Scope<'a>, - /*use_prelude*/ bool, - SyntaxContext, - ) -> Option<T>, + mut visitor: impl FnMut(&mut Self, Scope<'a>, UsePrelude, SyntaxContext) -> Option<T>, ) -> Option<T> { // General principles: // 1. Not controlled (user-defined) names should have higher priority than controlled names @@ -133,6 +140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; if visit { + let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No }; if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) { return break_result; } @@ -579,7 +587,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None, ignore_binding, ) { - if use_prelude || this.is_builtin_macro(binding.res()) { + if matches!(use_prelude, UsePrelude::Yes) + || this.is_builtin_macro(binding.res()) + { result = Ok((binding, Flags::MISC_FROM_PRELUDE)); } } diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index 335bf0949d6..51723fc81a0 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -1582,7 +1582,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> { None => ("/* fields */".to_string(), Applicability::HasPlaceholders), }; let pad = match field_ids { - Some(field_ids) if field_ids.is_empty() => "", + Some([]) => "", _ => " ", }; err.span_suggestion( diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 02c7a0c6371..b6c19486898 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -1264,7 +1264,7 @@ fn validate_commandline_args_with_session_available(sess: &Session) { // LLVM CFI using rustc LTO requires a single codegen unit. if sess.is_sanitizer_cfi_enabled() && sess.lto() == config::Lto::Fat - && !(sess.codegen_units().as_usize() == 1) + && (sess.codegen_units().as_usize() != 1) { sess.dcx().emit_err(errors::SanitizerCfiRequiresSingleCodegenUnit); } diff --git a/compiler/rustc_smir/src/rustc_smir/context.rs b/compiler/rustc_smir/src/rustc_smir/context.rs index b95186b0a1c..540bc483548 100644 --- a/compiler/rustc_smir/src/rustc_smir/context.rs +++ b/compiler/rustc_smir/src/rustc_smir/context.rs @@ -208,11 +208,10 @@ impl<'tcx> Context for TablesWrapper<'tcx> { let crates: Vec<stable_mir::Crate> = [LOCAL_CRATE] .iter() .chain(tables.tcx.crates(()).iter()) - .map(|crate_num| { + .filter_map(|crate_num| { let crate_name = tables.tcx.crate_name(*crate_num).to_string(); (name == crate_name).then(|| smir_crate(tables.tcx, *crate_num)) }) - .flatten() .collect(); crates } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 46472a131ff..609ab054da2 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1441,6 +1441,7 @@ symbols! { rustc_mir, rustc_must_implement_one_of, rustc_never_returns_null_ptr, + rustc_no_mir_inline, rustc_nonnull_optimization_guaranteed, rustc_nounwind, rustc_object_lifetime_default, diff --git a/compiler/rustc_target/src/lib.rs b/compiler/rustc_target/src/lib.rs index 04c5e60aa6b..8019d2b80cd 100644 --- a/compiler/rustc_target/src/lib.rs +++ b/compiler/rustc_target/src/lib.rs @@ -9,9 +9,10 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![cfg_attr(bootstrap, feature(exhaustive_patterns))] +#![cfg_attr(not(bootstrap), feature(min_exhaustive_patterns))] #![feature(rustdoc_internals)] #![feature(assert_matches)] -#![feature(exhaustive_patterns)] #![feature(iter_intersperse)] #![feature(let_chains)] #![cfg_attr(bootstrap, feature(min_specialization))] diff --git a/compiler/rustc_target/src/spec/base/apple/tests.rs b/compiler/rustc_target/src/spec/base/apple/tests.rs index f13058ebc82..097039d6c73 100644 --- a/compiler/rustc_target/src/spec/base/apple/tests.rs +++ b/compiler/rustc_target/src/spec/base/apple/tests.rs @@ -14,7 +14,7 @@ fn simulator_targets_set_abi() { aarch64_apple_watchos_sim::target(), ]; - for target in all_sim_targets { + for target in &all_sim_targets { assert_eq!(target.abi, "sim") } } diff --git a/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs index 22f6ee81055..70e40f60f22 100644 --- a/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs @@ -8,8 +8,7 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), + env: "gnu".into(), features: "+v6,+vfp2,-d32".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs b/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs index 84d9ceac04d..ca0db5e5640 100644 --- a/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs @@ -8,8 +8,6 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), features: "+v6,+vfp2,-d32".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs index 9f4a432c6fa..61b6d7a63e3 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs @@ -8,8 +8,7 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), + env: "gnu".into(), features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs index e5518c6daec..7afdb87b62e 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs @@ -8,8 +8,6 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index 3619d02438d..e82171de378 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -320,22 +320,25 @@ fn impl_intersection_has_impossible_obligation<'a, 'cx, 'tcx>( let mut errors = fulfill_cx.select_where_possible(infcx); errors.pop().map(|err| err.obligation) } else { - obligations.iter().cloned().find(|obligation| { - // We use `evaluate_root_obligation` to correctly track intercrate - // ambiguity clauses. We cannot use this in the new solver. - let evaluation_result = selcx.evaluate_root_obligation(obligation); - - match evaluation_result { - Ok(result) => !result.may_apply(), - // If overflow occurs, we need to conservatively treat the goal as possibly holding, - // since there can be instantiations of this goal that don't overflow and result in - // success. This isn't much of a problem in the old solver, since we treat overflow - // fatally (this still can be encountered: <https://github.com/rust-lang/rust/issues/105231>), - // but in the new solver, this is very important for correctness, since overflow - // *must* be treated as ambiguity for completeness. - Err(_overflow) => false, - } - }) + obligations + .iter() + .find(|obligation| { + // We use `evaluate_root_obligation` to correctly track intercrate + // ambiguity clauses. We cannot use this in the new solver. + let evaluation_result = selcx.evaluate_root_obligation(obligation); + + match evaluation_result { + Ok(result) => !result.may_apply(), + // If overflow occurs, we need to conservatively treat the goal as possibly holding, + // since there can be instantiations of this goal that don't overflow and result in + // success. This isn't much of a problem in the old solver, since we treat overflow + // fatally (this still can be encountered: <https://github.com/rust-lang/rust/issues/105231>), + // but in the new solver, this is very important for correctness, since overflow + // *must* be treated as ambiguity for completeness. + Err(_overflow) => false, + } + }) + .cloned() } } @@ -598,9 +601,24 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>( trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental) } +#[derive(Debug, Copy, Clone)] +pub enum IsFirstInputType { + No, + Yes, +} + +impl From<bool> for IsFirstInputType { + fn from(b: bool) -> IsFirstInputType { + match b { + false => IsFirstInputType::No, + true => IsFirstInputType::Yes, + } + } +} + #[derive(Debug)] pub enum OrphanCheckErr<'tcx> { - NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>), + NonLocalInputType(Vec<(Ty<'tcx>, IsFirstInputType)>), UncoveredTy(Ty<'tcx>, Option<Ty<'tcx>>), } @@ -751,7 +769,7 @@ struct OrphanChecker<'tcx, F> { /// Ignore orphan check failures and exclusively search for the first /// local type. search_first_local_ty: bool, - non_local_tys: Vec<(Ty<'tcx>, bool)>, + non_local_tys: Vec<(Ty<'tcx>, IsFirstInputType)>, } impl<'tcx, F, E> OrphanChecker<'tcx, F> @@ -769,7 +787,7 @@ where } fn found_non_local_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<OrphanCheckEarlyExit<'tcx, E>> { - self.non_local_tys.push((t, self.in_self_ty)); + self.non_local_tys.push((t, self.in_self_ty.into())); ControlFlow::Continue(()) } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 8ae31392b40..85f6da0d6cc 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -1283,9 +1283,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", file.display() )); - err.note(format!( - "consider using `--verbose` to print full type name to the console" - )); + err.note( + "consider using `--verbose` to print full type name to the console", + ); } if imm_ref_self_ty_satisfies_pred && mut_ref_self_ty_satisfies_pred { @@ -2869,9 +2869,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full name for the type has been written to '{}'", file.display(), )); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note( + "consider using `--verbose` to print the full type name to the console", + ); } } ObligationCauseCode::RepeatElementCopy { @@ -3339,9 +3339,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", file.display(), )); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note( + "consider using `--verbose` to print the full type name to the console", + ); } let mut parent_predicate = parent_trait_pred; let mut data = &data.derived; @@ -3395,9 +3395,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", file.display(), )); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note( + "consider using `--verbose` to print the full type name to the console", + ); } } // #74711: avoid a stack overflow diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index cac53796747..32447aca390 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -42,7 +42,7 @@ use std::fmt::Debug; use std::ops::ControlFlow; pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; -pub use self::coherence::{OrphanCheckErr, OverlapResult}; +pub use self::coherence::{IsFirstInputType, OrphanCheckErr, OverlapResult}; pub use self::engine::{ObligationCtxt, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; pub use self::normalize::NormalizeExt; @@ -172,9 +172,7 @@ fn do_normalize_predicates<'tcx>( // the normalized predicates. let errors = infcx.resolve_regions(&outlives_env); if !errors.is_empty() { - // @lcnr: Let's still ICE here for now. I want a test case - // for that. - tcx.dcx().span_bug( + tcx.dcx().span_delayed_bug( span, format!("failed region resolution while normalizing {elaborated_env:?}: {errors:?}"), ); diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index f8de19043e1..68c03e3c73e 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1031,12 +1031,9 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( { candidate_set.mark_ambiguous(); true - } else if obligation.predicate.args.type_at(0).to_opt_closure_kind().is_some() - && obligation.predicate.args.type_at(1).to_opt_closure_kind().is_some() - { - true } else { - false + obligation.predicate.args.type_at(0).to_opt_closure_kind().is_some() + && obligation.predicate.args.type_at(1).to_opt_closure_kind().is_some() } } else if lang_items.discriminant_kind_trait() == Some(trait_ref.def_id) { match self_ty.kind() { diff --git a/compiler/stable_mir/src/compiler_interface.rs b/compiler/stable_mir/src/compiler_interface.rs index 6272f793f40..0f7d8d7e083 100644 --- a/compiler/stable_mir/src/compiler_interface.rs +++ b/compiler/stable_mir/src/compiler_interface.rs @@ -208,7 +208,7 @@ where if TLV.is_set() { Err(Error::from("StableMIR already running")) } else { - let ptr: *const () = &context as *const &_ as _; + let ptr: *const () = std::ptr::addr_of!(context) as _; TLV.set(&Cell::new(ptr), || Ok(f())) } } |
