about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-07-06 10:20:27 +0000
committerbors <bors@rust-lang.org>2025-07-06 10:20:27 +0000
commitc83e217d268d25960a0c79c6941bcb3917a6a0af (patch)
tree5fd07b0e7aacf90ba97a22a7254478e0a3b115b4 /compiler
parente804cd4a5f1a5b658ddca245c80bef96a576c018 (diff)
parent097efc07cc13d8f6a3e04fe2a045b8a3ad6fd576 (diff)
downloadrust-c83e217d268d25960a0c79c6941bcb3917a6a0af.tar.gz
rust-c83e217d268d25960a0c79c6941bcb3917a6a0af.zip
Auto merge of #143521 - matthiaskrgr:rollup-kpv1og3, r=matthiaskrgr
Rollup of 6 pull requests

Successful merges:

 - rust-lang/rust#143416 (mbe: Defer checks for `compile_error!` until reporting an unused macro rule)
 - rust-lang/rust#143470 (std: sys: net: uefi: tcp4: Implement read)
 - rust-lang/rust#143477 (use `is_multiple_of` and `div_ceil`)
 - rust-lang/rust#143484 (distinguish the duplicate item of rpitit)
 - rust-lang/rust#143493 (tidy: use --bless for tidy spellcheck instead of spellcheck:fix)
 - rust-lang/rust#143504 (compiletest: print slightly more information on fs::write failure)

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_abi/src/lib.rs3
-rw-r--r--compiler/rustc_borrowck/src/polonius/legacy/location.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/abi.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/va_arg.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs4
-rw-r--r--compiler/rustc_expand/src/base.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs10
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs140
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs66
-rw-r--r--compiler/rustc_index/src/bit_set.rs4
-rw-r--r--compiler/rustc_passes/src/liveness/rwu_table.rs2
-rw-r--r--compiler/rustc_query_system/src/query/plumbing.rs2
-rw-r--r--compiler/rustc_resolve/src/build_reduced_graph.rs8
-rw-r--r--compiler/rustc_resolve/src/lib.rs6
-rw-r--r--compiler/rustc_resolve/src/macros.rs30
-rw-r--r--compiler/rustc_serialize/src/leb128.rs2
-rw-r--r--compiler/rustc_span/src/edit_distance.rs2
-rw-r--r--compiler/rustc_target/src/callconv/sparc64.rs6
-rw-r--r--compiler/rustc_target/src/callconv/x86.rs2
-rw-r--r--compiler/rustc_target/src/callconv/x86_64.rs2
-rw-r--r--compiler/rustc_target/src/callconv/xtensa.rs2
-rw-r--r--compiler/rustc_ty_utils/src/assoc.rs31
-rw-r--r--compiler/rustc_ty_utils/src/layout/invariant.rs2
23 files changed, 183 insertions, 153 deletions
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
index 0df8921c9b7..a438545c76f 100644
--- a/compiler/rustc_abi/src/lib.rs
+++ b/compiler/rustc_abi/src/lib.rs
@@ -527,8 +527,7 @@ impl Size {
     /// not a multiple of 8.
     pub fn from_bits(bits: impl TryInto<u64>) -> Size {
         let bits = bits.try_into().ok().unwrap();
-        // Avoid potential overflow from `bits + 7`.
-        Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
+        Size { raw: bits.div_ceil(8) }
     }
 
     #[inline]
diff --git a/compiler/rustc_borrowck/src/polonius/legacy/location.rs b/compiler/rustc_borrowck/src/polonius/legacy/location.rs
index 5f816bb9bbd..618119a6a3d 100644
--- a/compiler/rustc_borrowck/src/polonius/legacy/location.rs
+++ b/compiler/rustc_borrowck/src/polonius/legacy/location.rs
@@ -109,6 +109,6 @@ impl PoloniusLocationTable {
 impl LocationIndex {
     fn is_start(self) -> bool {
         // even indices are start points; odd indices are mid points
-        (self.index() % 2) == 0
+        self.index().is_multiple_of(2)
     }
 }
diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs
index 4b07c8aef91..009e7e2487b 100644
--- a/compiler/rustc_codegen_llvm/src/abi.rs
+++ b/compiler/rustc_codegen_llvm/src/abi.rs
@@ -146,7 +146,7 @@ impl LlvmType for CastTarget {
                 "total size {:?} cannot be divided into units of zero size",
                 self.rest.total
             );
-            if self.rest.total.bytes() % self.rest.unit.size.bytes() != 0 {
+            if !self.rest.total.bytes().is_multiple_of(self.rest.unit.size.bytes()) {
                 assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
             }
             self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())
diff --git a/compiler/rustc_codegen_llvm/src/va_arg.rs b/compiler/rustc_codegen_llvm/src/va_arg.rs
index 4fe4c9bcbf2..486dc894a4e 100644
--- a/compiler/rustc_codegen_llvm/src/va_arg.rs
+++ b/compiler/rustc_codegen_llvm/src/va_arg.rs
@@ -172,10 +172,10 @@ fn emit_aapcs_va_arg<'ll, 'tcx>(
 
     let gr_type = target_ty.is_any_ptr() || target_ty.is_integral();
     let (reg_off, reg_top, slot_size) = if gr_type {
-        let nreg = (layout.size.bytes() + 7) / 8;
+        let nreg = layout.size.bytes().div_ceil(8);
         (gr_offs, gr_top, nreg * 8)
     } else {
-        let nreg = (layout.size.bytes() + 15) / 16;
+        let nreg = layout.size.bytes().div_ceil(16);
         (vr_offs, vr_top, nreg * 16)
     };
 
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index ff822b52a8d..c97d53a45de 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -537,7 +537,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
 
         #[inline]
         fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
-            if offset % align.bytes() == 0 {
+            if offset.is_multiple_of(align.bytes()) {
                 None
             } else {
                 // The biggest power of two through which `offset` is divisible.
@@ -1554,7 +1554,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
                         // If the allocation is N-aligned, and the offset is not divisible by N,
                         // then `base + offset` has a non-zero remainder after division by `N`,
                         // which means `base + offset` cannot be null.
-                        if offset.bytes() % info.align.bytes() != 0 {
+                        if !offset.bytes().is_multiple_of(info.align.bytes()) {
                             return interp_ok(false);
                         }
                         // We don't know enough, this might be null.
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index 80f6e9d9fc4..d6d89808839 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -348,6 +348,10 @@ pub trait TTMacroExpander {
         span: Span,
         input: TokenStream,
     ) -> MacroExpanderResult<'cx>;
+
+    fn get_unused_rule(&self, _rule_i: usize) -> Option<(&Ident, Span)> {
+        None
+    }
 }
 
 pub type MacroExpanderResult<'cx> = ExpandResult<Box<dyn MacResult + 'cx>, ()>;
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index c607a3a3652..7a280d671f4 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -10,7 +10,7 @@ use rustc_span::source_map::SourceMap;
 use rustc_span::{ErrorGuaranteed, Ident, Span};
 use tracing::debug;
 
-use super::macro_rules::{NoopTracker, parser_from_cx};
+use super::macro_rules::{MacroRule, NoopTracker, parser_from_cx};
 use crate::expand::{AstFragmentKind, parse_ast_fragment};
 use crate::mbe::macro_parser::ParseResult::*;
 use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser};
@@ -22,14 +22,14 @@ pub(super) fn failed_to_match_macro(
     def_span: Span,
     name: Ident,
     arg: TokenStream,
-    lhses: &[Vec<MatcherLoc>],
+    rules: &[MacroRule],
 ) -> (Span, ErrorGuaranteed) {
     debug!("failed to match macro");
     // An error occurred, try the expansion again, tracking the expansion closely for better
     // diagnostics.
     let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp);
 
-    let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);
+    let try_success_result = try_match_macro(psess, name, &arg, rules, &mut tracker);
 
     if try_success_result.is_ok() {
         // Nonterminal parser recovery might turn failed matches into successful ones,
@@ -80,12 +80,12 @@ pub(super) fn failed_to_match_macro(
 
     // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
     if let Some((arg, comma_span)) = arg.add_comma() {
-        for lhs in lhses {
+        for rule in rules {
             let parser = parser_from_cx(psess, arg.clone(), Recovery::Allowed);
             let mut tt_parser = TtParser::new(name);
 
             if let Success(_) =
-                tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, &mut NoopTracker)
+                tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, &mut NoopTracker)
             {
                 if comma_span.is_dummy() {
                     err.note("you might be missing a comma");
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 2ffd4e3cf28..52cdcc5c747 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -36,6 +36,7 @@ use crate::base::{
 };
 use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
 use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
+use crate::mbe::quoted::{RulePart, parse_one_tt};
 use crate::mbe::transcribe::transcribe;
 use crate::mbe::{self, KleeneOp, macro_check};
 
@@ -97,13 +98,18 @@ impl<'a> ParserAnyMacro<'a> {
     }
 }
 
+pub(super) struct MacroRule {
+    pub(super) lhs: Vec<MatcherLoc>,
+    lhs_span: Span,
+    rhs: mbe::TokenTree,
+}
+
 struct MacroRulesMacroExpander {
     node_id: NodeId,
     name: Ident,
     span: Span,
     transparency: Transparency,
-    lhses: Vec<Vec<MatcherLoc>>,
-    rhses: Vec<mbe::TokenTree>,
+    rules: Vec<MacroRule>,
 }
 
 impl TTMacroExpander for MacroRulesMacroExpander {
@@ -121,10 +127,15 @@ impl TTMacroExpander for MacroRulesMacroExpander {
             self.name,
             self.transparency,
             input,
-            &self.lhses,
-            &self.rhses,
+            &self.rules,
         ))
     }
+
+    fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
+        // If the rhs contains an invocation like `compile_error!`, don't report it as unused.
+        let rule = &self.rules[rule_i];
+        if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
+    }
 }
 
 struct DummyExpander(ErrorGuaranteed);
@@ -183,9 +194,8 @@ impl<'matcher> Tracker<'matcher> for NoopTracker {
     }
 }
 
-/// Expands the rules based macro defined by `lhses` and `rhses` for a given
-/// input `arg`.
-#[instrument(skip(cx, transparency, arg, lhses, rhses))]
+/// Expands the rules based macro defined by `rules` for a given input `arg`.
+#[instrument(skip(cx, transparency, arg, rules))]
 fn expand_macro<'cx>(
     cx: &'cx mut ExtCtxt<'_>,
     sp: Span,
@@ -194,8 +204,7 @@ fn expand_macro<'cx>(
     name: Ident,
     transparency: Transparency,
     arg: TokenStream,
-    lhses: &[Vec<MatcherLoc>],
-    rhses: &[mbe::TokenTree],
+    rules: &[MacroRule],
 ) -> Box<dyn MacResult + 'cx> {
     let psess = &cx.sess.psess;
     // Macros defined in the current crate have a real node id,
@@ -208,15 +217,14 @@ fn expand_macro<'cx>(
     }
 
     // Track nothing for the best performance.
-    let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut NoopTracker);
+    let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
 
     match try_success_result {
-        Ok((i, named_matches)) => {
-            let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
-                mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span),
-                _ => cx.dcx().span_bug(sp, "malformed macro rhs"),
+        Ok((i, rule, named_matches)) => {
+            let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
+                cx.dcx().span_bug(sp, "malformed macro rhs");
             };
-            let arm_span = rhses[i].span();
+            let arm_span = rule.rhs.span();
 
             // rhs has holes ( `$id` and `$(...)` that need filled)
             let id = cx.current_expansion.id;
@@ -262,7 +270,7 @@ fn expand_macro<'cx>(
         Err(CanRetry::Yes) => {
             // Retry and emit a better error.
             let (span, guar) =
-                diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, lhses);
+                diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
             cx.trace_macros_diag();
             DummyResult::any(span, guar)
         }
@@ -278,14 +286,14 @@ pub(super) enum CanRetry {
 /// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful,
 /// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors
 /// correctly.
-#[instrument(level = "debug", skip(psess, arg, lhses, track), fields(tracking = %T::description()))]
+#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
 pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
     psess: &ParseSess,
     name: Ident,
     arg: &TokenStream,
-    lhses: &'matcher [Vec<MatcherLoc>],
+    rules: &'matcher [MacroRule],
     track: &mut T,
-) -> Result<(usize, NamedMatches), CanRetry> {
+) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
     // We create a base parser that can be used for the "black box" parts.
     // Every iteration needs a fresh copy of that parser. However, the parser
     // is not mutated on many of the iterations, particularly when dealing with
@@ -308,7 +316,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
     let parser = parser_from_cx(psess, arg.clone(), T::recovery());
     // Try each arm's matchers.
     let mut tt_parser = TtParser::new(name);
-    for (i, lhs) in lhses.iter().enumerate() {
+    for (i, rule) in rules.iter().enumerate() {
         let _tracing_span = trace_span!("Matching arm", %i);
 
         // Take a snapshot of the state of pre-expansion gating at this point.
@@ -317,7 +325,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
         // are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
         let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
 
-        let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
+        let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);
 
         track.after_arm(&result);
 
@@ -328,7 +336,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
                 // Merge the gated spans from parsing the matcher with the preexisting ones.
                 psess.gated_spans.merge(gated_spans_snapshot);
 
-                return Ok((i, named_matches));
+                return Ok((i, rule, named_matches));
             }
             Failure(_) => {
                 trace!("Failed to match arm, trying the next one");
@@ -364,7 +372,7 @@ pub fn compile_declarative_macro(
     span: Span,
     node_id: NodeId,
     edition: Edition,
-) -> (SyntaxExtension, Vec<(usize, Span)>) {
+) -> (SyntaxExtension, usize) {
     let mk_syn_ext = |expander| {
         SyntaxExtension::new(
             sess,
@@ -377,7 +385,7 @@ pub fn compile_declarative_macro(
             node_id != DUMMY_NODE_ID,
         )
     };
-    let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), Vec::new());
+    let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);
 
     let macro_rules = macro_def.macro_rules;
     let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
@@ -389,21 +397,11 @@ pub fn compile_declarative_macro(
     let mut guar = None;
     let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
 
-    let mut lhses = Vec::new();
-    let mut rhses = Vec::new();
+    let mut rules = Vec::new();
 
     while p.token != token::Eof {
         let lhs_tt = p.parse_token_tree();
-        let lhs_tt = mbe::quoted::parse(
-            &TokenStream::new(vec![lhs_tt]),
-            true, // LHS
-            sess,
-            node_id,
-            features,
-            edition,
-        )
-        .pop()
-        .unwrap();
+        let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
         // We don't handle errors here, the driver will abort after parsing/expansion. We can
         // report every error in every macro this way.
         check_emission(check_lhs_nt_follows(sess, node_id, &lhs_tt));
@@ -421,20 +419,18 @@ pub fn compile_declarative_macro(
             return dummy_syn_ext(guar);
         }
         let rhs_tt = p.parse_token_tree();
-        let rhs_tt = mbe::quoted::parse(
-            &TokenStream::new(vec![rhs_tt]),
-            false, // RHS
-            sess,
-            node_id,
-            features,
-            edition,
-        )
-        .pop()
-        .unwrap();
+        let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
         check_emission(check_rhs(sess, &rhs_tt));
         check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
-        lhses.push(lhs_tt);
-        rhses.push(rhs_tt);
+        let lhs_span = lhs_tt.span();
+        // Convert the lhs into `MatcherLoc` form, which is better for doing the
+        // actual matching.
+        let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
+            mbe::macro_parser::compute_locs(&delimited.tts)
+        } else {
+            return dummy_syn_ext(guar.unwrap());
+        };
+        rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
         if p.token == token::Eof {
             break;
         }
@@ -443,7 +439,7 @@ pub fn compile_declarative_macro(
         }
     }
 
-    if lhses.is_empty() {
+    if rules.is_empty() {
         let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
         return dummy_syn_ext(guar);
     }
@@ -457,48 +453,12 @@ pub fn compile_declarative_macro(
         return dummy_syn_ext(guar);
     }
 
-    // Compute the spans of the macro rules for unused rule linting.
-    // Also, we are only interested in non-foreign macros.
-    let rule_spans = if node_id != DUMMY_NODE_ID {
-        lhses
-            .iter()
-            .zip(rhses.iter())
-            .enumerate()
-            // If the rhs contains an invocation like compile_error!,
-            // don't consider the rule for the unused rule lint.
-            .filter(|(_idx, (_lhs, rhs))| !has_compile_error_macro(rhs))
-            // We only take the span of the lhs here,
-            // so that the spans of created warnings are smaller.
-            .map(|(idx, (lhs, _rhs))| (idx, lhs.span()))
-            .collect::<Vec<_>>()
-    } else {
-        Vec::new()
-    };
+    // Return the number of rules for unused rule linting, if this is a local macro.
+    let nrules = if node_id != DUMMY_NODE_ID { rules.len() } else { 0 };
 
-    // Convert the lhses into `MatcherLoc` form, which is better for doing the
-    // actual matching.
-    let lhses = lhses
-        .iter()
-        .map(|lhs| {
-            // Ignore the delimiters around the matcher.
-            match lhs {
-                mbe::TokenTree::Delimited(.., delimited) => {
-                    mbe::macro_parser::compute_locs(&delimited.tts)
-                }
-                _ => sess.dcx().span_bug(span, "malformed macro lhs"),
-            }
-        })
-        .collect();
-
-    let expander = Arc::new(MacroRulesMacroExpander {
-        name: ident,
-        span,
-        node_id,
-        transparency,
-        lhses,
-        rhses,
-    });
-    (mk_syn_ext(expander), rule_spans)
+    let expander =
+        Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
+    (mk_syn_ext(expander), nrules)
 }
 
 fn check_lhs_nt_follows(
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 2daa4e71558..eb874a27cec 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -16,6 +16,27 @@ pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are
     `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, \
     `meta`, `tt`, `item` and `vis`, along with `expr_2021` and `pat_param` for edition compatibility";
 
+/// Which part of a macro rule we're parsing
+#[derive(Copy, Clone)]
+pub(crate) enum RulePart {
+    /// The left-hand side, with patterns and metavar definitions with types
+    Pattern,
+    /// The right-hand side body, with metavar references and metavar expressions
+    Body,
+}
+
+impl RulePart {
+    #[inline(always)]
+    fn is_pattern(&self) -> bool {
+        matches!(self, Self::Pattern)
+    }
+
+    #[inline(always)]
+    fn is_body(&self) -> bool {
+        matches!(self, Self::Body)
+    }
+}
+
 /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
 /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
 /// collection of `TokenTree` for use in parsing a macro.
@@ -23,8 +44,8 @@ pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are
 /// # Parameters
 ///
 /// - `input`: a token stream to read from, the contents of which we are parsing.
-/// - `parsing_patterns`: `parse` can be used to parse either the "patterns" or the "body" of a
-///   macro. Both take roughly the same form _except_ that:
+/// - `part`: whether we're parsing the patterns or the body of a macro. Both take roughly the same
+///   form _except_ that:
 ///   - In a pattern, metavars are declared with their "matcher" type. For example `$var:expr` or
 ///     `$id:ident`. In this example, `expr` and `ident` are "matchers". They are not present in the
 ///     body of a macro rule -- just in the pattern.
@@ -36,9 +57,9 @@ pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are
 /// # Returns
 ///
 /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
-pub(super) fn parse(
+fn parse(
     input: &tokenstream::TokenStream,
-    parsing_patterns: bool,
+    part: RulePart,
     sess: &Session,
     node_id: NodeId,
     features: &Features,
@@ -53,9 +74,9 @@ pub(super) fn parse(
     while let Some(tree) = iter.next() {
         // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
         // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
-        let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition);
+        let tree = parse_tree(tree, &mut iter, part, sess, node_id, features, edition);
 
-        if !parsing_patterns {
+        if part.is_body() {
             // No matchers allowed, nothing to process here
             result.push(tree);
             continue;
@@ -131,6 +152,22 @@ pub(super) fn parse(
     result
 }
 
+/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Like `parse`, but for a
+/// single token tree. Emits errors to `sess` if needed.
+#[inline]
+pub(super) fn parse_one_tt(
+    input: tokenstream::TokenTree,
+    part: RulePart,
+    sess: &Session,
+    node_id: NodeId,
+    features: &Features,
+    edition: Edition,
+) -> TokenTree {
+    parse(&tokenstream::TokenStream::new(vec![input]), part, sess, node_id, features, edition)
+        .pop()
+        .unwrap()
+}
+
 /// Asks for the `macro_metavar_expr` feature if it is not enabled
 fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &Session, span: Span) {
     if !features.macro_metavar_expr() {
@@ -157,13 +194,13 @@ fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Sess
 /// - `tree`: the tree we wish to convert.
 /// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish
 ///   converting `tree`
-/// - `parsing_patterns`: same as [parse].
+/// - `part`: same as [parse].
 /// - `sess`: the parsing session. Any errors will be emitted to this session.
 /// - `features`: language features so we can do feature gating.
 fn parse_tree<'a>(
     tree: &'a tokenstream::TokenTree,
     outer_iter: &mut TokenStreamIter<'a>,
-    parsing_patterns: bool,
+    part: RulePart,
     sess: &Session,
     node_id: NodeId,
     features: &Features,
@@ -189,7 +226,7 @@ fn parse_tree<'a>(
             match next {
                 // `tree` is followed by a delimited set of token trees.
                 Some(&tokenstream::TokenTree::Delimited(delim_span, _, delim, ref tts)) => {
-                    if parsing_patterns {
+                    if part.is_pattern() {
                         if delim != Delimiter::Parenthesis {
                             span_dollar_dollar_or_metavar_in_the_lhs_err(
                                 sess,
@@ -244,13 +281,13 @@ fn parse_tree<'a>(
                     // If we didn't find a metavar expression above, then we must have a
                     // repetition sequence in the macro (e.g. `$(pat)*`). Parse the
                     // contents of the sequence itself
-                    let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
+                    let sequence = parse(tts, part, sess, node_id, features, edition);
                     // Get the Kleene operator and optional separator
                     let (separator, kleene) =
                         parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess);
                     // Count the number of captured "names" (i.e., named metavars)
                     let num_captures =
-                        if parsing_patterns { count_metavar_decls(&sequence) } else { 0 };
+                        if part.is_pattern() { count_metavar_decls(&sequence) } else { 0 };
                     TokenTree::Sequence(
                         delim_span,
                         SequenceRepetition { tts: sequence, separator, kleene, num_captures },
@@ -274,7 +311,7 @@ fn parse_tree<'a>(
                     Token { kind: token::Dollar, span: dollar_span2 },
                     _,
                 )) => {
-                    if parsing_patterns {
+                    if part.is_pattern() {
                         span_dollar_dollar_or_metavar_in_the_lhs_err(
                             sess,
                             &Token { kind: token::Dollar, span: dollar_span2 },
@@ -306,10 +343,7 @@ fn parse_tree<'a>(
         &tokenstream::TokenTree::Delimited(span, spacing, delim, ref tts) => TokenTree::Delimited(
             span,
             spacing,
-            Delimited {
-                delim,
-                tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
-            },
+            Delimited { delim, tts: parse(tts, part, sess, node_id, features, edition) },
         ),
     }
 }
diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs
index a4885aabe1f..645d95b1dba 100644
--- a/compiler/rustc_index/src/bit_set.rs
+++ b/compiler/rustc_index/src/bit_set.rs
@@ -1744,13 +1744,13 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> {
 
 #[inline]
 fn num_words<T: Idx>(domain_size: T) -> usize {
-    (domain_size.index() + WORD_BITS - 1) / WORD_BITS
+    domain_size.index().div_ceil(WORD_BITS)
 }
 
 #[inline]
 fn num_chunks<T: Idx>(domain_size: T) -> usize {
     assert!(domain_size.index() > 0);
-    (domain_size.index() + CHUNK_BITS - 1) / CHUNK_BITS
+    domain_size.index().div_ceil(CHUNK_BITS)
 }
 
 #[inline]
diff --git a/compiler/rustc_passes/src/liveness/rwu_table.rs b/compiler/rustc_passes/src/liveness/rwu_table.rs
index 4c1f6ea141e..a1177946f86 100644
--- a/compiler/rustc_passes/src/liveness/rwu_table.rs
+++ b/compiler/rustc_passes/src/liveness/rwu_table.rs
@@ -44,7 +44,7 @@ impl RWUTable {
     const WORD_RWU_COUNT: usize = Self::WORD_BITS / Self::RWU_BITS;
 
     pub(super) fn new(live_nodes: usize, vars: usize) -> RWUTable {
-        let live_node_words = (vars + Self::WORD_RWU_COUNT - 1) / Self::WORD_RWU_COUNT;
+        let live_node_words = vars.div_ceil(Self::WORD_RWU_COUNT);
         Self { live_nodes, vars, live_node_words, words: vec![0u8; live_node_words * live_nodes] }
     }
 
diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs
index 3c1fc731784..06e59eb4ccc 100644
--- a/compiler/rustc_query_system/src/query/plumbing.rs
+++ b/compiler/rustc_query_system/src/query/plumbing.rs
@@ -597,7 +597,7 @@ where
         // from disk. Re-hashing results is fairly expensive, so we can't
         // currently afford to verify every hash. This subset should still
         // give us some coverage of potential bugs though.
-        let try_verify = prev_fingerprint.split().1.as_u64() % 32 == 0;
+        let try_verify = prev_fingerprint.split().1.as_u64().is_multiple_of(32);
         if std::intrinsics::unlikely(
             try_verify || qcx.dep_context().sess().opts.unstable_opts.incremental_verify_ich,
         ) {
diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs
index 650a827ba56..eeb8cb893d7 100644
--- a/compiler/rustc_resolve/src/build_reduced_graph.rs
+++ b/compiler/rustc_resolve/src/build_reduced_graph.rs
@@ -1202,12 +1202,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
     fn insert_unused_macro(&mut self, ident: Ident, def_id: LocalDefId, node_id: NodeId) {
         if !ident.as_str().starts_with('_') {
             self.r.unused_macros.insert(def_id, (node_id, ident));
-            for (rule_i, rule_span) in &self.r.macro_map[&def_id.to_def_id()].rule_spans {
-                self.r
-                    .unused_macro_rules
-                    .entry(node_id)
-                    .or_default()
-                    .insert(*rule_i, (ident, *rule_span));
+            for rule_i in 0..self.r.macro_map[&def_id.to_def_id()].nrules {
+                self.r.unused_macro_rules.entry(node_id).or_default().insert(rule_i);
             }
         }
     }
diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs
index 7162f3a77d3..3f865d7c2da 100644
--- a/compiler/rustc_resolve/src/lib.rs
+++ b/compiler/rustc_resolve/src/lib.rs
@@ -1014,13 +1014,13 @@ struct DeriveData {
 
 struct MacroData {
     ext: Arc<SyntaxExtension>,
-    rule_spans: Vec<(usize, Span)>,
+    nrules: usize,
     macro_rules: bool,
 }
 
 impl MacroData {
     fn new(ext: Arc<SyntaxExtension>) -> MacroData {
-        MacroData { ext, rule_spans: Vec::new(), macro_rules: false }
+        MacroData { ext, nrules: 0, macro_rules: false }
     }
 }
 
@@ -1135,7 +1135,7 @@ pub struct Resolver<'ra, 'tcx> {
     ast_transform_scopes: FxHashMap<LocalExpnId, Module<'ra>>,
     unused_macros: FxIndexMap<LocalDefId, (NodeId, Ident)>,
     /// A map from the macro to all its potentially unused arms.
-    unused_macro_rules: FxIndexMap<NodeId, UnordMap<usize, (Ident, Span)>>,
+    unused_macro_rules: FxIndexMap<NodeId, UnordSet<usize>>,
     proc_macro_stubs: FxHashSet<LocalDefId>,
     /// Traces collected during macro resolution and validated when it's complete.
     single_segment_macro_resolutions:
diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs
index 3d33a02a9c6..9bc96403559 100644
--- a/compiler/rustc_resolve/src/macros.rs
+++ b/compiler/rustc_resolve/src/macros.rs
@@ -351,13 +351,23 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
         }
 
         for (&node_id, unused_arms) in self.unused_macro_rules.iter() {
-            for (&arm_i, &(ident, rule_span)) in unused_arms.to_sorted_stable_ord() {
-                self.lint_buffer.buffer_lint(
-                    UNUSED_MACRO_RULES,
-                    node_id,
-                    rule_span,
-                    BuiltinLintDiag::MacroRuleNeverUsed(arm_i, ident.name),
-                );
+            if unused_arms.is_empty() {
+                continue;
+            }
+            let def_id = self.local_def_id(node_id).to_def_id();
+            let m = &self.macro_map[&def_id];
+            let SyntaxExtensionKind::LegacyBang(ref ext) = m.ext.kind else {
+                continue;
+            };
+            for &arm_i in unused_arms.to_sorted_stable_ord() {
+                if let Some((ident, rule_span)) = ext.get_unused_rule(arm_i) {
+                    self.lint_buffer.buffer_lint(
+                        UNUSED_MACRO_RULES,
+                        node_id,
+                        rule_span,
+                        BuiltinLintDiag::MacroRuleNeverUsed(arm_i, ident.name),
+                    );
+                }
             }
         }
     }
@@ -1146,7 +1156,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
         node_id: NodeId,
         edition: Edition,
     ) -> MacroData {
-        let (mut ext, mut rule_spans) = compile_declarative_macro(
+        let (mut ext, mut nrules) = compile_declarative_macro(
             self.tcx.sess,
             self.tcx.features(),
             macro_def,
@@ -1163,13 +1173,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                 // The macro is a built-in, replace its expander function
                 // while still taking everything else from the source code.
                 ext.kind = builtin_ext_kind.clone();
-                rule_spans = Vec::new();
+                nrules = 0;
             } else {
                 self.dcx().emit_err(errors::CannotFindBuiltinMacroWithName { span, ident });
             }
         }
 
-        MacroData { ext: Arc::new(ext), rule_spans, macro_rules: macro_def.macro_rules }
+        MacroData { ext: Arc::new(ext), nrules, macro_rules: macro_def.macro_rules }
     }
 
     fn path_accessible(
diff --git a/compiler/rustc_serialize/src/leb128.rs b/compiler/rustc_serialize/src/leb128.rs
index 954c1f728f2..da328dcea03 100644
--- a/compiler/rustc_serialize/src/leb128.rs
+++ b/compiler/rustc_serialize/src/leb128.rs
@@ -7,7 +7,7 @@ use crate::serialize::Decoder;
 /// Returns the length of the longest LEB128 encoding for `T`, assuming `T` is an integer type
 pub const fn max_leb128_len<T>() -> usize {
     // The longest LEB128 encoding for an integer uses 7 bits per byte.
-    (size_of::<T>() * 8 + 6) / 7
+    (size_of::<T>() * 8).div_ceil(7)
 }
 
 /// Returns the length of the longest LEB128 encoding of all supported integer types.
diff --git a/compiler/rustc_span/src/edit_distance.rs b/compiler/rustc_span/src/edit_distance.rs
index 4f3202b694c..416e9daa8fb 100644
--- a/compiler/rustc_span/src/edit_distance.rs
+++ b/compiler/rustc_span/src/edit_distance.rs
@@ -130,7 +130,7 @@ pub fn edit_distance_with_substrings(a: &str, b: &str, limit: usize) -> Option<u
         1 // Exact substring match, but not a total word match so return non-zero
     } else if !big_len_diff {
         // Not a big difference in length, discount cost of length difference
-        score + (len_diff + 1) / 2
+        score + len_diff.div_ceil(2)
     } else {
         // A big difference in length, add back the difference in length to the score
         score + len_diff
diff --git a/compiler/rustc_target/src/callconv/sparc64.rs b/compiler/rustc_target/src/callconv/sparc64.rs
index 186826c08fc..ecc9067ced3 100644
--- a/compiler/rustc_target/src/callconv/sparc64.rs
+++ b/compiler/rustc_target/src/callconv/sparc64.rs
@@ -90,7 +90,7 @@ where
         _ => {}
     }
 
-    if (offset.bytes() % 4) != 0
+    if !offset.bytes().is_multiple_of(4)
         && matches!(scalar2.primitive(), Primitive::Float(Float::F32 | Float::F64))
     {
         offset += Size::from_bytes(4 - (offset.bytes() % 4));
@@ -181,7 +181,7 @@ where
                 // Structure { float, int, int } doesn't like to be handled like
                 // { float, long int }. Other way around it doesn't mind.
                 if data.last_offset < arg.layout.size
-                    && (data.last_offset.bytes() % 8) != 0
+                    && !data.last_offset.bytes().is_multiple_of(8)
                     && data.prefix_index < data.prefix.len()
                 {
                     data.prefix[data.prefix_index] = Some(Reg::i32());
@@ -190,7 +190,7 @@ where
                 }
 
                 let mut rest_size = arg.layout.size - data.last_offset;
-                if (rest_size.bytes() % 8) != 0 && data.prefix_index < data.prefix.len() {
+                if !rest_size.bytes().is_multiple_of(8) && data.prefix_index < data.prefix.len() {
                     data.prefix[data.prefix_index] = Some(Reg::i32());
                     rest_size = rest_size - Reg::i32().size;
                 }
diff --git a/compiler/rustc_target/src/callconv/x86.rs b/compiler/rustc_target/src/callconv/x86.rs
index 8328f818f9b..bdf116ff303 100644
--- a/compiler/rustc_target/src/callconv/x86.rs
+++ b/compiler/rustc_target/src/callconv/x86.rs
@@ -171,7 +171,7 @@ pub(crate) fn fill_inregs<'a, Ty, C>(
             continue;
         }
 
-        let size_in_regs = (arg.layout.size.bits() + 31) / 32;
+        let size_in_regs = arg.layout.size.bits().div_ceil(32);
 
         if size_in_regs == 0 {
             continue;
diff --git a/compiler/rustc_target/src/callconv/x86_64.rs b/compiler/rustc_target/src/callconv/x86_64.rs
index 700ee73c8fd..d8db7ed6e4c 100644
--- a/compiler/rustc_target/src/callconv/x86_64.rs
+++ b/compiler/rustc_target/src/callconv/x86_64.rs
@@ -95,7 +95,7 @@ where
         Ok(())
     }
 
-    let n = ((arg.layout.size.bytes() + 7) / 8) as usize;
+    let n = arg.layout.size.bytes().div_ceil(8) as usize;
     if n > MAX_EIGHTBYTES {
         return Err(Memory);
     }
diff --git a/compiler/rustc_target/src/callconv/xtensa.rs b/compiler/rustc_target/src/callconv/xtensa.rs
index b687f0e20c6..a73a70a1a0c 100644
--- a/compiler/rustc_target/src/callconv/xtensa.rs
+++ b/compiler/rustc_target/src/callconv/xtensa.rs
@@ -54,7 +54,7 @@ where
     // Determine the number of GPRs needed to pass the current argument
     // according to the ABI. 2*XLen-aligned varargs are passed in "aligned"
     // register pairs, so may consume 3 registers.
-    let mut needed_arg_gprs = (size + 32 - 1) / 32;
+    let mut needed_arg_gprs = size.div_ceil(32);
     if needed_align == 64 {
         needed_arg_gprs += *arg_gprs_left % 2;
     }
diff --git a/compiler/rustc_ty_utils/src/assoc.rs b/compiler/rustc_ty_utils/src/assoc.rs
index a65f9b347dc..2fb3c5ff945 100644
--- a/compiler/rustc_ty_utils/src/assoc.rs
+++ b/compiler/rustc_ty_utils/src/assoc.rs
@@ -195,12 +195,39 @@ fn associated_types_for_impl_traits_in_associated_fn(
     match tcx.def_kind(parent_def_id) {
         DefKind::Trait => {
             if let Some(output) = tcx.hir_get_fn_output(fn_def_id) {
-                let data = DefPathData::AnonAssocTy(tcx.item_name(fn_def_id.to_def_id()));
+                let def_path_id = |def_id: LocalDefId| tcx.item_name(def_id.to_def_id());
+                let def_path_data = def_path_id(fn_def_id);
+
+                let (.., trait_item_refs) = tcx.hir_expect_item(parent_def_id).expect_trait();
+                // The purpose of `disambiguator_idx` is to ensure there are
+                // no duplicate `def_id` in certain cases, such as:
+                // ```
+                // trait Foo {
+                //     fn bar() -> impl Trait;
+                //     fn bar() -> impl Trait;
+                //              // ~~~~~~~~~~ It will generate the same ID if we don’t disambiguate it.
+                // }
+                // ```
+                let disambiguator_idx = trait_item_refs
+                    .iter()
+                    .take_while(|item| item.id.owner_id.def_id != fn_def_id)
+                    .fold(0, |acc, item| {
+                        if !matches!(item.kind, hir::AssocItemKind::Fn { .. }) {
+                            acc
+                        } else if def_path_id(item.id.owner_id.def_id) == def_path_data {
+                            tcx.def_key(item.id.owner_id.def_id).disambiguated_data.disambiguator
+                                + 1
+                        } else {
+                            acc
+                        }
+                    });
+
+                let data = DefPathData::AnonAssocTy(def_path_data);
                 let mut visitor = RPITVisitor {
                     tcx,
                     synthetics: vec![],
                     data,
-                    disambiguator: DisambiguatorState::with(parent_def_id, data, 0),
+                    disambiguator: DisambiguatorState::with(parent_def_id, data, disambiguator_idx),
                 };
                 visitor.visit_fn_ret_ty(output);
                 tcx.arena.alloc_from_iter(
diff --git a/compiler/rustc_ty_utils/src/layout/invariant.rs b/compiler/rustc_ty_utils/src/layout/invariant.rs
index 4b65c05d0e9..1311ee31182 100644
--- a/compiler/rustc_ty_utils/src/layout/invariant.rs
+++ b/compiler/rustc_ty_utils/src/layout/invariant.rs
@@ -8,7 +8,7 @@ use rustc_middle::ty::layout::{HasTyCtxt, LayoutCx, TyAndLayout};
 pub(super) fn layout_sanity_check<'tcx>(cx: &LayoutCx<'tcx>, layout: &TyAndLayout<'tcx>) {
     let tcx = cx.tcx();
 
-    if layout.size.bytes() % layout.align.abi.bytes() != 0 {
+    if !layout.size.bytes().is_multiple_of(layout.align.abi.bytes()) {
         bug!("size is not a multiple of align, in the following layout:\n{layout:#?}");
     }
     if layout.size.bytes() >= tcx.data_layout.obj_size_bound() {