about summary refs log tree commit diff
diff options
context:
space:
mode:
authorlcnr <rust@lcnr.de>2024-07-23 23:03:34 +0200
committerlcnr <rust@lcnr.de>2024-08-12 10:33:04 +0200
commite83eacdfaa9002559d3a301a0d1a0f54fa253f1d (patch)
tree8a34c6d8d43e64e4ac2d7b53cd9f975bc35431b4
parent9308401df57318cf3b3ad72bd3674516fe9d1c6c (diff)
downloadrust-e83eacdfaa9002559d3a301a0d1a0f54fa253f1d.tar.gz
rust-e83eacdfaa9002559d3a301a0d1a0f54fa253f1d.zip
move behavior out of shared fn
-rw-r--r--compiler/rustc_type_ir/src/search_graph/mod.rs25
1 files changed, 9 insertions, 16 deletions
diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs
index 6c67bf13ac7..18a5a85dfa8 100644
--- a/compiler/rustc_type_ir/src/search_graph/mod.rs
+++ b/compiler/rustc_type_ir/src/search_graph/mod.rs
@@ -300,17 +300,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
     // We update both the head of this cycle to rerun its evaluation until
     // we reach a fixpoint and all other cycle participants to make sure that
     // their result does not get moved to the global cache.
-    fn tag_cycle_participants(
-        stack: &mut IndexVec<StackDepth, StackEntry<X>>,
-        usage_kind: Option<UsageKind>,
-        head: StackDepth,
-    ) {
-        if let Some(usage_kind) = usage_kind {
-            stack[head].has_been_used =
-                Some(stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind)));
-        }
-        debug_assert!(stack[head].has_been_used.is_some());
-
+    fn tag_cycle_participants(stack: &mut IndexVec<StackDepth, StackEntry<X>>, head: StackDepth) {
         // The current root of these cycles. Note that this may not be the final
         // root in case a later goal depends on a goal higher up the stack.
         let mut current_root = head;
@@ -403,7 +393,8 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
             // We have a nested goal which is already in the provisional cache, use
             // its result. We do not provide any usage kind as that should have been
             // already set correctly while computing the cache entry.
-            Self::tag_cycle_participants(&mut self.stack, None, entry.head);
+            debug_assert!(self.stack[entry.head].has_been_used.is_some());
+            Self::tag_cycle_participants(&mut self.stack, entry.head);
             return entry.result;
         } else if let Some(stack_depth) = cache_entry.stack_depth {
             debug!("encountered cycle with depth {stack_depth:?}");
@@ -416,11 +407,13 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
             let is_coinductive_cycle = Self::stack_coinductive_from(cx, &self.stack, stack_depth);
             let cycle_kind =
                 if is_coinductive_cycle { CycleKind::Coinductive } else { CycleKind::Inductive };
-            Self::tag_cycle_participants(
-                &mut self.stack,
-                Some(UsageKind::Single(cycle_kind)),
-                stack_depth,
+            let usage_kind = UsageKind::Single(cycle_kind);
+            self.stack[stack_depth].has_been_used = Some(
+                self.stack[stack_depth]
+                    .has_been_used
+                    .map_or(usage_kind, |prev| prev.merge(usage_kind)),
             );
+            Self::tag_cycle_participants(&mut self.stack, stack_depth);
 
             // Return the provisional result or, if we're in the first iteration,
             // start with no constraints.