about summary refs log tree commit diff
diff options
context:
space:
mode:
authorYuki Okushi <huyuumi.dev@gmail.com>2020-10-25 18:43:47 +0900
committerGitHub <noreply@github.com>2020-10-25 18:43:47 +0900
commit90856565124965295202aae11a33e66870f39e71 (patch)
tree29bc3b1e2a586fe28632aa4623559ea747b722ce
parent63e48cd4100619550af142471543f1969bfeb8c0 (diff)
parent3b6c4fe465bcb8c47a6b6530d7687a19e78b2f41 (diff)
downloadrust-90856565124965295202aae11a33e66870f39e71.tar.gz
rust-90856565124965295202aae11a33e66870f39e71.zip
Rollup merge of #78322 - ssomers:btree_no_min_len_at_node_level, r=Mark-Simulacrum
BTreeMap: stop mistaking node::MIN_LEN for a node level constraint

Correcting #77612 that fell into the trap of assuming that node::MIN_LEN is an imposed minimum everywhere, and trying to make it much more clear it is an offered minimum at the node level.

r? @Mark-Simulacrum
-rw-r--r--library/alloc/src/collections/btree/map.rs8
-rw-r--r--library/alloc/src/collections/btree/map/tests.rs21
-rw-r--r--library/alloc/src/collections/btree/node.rs2
-rw-r--r--library/alloc/src/collections/btree/node/tests.rs56
-rw-r--r--library/alloc/src/collections/btree/remove.rs5
-rw-r--r--library/alloc/src/collections/btree/split.rs18
6 files changed, 55 insertions, 55 deletions
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index 20c6ebd2292..4f9aa44b6b5 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -17,6 +17,10 @@ mod entry;
 pub use entry::{Entry, OccupiedEntry, VacantEntry};
 use Entry::*;
 
+/// Minimum number of elements in nodes that are not a root.
+/// We might temporarily have fewer elements during methods.
+pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
+
 /// A map based on a B-Tree.
 ///
 /// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
@@ -1094,13 +1098,13 @@ impl<K: Ord, V> BTreeMap<K, V> {
             // Check if right-most child is underfull.
             let mut last_edge = internal.last_edge();
             let right_child_len = last_edge.reborrow().descend().len();
-            if right_child_len < node::MIN_LEN {
+            if right_child_len < MIN_LEN {
                 // We need to steal.
                 let mut last_kv = match last_edge.left_kv() {
                     Ok(left) => left,
                     Err(_) => unreachable!(),
                 };
-                last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
+                last_kv.bulk_steal_left(MIN_LEN - right_child_len);
                 last_edge = last_kv.right_edge();
             }
 
diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs
index b51b95a635c..adb94972f5b 100644
--- a/library/alloc/src/collections/btree/map/tests.rs
+++ b/library/alloc/src/collections/btree/map/tests.rs
@@ -50,10 +50,15 @@ impl<'a, K: 'a, V: 'a> BTreeMap<K, V> {
     {
         if let Some(root) = &self.root {
             let root_node = root.node_as_ref();
+
             assert!(root_node.ascend().is_err());
             root_node.assert_back_pointers();
-            root_node.assert_ascending();
-            assert_eq!(self.length, root_node.assert_and_add_lengths());
+
+            let counted = root_node.assert_ascending();
+            assert_eq!(self.length, counted);
+            assert_eq!(self.length, root_node.calc_length());
+
+            root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 });
         } else {
             assert_eq!(self.length, 0);
         }
@@ -76,6 +81,18 @@ impl<'a, K: 'a, V: 'a> BTreeMap<K, V> {
     }
 }
 
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
+    pub fn assert_min_len(self, min_len: usize) {
+        assert!(self.len() >= min_len, "{} < {}", self.len(), min_len);
+        if let node::ForceResult::Internal(node) = self.force() {
+            for idx in 0..=node.len() {
+                let edge = unsafe { Handle::new_edge(node, idx) };
+                edge.descend().assert_min_len(MIN_LEN);
+            }
+        }
+    }
+}
+
 // Test our value of MIN_INSERTS_HEIGHT_2. It may change according to the
 // implementation of insertion, but it's best to be aware of when it does.
 #[test]
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
index 886d8abd030..f5aff9bf494 100644
--- a/library/alloc/src/collections/btree/node.rs
+++ b/library/alloc/src/collections/btree/node.rs
@@ -38,8 +38,8 @@ use crate::alloc::{AllocRef, Global, Layout};
 use crate::boxed::Box;
 
 const B: usize = 6;
-pub const MIN_LEN: usize = B - 1;
 pub const CAPACITY: usize = 2 * B - 1;
+pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
 const KV_IDX_CENTER: usize = B - 1;
 const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
 const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
diff --git a/library/alloc/src/collections/btree/node/tests.rs b/library/alloc/src/collections/btree/node/tests.rs
index e56fc2aa51e..d6527057c5d 100644
--- a/library/alloc/src/collections/btree/node/tests.rs
+++ b/library/alloc/src/collections/btree/node/tests.rs
@@ -5,25 +5,26 @@ use crate::string::String;
 use core::cmp::Ordering::*;
 
 impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
+    /// Asserts that the back pointer in each reachable node points to its parent.
     pub fn assert_back_pointers(self) {
-        match self.force() {
-            ForceResult::Leaf(_) => {}
-            ForceResult::Internal(node) => {
-                for idx in 0..=node.len() {
-                    let edge = unsafe { Handle::new_edge(node, idx) };
-                    let child = edge.descend();
-                    assert!(child.ascend().ok() == Some(edge));
-                    child.assert_back_pointers();
-                }
+        if let ForceResult::Internal(node) = self.force() {
+            for idx in 0..=node.len() {
+                let edge = unsafe { Handle::new_edge(node, idx) };
+                let child = edge.descend();
+                assert!(child.ascend().ok() == Some(edge));
+                child.assert_back_pointers();
             }
         }
     }
 
-    pub fn assert_ascending(self)
+    /// Asserts that the keys are in strictly ascending order.
+    /// Returns how many keys it encountered.
+    pub fn assert_ascending(self) -> usize
     where
         K: Copy + Debug + Ord,
     {
         struct SeriesChecker<T> {
+            num_seen: usize,
             previous: Option<T>,
         }
         impl<T: Copy + Debug + Ord> SeriesChecker<T> {
@@ -32,10 +33,11 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>
                     assert!(previous < next, "{:?} >= {:?}", previous, next);
                 }
                 self.previous = Some(next);
+                self.num_seen += 1;
             }
         }
 
-        let mut checker = SeriesChecker { previous: None };
+        let mut checker = SeriesChecker { num_seen: 0, previous: None };
         self.visit_nodes_in_order(|pos| match pos {
             navigate::Position::Leaf(node) => {
                 for idx in 0..node.len() {
@@ -49,33 +51,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>
             }
             navigate::Position::Internal(_) => {}
         });
-    }
-
-    pub fn assert_and_add_lengths(self) -> usize {
-        let mut internal_length = 0;
-        let mut internal_kv_count = 0;
-        let mut leaf_length = 0;
-        self.visit_nodes_in_order(|pos| match pos {
-            navigate::Position::Leaf(node) => {
-                let is_root = self.height() == 0;
-                let min_len = if is_root { 0 } else { MIN_LEN };
-                assert!(node.len() >= min_len, "{} < {}", node.len(), min_len);
-                leaf_length += node.len();
-            }
-            navigate::Position::Internal(node) => {
-                let is_root = self.height() == node.height();
-                let min_len = if is_root { 1 } else { MIN_LEN };
-                assert!(node.len() >= min_len, "{} < {}", node.len(), min_len);
-                internal_length += node.len();
-            }
-            navigate::Position::InternalKV(_) => {
-                internal_kv_count += 1;
-            }
-        });
-        assert_eq!(internal_length, internal_kv_count);
-        let total = internal_length + leaf_length;
-        assert_eq!(self.calc_length(), total);
-        total
+        checker.num_seen
     }
 
     pub fn dump_keys(self) -> String
@@ -124,8 +100,8 @@ fn test_splitpoint() {
                 right_len += 1;
             }
         }
-        assert!(left_len >= MIN_LEN);
-        assert!(right_len >= MIN_LEN);
+        assert!(left_len >= MIN_LEN_AFTER_SPLIT);
+        assert!(right_len >= MIN_LEN_AFTER_SPLIT);
         assert!(left_len + right_len == CAPACITY);
     }
 }
diff --git a/library/alloc/src/collections/btree/remove.rs b/library/alloc/src/collections/btree/remove.rs
index 9733b7d6084..99655d3e2bf 100644
--- a/library/alloc/src/collections/btree/remove.rs
+++ b/library/alloc/src/collections/btree/remove.rs
@@ -1,4 +1,5 @@
-use super::node::{self, marker, ForceResult, Handle, NodeRef};
+use super::map::MIN_LEN;
+use super::node::{marker, ForceResult, Handle, NodeRef};
 use super::unwrap_unchecked;
 use core::mem;
 use core::ptr;
@@ -40,7 +41,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInter
         // Handle underflow
         let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
         let mut at_leaf = true;
-        while cur_node.len() < node::MIN_LEN {
+        while cur_node.len() < MIN_LEN {
             match handle_underfull_node(cur_node) {
                 UnderflowResult::AtRoot => break,
                 UnderflowResult::Merged(edge, merged_with_left, offset) => {
diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs
index 0e6e213f6e8..5f00a5a25ab 100644
--- a/library/alloc/src/collections/btree/split.rs
+++ b/library/alloc/src/collections/btree/split.rs
@@ -1,5 +1,6 @@
-use super::node::{self, ForceResult::*, Root};
-use super::search::{self, SearchResult::*};
+use super::map::MIN_LEN;
+use super::node::{ForceResult::*, Root};
+use super::search::{search_node, SearchResult::*};
 use core::borrow::Borrow;
 
 impl<K, V> Root<K, V> {
@@ -20,7 +21,7 @@ impl<K, V> Root<K, V> {
             let mut right_node = right_root.node_as_mut();
 
             loop {
-                let mut split_edge = match search::search_node(left_node, key) {
+                let mut split_edge = match search_node(left_node, key) {
                     // key is going to the right tree
                     Found(handle) => handle.left_edge(),
                     GoDown(handle) => handle,
@@ -65,9 +66,9 @@ impl<K, V> Root<K, V> {
                     cur_node = last_kv.merge().descend();
                 } else {
                     let right_len = last_kv.reborrow().right_edge().descend().len();
-                    // `MINLEN + 1` to avoid readjust if merge happens on the next level.
-                    if right_len < node::MIN_LEN + 1 {
-                        last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+                    // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
+                    if right_len < MIN_LEN + 1 {
+                        last_kv.bulk_steal_left(MIN_LEN + 1 - right_len);
                     }
                     cur_node = last_kv.right_edge().descend();
                 }
@@ -91,8 +92,9 @@ impl<K, V> Root<K, V> {
                     cur_node = first_kv.merge().descend();
                 } else {
                     let left_len = first_kv.reborrow().left_edge().descend().len();
-                    if left_len < node::MIN_LEN + 1 {
-                        first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+                    // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
+                    if left_len < MIN_LEN + 1 {
+                        first_kv.bulk_steal_right(MIN_LEN + 1 - left_len);
                     }
                     cur_node = first_kv.left_edge().descend();
                 }