about summary refs log tree commit diff
path: root/library/alloc/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-08-14 12:00:59 +0000
committerbors <bors@rust-lang.org>2020-08-14 12:00:59 +0000
commit7996182bc1b48495014fa13799f503be01d3e5bc (patch)
tree6861c7f3ce0cb33da6ad83a3be7db9faa1d8303b /library/alloc/src
parent6b2ca8457a0f6450896c669caf880090a18d1541 (diff)
parent052204023348d6befed705dbab7817afa4f7d7cf (diff)
downloadrust-7996182bc1b48495014fa13799f503be01d3e5bc.tar.gz
rust-7996182bc1b48495014fa13799f503be01d3e5bc.zip
Auto merge of #74777 - ssomers:btree_cleanup_7, r=Mark-Simulacrum
Stop BTreeMap casts from reborrowing

Down in btree/node.rs, the interface and use of `cast_unchecked` look a bit shady. It's really just there for inverting `forget_type` which does not borrow. By borrowing we can't write the same `cast_unchecked` in the same way at the Handle level.

No change in undefined behaviour or performance.
Diffstat (limited to 'library/alloc/src')
-rw-r--r--library/alloc/src/collections/btree/node.rs36
1 files changed, 16 insertions, 20 deletions
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
index aa959c667ac..c0b75fd5eac 100644
--- a/library/alloc/src/collections/btree/node.rs
+++ b/library/alloc/src/collections/btree/node.rs
@@ -413,7 +413,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
     /// Unsafely asserts to the compiler some static information about whether this
     /// node is a `Leaf` or an `Internal`.
-    unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
+    unsafe fn cast_unchecked<NewType>(self) -> NodeRef<marker::Mut<'a>, K, V, NewType> {
         NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
     }
 
@@ -721,7 +721,7 @@ impl<Node: Copy, Type> Clone for Handle<Node, Type> {
 }
 
 impl<Node, Type> Handle<Node, Type> {
-    /// Retrieves the node that contains the edge of key/value pair this handle points to.
+    /// Retrieves the node that contains the edge or key/value pair this handle points to.
     pub fn into_node(self) -> Node {
         self.node
     }
@@ -1131,7 +1131,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
     }
 
     /// Removes the key/value pair pointed to by this handle and returns it, along with the edge
-    /// between the now adjacent key/value pairs (if any) to the left and right of this handle.
+    /// that the key/value pair collapsed into.
     pub fn remove(
         mut self,
     ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
@@ -1189,7 +1189,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
     /// to by this handle, and the node immediately to the right of this handle into one new
     /// child of the underlying node, returning an edge referencing that new child.
     ///
-    /// Assumes that this edge `.can_merge()`.
+    /// Panics unless this edge `.can_merge()`.
     pub fn merge(
         mut self,
     ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
@@ -1197,10 +1197,9 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
         let self2 = unsafe { ptr::read(&self) };
         let mut left_node = self1.left_edge().descend();
         let left_len = left_node.len();
-        let mut right_node = self2.right_edge().descend();
+        let right_node = self2.right_edge().descend();
         let right_len = right_node.len();
 
-        // necessary for correctness, but in a private module
         assert!(left_len + right_len < CAPACITY);
 
         unsafe {
@@ -1231,28 +1230,25 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
 
             (*left_node.as_leaf_mut()).len += right_len as u16 + 1;
 
-            let layout = if self.node.height > 1 {
+            if self.node.height > 1 {
+                // SAFETY: the height of the nodes being merged is one below the height
+                // of the node of this edge, thus above zero, so they are internal.
+                let mut left_node = left_node.cast_unchecked();
+                let right_node = right_node.cast_unchecked();
                 ptr::copy_nonoverlapping(
-                    right_node.cast_unchecked().as_internal().edges.as_ptr(),
-                    left_node
-                        .cast_unchecked()
-                        .as_internal_mut()
-                        .edges
-                        .as_mut_ptr()
-                        .add(left_len + 1),
+                    right_node.reborrow().as_internal().edges.as_ptr(),
+                    left_node.reborrow_mut().as_internal_mut().edges.as_mut_ptr().add(left_len + 1),
                     right_len + 1,
                 );
 
                 for i in left_len + 1..left_len + right_len + 2 {
-                    Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i)
-                        .correct_parent_link();
+                    Handle::new_edge(left_node.reborrow_mut(), i).correct_parent_link();
                 }
 
-                Layout::new::<InternalNode<K, V>>()
+                Global.dealloc(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
             } else {
-                Layout::new::<LeafNode<K, V>>()
-            };
-            Global.dealloc(right_node.node.cast(), layout);
+                Global.dealloc(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
+            }
 
             Handle::new_edge(self.node, self.idx)
         }