Skip to content

Commit

Permalink
Auto merge of #81486 - ssomers:btree_separate_drop, r=Mark-Simulacrum
Browse files Browse the repository at this point in the history
BTreeMap: disentangle Drop implementation from IntoIter

No longer require every `BTreeMap` to dig up its last leaf edge before dying. This speeds up the `clone_` benchmarks by 25% for normal keys and values (far less for huge values).

r? `@Mark-Simulacrum`
  • Loading branch information
bors committed Feb 12, 2021
2 parents e9920ef + 3045b75 commit a118ee2
Show file tree
Hide file tree
Showing 2 changed files with 106 additions and 65 deletions.
60 changes: 34 additions & 26 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,8 +145,8 @@ pub struct BTreeMap<K, V> {
#[stable(feature = "btree_drop", since = "1.7.0")]
unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
fn drop(&mut self) {
unsafe {
drop(ptr::read(self).into_iter());
if let Some(root) = self.root.take() {
Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length };
}
}
}
Expand Down Expand Up @@ -332,6 +332,14 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
}
}

/// A simplified version of `IntoIter` that is not double-ended and has only one
/// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to
/// drop an entire tree without the need to first look up a `back` leaf edge.
struct Dropper<K, V> {
front: Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>,
remaining_length: usize,
}

/// An iterator over the keys of a `BTreeMap`.
///
/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
Expand Down Expand Up @@ -1410,42 +1418,42 @@ impl<K, V> IntoIterator for BTreeMap<K, V> {
}
}

#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
impl<K, V> Drop for Dropper<K, V> {
fn drop(&mut self) {
struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
// Similar to advancing a non-fusing iterator.
fn next_or_end<K, V>(this: &mut Dropper<K, V>) -> Option<(K, V)> {
if this.remaining_length == 0 {
unsafe { ptr::read(&this.front).deallocating_end() }
None
} else {
this.remaining_length -= 1;
Some(unsafe { this.front.deallocating_next_unchecked() })
}
}

struct DropGuard<'a, K, V>(&'a mut Dropper<K, V>);

impl<'a, K, V> Drop for DropGuard<'a, K, V> {
fn drop(&mut self) {
// Continue the same loop we perform below. This only runs when unwinding, so we
// don't have to care about panics this time (they'll abort).
while let Some(_) = self.0.next() {}

unsafe {
let mut node =
ptr::read(&self.0.front).unwrap_unchecked().into_node().forget_type();
while let Some(parent) = node.deallocate_and_ascend() {
node = parent.into_node().forget_type();
}
}
while let Some(_pair) = next_or_end(&mut self.0) {}
}
}

while let Some(pair) = self.next() {
while let Some(pair) = next_or_end(self) {
let guard = DropGuard(self);
drop(pair);
mem::forget(guard);
}
}
}

unsafe {
if let Some(front) = ptr::read(&self.front) {
let mut node = front.into_node().forget_type();
// Most of the nodes have been deallocated while traversing
// but one pile from a leaf up to the root is left standing.
while let Some(parent) = node.deallocate_and_ascend() {
node = parent.into_node().forget_type();
}
}
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
fn drop(&mut self) {
if let Some(front) = self.front.take() {
Dropper { front, remaining_length: self.length };
}
}
}
Expand All @@ -1459,7 +1467,7 @@ impl<K, V> Iterator for IntoIter<K, V> {
None
} else {
self.length -= 1;
Some(unsafe { self.front.as_mut().unwrap().next_unchecked() })
Some(unsafe { self.front.as_mut().unwrap().deallocating_next_unchecked() })
}
}

Expand All @@ -1475,7 +1483,7 @@ impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
None
} else {
self.length -= 1;
Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() })
Some(unsafe { self.back.as_mut().unwrap().deallocating_next_back_unchecked() })
}
}
}
Expand Down
111 changes: 72 additions & 39 deletions library/alloc/src/collections/btree/navigate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -289,37 +289,76 @@ impl<BorrowType: marker::BorrowType, K, V>
}
}

macro_rules! def_next_kv_uncheched_dealloc {
{ unsafe fn $name:ident : $adjacent_kv:ident } => {
/// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
/// while deallocating any node left behind yet leaving the corresponding edge
/// in its parent node dangling.
///
/// # Safety
/// - The leaf edge must not be the last one in the direction travelled.
/// - The node carrying the next KV returned must not have been deallocated by a
/// previous call on any handle obtained for this tree.
unsafe fn $name <K, V>(
leaf_edge: Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
let mut edge = leaf_edge.forget_node_type();
loop {
edge = match edge.$adjacent_kv() {
Ok(internal_kv) => return internal_kv,
Err(last_edge) => {
unsafe {
let parent_edge = last_edge.into_node().deallocate_and_ascend();
parent_edge.unwrap_unchecked().forget_node_type()
}
}
impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// Given a leaf edge handle into a dying tree, returns the next leaf edge
/// on the right side, and the key-value pair in between, which is either
/// in the same leaf node, in an ancestor node, or non-existent.
///
/// This method also deallocates any node(s) it reaches the end of. This
/// implies that if no more key-value pair exists, the entire remainder of
/// the tree will have been deallocated and there is nothing left to return.
///
/// # Safety
/// The given edge must not have been previously returned by counterpart
/// `deallocating_next_back`.
unsafe fn deallocating_next(self) -> Option<(Self, (K, V))> {
let mut edge = self.forget_node_type();
loop {
edge = match edge.right_kv() {
Ok(kv) => {
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
return Some((kv.next_leaf_edge(), (k, v)));
}
Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
},
}
}
};
}
}

def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
/// Given a leaf edge handle into a dying tree, returns the next leaf edge
/// on the left side, and the key-value pair in between, which is either
/// in the same leaf node, in an ancestor node, or non-existent.
///
/// This method also deallocates any node(s) it reaches the end of. This
/// implies that if no more key-value pair exists, the entire remainder of
/// the tree will have been deallocated and there is nothing left to return.
///
/// # Safety
/// The given edge must not have been previously returned by counterpart
/// `deallocating_next`.
unsafe fn deallocating_next_back(self) -> Option<(Self, (K, V))> {
let mut edge = self.forget_node_type();
loop {
edge = match edge.left_kv() {
Ok(kv) => {
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
return Some((kv.next_back_leaf_edge(), (k, v)));
}
Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
},
}
}
}

/// Deallocates a pile of nodes from the leaf up to the root.
/// This is the only way to deallocate the remainder of a tree after
/// `deallocating_next` and `deallocating_next_back` have been nibbling at
/// both sides of the tree, and have hit the same edge. As it is intended
/// only to be called when all keys and values have been returned,
/// no cleanup is done on any of the keys or values.
pub fn deallocating_end(self) {
let mut edge = self.forget_node_type();
while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } {
edge = parent_edge.forget_node_type();
}
}
}

impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
/// Moves the leaf edge handle to the next leaf edge and returns references to the
Expand Down Expand Up @@ -394,12 +433,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// call this method again subject to its safety conditions, or call counterpart
/// `next_back_unchecked` subject to its safety conditions.
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
super::mem::replace(self, |leaf_edge| {
let kv = unsafe { next_kv_unchecked_dealloc(leaf_edge) };
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
(kv.next_leaf_edge(), (k, v))
pub unsafe fn deallocating_next_unchecked(&mut self) -> (K, V) {
super::mem::replace(self, |leaf_edge| unsafe {
leaf_edge.deallocating_next().unwrap_unchecked()
})
}

Expand All @@ -415,12 +451,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// call this method again subject to its safety conditions, or call counterpart
/// `next_unchecked` subject to its safety conditions.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
super::mem::replace(self, |leaf_edge| {
let kv = unsafe { next_back_kv_unchecked_dealloc(leaf_edge) };
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
(kv.next_back_leaf_edge(), (k, v))
pub unsafe fn deallocating_next_back_unchecked(&mut self) -> (K, V) {
super::mem::replace(self, |leaf_edge| unsafe {
leaf_edge.deallocating_next_back().unwrap_unchecked()
})
}
}
Expand Down

0 comments on commit a118ee2

Please sign in to comment.