Skip to content

Commit e2223c9

Browse files
committedFeb 28, 2020
Auto merge of #68827 - ssomers:btree_navigation_revisited, r=Mark-Simulacrum
BTreeMap navigation done safer & faster It turns out that there was a faster way to do the tree navigation code bundled in #67073, by moving from edge to KV and from KV to next edge separately. It extracts most of the code as safe functions, and contains the duplication of handles within the short wrapper functions. This somehow hits a sweet spot in the compiler because it reports boosts all over the board: ``` >cargo benchcmp pre3.txt posz4.txt --threshold 5 name pre3.txt ns/iter posz4.txt ns/iter diff ns/iter diff % speedup btree::map::first_and_last_0 40 37 -3 -7.50% x 1.08 btree::map::first_and_last_100 58 44 -14 -24.14% x 1.32 btree::map::iter_1000 8,920 3,419 -5,501 -61.67% x 2.61 btree::map::iter_100000 1,069,290 411,615 -657,675 -61.51% x 2.60 btree::map::iter_20 169 58 -111 -65.68% x 2.91 btree::map::iter_mut_1000 8,701 3,303 -5,398 -62.04% x 2.63 btree::map::iter_mut_100000 1,034,560 405,975 -628,585 -60.76% x 2.55 btree::map::iter_mut_20 165 58 -107 -64.85% x 2.84 btree::set::clone_100 1,831 1,562 -269 -14.69% x 1.17 btree::set::clone_100_and_clear 1,831 1,565 -266 -14.53% x 1.17 btree::set::clone_100_and_into_iter 1,917 1,541 -376 -19.61% x 1.24 btree::set::clone_100_and_pop_all 2,609 2,441 -168 -6.44% x 1.07 btree::set::clone_100_and_remove_all 4,598 3,927 -671 -14.59% x 1.17 btree::set::clone_100_and_remove_half 2,765 2,551 -214 -7.74% x 1.08 btree::set::clone_10k 191,610 164,616 -26,994 -14.09% x 1.16 btree::set::clone_10k_and_clear 192,003 164,616 -27,387 -14.26% x 1.17 btree::set::clone_10k_and_into_iter 200,037 163,010 -37,027 -18.51% x 1.23 btree::set::clone_10k_and_pop_all 267,023 250,913 -16,110 -6.03% x 1.06 btree::set::clone_10k_and_remove_all 536,230 464,100 -72,130 -13.45% x 1.16 btree::set::clone_10k_and_remove_half 453,350 430,545 -22,805 -5.03% x 1.05 btree::set::difference_random_100_vs_100 1,787 801 -986 -55.18% x 2.23 btree::set::difference_random_100_vs_10k 2,978 2,696 -282 -9.47% x 1.10 btree::set::difference_random_10k_vs_100 111,075 54,734 -56,341 -50.72% x 2.03 btree::set::difference_random_10k_vs_10k 246,380 175,980 -70,400 -28.57% x 1.40 btree::set::difference_staggered_100_vs_100 1,789 951 -838 -46.84% x 1.88 btree::set::difference_staggered_100_vs_10k 2,798 2,606 -192 -6.86% x 1.07 btree::set::difference_staggered_10k_vs_10k 176,452 97,401 -79,051 -44.80% x 1.81 btree::set::intersection_100_neg_vs_10k_pos 34 32 -2 -5.88% x 1.06 btree::set::intersection_100_pos_vs_100_neg 30 27 -3 -10.00% x 1.11 btree::set::intersection_random_100_vs_100 1,537 613 -924 -60.12% x 2.51 btree::set::intersection_random_100_vs_10k 2,793 2,649 -144 -5.16% x 1.05 btree::set::intersection_random_10k_vs_10k 222,127 147,166 -74,961 -33.75% x 1.51 btree::set::intersection_staggered_100_vs_100 1,447 622 -825 -57.01% x 2.33 btree::set::intersection_staggered_100_vs_10k 2,606 2,382 -224 -8.60% x 1.09 btree::set::intersection_staggered_10k_vs_10k 143,620 58,790 -84,830 -59.07% x 2.44 btree::set::is_subset_100_vs_100 1,349 488 -861 -63.83% x 2.76 btree::set::is_subset_100_vs_10k 1,720 1,428 -292 -16.98% x 1.20 btree::set::is_subset_10k_vs_10k 135,984 48,527 -87,457 -64.31% x 2.80 ``` The `first_and_last` ones are noise (they don't do iteration), the others seem genuine. As always, approved by Miri. Also, a separate commit with some more benchmarks of mutable behaviour (which also benefit). r? @cuviper
·
1.88.01.43.0
2 parents bfc32dd + 9f7b58f commit e2223c9

File tree

4 files changed

+233
-166
lines changed

4 files changed

+233
-166
lines changed
 

‎src/liballoc/benches/btree/set.rs

Lines changed: 85 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -50,43 +50,112 @@ macro_rules! set_bench {
5050
};
5151
}
5252

53-
const BUILD_SET_SIZE: usize = 100;
53+
#[bench]
54+
pub fn clone_100(b: &mut Bencher) {
55+
let src = pos(100);
56+
b.iter(|| src.clone())
57+
}
5458

5559
#[bench]
56-
pub fn build_and_clear(b: &mut Bencher) {
57-
b.iter(|| pos(BUILD_SET_SIZE).clear())
60+
pub fn clone_100_and_clear(b: &mut Bencher) {
61+
let src = pos(100);
62+
b.iter(|| src.clone().clear())
5863
}
5964

6065
#[bench]
61-
pub fn build_and_drop(b: &mut Bencher) {
62-
b.iter(|| pos(BUILD_SET_SIZE))
66+
pub fn clone_100_and_into_iter(b: &mut Bencher) {
67+
let src = pos(100);
68+
b.iter(|| src.clone().into_iter().count())
6369
}
6470

6571
#[bench]
66-
pub fn build_and_into_iter(b: &mut Bencher) {
67-
b.iter(|| pos(BUILD_SET_SIZE).into_iter().count())
72+
pub fn clone_100_and_pop_all(b: &mut Bencher) {
73+
let src = pos(100);
74+
b.iter(|| {
75+
let mut set = src.clone();
76+
while set.pop_first().is_some() {}
77+
set
78+
});
6879
}
6980

7081
#[bench]
71-
pub fn build_and_pop_all(b: &mut Bencher) {
82+
pub fn clone_100_and_remove_all(b: &mut Bencher) {
83+
let src = pos(100);
7284
b.iter(|| {
73-
let mut s = pos(BUILD_SET_SIZE);
74-
while s.pop_first().is_some() {}
75-
s
85+
let mut set = src.clone();
86+
while let Some(elt) = set.iter().copied().next() {
87+
set.remove(&elt);
88+
}
89+
set
7690
});
7791
}
7892

7993
#[bench]
80-
pub fn build_and_remove_all(b: &mut Bencher) {
94+
pub fn clone_100_and_remove_half(b: &mut Bencher) {
95+
let src = pos(100);
8196
b.iter(|| {
82-
let mut s = pos(BUILD_SET_SIZE);
83-
while let Some(elt) = s.iter().copied().next() {
84-
s.remove(&elt);
97+
let mut set = src.clone();
98+
for i in (2..=100 as i32).step_by(2) {
99+
set.remove(&i);
85100
}
86-
s
101+
assert_eq!(set.len(), 100 / 2);
102+
set
103+
})
104+
}
105+
106+
#[bench]
107+
pub fn clone_10k(b: &mut Bencher) {
108+
let src = pos(10_000);
109+
b.iter(|| src.clone())
110+
}
111+
112+
#[bench]
113+
pub fn clone_10k_and_clear(b: &mut Bencher) {
114+
let src = pos(10_000);
115+
b.iter(|| src.clone().clear())
116+
}
117+
118+
#[bench]
119+
pub fn clone_10k_and_into_iter(b: &mut Bencher) {
120+
let src = pos(10_000);
121+
b.iter(|| src.clone().into_iter().count())
122+
}
123+
124+
#[bench]
125+
pub fn clone_10k_and_pop_all(b: &mut Bencher) {
126+
let src = pos(10_000);
127+
b.iter(|| {
128+
let mut set = src.clone();
129+
while set.pop_first().is_some() {}
130+
set
131+
});
132+
}
133+
134+
#[bench]
135+
pub fn clone_10k_and_remove_all(b: &mut Bencher) {
136+
let src = pos(10_000);
137+
b.iter(|| {
138+
let mut set = src.clone();
139+
while let Some(elt) = set.iter().copied().next() {
140+
set.remove(&elt);
141+
}
142+
set
87143
});
88144
}
89145

146+
#[bench]
147+
pub fn clone_10k_and_remove_half(b: &mut Bencher) {
148+
let src = pos(10_000);
149+
b.iter(|| {
150+
let mut set = src.clone();
151+
for i in (2..=10_000 as i32).step_by(2) {
152+
set.remove(&i);
153+
}
154+
assert_eq!(set.len(), 10_000 / 2);
155+
set
156+
})
157+
}
158+
90159
set_bench! {intersection_100_neg_vs_100_pos, intersection, count, [neg(100), pos(100)]}
91160
set_bench! {intersection_100_neg_vs_10k_pos, intersection, count, [neg(100), pos(10_000)]}
92161
set_bench! {intersection_100_pos_vs_100_neg, intersection, count, [pos(100), neg(100)]}

‎src/liballoc/collections/btree/map.rs

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1487,16 +1487,13 @@ impl<K, V> Drop for IntoIter<K, V> {
14871487
}
14881488

14891489
unsafe {
1490-
let leaf_node = ptr::read(&self.front).into_node();
1491-
if leaf_node.is_shared_root() {
1490+
let mut node = ptr::read(&self.front).into_node().forget_type();
1491+
if node.is_shared_root() {
14921492
return;
14931493
}
14941494

1495-
if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
1496-
let mut cur_internal_node = first_parent.into_node();
1497-
while let Some(parent) = cur_internal_node.deallocate_and_ascend() {
1498-
cur_internal_node = parent.into_node()
1499-
}
1495+
while let Some(parent) = node.deallocate_and_ascend() {
1496+
node = parent.into_node().forget_type();
15001497
}
15011498
}
15021499
}

‎src/liballoc/collections/btree/navigate.rs

Lines changed: 122 additions & 123 deletions
Original file line numberDiff line numberDiff line change
@@ -3,151 +3,112 @@ use core::ptr;
33
use super::node::{marker, ForceResult::*, Handle, NodeRef};
44
use super::unwrap_unchecked;
55

6-
macro_rules! def_next {
7-
{ unsafe fn $name:ident : $next_kv:ident $next_edge:ident $initial_leaf_edge:ident } => {
8-
/// Given a leaf edge handle into an immutable tree, returns a handle to the next
9-
/// leaf edge and references to the key and value between these edges.
10-
/// Unsafe because the caller must ensure that the given leaf edge has a successor.
11-
unsafe fn $name <'a, K: 'a, V: 'a>(
12-
leaf_edge: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
13-
) -> (Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>, &'a K, &'a V) {
14-
let mut cur_handle = match leaf_edge.$next_kv() {
15-
Ok(leaf_kv) => {
16-
let (k, v) = leaf_kv.into_kv();
17-
let next_leaf_edge = leaf_kv.$next_edge();
18-
return (next_leaf_edge, k, v);
19-
}
20-
Err(last_edge) => {
21-
let next_level = last_edge.into_node().ascend().ok();
22-
unwrap_unchecked(next_level)
23-
}
24-
};
25-
26-
loop {
27-
cur_handle = match cur_handle.$next_kv() {
28-
Ok(internal_kv) => {
29-
let (k, v) = internal_kv.into_kv();
30-
let next_internal_edge = internal_kv.$next_edge();
31-
let next_leaf_edge = next_internal_edge.descend().$initial_leaf_edge();
32-
return (next_leaf_edge, k, v);
33-
}
34-
Err(last_edge) => {
35-
let next_level = last_edge.into_node().ascend().ok();
36-
unwrap_unchecked(next_level)
37-
}
38-
}
6+
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
7+
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
8+
/// on the right side, which is either in the same leaf node or in an ancestor node.
9+
/// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
10+
pub fn next_kv(
11+
self,
12+
) -> Result<
13+
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
14+
NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
15+
> {
16+
let mut edge = self.forget_node_type();
17+
loop {
18+
edge = match edge.right_kv() {
19+
Ok(internal_kv) => return Ok(internal_kv),
20+
Err(last_edge) => match last_edge.into_node().ascend() {
21+
Ok(parent_edge) => parent_edge.forget_node_type(),
22+
Err(root) => return Err(root.forget_type()),
23+
},
3924
}
4025
}
41-
};
42-
}
43-
44-
macro_rules! def_next_mut {
45-
{ unsafe fn $name:ident : $next_kv:ident $next_edge:ident $initial_leaf_edge:ident } => {
46-
/// Given a leaf edge handle into a mutable tree, returns handles to the next
47-
/// leaf edge and to the KV between these edges.
48-
/// Unsafe for two reasons:
49-
/// - the caller must ensure that the given leaf edge has a successor;
50-
/// - both returned handles represent mutable references into the same tree
51-
/// that can easily invalidate each other, even on immutable use.
52-
unsafe fn $name <'a, K: 'a, V: 'a>(
53-
leaf_edge: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
54-
) -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
55-
Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>) {
56-
let mut cur_handle = match leaf_edge.$next_kv() {
57-
Ok(leaf_kv) => {
58-
let next_leaf_edge = ptr::read(&leaf_kv).$next_edge();
59-
return (next_leaf_edge, leaf_kv.forget_node_type());
60-
}
61-
Err(last_edge) => {
62-
let next_level = last_edge.into_node().ascend().ok();
63-
unwrap_unchecked(next_level)
64-
}
65-
};
26+
}
6627

67-
loop {
68-
cur_handle = match cur_handle.$next_kv() {
69-
Ok(internal_kv) => {
70-
let next_internal_edge = ptr::read(&internal_kv).$next_edge();
71-
let next_leaf_edge = next_internal_edge.descend().$initial_leaf_edge();
72-
return (next_leaf_edge, internal_kv.forget_node_type());
73-
}
74-
Err(last_edge) => {
75-
let next_level = last_edge.into_node().ascend().ok();
76-
unwrap_unchecked(next_level)
77-
}
78-
}
28+
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
29+
/// on the left side, which is either in the same leaf node or in an ancestor node.
30+
/// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
31+
pub fn next_back_kv(
32+
self,
33+
) -> Result<
34+
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
35+
NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
36+
> {
37+
let mut edge = self.forget_node_type();
38+
loop {
39+
edge = match edge.left_kv() {
40+
Ok(internal_kv) => return Ok(internal_kv),
41+
Err(last_edge) => match last_edge.into_node().ascend() {
42+
Ok(parent_edge) => parent_edge.forget_node_type(),
43+
Err(root) => return Err(root.forget_type()),
44+
},
7945
}
8046
}
81-
};
47+
}
8248
}
8349

84-
macro_rules! def_next_dealloc {
85-
{ unsafe fn $name:ident : $next_kv:ident $next_edge:ident $initial_leaf_edge:ident } => {
86-
/// Given a leaf edge handle into an owned tree, returns a handle to the next
87-
/// leaf edge and the key and value between these edges, while deallocating
88-
/// any node left behind.
50+
macro_rules! def_next_kv_uncheched_dealloc {
51+
{ unsafe fn $name:ident : $adjacent_kv:ident } => {
52+
/// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
53+
/// while deallocating any node left behind.
8954
/// Unsafe for two reasons:
90-
/// - the caller must ensure that the given leaf edge has a successor;
91-
/// - the node pointed at by the given handle, and its ancestors, may be deallocated,
55+
/// - The caller must ensure that the leaf edge is not the last one in the tree.
56+
/// - The node pointed at by the given handle, and its ancestors, may be deallocated,
9257
/// while the reference to those nodes in the surviving ancestors is left dangling;
93-
/// thus using the returned handle is dangerous.
58+
/// thus using the returned handle to navigate further is dangerous.
9459
unsafe fn $name <K, V>(
9560
leaf_edge: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
96-
) -> (Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>, K, V) {
97-
let mut cur_handle = match leaf_edge.$next_kv() {
98-
Ok(leaf_kv) => {
99-
let k = ptr::read(leaf_kv.reborrow().into_kv().0);
100-
let v = ptr::read(leaf_kv.reborrow().into_kv().1);
101-
let next_leaf_edge = leaf_kv.$next_edge();
102-
return (next_leaf_edge, k, v);
103-
}
104-
Err(last_edge) => {
105-
unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
106-
}
107-
};
108-
61+
) -> Handle<NodeRef<marker::Owned, K, V, marker::LeafOrInternal>, marker::KV> {
62+
let mut edge = leaf_edge.forget_node_type();
10963
loop {
110-
cur_handle = match cur_handle.$next_kv() {
111-
Ok(internal_kv) => {
112-
let k = ptr::read(internal_kv.reborrow().into_kv().0);
113-
let v = ptr::read(internal_kv.reborrow().into_kv().1);
114-
let next_internal_edge = internal_kv.$next_edge();
115-
let next_leaf_edge = next_internal_edge.descend().$initial_leaf_edge();
116-
return (next_leaf_edge, k, v);
117-
}
64+
edge = match edge.$adjacent_kv() {
65+
Ok(internal_kv) => return internal_kv,
11866
Err(last_edge) => {
119-
unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
67+
let parent_edge = last_edge.into_node().deallocate_and_ascend();
68+
unwrap_unchecked(parent_edge).forget_node_type()
12069
}
12170
}
12271
}
12372
}
12473
};
12574
}
12675

127-
def_next! {unsafe fn next_unchecked: right_kv right_edge first_leaf_edge}
128-
def_next! {unsafe fn next_back_unchecked: left_kv left_edge last_leaf_edge}
129-
def_next_mut! {unsafe fn next_unchecked_mut: right_kv right_edge first_leaf_edge}
130-
def_next_mut! {unsafe fn next_back_unchecked_mut: left_kv left_edge last_leaf_edge}
131-
def_next_dealloc! {unsafe fn next_unchecked_deallocating: right_kv right_edge first_leaf_edge}
132-
def_next_dealloc! {unsafe fn next_back_unchecked_deallocating: left_kv left_edge last_leaf_edge}
76+
def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
77+
def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
78+
79+
/// This replaces the value behind the `v` unique reference by calling the
80+
/// relevant function.
81+
///
82+
/// Safety: The change closure must not panic.
83+
#[inline]
84+
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
85+
let value = ptr::read(v);
86+
let (new_value, ret) = change(value);
87+
ptr::write(v, new_value);
88+
ret
89+
}
13390

13491
impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
13592
/// Moves the leaf edge handle to the next leaf edge and returns references to the
13693
/// key and value in between.
13794
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
13895
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
139-
let (next_edge, k, v) = next_unchecked(*self);
140-
*self = next_edge;
141-
(k, v)
96+
replace(self, |leaf_edge| {
97+
let kv = leaf_edge.next_kv();
98+
let kv = unwrap_unchecked(kv.ok());
99+
(kv.next_leaf_edge(), kv.into_kv())
100+
})
142101
}
143102

144103
/// Moves the leaf edge handle to the previous leaf edge and returns references to the
145104
/// key and value in between.
146105
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
147106
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
148-
let (next_edge, k, v) = next_back_unchecked(*self);
149-
*self = next_edge;
150-
(k, v)
107+
replace(self, |leaf_edge| {
108+
let kv = leaf_edge.next_back_kv();
109+
let kv = unwrap_unchecked(kv.ok());
110+
(kv.next_back_leaf_edge(), kv.into_kv())
111+
})
151112
}
152113
}
153114

@@ -158,8 +119,11 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
158119
/// - The caller must ensure that the leaf edge is not the last one in the tree.
159120
/// - Using the updated handle may well invalidate the returned references.
160121
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
161-
let (next_edge, kv) = next_unchecked_mut(ptr::read(self));
162-
*self = next_edge;
122+
let kv = replace(self, |leaf_edge| {
123+
let kv = leaf_edge.next_kv();
124+
let kv = unwrap_unchecked(kv.ok());
125+
(ptr::read(&kv).next_leaf_edge(), kv)
126+
});
163127
// Doing the descend (and perhaps another move) invalidates the references
164128
// returned by `into_kv_mut`, so we have to do this last.
165129
kv.into_kv_mut()
@@ -171,8 +135,11 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
171135
/// - The caller must ensure that the leaf edge is not the first one in the tree.
172136
/// - Using the updated handle may well invalidate the returned references.
173137
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
174-
let (next_edge, kv) = next_back_unchecked_mut(ptr::read(self));
175-
*self = next_edge;
138+
let kv = replace(self, |leaf_edge| {
139+
let kv = leaf_edge.next_back_kv();
140+
let kv = unwrap_unchecked(kv.ok());
141+
(ptr::read(&kv).next_back_leaf_edge(), kv)
142+
});
176143
// Doing the descend (and perhaps another move) invalidates the references
177144
// returned by `into_kv_mut`, so we have to do this last.
178145
kv.into_kv_mut()
@@ -192,9 +159,12 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
192159
/// if the two preconditions above hold.
193160
/// - Using the updated handle may well invalidate the returned references.
194161
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
195-
let (next_edge, k, v) = next_unchecked_deallocating(ptr::read(self));
196-
*self = next_edge;
197-
(k, v)
162+
replace(self, |leaf_edge| {
163+
let kv = next_kv_unchecked_dealloc(leaf_edge);
164+
let k = ptr::read(kv.reborrow().into_kv().0);
165+
let v = ptr::read(kv.reborrow().into_kv().1);
166+
(kv.next_leaf_edge(), (k, v))
167+
})
198168
}
199169

200170
/// Moves the leaf edge handle to the previous leaf edge and returns the key
@@ -209,9 +179,12 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
209179
/// if the two preconditions above hold.
210180
/// - Using the updated handle may well invalidate the returned references.
211181
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
212-
let (next_edge, k, v) = next_back_unchecked_deallocating(ptr::read(self));
213-
*self = next_edge;
214-
(k, v)
182+
replace(self, |leaf_edge| {
183+
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
184+
let k = ptr::read(kv.reborrow().into_kv().0);
185+
let v = ptr::read(kv.reborrow().into_kv().1);
186+
(kv.next_back_leaf_edge(), (k, v))
187+
})
215188
}
216189
}
217190

@@ -242,3 +215,29 @@ impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
242215
}
243216
}
244217
}
218+
219+
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
220+
/// Returns the leaf edge closest to a KV for forward navigation.
221+
pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
222+
match self.force() {
223+
Leaf(leaf_kv) => leaf_kv.right_edge(),
224+
Internal(internal_kv) => {
225+
let next_internal_edge = internal_kv.right_edge();
226+
next_internal_edge.descend().first_leaf_edge()
227+
}
228+
}
229+
}
230+
231+
/// Returns the leaf edge closest to a KV for backward navigation.
232+
pub fn next_back_leaf_edge(
233+
self,
234+
) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
235+
match self.force() {
236+
Leaf(leaf_kv) => leaf_kv.left_edge(),
237+
Internal(internal_kv) => {
238+
let next_internal_edge = internal_kv.left_edge();
239+
next_internal_edge.descend().last_leaf_edge()
240+
}
241+
}
242+
}
243+
}

‎src/liballoc/collections/btree/node.rs

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -452,31 +452,25 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
452452
}
453453
}
454454

455-
impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
455+
impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
456456
/// Similar to `ascend`, gets a reference to a node's parent node, but also
457457
/// deallocate the current node in the process. This is unsafe because the
458458
/// current node will still be accessible despite being deallocated.
459459
pub unsafe fn deallocate_and_ascend(
460460
self,
461461
) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
462462
assert!(!self.is_shared_root());
463+
let height = self.height;
463464
let node = self.node;
464465
let ret = self.ascend().ok();
465-
Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
466-
ret
467-
}
468-
}
469-
470-
impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
471-
/// Similar to `ascend`, gets a reference to a node's parent node, but also
472-
/// deallocate the current node in the process. This is unsafe because the
473-
/// current node will still be accessible despite being deallocated.
474-
pub unsafe fn deallocate_and_ascend(
475-
self,
476-
) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
477-
let node = self.node;
478-
let ret = self.ascend().ok();
479-
Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
466+
Global.dealloc(
467+
node.cast(),
468+
if height > 0 {
469+
Layout::new::<InternalNode<K, V>>()
470+
} else {
471+
Layout::new::<LeafNode<K, V>>()
472+
},
473+
);
480474
ret
481475
}
482476
}
@@ -1427,15 +1421,23 @@ unsafe fn move_edges<K, V>(
14271421
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
14281422
}
14291423

1430-
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1424+
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
14311425
pub fn forget_node_type(
14321426
self,
1433-
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1434-
unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1427+
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1428+
unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1429+
}
1430+
}
1431+
1432+
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1433+
pub fn forget_node_type(
1434+
self,
1435+
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1436+
unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
14351437
}
14361438
}
14371439

1438-
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
1440+
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
14391441
pub fn forget_node_type(
14401442
self,
14411443
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {

0 commit comments

Comments
 (0)
Please sign in to comment.