alloc/collections/btree/
node.rs

1// This is an attempt at an implementation following the ideal
2//
3// ```
4// struct BTreeMap<K, V> {
5//     height: usize,
6//     root: Option<Box<Node<K, V, height>>>
7// }
8//
9// struct Node<K, V, height: usize> {
10//     keys: [K; 2 * B - 1],
11//     vals: [V; 2 * B - 1],
12//     edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13//     parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14//     len: u16,
15// }
16// ```
17//
18// Since Rust doesn't actually have dependent types and polymorphic recursion,
19// we make do with lots of unsafety.
20
21// A major goal of this module is to avoid complexity by treating the tree as a generic (if
22// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24// even what underfull means. However, we do rely on a few invariants:
25//
26// - Trees must have uniform depth/height. This means that every path down to a leaf from a
27//   given node has exactly the same length.
28// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29//   This implies that even an empty node has at least one edge.
30//   For a leaf node, "having an edge" only means we can identify a position in the node,
31//   since leaf edges are empty and need no data representation. In an internal node,
32//   an edge both identifies a position and contains a pointer to a child node.
33
34use core::marker::PhantomData;
35use core::mem::{self, MaybeUninit};
36use core::ptr::{self, NonNull};
37use core::slice::SliceIndex;
38
39use crate::alloc::{Allocator, Layout};
40use crate::boxed::Box;
41
42const B: usize = 6;
43pub(super) const CAPACITY: usize = 2 * B - 1;
44pub(super) const MIN_LEN_AFTER_SPLIT: usize = B - 1;
45const KV_IDX_CENTER: usize = B - 1;
46const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
48
49/// The underlying representation of leaf nodes and part of the representation of internal nodes.
50struct LeafNode<K, V> {
51    /// We want to be covariant in `K` and `V`.
52    parent: Option<NonNull<InternalNode<K, V>>>,
53
54    /// This node's index into the parent node's `edges` array.
55    /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
56    /// This is only guaranteed to be initialized when `parent` is non-null.
57    parent_idx: MaybeUninit<u16>,
58
59    /// The number of keys and values this node stores.
60    len: u16,
61
62    /// The arrays storing the actual data of the node. Only the first `len` elements of each
63    /// array are initialized and valid.
64    keys: [MaybeUninit<K>; CAPACITY],
65    vals: [MaybeUninit<V>; CAPACITY],
66}
67
68impl<K, V> LeafNode<K, V> {
69    /// Initializes a new `LeafNode` in-place.
70    ///
71    /// # Safety
72    ///
73    /// The caller must ensure that `this` points to a (possibly uninitialized) `LeafNode`
74    unsafe fn init(this: *mut Self) {
75        // As a general policy, we leave fields uninitialized if they can be, as this should
76        // be both slightly faster and easier to track in Valgrind.
77        unsafe {
78            // parent_idx, keys, and vals are all MaybeUninit
79            (&raw mut (*this).parent).write(None);
80            (&raw mut (*this).len).write(0);
81        }
82    }
83
84    /// Creates a new boxed `LeafNode`.
85    fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
86        let mut leaf = Box::new_uninit_in(alloc);
87        unsafe {
88            // SAFETY: `leaf` points to a `LeafNode`
89            LeafNode::init(leaf.as_mut_ptr());
90            // SAFETY: `leaf` was just initialized
91            leaf.assume_init()
92        }
93    }
94}
95
96/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
97/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
98/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
99/// node, allowing code to act on leaf and internal nodes generically without having to even check
100/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
101#[repr(C)]
102// gdb_providers.py uses this type name for introspection.
103struct InternalNode<K, V> {
104    data: LeafNode<K, V>,
105
106    /// The pointers to the children of this node. `len + 1` of these are considered
107    /// initialized and valid, except that near the end, while the tree is held
108    /// through borrow type `Dying`, some of these pointers are dangling.
109    edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
110}
111
112impl<K, V> InternalNode<K, V> {
113    /// Creates a new boxed `InternalNode`.
114    ///
115    /// # Safety
116    /// An invariant of internal nodes is that they have at least one
117    /// initialized and valid edge. This function does not set up
118    /// such an edge.
119    unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
120        unsafe {
121            let mut node = Box::<Self, _>::new_uninit_in(alloc);
122            // We only need to initialize the data; the edges are MaybeUninit.
123            LeafNode::init(&raw mut (*node.as_mut_ptr()).data);
124            node.assume_init()
125        }
126    }
127}
128
129/// A managed, non-null pointer to a node. This is either an owned pointer to
130/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
131///
132/// However, `BoxedNode` contains no information as to which of the two types
133/// of nodes it actually contains, and, partially due to this lack of information,
134/// is not a separate type and has no destructor.
135type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
136
137// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
138// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
139// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
140// However, whenever a public type wraps `NodeRef`, make sure that it has the
141// correct variance.
142///
143/// A reference to a node.
144///
145/// This type has a number of parameters that controls how it acts:
146/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
147///    - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
148///    - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
149///      with respect to keys and tree structure, but also allows many
150///      mutable references to values throughout the tree to coexist.
151///    - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
152///      although insert methods allow a mutable pointer to a value to coexist.
153///    - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
154///      but does not have a destructor, and must be cleaned up manually.
155///    - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
156///      but has methods to destroy the tree bit by bit, and ordinary methods,
157///      while not marked as unsafe to call, can invoke UB if called incorrectly.
158///   Since any `NodeRef` allows navigating through the tree, `BorrowType`
159///   effectively applies to the entire tree, not just to the node itself.
160/// - `K` and `V`: These are the types of keys and values stored in the nodes.
161/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
162///   `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
163///   `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
164///   `NodeRef` could be pointing to either type of node.
165///   `Type` is named `NodeType` when used outside `NodeRef`.
166///
167/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
168/// exploit static type safety. There are limitations in the way we can apply
169/// such restrictions:
170/// - For each type parameter, we can only define a method either generically
171///   or for one particular type. For example, we cannot define a method like
172///   `into_kv` generically for all `BorrowType`, or once for all types that
173///   carry a lifetime, because we want it to return `&'a` references.
174///   Therefore, we define it only for the least powerful type `Immut<'a>`.
175/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
176///   Therefore, we have to explicitly call `reborrow` on a more powerful
177///   `NodeRef` in order to reach a method like `into_kv`.
178///
179/// All methods on `NodeRef` that return some kind of reference, either:
180/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
181///   Sometimes, to invoke such a method, we need to call `reborrow_mut`.
182/// - Take `self` by reference, and (implicitly) return that reference's
183///   lifetime, instead of the lifetime carried by `BorrowType`. That way,
184///   the borrow checker guarantees that the `NodeRef` remains borrowed as long
185///   as the returned reference is used.
186///   The methods supporting insert bend this rule by returning a raw pointer,
187///   i.e., a reference without any lifetime.
188pub(super) struct NodeRef<BorrowType, K, V, Type> {
189    /// The number of levels that the node and the level of leaves are apart, a
190    /// constant of the node that cannot be entirely described by `Type`, and that
191    /// the node itself does not store. We only need to store the height of the root
192    /// node, and derive every other node's height from it.
193    /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
194    height: usize,
195    /// The pointer to the leaf or internal node. The definition of `InternalNode`
196    /// ensures that the pointer is valid either way.
197    node: NonNull<LeafNode<K, V>>,
198    _marker: PhantomData<(BorrowType, Type)>,
199}
200
201/// The root node of an owned tree.
202///
203/// Note that this does not have a destructor, and must be cleaned up manually.
204pub(super) type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
205
206impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
207impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
208    fn clone(&self) -> Self {
209        *self
210    }
211}
212
213unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
214
215unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Immut<'_>, K, V, Type> {}
216unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Mut<'_>, K, V, Type> {}
217unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::ValMut<'_>, K, V, Type> {}
218unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
219unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
220
221impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
222    pub(super) fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
223        Self::from_new_leaf(LeafNode::new(alloc))
224    }
225
226    fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
227        NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
228    }
229}
230
231impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
232    fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
233        let mut new_node = unsafe { InternalNode::new(alloc) };
234        new_node.edges[0].write(child.node);
235        unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
236    }
237
238    /// # Safety
239    /// `height` must not be zero.
240    unsafe fn from_new_internal<A: Allocator + Clone>(
241        internal: Box<InternalNode<K, V>, A>,
242        height: usize,
243    ) -> Self {
244        debug_assert!(height > 0);
245        let node = NonNull::from(Box::leak(internal)).cast();
246        let mut this = NodeRef { height, node, _marker: PhantomData };
247        this.borrow_mut().correct_all_childrens_parent_links();
248        this
249    }
250}
251
252impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
253    /// Unpack a node reference that was packed as `NodeRef::parent`.
254    fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
255        debug_assert!(height > 0);
256        NodeRef { height, node: node.cast(), _marker: PhantomData }
257    }
258}
259
260impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
261    /// Exposes the data of an internal node.
262    ///
263    /// Returns a raw ptr to avoid invalidating other references to this node.
264    fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
265        // SAFETY: the static node type is `Internal`.
266        this.node.as_ptr() as *mut InternalNode<K, V>
267    }
268}
269
270impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
271    /// Borrows exclusive access to the data of an internal node.
272    fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
273        let ptr = Self::as_internal_ptr(self);
274        unsafe { &mut *ptr }
275    }
276}
277
278impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
279    /// Finds the length of the node. This is the number of keys or values.
280    /// The number of edges is `len() + 1`.
281    /// Note that, despite being safe, calling this function can have the side effect
282    /// of invalidating mutable references that unsafe code has created.
283    pub(super) fn len(&self) -> usize {
284        // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
285        // there might be outstanding mutable references to values that we must not invalidate.
286        unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
287    }
288
289    /// Returns the number of levels that the node and leaves are apart. Zero
290    /// height means the node is a leaf itself. If you picture trees with the
291    /// root on top, the number says at which elevation the node appears.
292    /// If you picture trees with leaves on top, the number says how high
293    /// the tree extends above the node.
294    pub(super) fn height(&self) -> usize {
295        self.height
296    }
297
298    /// Temporarily takes out another, immutable reference to the same node.
299    pub(super) fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
300        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
301    }
302
303    /// Exposes the leaf portion of any leaf or internal node.
304    ///
305    /// Returns a raw ptr to avoid invalidating other references to this node.
306    fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
307        // The node must be valid for at least the LeafNode portion.
308        // This is not a reference in the NodeRef type because we don't know if
309        // it should be unique or shared.
310        this.node.as_ptr()
311    }
312}
313
314impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
315    /// Finds the parent of the current node. Returns `Ok(handle)` if the current
316    /// node actually has a parent, where `handle` points to the edge of the parent
317    /// that points to the current node. Returns `Err(self)` if the current node has
318    /// no parent, giving back the original `NodeRef`.
319    ///
320    /// The method name assumes you picture trees with the root node on top.
321    ///
322    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
323    /// both, upon success, do nothing.
324    pub(super) fn ascend(
325        self,
326    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
327        const {
328            assert!(BorrowType::TRAVERSAL_PERMIT);
329        }
330
331        // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
332        // there might be outstanding mutable references to values that we must not invalidate.
333        let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
334        unsafe { (*leaf_ptr).parent }
335            .as_ref()
336            .map(|parent| Handle {
337                node: NodeRef::from_internal(*parent, self.height + 1),
338                idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
339                _marker: PhantomData,
340            })
341            .ok_or(self)
342    }
343
344    pub(super) fn first_edge(self) -> Handle<Self, marker::Edge> {
345        unsafe { Handle::new_edge(self, 0) }
346    }
347
348    pub(super) fn last_edge(self) -> Handle<Self, marker::Edge> {
349        let len = self.len();
350        unsafe { Handle::new_edge(self, len) }
351    }
352
353    /// Note that `self` must be nonempty.
354    pub(super) fn first_kv(self) -> Handle<Self, marker::KV> {
355        let len = self.len();
356        assert!(len > 0);
357        unsafe { Handle::new_kv(self, 0) }
358    }
359
360    /// Note that `self` must be nonempty.
361    pub(super) fn last_kv(self) -> Handle<Self, marker::KV> {
362        let len = self.len();
363        assert!(len > 0);
364        unsafe { Handle::new_kv(self, len - 1) }
365    }
366}
367
368impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
369    /// Could be a public implementation of PartialEq, but only used in this module.
370    fn eq(&self, other: &Self) -> bool {
371        let Self { node, height, _marker } = self;
372        if node.eq(&other.node) {
373            debug_assert_eq!(*height, other.height);
374            true
375        } else {
376            false
377        }
378    }
379}
380
381impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
382    /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
383    fn into_leaf(self) -> &'a LeafNode<K, V> {
384        let ptr = Self::as_leaf_ptr(&self);
385        // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
386        unsafe { &*ptr }
387    }
388
389    /// Borrows a view into the keys stored in the node.
390    pub(super) fn keys(&self) -> &[K] {
391        let leaf = self.into_leaf();
392        unsafe { leaf.keys.get_unchecked(..usize::from(leaf.len)).assume_init_ref() }
393    }
394}
395
396impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
397    /// Similar to `ascend`, gets a reference to a node's parent node, but also
398    /// deallocates the current node in the process. This is unsafe because the
399    /// current node will still be accessible despite being deallocated.
400    pub(super) unsafe fn deallocate_and_ascend<A: Allocator + Clone>(
401        self,
402        alloc: A,
403    ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
404        let height = self.height;
405        let node = self.node;
406        let ret = self.ascend().ok();
407        unsafe {
408            alloc.deallocate(
409                node.cast(),
410                if height > 0 {
411                    Layout::new::<InternalNode<K, V>>()
412                } else {
413                    Layout::new::<LeafNode<K, V>>()
414                },
415            );
416        }
417        ret
418    }
419}
420
421impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
422    /// Temporarily takes out another mutable reference to the same node. Beware, as
423    /// this method is very dangerous, doubly so since it might not immediately appear
424    /// dangerous.
425    ///
426    /// Because mutable pointers can roam anywhere around the tree, the returned
427    /// pointer can easily be used to make the original pointer dangling, out of
428    /// bounds, or invalid under stacked borrow rules.
429    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
430    // that restricts the use of navigation methods on reborrowed pointers,
431    // preventing this unsafety.
432    unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
433        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
434    }
435
436    /// Borrows exclusive access to the leaf portion of a leaf or internal node.
437    fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
438        let ptr = Self::as_leaf_ptr(self);
439        // SAFETY: we have exclusive access to the entire node.
440        unsafe { &mut *ptr }
441    }
442
443    /// Offers exclusive access to the leaf portion of a leaf or internal node.
444    fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
445        let ptr = Self::as_leaf_ptr(&mut self);
446        // SAFETY: we have exclusive access to the entire node.
447        unsafe { &mut *ptr }
448    }
449
450    /// Returns a dormant copy of this node with its lifetime erased which can
451    /// be reawakened later.
452    pub(super) fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
453        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
454    }
455}
456
457impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
458    /// Revert to the unique borrow initially captured.
459    ///
460    /// # Safety
461    ///
462    /// The reborrow must have ended, i.e., the reference returned by `new` and
463    /// all pointers and references derived from it, must not be used anymore.
464    pub(super) unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
465        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
466    }
467}
468
469impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
470    /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
471    fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
472        let ptr = Self::as_leaf_ptr(self);
473        // SAFETY: we have exclusive access to the entire node.
474        unsafe { &mut *ptr }
475    }
476}
477
478impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
479    /// Borrows exclusive access to an element of the key storage area.
480    ///
481    /// # Safety
482    /// `index` is in bounds of 0..CAPACITY
483    unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
484    where
485        I: SliceIndex<[MaybeUninit<K>], Output = Output>,
486    {
487        // SAFETY: the caller will not be able to call further methods on self
488        // until the key slice reference is dropped, as we have unique access
489        // for the lifetime of the borrow.
490        unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
491    }
492
493    /// Borrows exclusive access to an element or slice of the node's value storage area.
494    ///
495    /// # Safety
496    /// `index` is in bounds of 0..CAPACITY
497    unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
498    where
499        I: SliceIndex<[MaybeUninit<V>], Output = Output>,
500    {
501        // SAFETY: the caller will not be able to call further methods on self
502        // until the value slice reference is dropped, as we have unique access
503        // for the lifetime of the borrow.
504        unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
505    }
506}
507
508impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
509    /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
510    ///
511    /// # Safety
512    /// `index` is in bounds of 0..CAPACITY + 1
513    unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
514    where
515        I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
516    {
517        // SAFETY: the caller will not be able to call further methods on self
518        // until the edge slice reference is dropped, as we have unique access
519        // for the lifetime of the borrow.
520        unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
521    }
522}
523
524impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
525    /// # Safety
526    /// - The node has more than `idx` initialized elements.
527    unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
528        // We only create a reference to the one element we are interested in,
529        // to avoid aliasing with outstanding references to other elements,
530        // in particular, those returned to the caller in earlier iterations.
531        let leaf = Self::as_leaf_ptr(&mut self);
532        let keys = unsafe { &raw const (*leaf).keys };
533        let vals = unsafe { &raw mut (*leaf).vals };
534        // We must coerce to unsized array pointers because of Rust issue #74679.
535        let keys: *const [_] = keys;
536        let vals: *mut [_] = vals;
537        let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
538        let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
539        (key, val)
540    }
541}
542
543impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
544    /// Borrows exclusive access to the length of the node.
545    pub(super) fn len_mut(&mut self) -> &mut u16 {
546        &mut self.as_leaf_mut().len
547    }
548}
549
550impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
551    /// # Safety
552    /// Every item returned by `range` is a valid edge index for the node.
553    unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
554        for i in range {
555            debug_assert!(i <= self.len());
556            unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
557        }
558    }
559
560    fn correct_all_childrens_parent_links(&mut self) {
561        let len = self.len();
562        unsafe { self.correct_childrens_parent_links(0..=len) };
563    }
564}
565
566impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
567    /// Sets the node's link to its parent edge,
568    /// without invalidating other references to the node.
569    fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
570        let leaf = Self::as_leaf_ptr(self);
571        unsafe { (*leaf).parent = Some(parent) };
572        unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
573    }
574}
575
576impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
577    /// Clears the root's link to its parent edge.
578    fn clear_parent_link(&mut self) {
579        let mut root_node = self.borrow_mut();
580        let leaf = root_node.as_leaf_mut();
581        leaf.parent = None;
582    }
583}
584
585impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
586    /// Returns a new owned tree, with its own root node that is initially empty.
587    pub(super) fn new<A: Allocator + Clone>(alloc: A) -> Self {
588        NodeRef::new_leaf(alloc).forget_type()
589    }
590
591    /// Adds a new internal node with a single edge pointing to the previous root node,
592    /// make that new node the root node, and return it. This increases the height by 1
593    /// and is the opposite of `pop_internal_level`.
594    pub(super) fn push_internal_level<A: Allocator + Clone>(
595        &mut self,
596        alloc: A,
597    ) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
598        super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
599
600        // `self.borrow_mut()`, except that we just forgot we're internal now:
601        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
602    }
603
604    /// Removes the internal root node, using its first child as the new root node.
605    /// As it is intended only to be called when the root node has only one child,
606    /// no cleanup is done on any of the keys, values and other children.
607    /// This decreases the height by 1 and is the opposite of `push_internal_level`.
608    ///
609    /// Does not invalidate any handles or references pointing into the subtree
610    /// rooted at the first child of `self`.
611    ///
612    /// Panics if there is no internal level, i.e., if the root node is a leaf.
613    pub(super) fn pop_internal_level<A: Allocator + Clone>(&mut self, alloc: A) {
614        assert!(self.height > 0);
615
616        let top = self.node;
617
618        // SAFETY: we asserted to be internal.
619        let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
620        // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
621        let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
622        // SAFETY: the first edge is always initialized.
623        self.node = unsafe { internal_node.edges[0].assume_init_read() };
624        self.height -= 1;
625        self.clear_parent_link();
626
627        unsafe {
628            alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
629        }
630    }
631}
632
633impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
634    /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
635    /// because the return value cannot be used to destroy the root, and there
636    /// cannot be other references to the tree.
637    pub(super) fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
638        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
639    }
640
641    /// Slightly mutably borrows the owned root node.
642    pub(super) fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
643        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
644    }
645
646    /// Irreversibly transitions to a reference that permits traversal and offers
647    /// destructive methods and little else.
648    pub(super) fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
649        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
650    }
651}
652
653impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
654    /// Adds a key-value pair to the end of the node, and returns
655    /// a handle to the inserted value.
656    ///
657    /// # Safety
658    ///
659    /// The returned handle has an unbound lifetime.
660    pub(super) unsafe fn push_with_handle<'b>(
661        &mut self,
662        key: K,
663        val: V,
664    ) -> Handle<NodeRef<marker::Mut<'b>, K, V, marker::Leaf>, marker::KV> {
665        let len = self.len_mut();
666        let idx = usize::from(*len);
667        assert!(idx < CAPACITY);
668        *len += 1;
669        unsafe {
670            self.key_area_mut(idx).write(key);
671            self.val_area_mut(idx).write(val);
672            Handle::new_kv(
673                NodeRef { height: self.height, node: self.node, _marker: PhantomData },
674                idx,
675            )
676        }
677    }
678
679    /// Adds a key-value pair to the end of the node, and returns
680    /// the mutable reference of the inserted value.
681    pub(super) fn push(&mut self, key: K, val: V) -> *mut V {
682        // SAFETY: The unbound handle is no longer accessible.
683        unsafe { self.push_with_handle(key, val).into_val_mut() }
684    }
685}
686
687impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
688    /// Adds a key-value pair, and an edge to go to the right of that pair,
689    /// to the end of the node.
690    pub(super) fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
691        assert!(edge.height == self.height - 1);
692
693        let len = self.len_mut();
694        let idx = usize::from(*len);
695        assert!(idx < CAPACITY);
696        *len += 1;
697        unsafe {
698            self.key_area_mut(idx).write(key);
699            self.val_area_mut(idx).write(val);
700            self.edge_area_mut(idx + 1).write(edge.node);
701            Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
702        }
703    }
704}
705
706impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
707    /// Removes any static information asserting that this node is a `Leaf` node.
708    pub(super) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
709        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
710    }
711}
712
713impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
714    /// Removes any static information asserting that this node is an `Internal` node.
715    pub(super) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
716        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
717    }
718}
719
720impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
721    /// Checks whether a node is an `Internal` node or a `Leaf` node.
722    pub(super) fn force(
723        self,
724    ) -> ForceResult<
725        NodeRef<BorrowType, K, V, marker::Leaf>,
726        NodeRef<BorrowType, K, V, marker::Internal>,
727    > {
728        if self.height == 0 {
729            ForceResult::Leaf(NodeRef {
730                height: self.height,
731                node: self.node,
732                _marker: PhantomData,
733            })
734        } else {
735            ForceResult::Internal(NodeRef {
736                height: self.height,
737                node: self.node,
738                _marker: PhantomData,
739            })
740        }
741    }
742}
743
744impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
745    /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
746    pub(super) unsafe fn cast_to_leaf_unchecked(
747        self,
748    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
749        debug_assert!(self.height == 0);
750        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
751    }
752
753    /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
754    unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
755        debug_assert!(self.height > 0);
756        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
757    }
758}
759
760/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
761/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
762/// pair) or `Edge` (signifying a handle on an edge).
763///
764/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
765/// a child node, these represent the spaces where child pointers would go between the key-value
766/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
767/// to the left of the node, one between the two pairs, and one at the right of the node.
768pub(super) struct Handle<Node, Type> {
769    node: Node,
770    idx: usize,
771    _marker: PhantomData<Type>,
772}
773
774impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
775// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
776// `Clone`able is when it is an immutable reference and therefore `Copy`.
777impl<Node: Copy, Type> Clone for Handle<Node, Type> {
778    fn clone(&self) -> Self {
779        *self
780    }
781}
782
783impl<Node, Type> Handle<Node, Type> {
784    /// Retrieves the node that contains the edge or key-value pair this handle points to.
785    pub(super) fn into_node(self) -> Node {
786        self.node
787    }
788
789    /// Returns the position of this handle in the node.
790    pub(super) fn idx(&self) -> usize {
791        self.idx
792    }
793}
794
795impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
796    /// Creates a new handle to a key-value pair in `node`.
797    /// Unsafe because the caller must ensure that `idx < node.len()`.
798    pub(super) unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
799        debug_assert!(idx < node.len());
800
801        Handle { node, idx, _marker: PhantomData }
802    }
803
804    pub(super) fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
805        unsafe { Handle::new_edge(self.node, self.idx) }
806    }
807
808    pub(super) fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
809        unsafe { Handle::new_edge(self.node, self.idx + 1) }
810    }
811}
812
813impl<BorrowType, K, V, NodeType, HandleType> PartialEq
814    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
815{
816    fn eq(&self, other: &Self) -> bool {
817        let Self { node, idx, _marker } = self;
818        node.eq(&other.node) && *idx == other.idx
819    }
820}
821
822impl<BorrowType, K, V, NodeType, HandleType>
823    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
824{
825    /// Temporarily takes out another immutable handle on the same location.
826    pub(super) fn reborrow(
827        &self,
828    ) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
829        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
830        Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
831    }
832}
833
834impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
835    /// Temporarily takes out another mutable handle on the same location. Beware, as
836    /// this method is very dangerous, doubly so since it might not immediately appear
837    /// dangerous.
838    ///
839    /// For details, see `NodeRef::reborrow_mut`.
840    pub(super) unsafe fn reborrow_mut(
841        &mut self,
842    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
843        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
844        Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
845    }
846
847    /// Returns a dormant copy of this handle which can be reawakened later.
848    ///
849    /// See `DormantMutRef` for more details.
850    pub(super) fn dormant(
851        &self,
852    ) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
853        Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData }
854    }
855}
856
857impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
858    /// Revert to the unique borrow initially captured.
859    ///
860    /// # Safety
861    ///
862    /// The reborrow must have ended, i.e., the reference returned by `new` and
863    /// all pointers and references derived from it, must not be used anymore.
864    pub(super) unsafe fn awaken<'a>(
865        self,
866    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
867        Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData }
868    }
869}
870
871impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
872    /// Creates a new handle to an edge in `node`.
873    /// Unsafe because the caller must ensure that `idx <= node.len()`.
874    pub(super) unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
875        debug_assert!(idx <= node.len());
876
877        Handle { node, idx, _marker: PhantomData }
878    }
879
880    pub(super) fn left_kv(
881        self,
882    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
883        if self.idx > 0 {
884            Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
885        } else {
886            Err(self)
887        }
888    }
889
890    pub(super) fn right_kv(
891        self,
892    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
893        if self.idx < self.node.len() {
894            Ok(unsafe { Handle::new_kv(self.node, self.idx) })
895        } else {
896            Err(self)
897        }
898    }
899}
900
901pub(super) enum LeftOrRight<T> {
902    Left(T),
903    Right(T),
904}
905
906/// Given an edge index where we want to insert into a node filled to capacity,
907/// computes a sensible KV index of a split point and where to perform the insertion.
908/// The goal of the split point is for its key and value to end up in a parent node;
909/// the keys, values and edges to the left of the split point become the left child;
910/// the keys, values and edges to the right of the split point become the right child.
911fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
912    debug_assert!(edge_idx <= CAPACITY);
913    // Rust issue #74834 tries to explain these symmetric rules.
914    match edge_idx {
915        0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
916        EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
917        EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
918        _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
919    }
920}
921
922impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
923    /// Inserts a new key-value pair between the key-value pairs to the right and left of
924    /// this edge. This method assumes that there is enough space in the node for the new
925    /// pair to fit.
926    unsafe fn insert_fit(
927        mut self,
928        key: K,
929        val: V,
930    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
931        debug_assert!(self.node.len() < CAPACITY);
932        let new_len = self.node.len() + 1;
933
934        unsafe {
935            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
936            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
937            *self.node.len_mut() = new_len as u16;
938
939            Handle::new_kv(self.node, self.idx)
940        }
941    }
942}
943
944impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
945    /// Inserts a new key-value pair between the key-value pairs to the right and left of
946    /// this edge. This method splits the node if there isn't enough room.
947    ///
948    /// Returns a dormant handle to the inserted node which can be reawakened
949    /// once splitting is complete.
950    fn insert<A: Allocator + Clone>(
951        self,
952        key: K,
953        val: V,
954        alloc: A,
955    ) -> (
956        Option<SplitResult<'a, K, V, marker::Leaf>>,
957        Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
958    ) {
959        if self.node.len() < CAPACITY {
960            // SAFETY: There is enough space in the node for insertion.
961            let handle = unsafe { self.insert_fit(key, val) };
962            (None, handle.dormant())
963        } else {
964            let (middle_kv_idx, insertion) = splitpoint(self.idx);
965            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
966            let mut result = middle.split(alloc);
967            let insertion_edge = match insertion {
968                LeftOrRight::Left(insert_idx) => unsafe {
969                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
970                },
971                LeftOrRight::Right(insert_idx) => unsafe {
972                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
973                },
974            };
975            // SAFETY: We just split the node, so there is enough space for
976            // insertion.
977            let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
978            (Some(result), handle)
979        }
980    }
981}
982
983impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
984    /// Fixes the parent pointer and index in the child node that this edge
985    /// links to. This is useful when the ordering of edges has been changed,
986    fn correct_parent_link(self) {
987        // Create backpointer without invalidating other references to the node.
988        let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
989        let idx = self.idx;
990        let mut child = self.descend();
991        child.set_parent_link(ptr, idx);
992    }
993}
994
995impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
996    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
997    /// between this edge and the key-value pair to the right of this edge. This method assumes
998    /// that there is enough space in the node for the new pair to fit.
999    fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
1000        debug_assert!(self.node.len() < CAPACITY);
1001        debug_assert!(edge.height == self.node.height - 1);
1002        let new_len = self.node.len() + 1;
1003
1004        unsafe {
1005            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
1006            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
1007            slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
1008            *self.node.len_mut() = new_len as u16;
1009
1010            self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
1011        }
1012    }
1013
1014    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1015    /// between this edge and the key-value pair to the right of this edge. This method splits
1016    /// the node if there isn't enough room.
1017    fn insert<A: Allocator + Clone>(
1018        mut self,
1019        key: K,
1020        val: V,
1021        edge: Root<K, V>,
1022        alloc: A,
1023    ) -> Option<SplitResult<'a, K, V, marker::Internal>> {
1024        assert!(edge.height == self.node.height - 1);
1025
1026        if self.node.len() < CAPACITY {
1027            self.insert_fit(key, val, edge);
1028            None
1029        } else {
1030            let (middle_kv_idx, insertion) = splitpoint(self.idx);
1031            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1032            let mut result = middle.split(alloc);
1033            let mut insertion_edge = match insertion {
1034                LeftOrRight::Left(insert_idx) => unsafe {
1035                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
1036                },
1037                LeftOrRight::Right(insert_idx) => unsafe {
1038                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
1039                },
1040            };
1041            insertion_edge.insert_fit(key, val, edge);
1042            Some(result)
1043        }
1044    }
1045}
1046
1047impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1048    /// Inserts a new key-value pair between the key-value pairs to the right and left of
1049    /// this edge. This method splits the node if there isn't enough room, and tries to
1050    /// insert the split off portion into the parent node recursively, until the root is reached.
1051    ///
1052    /// If the returned result is some `SplitResult`, the `left` field will be the root node.
1053    /// The returned pointer points to the inserted value, which in the case of `SplitResult`
1054    /// is in the `left` or `right` tree.
1055    pub(super) fn insert_recursing<A: Allocator + Clone>(
1056        self,
1057        key: K,
1058        value: V,
1059        alloc: A,
1060        split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>),
1061    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1062        let (mut split, handle) = match self.insert(key, value, alloc.clone()) {
1063            // SAFETY: we have finished splitting and can now re-awaken the
1064            // handle to the inserted element.
1065            (None, handle) => return unsafe { handle.awaken() },
1066            (Some(split), handle) => (split.forget_node_type(), handle),
1067        };
1068
1069        loop {
1070            split = match split.left.ascend() {
1071                Ok(parent) => {
1072                    match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
1073                        // SAFETY: we have finished splitting and can now re-awaken the
1074                        // handle to the inserted element.
1075                        None => return unsafe { handle.awaken() },
1076                        Some(split) => split.forget_node_type(),
1077                    }
1078                }
1079                Err(root) => {
1080                    split_root(SplitResult { left: root, ..split });
1081                    // SAFETY: we have finished splitting and can now re-awaken the
1082                    // handle to the inserted element.
1083                    return unsafe { handle.awaken() };
1084                }
1085            };
1086        }
1087    }
1088}
1089
1090impl<BorrowType: marker::BorrowType, K, V>
1091    Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
1092{
1093    /// Finds the node pointed to by this edge.
1094    ///
1095    /// The method name assumes you picture trees with the root node on top.
1096    ///
1097    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1098    /// both, upon success, do nothing.
1099    pub(super) fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1100        const {
1101            assert!(BorrowType::TRAVERSAL_PERMIT);
1102        }
1103
1104        // We need to use raw pointers to nodes because, if BorrowType is
1105        // marker::ValMut, there might be outstanding mutable references to
1106        // values that we must not invalidate. There's no worry accessing the
1107        // height field because that value is copied. Beware that, once the
1108        // node pointer is dereferenced, we access the edges array with a
1109        // reference (Rust issue #73987) and invalidate any other references
1110        // to or inside the array, should any be around.
1111        let parent_ptr = NodeRef::as_internal_ptr(&self.node);
1112        let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
1113        NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
1114    }
1115}
1116
1117impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1118    pub(super) fn into_kv(self) -> (&'a K, &'a V) {
1119        debug_assert!(self.idx < self.node.len());
1120        let leaf = self.node.into_leaf();
1121        let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1122        let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1123        (k, v)
1124    }
1125}
1126
1127impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1128    pub(super) fn key_mut(&mut self) -> &mut K {
1129        unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1130    }
1131
1132    pub(super) fn into_val_mut(self) -> &'a mut V {
1133        debug_assert!(self.idx < self.node.len());
1134        let leaf = self.node.into_leaf_mut();
1135        unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1136    }
1137
1138    pub(super) fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1139        debug_assert!(self.idx < self.node.len());
1140        let leaf = self.node.into_leaf_mut();
1141        let k = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() };
1142        let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
1143        (k, v)
1144    }
1145}
1146
1147impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1148    pub(super) fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1149        unsafe { self.node.into_key_val_mut_at(self.idx) }
1150    }
1151}
1152
1153impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1154    pub(super) fn kv_mut(&mut self) -> (&mut K, &mut V) {
1155        debug_assert!(self.idx < self.node.len());
1156        // We cannot call separate key and value methods, because calling the second one
1157        // invalidates the reference returned by the first.
1158        unsafe {
1159            let leaf = self.node.as_leaf_mut();
1160            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1161            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1162            (key, val)
1163        }
1164    }
1165
1166    /// Replaces the key and value that the KV handle refers to.
1167    pub(super) fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1168        let (key, val) = self.kv_mut();
1169        (mem::replace(key, k), mem::replace(val, v))
1170    }
1171}
1172
1173impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1174    /// Extracts the key and value that the KV handle refers to.
1175    /// # Safety
1176    /// The node that the handle refers to must not yet have been deallocated.
1177    pub(super) unsafe fn into_key_val(mut self) -> (K, V) {
1178        debug_assert!(self.idx < self.node.len());
1179        let leaf = self.node.as_leaf_dying();
1180        unsafe {
1181            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1182            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1183            (key, val)
1184        }
1185    }
1186
1187    /// Drops the key and value that the KV handle refers to.
1188    /// # Safety
1189    /// The node that the handle refers to must not yet have been deallocated.
1190    #[inline]
1191    pub(super) unsafe fn drop_key_val(mut self) {
1192        // Run the destructor of the value even if the destructor of the key panics.
1193        struct Dropper<'a, T>(&'a mut MaybeUninit<T>);
1194        impl<T> Drop for Dropper<'_, T> {
1195            #[inline]
1196            fn drop(&mut self) {
1197                unsafe {
1198                    self.0.assume_init_drop();
1199                }
1200            }
1201        }
1202
1203        debug_assert!(self.idx < self.node.len());
1204        let leaf = self.node.as_leaf_dying();
1205        unsafe {
1206            let key = leaf.keys.get_unchecked_mut(self.idx);
1207            let val = leaf.vals.get_unchecked_mut(self.idx);
1208            let _guard = Dropper(val);
1209            key.assume_init_drop();
1210            // dropping the guard will drop the value
1211        }
1212    }
1213}
1214
1215impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1216    /// Helps implementations of `split` for a particular `NodeType`,
1217    /// by taking care of leaf data.
1218    fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1219        debug_assert!(self.idx < self.node.len());
1220        let old_len = self.node.len();
1221        let new_len = old_len - self.idx - 1;
1222        new_node.len = new_len as u16;
1223        unsafe {
1224            let k = self.node.key_area_mut(self.idx).assume_init_read();
1225            let v = self.node.val_area_mut(self.idx).assume_init_read();
1226
1227            move_to_slice(
1228                self.node.key_area_mut(self.idx + 1..old_len),
1229                &mut new_node.keys[..new_len],
1230            );
1231            move_to_slice(
1232                self.node.val_area_mut(self.idx + 1..old_len),
1233                &mut new_node.vals[..new_len],
1234            );
1235
1236            *self.node.len_mut() = self.idx as u16;
1237            (k, v)
1238        }
1239    }
1240}
1241
1242impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1243    /// Splits the underlying node into three parts:
1244    ///
1245    /// - The node is truncated to only contain the key-value pairs to the left of
1246    ///   this handle.
1247    /// - The key and value pointed to by this handle are extracted.
1248    /// - All the key-value pairs to the right of this handle are put into a newly
1249    ///   allocated node.
1250    pub(super) fn split<A: Allocator + Clone>(
1251        mut self,
1252        alloc: A,
1253    ) -> SplitResult<'a, K, V, marker::Leaf> {
1254        let mut new_node = LeafNode::new(alloc);
1255
1256        let kv = self.split_leaf_data(&mut new_node);
1257
1258        let right = NodeRef::from_new_leaf(new_node);
1259        SplitResult { left: self.node, kv, right }
1260    }
1261
1262    /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1263    /// that the key-value pair collapsed into.
1264    pub(super) fn remove(
1265        mut self,
1266    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1267        let old_len = self.node.len();
1268        unsafe {
1269            let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1270            let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1271            *self.node.len_mut() = (old_len - 1) as u16;
1272            ((k, v), self.left_edge())
1273        }
1274    }
1275}
1276
1277impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1278    /// Splits the underlying node into three parts:
1279    ///
1280    /// - The node is truncated to only contain the edges and key-value pairs to the
1281    ///   left of this handle.
1282    /// - The key and value pointed to by this handle are extracted.
1283    /// - All the edges and key-value pairs to the right of this handle are put into
1284    ///   a newly allocated node.
1285    pub(super) fn split<A: Allocator + Clone>(
1286        mut self,
1287        alloc: A,
1288    ) -> SplitResult<'a, K, V, marker::Internal> {
1289        let old_len = self.node.len();
1290        unsafe {
1291            let mut new_node = InternalNode::new(alloc);
1292            let kv = self.split_leaf_data(&mut new_node.data);
1293            let new_len = usize::from(new_node.data.len);
1294            move_to_slice(
1295                self.node.edge_area_mut(self.idx + 1..old_len + 1),
1296                &mut new_node.edges[..new_len + 1],
1297            );
1298
1299            let height = self.node.height;
1300            let right = NodeRef::from_new_internal(new_node, height);
1301
1302            SplitResult { left: self.node, kv, right }
1303        }
1304    }
1305}
1306
1307/// Represents a session for evaluating and performing a balancing operation
1308/// around an internal key-value pair.
1309pub(super) struct BalancingContext<'a, K, V> {
1310    parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1311    left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1312    right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1313}
1314
1315impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1316    pub(super) fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1317        let self1 = unsafe { ptr::read(&self) };
1318        let self2 = unsafe { ptr::read(&self) };
1319        BalancingContext {
1320            parent: self,
1321            left_child: self1.left_edge().descend(),
1322            right_child: self2.right_edge().descend(),
1323        }
1324    }
1325}
1326
1327impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1328    /// Chooses a balancing context involving the node as a child, thus between
1329    /// the KV immediately to the left or to the right in the parent node.
1330    /// Returns an `Err` if there is no parent.
1331    /// Panics if the parent is empty.
1332    ///
1333    /// Prefers the left side, to be optimal if the given node is somehow
1334    /// underfull, meaning here only that it has fewer elements than its left
1335    /// sibling and than its right sibling, if they exist. In that case,
1336    /// merging with the left sibling is faster, since we only need to move
1337    /// the node's N elements, instead of shifting them to the right and moving
1338    /// more than N elements in front. Stealing from the left sibling is also
1339    /// typically faster, since we only need to shift the node's N elements to
1340    /// the right, instead of shifting at least N of the sibling's elements to
1341    /// the left.
1342    pub(super) fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1343        match unsafe { ptr::read(&self) }.ascend() {
1344            Ok(parent_edge) => match parent_edge.left_kv() {
1345                Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1346                    parent: unsafe { ptr::read(&left_parent_kv) },
1347                    left_child: left_parent_kv.left_edge().descend(),
1348                    right_child: self,
1349                })),
1350                Err(parent_edge) => match parent_edge.right_kv() {
1351                    Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1352                        parent: unsafe { ptr::read(&right_parent_kv) },
1353                        left_child: self,
1354                        right_child: right_parent_kv.right_edge().descend(),
1355                    })),
1356                    Err(_) => unreachable!("empty internal node"),
1357                },
1358            },
1359            Err(root) => Err(root),
1360        }
1361    }
1362}
1363
1364impl<'a, K, V> BalancingContext<'a, K, V> {
1365    pub(super) fn left_child_len(&self) -> usize {
1366        self.left_child.len()
1367    }
1368
1369    pub(super) fn right_child_len(&self) -> usize {
1370        self.right_child.len()
1371    }
1372
1373    pub(super) fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1374        self.left_child
1375    }
1376
1377    pub(super) fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1378        self.right_child
1379    }
1380
1381    /// Returns whether merging is possible, i.e., whether there is enough room
1382    /// in a node to combine the central KV with both adjacent child nodes.
1383    pub(super) fn can_merge(&self) -> bool {
1384        self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1385    }
1386}
1387
1388impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1389    /// Performs a merge and lets a closure decide what to return.
1390    fn do_merge<
1391        F: FnOnce(
1392            NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1393            NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1394        ) -> R,
1395        R,
1396        A: Allocator,
1397    >(
1398        self,
1399        result: F,
1400        alloc: A,
1401    ) -> R {
1402        let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1403        let old_parent_len = parent_node.len();
1404        let mut left_node = self.left_child;
1405        let old_left_len = left_node.len();
1406        let mut right_node = self.right_child;
1407        let right_len = right_node.len();
1408        let new_left_len = old_left_len + 1 + right_len;
1409
1410        assert!(new_left_len <= CAPACITY);
1411
1412        unsafe {
1413            *left_node.len_mut() = new_left_len as u16;
1414
1415            let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1416            left_node.key_area_mut(old_left_len).write(parent_key);
1417            move_to_slice(
1418                right_node.key_area_mut(..right_len),
1419                left_node.key_area_mut(old_left_len + 1..new_left_len),
1420            );
1421
1422            let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1423            left_node.val_area_mut(old_left_len).write(parent_val);
1424            move_to_slice(
1425                right_node.val_area_mut(..right_len),
1426                left_node.val_area_mut(old_left_len + 1..new_left_len),
1427            );
1428
1429            slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
1430            parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1431            *parent_node.len_mut() -= 1;
1432
1433            if parent_node.height > 1 {
1434                // SAFETY: the height of the nodes being merged is one below the height
1435                // of the node of this edge, thus above zero, so they are internal.
1436                let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1437                let mut right_node = right_node.cast_to_internal_unchecked();
1438                move_to_slice(
1439                    right_node.edge_area_mut(..right_len + 1),
1440                    left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1441                );
1442
1443                left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1444
1445                alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1446            } else {
1447                alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1448            }
1449        }
1450        result(parent_node, left_node)
1451    }
1452
1453    /// Merges the parent's key-value pair and both adjacent child nodes into
1454    /// the left child node and returns the shrunk parent node.
1455    ///
1456    /// Panics unless we `.can_merge()`.
1457    pub(super) fn merge_tracking_parent<A: Allocator + Clone>(
1458        self,
1459        alloc: A,
1460    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1461        self.do_merge(|parent, _child| parent, alloc)
1462    }
1463
1464    /// Merges the parent's key-value pair and both adjacent child nodes into
1465    /// the left child node and returns that child node.
1466    ///
1467    /// Panics unless we `.can_merge()`.
1468    pub(super) fn merge_tracking_child<A: Allocator + Clone>(
1469        self,
1470        alloc: A,
1471    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1472        self.do_merge(|_parent, child| child, alloc)
1473    }
1474
1475    /// Merges the parent's key-value pair and both adjacent child nodes into
1476    /// the left child node and returns the edge handle in that child node
1477    /// where the tracked child edge ended up,
1478    ///
1479    /// Panics unless we `.can_merge()`.
1480    pub(super) fn merge_tracking_child_edge<A: Allocator + Clone>(
1481        self,
1482        track_edge_idx: LeftOrRight<usize>,
1483        alloc: A,
1484    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1485        let old_left_len = self.left_child.len();
1486        let right_len = self.right_child.len();
1487        assert!(match track_edge_idx {
1488            LeftOrRight::Left(idx) => idx <= old_left_len,
1489            LeftOrRight::Right(idx) => idx <= right_len,
1490        });
1491        let child = self.merge_tracking_child(alloc);
1492        let new_idx = match track_edge_idx {
1493            LeftOrRight::Left(idx) => idx,
1494            LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1495        };
1496        unsafe { Handle::new_edge(child, new_idx) }
1497    }
1498
1499    /// Removes a key-value pair from the left child and places it in the key-value storage
1500    /// of the parent, while pushing the old parent key-value pair into the right child.
1501    /// Returns a handle to the edge in the right child corresponding to where the original
1502    /// edge specified by `track_right_edge_idx` ended up.
1503    pub(super) fn steal_left(
1504        mut self,
1505        track_right_edge_idx: usize,
1506    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1507        self.bulk_steal_left(1);
1508        unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1509    }
1510
1511    /// Removes a key-value pair from the right child and places it in the key-value storage
1512    /// of the parent, while pushing the old parent key-value pair onto the left child.
1513    /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1514    /// which didn't move.
1515    pub(super) fn steal_right(
1516        mut self,
1517        track_left_edge_idx: usize,
1518    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1519        self.bulk_steal_right(1);
1520        unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1521    }
1522
1523    /// This does stealing similar to `steal_left` but steals multiple elements at once.
1524    pub(super) fn bulk_steal_left(&mut self, count: usize) {
1525        assert!(count > 0);
1526        unsafe {
1527            let left_node = &mut self.left_child;
1528            let old_left_len = left_node.len();
1529            let right_node = &mut self.right_child;
1530            let old_right_len = right_node.len();
1531
1532            // Make sure that we may steal safely.
1533            assert!(old_right_len + count <= CAPACITY);
1534            assert!(old_left_len >= count);
1535
1536            let new_left_len = old_left_len - count;
1537            let new_right_len = old_right_len + count;
1538            *left_node.len_mut() = new_left_len as u16;
1539            *right_node.len_mut() = new_right_len as u16;
1540
1541            // Move leaf data.
1542            {
1543                // Make room for stolen elements in the right child.
1544                slice_shr(right_node.key_area_mut(..new_right_len), count);
1545                slice_shr(right_node.val_area_mut(..new_right_len), count);
1546
1547                // Move elements from the left child to the right one.
1548                move_to_slice(
1549                    left_node.key_area_mut(new_left_len + 1..old_left_len),
1550                    right_node.key_area_mut(..count - 1),
1551                );
1552                move_to_slice(
1553                    left_node.val_area_mut(new_left_len + 1..old_left_len),
1554                    right_node.val_area_mut(..count - 1),
1555                );
1556
1557                // Move the leftmost stolen pair to the parent.
1558                let k = left_node.key_area_mut(new_left_len).assume_init_read();
1559                let v = left_node.val_area_mut(new_left_len).assume_init_read();
1560                let (k, v) = self.parent.replace_kv(k, v);
1561
1562                // Move parent's key-value pair to the right child.
1563                right_node.key_area_mut(count - 1).write(k);
1564                right_node.val_area_mut(count - 1).write(v);
1565            }
1566
1567            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1568                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1569                    // Make room for stolen edges.
1570                    slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1571
1572                    // Steal edges.
1573                    move_to_slice(
1574                        left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1575                        right.edge_area_mut(..count),
1576                    );
1577
1578                    right.correct_childrens_parent_links(0..new_right_len + 1);
1579                }
1580                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1581                _ => unreachable!(),
1582            }
1583        }
1584    }
1585
1586    /// The symmetric clone of `bulk_steal_left`.
1587    pub(super) fn bulk_steal_right(&mut self, count: usize) {
1588        assert!(count > 0);
1589        unsafe {
1590            let left_node = &mut self.left_child;
1591            let old_left_len = left_node.len();
1592            let right_node = &mut self.right_child;
1593            let old_right_len = right_node.len();
1594
1595            // Make sure that we may steal safely.
1596            assert!(old_left_len + count <= CAPACITY);
1597            assert!(old_right_len >= count);
1598
1599            let new_left_len = old_left_len + count;
1600            let new_right_len = old_right_len - count;
1601            *left_node.len_mut() = new_left_len as u16;
1602            *right_node.len_mut() = new_right_len as u16;
1603
1604            // Move leaf data.
1605            {
1606                // Move the rightmost stolen pair to the parent.
1607                let k = right_node.key_area_mut(count - 1).assume_init_read();
1608                let v = right_node.val_area_mut(count - 1).assume_init_read();
1609                let (k, v) = self.parent.replace_kv(k, v);
1610
1611                // Move parent's key-value pair to the left child.
1612                left_node.key_area_mut(old_left_len).write(k);
1613                left_node.val_area_mut(old_left_len).write(v);
1614
1615                // Move elements from the right child to the left one.
1616                move_to_slice(
1617                    right_node.key_area_mut(..count - 1),
1618                    left_node.key_area_mut(old_left_len + 1..new_left_len),
1619                );
1620                move_to_slice(
1621                    right_node.val_area_mut(..count - 1),
1622                    left_node.val_area_mut(old_left_len + 1..new_left_len),
1623                );
1624
1625                // Fill gap where stolen elements used to be.
1626                slice_shl(right_node.key_area_mut(..old_right_len), count);
1627                slice_shl(right_node.val_area_mut(..old_right_len), count);
1628            }
1629
1630            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1631                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1632                    // Steal edges.
1633                    move_to_slice(
1634                        right.edge_area_mut(..count),
1635                        left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1636                    );
1637
1638                    // Fill gap where stolen edges used to be.
1639                    slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1640
1641                    left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1642                    right.correct_childrens_parent_links(0..new_right_len + 1);
1643                }
1644                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1645                _ => unreachable!(),
1646            }
1647        }
1648    }
1649}
1650
1651impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1652    pub(super) fn forget_node_type(
1653        self,
1654    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1655        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1656    }
1657}
1658
1659impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1660    pub(super) fn forget_node_type(
1661        self,
1662    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1663        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1664    }
1665}
1666
1667impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1668    pub(super) fn forget_node_type(
1669        self,
1670    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1671        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1672    }
1673}
1674
1675impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1676    /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1677    pub(super) fn force(
1678        self,
1679    ) -> ForceResult<
1680        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1681        Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1682    > {
1683        match self.node.force() {
1684            ForceResult::Leaf(node) => {
1685                ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1686            }
1687            ForceResult::Internal(node) => {
1688                ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1689            }
1690        }
1691    }
1692}
1693
1694impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1695    /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1696    pub(super) unsafe fn cast_to_leaf_unchecked(
1697        self,
1698    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1699        let node = unsafe { self.node.cast_to_leaf_unchecked() };
1700        Handle { node, idx: self.idx, _marker: PhantomData }
1701    }
1702}
1703
1704impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1705    /// Move the suffix after `self` from one node to another one. `right` must be empty.
1706    /// The first edge of `right` remains unchanged.
1707    pub(super) fn move_suffix(
1708        &mut self,
1709        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1710    ) {
1711        unsafe {
1712            let new_left_len = self.idx;
1713            let mut left_node = self.reborrow_mut().into_node();
1714            let old_left_len = left_node.len();
1715
1716            let new_right_len = old_left_len - new_left_len;
1717            let mut right_node = right.reborrow_mut();
1718
1719            assert!(right_node.len() == 0);
1720            assert!(left_node.height == right_node.height);
1721
1722            if new_right_len > 0 {
1723                *left_node.len_mut() = new_left_len as u16;
1724                *right_node.len_mut() = new_right_len as u16;
1725
1726                move_to_slice(
1727                    left_node.key_area_mut(new_left_len..old_left_len),
1728                    right_node.key_area_mut(..new_right_len),
1729                );
1730                move_to_slice(
1731                    left_node.val_area_mut(new_left_len..old_left_len),
1732                    right_node.val_area_mut(..new_right_len),
1733                );
1734                match (left_node.force(), right_node.force()) {
1735                    (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1736                        move_to_slice(
1737                            left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1738                            right.edge_area_mut(1..new_right_len + 1),
1739                        );
1740                        right.correct_childrens_parent_links(1..new_right_len + 1);
1741                    }
1742                    (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1743                    _ => unreachable!(),
1744                }
1745            }
1746        }
1747    }
1748}
1749
1750pub(super) enum ForceResult<Leaf, Internal> {
1751    Leaf(Leaf),
1752    Internal(Internal),
1753}
1754
1755/// Result of insertion, when a node needed to expand beyond its capacity.
1756pub(super) struct SplitResult<'a, K, V, NodeType> {
1757    // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1758    pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1759    // Some key and value that existed before and were split off, to be inserted elsewhere.
1760    pub kv: (K, V),
1761    // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1762    pub right: NodeRef<marker::Owned, K, V, NodeType>,
1763}
1764
1765impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1766    pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1767        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1768    }
1769}
1770
1771impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1772    pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1773        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1774    }
1775}
1776
1777pub(super) mod marker {
1778    use core::marker::PhantomData;
1779
1780    pub(crate) enum Leaf {}
1781    pub(crate) enum Internal {}
1782    pub(crate) enum LeafOrInternal {}
1783
1784    pub(crate) enum Owned {}
1785    pub(crate) enum Dying {}
1786    pub(crate) enum DormantMut {}
1787    pub(crate) struct Immut<'a>(PhantomData<&'a ()>);
1788    pub(crate) struct Mut<'a>(PhantomData<&'a mut ()>);
1789    pub(crate) struct ValMut<'a>(PhantomData<&'a mut ()>);
1790
1791    pub(crate) trait BorrowType {
1792        /// If node references of this borrow type allow traversing to other
1793        /// nodes in the tree, this constant is set to `true`. It can be used
1794        /// for a compile-time assertion.
1795        const TRAVERSAL_PERMIT: bool = true;
1796    }
1797    impl BorrowType for Owned {
1798        /// Reject traversal, because it isn't needed. Instead traversal
1799        /// happens using the result of `borrow_mut`.
1800        /// By disabling traversal, and only creating new references to roots,
1801        /// we know that every reference of the `Owned` type is to a root node.
1802        const TRAVERSAL_PERMIT: bool = false;
1803    }
1804    impl BorrowType for Dying {}
1805    impl<'a> BorrowType for Immut<'a> {}
1806    impl<'a> BorrowType for Mut<'a> {}
1807    impl<'a> BorrowType for ValMut<'a> {}
1808    impl BorrowType for DormantMut {}
1809
1810    pub(crate) enum KV {}
1811    pub(crate) enum Edge {}
1812}
1813
1814/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1815///
1816/// # Safety
1817/// The slice has more than `idx` elements.
1818unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
1819    unsafe {
1820        let len = slice.len();
1821        debug_assert!(len > idx);
1822        let slice_ptr = slice.as_mut_ptr();
1823        if len > idx + 1 {
1824            ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
1825        }
1826        (*slice_ptr.add(idx)).write(val);
1827    }
1828}
1829
1830/// Removes and returns a value from a slice of all initialized elements, leaving behind one
1831/// trailing uninitialized element.
1832///
1833/// # Safety
1834/// The slice has more than `idx` elements.
1835unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
1836    unsafe {
1837        let len = slice.len();
1838        debug_assert!(idx < len);
1839        let slice_ptr = slice.as_mut_ptr();
1840        let ret = (*slice_ptr.add(idx)).assume_init_read();
1841        ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
1842        ret
1843    }
1844}
1845
1846/// Shifts the elements in a slice `distance` positions to the left.
1847///
1848/// # Safety
1849/// The slice has at least `distance` elements.
1850unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1851    unsafe {
1852        let slice_ptr = slice.as_mut_ptr();
1853        ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
1854    }
1855}
1856
1857/// Shifts the elements in a slice `distance` positions to the right.
1858///
1859/// # Safety
1860/// The slice has at least `distance` elements.
1861unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1862    unsafe {
1863        let slice_ptr = slice.as_mut_ptr();
1864        ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
1865    }
1866}
1867
1868/// Moves all values from a slice of initialized elements to a slice
1869/// of uninitialized elements, leaving behind `src` as all uninitialized.
1870/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1871fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1872    assert!(src.len() == dst.len());
1873    unsafe {
1874        ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
1875    }
1876}
1877
1878#[cfg(test)]
1879mod tests;