alloc/collections/btree/
node.rs

1// This is an attempt at an implementation following the ideal
2//
3// ```
4// struct BTreeMap<K, V> {
5//     height: usize,
6//     root: Option<Box<Node<K, V, height>>>
7// }
8//
9// struct Node<K, V, height: usize> {
10//     keys: [K; 2 * B - 1],
11//     vals: [V; 2 * B - 1],
12//     edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13//     parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14//     len: u16,
15// }
16// ```
17//
18// Since Rust doesn't actually have dependent types and polymorphic recursion,
19// we make do with lots of unsafety.
20
21// A major goal of this module is to avoid complexity by treating the tree as a generic (if
22// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24// even what underfull means. However, we do rely on a few invariants:
25//
26// - Trees must have uniform depth/height. This means that every path down to a leaf from a
27//   given node has exactly the same length.
28// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29//   This implies that even an empty node has at least one edge.
30//   For a leaf node, "having an edge" only means we can identify a position in the node,
31//   since leaf edges are empty and need no data representation. In an internal node,
32//   an edge both identifies a position and contains a pointer to a child node.
33
34use core::marker::PhantomData;
35use core::mem::{self, MaybeUninit};
36use core::num::NonZero;
37use core::ptr::{self, NonNull};
38use core::slice::SliceIndex;
39
40use crate::alloc::{Allocator, Layout};
41use crate::boxed::Box;
42
43const B: usize = 6;
44pub(super) const CAPACITY: usize = 2 * B - 1;
45pub(super) const MIN_LEN_AFTER_SPLIT: usize = B - 1;
46const KV_IDX_CENTER: usize = B - 1;
47const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
48const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
49
50/// The underlying representation of leaf nodes and part of the representation of internal nodes.
51struct LeafNode<K, V> {
52    /// We want to be covariant in `K` and `V`.
53    parent: Option<NonNull<InternalNode<K, V>>>,
54
55    /// This node's index into the parent node's `edges` array.
56    /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
57    /// This is only guaranteed to be initialized when `parent` is non-null.
58    parent_idx: MaybeUninit<u16>,
59
60    /// The number of keys and values this node stores.
61    len: u16,
62
63    /// The arrays storing the actual data of the node. Only the first `len` elements of each
64    /// array are initialized and valid.
65    keys: [MaybeUninit<K>; CAPACITY],
66    vals: [MaybeUninit<V>; CAPACITY],
67}
68
69impl<K, V> LeafNode<K, V> {
70    /// Initializes a new `LeafNode` in-place.
71    ///
72    /// # Safety
73    ///
74    /// The caller must ensure that `this` points to a (possibly uninitialized) `LeafNode`
75    unsafe fn init(this: *mut Self) {
76        // As a general policy, we leave fields uninitialized if they can be, as this should
77        // be both slightly faster and easier to track in Valgrind.
78        unsafe {
79            // parent_idx, keys, and vals are all MaybeUninit
80            (&raw mut (*this).parent).write(None);
81            (&raw mut (*this).len).write(0);
82        }
83    }
84
85    /// Creates a new boxed `LeafNode`.
86    fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
87        let mut leaf = Box::new_uninit_in(alloc);
88        unsafe {
89            // SAFETY: `leaf` points to a `LeafNode`
90            LeafNode::init(leaf.as_mut_ptr());
91            // SAFETY: `leaf` was just initialized
92            leaf.assume_init()
93        }
94    }
95}
96
97/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
98/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
99/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
100/// node, allowing code to act on leaf and internal nodes generically without having to even check
101/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
102#[repr(C)]
103// gdb_providers.py uses this type name for introspection.
104struct InternalNode<K, V> {
105    data: LeafNode<K, V>,
106
107    /// The pointers to the children of this node. `len + 1` of these are considered
108    /// initialized and valid, except that near the end, while the tree is held
109    /// through borrow type `Dying`, some of these pointers are dangling.
110    edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
111}
112
113impl<K, V> InternalNode<K, V> {
114    /// Creates a new boxed `InternalNode`.
115    ///
116    /// # Safety
117    /// An invariant of internal nodes is that they have at least one
118    /// initialized and valid edge. This function does not set up
119    /// such an edge.
120    unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
121        let mut node = Box::<Self, _>::new_uninit_in(alloc);
122        unsafe {
123            // SAFETY: argument points to the `node.data` `LeafNode`
124            LeafNode::init(&raw mut (*node.as_mut_ptr()).data);
125            // SAFETY: `node.data` was just initialized and `node.edges` is MaybeUninit.
126            node.assume_init()
127        }
128    }
129}
130
131/// A managed, non-null pointer to a node. This is either an owned pointer to
132/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
133///
134/// However, `BoxedNode` contains no information as to which of the two types
135/// of nodes it actually contains, and, partially due to this lack of information,
136/// is not a separate type and has no destructor.
137type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
138
139// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
140// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
141// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
142// However, whenever a public type wraps `NodeRef`, make sure that it has the
143// correct variance.
144///
145/// A reference to a node.
146///
147/// This type has a number of parameters that control how it acts:
148/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
149///    - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
150///    - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
151///      with respect to keys and tree structure, but also allows many
152///      mutable references to values throughout the tree to coexist.
153///    - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
154///      although insert methods allow a mutable pointer to a value to coexist.
155///    - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
156///      but does not have a destructor, and must be cleaned up manually.
157///    - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
158///      but has methods to destroy the tree bit by bit, and ordinary methods,
159///      while not marked as unsafe to call, can invoke UB if called incorrectly.
160///   Since any `NodeRef` allows navigating through the tree, `BorrowType`
161///   effectively applies to the entire tree, not just to the node itself.
162/// - `K` and `V`: These are the types of keys and values stored in the nodes.
163/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
164///   `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
165///   `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
166///   `NodeRef` could be pointing to either type of node.
167///   `Type` is named `NodeType` when used outside `NodeRef`.
168///
169/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
170/// exploit static type safety. There are limitations in the way we can apply
171/// such restrictions:
172/// - For each type parameter, we can only define a method either generically
173///   or for one particular type. For example, we cannot define a method like
174///   `into_kv` generically for all `BorrowType`, or once for all types that
175///   carry a lifetime, because we want it to return `&'a` references.
176///   Therefore, we define it only for the least powerful type `Immut<'a>`.
177/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
178///   Therefore, we have to explicitly call `reborrow` on a more powerful
179///   `NodeRef` in order to reach a method like `into_kv`.
180///
181/// All methods on `NodeRef` that return some kind of reference, either:
182/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
183///   Sometimes, to invoke such a method, we need to call `reborrow_mut`.
184/// - Take `self` by reference, and (implicitly) return that reference's
185///   lifetime, instead of the lifetime carried by `BorrowType`. That way,
186///   the borrow checker guarantees that the `NodeRef` remains borrowed as long
187///   as the returned reference is used.
188///   The methods supporting insert bend this rule by returning a raw pointer,
189///   i.e., a reference without any lifetime.
190pub(super) struct NodeRef<BorrowType, K, V, Type> {
191    /// The number of levels that the node and the level of leaves are apart, a
192    /// constant of the node that cannot be entirely described by `Type`, and that
193    /// the node itself does not store. We only need to store the height of the root
194    /// node, and derive every other node's height from it.
195    /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
196    height: usize,
197    /// The pointer to the leaf or internal node. The definition of `InternalNode`
198    /// ensures that the pointer is valid either way.
199    node: NonNull<LeafNode<K, V>>,
200    _marker: PhantomData<(BorrowType, Type)>,
201}
202
203/// The root node of an owned tree.
204///
205/// Note that this does not have a destructor, and must be cleaned up manually.
206pub(super) type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
207
208impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
209impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
210    fn clone(&self) -> Self {
211        *self
212    }
213}
214
215unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
216
217unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Immut<'_>, K, V, Type> {}
218unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Mut<'_>, K, V, Type> {}
219unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::ValMut<'_>, K, V, Type> {}
220unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
221unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
222
223impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
224    pub(super) fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
225        Self::from_new_leaf(LeafNode::new(alloc))
226    }
227
228    fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
229        // The allocator must be dropped, not leaked.  See also `BTreeMap::alloc`.
230        let (node, _alloc) = Box::into_non_null_with_allocator(leaf);
231        NodeRef { height: 0, node, _marker: PhantomData }
232    }
233}
234
235impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
236    /// Creates a new internal (height > 0) `NodeRef`
237    fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
238        let mut new_node = unsafe { InternalNode::new(alloc) };
239        new_node.edges[0].write(child.node);
240        NodeRef::from_new_internal(new_node, NonZero::new(child.height + 1).unwrap())
241    }
242
243    /// Creates a new internal (height > 0) `NodeRef` from an existing internal node
244    fn from_new_internal<A: Allocator + Clone>(
245        internal: Box<InternalNode<K, V>, A>,
246        height: NonZero<usize>,
247    ) -> Self {
248        // The allocator must be dropped, not leaked.  See also `BTreeMap::alloc`.
249        let (node, _alloc) = Box::into_non_null_with_allocator(internal);
250        let mut this = NodeRef { height: height.into(), node: node.cast(), _marker: PhantomData };
251        this.borrow_mut().correct_all_childrens_parent_links();
252        this
253    }
254}
255
256impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
257    /// Unpack a node reference that was packed as `NodeRef::parent`.
258    fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
259        debug_assert!(height > 0);
260        NodeRef { height, node: node.cast(), _marker: PhantomData }
261    }
262}
263
264impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
265    /// Exposes the data of an internal node.
266    ///
267    /// Returns a raw ptr to avoid invalidating other references to this node.
268    fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
269        // SAFETY: the static node type is `Internal`.
270        this.node.as_ptr() as *mut InternalNode<K, V>
271    }
272}
273
274impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
275    /// Borrows exclusive access to the data of an internal node.
276    fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
277        let ptr = Self::as_internal_ptr(self);
278        unsafe { &mut *ptr }
279    }
280}
281
282impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
283    /// Finds the length of the node. This is the number of keys or values.
284    /// The number of edges is `len() + 1`.
285    /// Note that, despite being safe, calling this function can have the side effect
286    /// of invalidating mutable references that unsafe code has created.
287    pub(super) fn len(&self) -> usize {
288        // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
289        // there might be outstanding mutable references to values that we must not invalidate.
290        unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
291    }
292
293    /// Returns the number of levels that the node and leaves are apart. Zero
294    /// height means the node is a leaf itself. If you picture trees with the
295    /// root on top, the number says at which elevation the node appears.
296    /// If you picture trees with leaves on top, the number says how high
297    /// the tree extends above the node.
298    pub(super) fn height(&self) -> usize {
299        self.height
300    }
301
302    /// Temporarily takes out another, immutable reference to the same node.
303    pub(super) fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
304        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
305    }
306
307    /// Exposes the leaf portion of any leaf or internal node.
308    ///
309    /// Returns a raw ptr to avoid invalidating other references to this node.
310    fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
311        // The node must be valid for at least the LeafNode portion.
312        // This is not a reference in the NodeRef type because we don't know if
313        // it should be unique or shared.
314        this.node.as_ptr()
315    }
316}
317
318impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
319    /// Finds the parent of the current node. Returns `Ok(handle)` if the current
320    /// node actually has a parent, where `handle` points to the edge of the parent
321    /// that points to the current node. Returns `Err(self)` if the current node has
322    /// no parent, giving back the original `NodeRef`.
323    ///
324    /// The method name assumes you picture trees with the root node on top.
325    ///
326    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
327    /// both, upon success, do nothing.
328    pub(super) fn ascend(
329        self,
330    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
331        const {
332            assert!(BorrowType::TRAVERSAL_PERMIT);
333        }
334
335        // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
336        // there might be outstanding mutable references to values that we must not invalidate.
337        let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
338        unsafe { (*leaf_ptr).parent }
339            .as_ref()
340            .map(|parent| Handle {
341                node: NodeRef::from_internal(*parent, self.height + 1),
342                idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
343                _marker: PhantomData,
344            })
345            .ok_or(self)
346    }
347
348    pub(super) fn first_edge(self) -> Handle<Self, marker::Edge> {
349        unsafe { Handle::new_edge(self, 0) }
350    }
351
352    pub(super) fn last_edge(self) -> Handle<Self, marker::Edge> {
353        let len = self.len();
354        unsafe { Handle::new_edge(self, len) }
355    }
356
357    /// Note that `self` must be nonempty.
358    pub(super) fn first_kv(self) -> Handle<Self, marker::KV> {
359        let len = self.len();
360        assert!(len > 0);
361        unsafe { Handle::new_kv(self, 0) }
362    }
363
364    /// Note that `self` must be nonempty.
365    pub(super) fn last_kv(self) -> Handle<Self, marker::KV> {
366        let len = self.len();
367        assert!(len > 0);
368        unsafe { Handle::new_kv(self, len - 1) }
369    }
370}
371
372impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
373    /// Could be a public implementation of PartialEq, but only used in this module.
374    fn eq(&self, other: &Self) -> bool {
375        let Self { node, height, _marker } = self;
376        if node.eq(&other.node) {
377            debug_assert_eq!(*height, other.height);
378            true
379        } else {
380            false
381        }
382    }
383}
384
385impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
386    /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
387    fn into_leaf(self) -> &'a LeafNode<K, V> {
388        let ptr = Self::as_leaf_ptr(&self);
389        // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
390        unsafe { &*ptr }
391    }
392
393    /// Borrows a view into the keys stored in the node.
394    pub(super) fn keys(&self) -> &[K] {
395        let leaf = self.into_leaf();
396        unsafe { leaf.keys.get_unchecked(..usize::from(leaf.len)).assume_init_ref() }
397    }
398}
399
400impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
401    /// Similar to `ascend`, gets a reference to a node's parent node, but also
402    /// deallocates the current node in the process. This is unsafe because the
403    /// current node will still be accessible despite being deallocated.
404    pub(super) unsafe fn deallocate_and_ascend<A: Allocator + Clone>(
405        self,
406        alloc: A,
407    ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
408        let height = self.height;
409        let node = self.node;
410        let ret = self.ascend().ok();
411        unsafe {
412            alloc.deallocate(
413                node.cast(),
414                if height > 0 {
415                    Layout::new::<InternalNode<K, V>>()
416                } else {
417                    Layout::new::<LeafNode<K, V>>()
418                },
419            );
420        }
421        ret
422    }
423}
424
425impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
426    /// Temporarily takes out another mutable reference to the same node. Beware, as
427    /// this method is very dangerous, doubly so since it might not immediately appear
428    /// dangerous.
429    ///
430    /// Because mutable pointers can roam anywhere around the tree, the returned
431    /// pointer can easily be used to make the original pointer dangling, out of
432    /// bounds, or invalid under stacked borrow rules.
433    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
434    // that restricts the use of navigation methods on reborrowed pointers,
435    // preventing this unsafety.
436    unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
437        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
438    }
439
440    /// Borrows exclusive access to the leaf portion of a leaf or internal node.
441    fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
442        let ptr = Self::as_leaf_ptr(self);
443        // SAFETY: we have exclusive access to the entire node.
444        unsafe { &mut *ptr }
445    }
446
447    /// Offers exclusive access to the leaf portion of a leaf or internal node.
448    fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
449        let ptr = Self::as_leaf_ptr(&mut self);
450        // SAFETY: we have exclusive access to the entire node.
451        unsafe { &mut *ptr }
452    }
453
454    /// Returns a dormant copy of this node with its lifetime erased which can
455    /// be reawakened later.
456    pub(super) fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
457        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
458    }
459}
460
461impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
462    /// Revert to the unique borrow initially captured.
463    ///
464    /// # Safety
465    ///
466    /// The reborrow must have ended, i.e., the reference returned by `new` and
467    /// all pointers and references derived from it, must not be used anymore.
468    pub(super) unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
469        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
470    }
471}
472
473impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
474    /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
475    fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
476        let ptr = Self::as_leaf_ptr(self);
477        // SAFETY: we have exclusive access to the entire node.
478        unsafe { &mut *ptr }
479    }
480}
481
482impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
483    /// Borrows exclusive access to an element of the key storage area.
484    ///
485    /// # Safety
486    /// `index` is in bounds of 0..CAPACITY
487    unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
488    where
489        I: SliceIndex<[MaybeUninit<K>], Output = Output>,
490    {
491        // SAFETY: the caller will not be able to call further methods on self
492        // until the key slice reference is dropped, as we have unique access
493        // for the lifetime of the borrow.
494        unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
495    }
496
497    /// Borrows exclusive access to an element or slice of the node's value storage area.
498    ///
499    /// # Safety
500    /// `index` is in bounds of 0..CAPACITY
501    unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
502    where
503        I: SliceIndex<[MaybeUninit<V>], Output = Output>,
504    {
505        // SAFETY: the caller will not be able to call further methods on self
506        // until the value slice reference is dropped, as we have unique access
507        // for the lifetime of the borrow.
508        unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
509    }
510}
511
512impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
513    /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
514    ///
515    /// # Safety
516    /// `index` is in bounds of 0..CAPACITY + 1
517    unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
518    where
519        I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
520    {
521        // SAFETY: the caller will not be able to call further methods on self
522        // until the edge slice reference is dropped, as we have unique access
523        // for the lifetime of the borrow.
524        unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
525    }
526}
527
528impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
529    /// # Safety
530    /// - The node has more than `idx` initialized elements.
531    unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
532        // We only create a reference to the one element we are interested in,
533        // to avoid aliasing with outstanding references to other elements,
534        // in particular, those returned to the caller in earlier iterations.
535        let leaf = Self::as_leaf_ptr(&mut self);
536        let keys = unsafe { &raw const (*leaf).keys };
537        let vals = unsafe { &raw mut (*leaf).vals };
538        // We must coerce to unsized array pointers because of Rust issue #74679.
539        let keys: *const [_] = keys;
540        let vals: *mut [_] = vals;
541        let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
542        let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
543        (key, val)
544    }
545}
546
547impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
548    /// Borrows exclusive access to the length of the node.
549    pub(super) fn len_mut(&mut self) -> &mut u16 {
550        &mut self.as_leaf_mut().len
551    }
552}
553
554impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
555    /// # Safety
556    /// Every item returned by `range` is a valid edge index for the node.
557    unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
558        for i in range {
559            debug_assert!(i <= self.len());
560            unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
561        }
562    }
563
564    fn correct_all_childrens_parent_links(&mut self) {
565        let len = self.len();
566        unsafe { self.correct_childrens_parent_links(0..=len) };
567    }
568}
569
570impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
571    /// Sets the node's link to its parent edge,
572    /// without invalidating other references to the node.
573    fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
574        let leaf = Self::as_leaf_ptr(self);
575        unsafe { (*leaf).parent = Some(parent) };
576        unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
577    }
578}
579
580impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
581    /// Clears the root's link to its parent edge.
582    fn clear_parent_link(&mut self) {
583        let mut root_node = self.borrow_mut();
584        let leaf = root_node.as_leaf_mut();
585        leaf.parent = None;
586    }
587}
588
589impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
590    /// Returns a new owned tree, with its own root node that is initially empty.
591    pub(super) fn new<A: Allocator + Clone>(alloc: A) -> Self {
592        NodeRef::new_leaf(alloc).forget_type()
593    }
594
595    /// Adds a new internal node with a single edge pointing to the previous root node,
596    /// make that new node the root node, and return it. This increases the height by 1
597    /// and is the opposite of `pop_internal_level`.
598    pub(super) fn push_internal_level<A: Allocator + Clone>(
599        &mut self,
600        alloc: A,
601    ) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
602        super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
603
604        // `self.borrow_mut()`, except that we just forgot we're internal now:
605        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
606    }
607
608    /// Removes the internal root node, using its first child as the new root node.
609    /// As it is intended only to be called when the root node has only one child,
610    /// no cleanup is done on any of the keys, values and other children.
611    /// This decreases the height by 1 and is the opposite of `push_internal_level`.
612    ///
613    /// Does not invalidate any handles or references pointing into the subtree
614    /// rooted at the first child of `self`.
615    ///
616    /// Panics if there is no internal level, i.e., if the root node is a leaf.
617    pub(super) fn pop_internal_level<A: Allocator + Clone>(&mut self, alloc: A) {
618        assert!(self.height > 0);
619
620        let top = self.node;
621
622        // SAFETY: we asserted to be internal.
623        let mut internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
624        let internal_node = internal_self.as_internal_mut();
625        // SAFETY: the first edge is always initialized.
626        self.node = unsafe { internal_node.edges[0].assume_init_read() };
627        self.height -= 1;
628        self.clear_parent_link();
629
630        unsafe {
631            alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
632        }
633    }
634}
635
636impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
637    /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
638    /// because the return value cannot be used to destroy the root, and there
639    /// cannot be other references to the tree.
640    pub(super) fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
641        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
642    }
643
644    /// Slightly mutably borrows the owned root node.
645    pub(super) fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
646        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
647    }
648
649    /// Irreversibly transitions to a reference that permits traversal and offers
650    /// destructive methods and little else.
651    pub(super) fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
652        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
653    }
654}
655
656impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
657    /// Adds a key-value pair to the end of the node, and returns
658    /// a handle to the inserted value.
659    ///
660    /// # Safety
661    ///
662    /// The returned handle has an unbound lifetime.
663    pub(super) unsafe fn push_with_handle<'b>(
664        &mut self,
665        key: K,
666        val: V,
667    ) -> Handle<NodeRef<marker::Mut<'b>, K, V, marker::Leaf>, marker::KV> {
668        let len = self.len_mut();
669        let idx = usize::from(*len);
670        assert!(idx < CAPACITY);
671        *len += 1;
672        unsafe {
673            self.key_area_mut(idx).write(key);
674            self.val_area_mut(idx).write(val);
675            Handle::new_kv(
676                NodeRef { height: self.height, node: self.node, _marker: PhantomData },
677                idx,
678            )
679        }
680    }
681
682    /// Adds a key-value pair to the end of the node, and returns
683    /// the mutable reference of the inserted value.
684    pub(super) fn push(&mut self, key: K, val: V) -> *mut V {
685        // SAFETY: The unbound handle is no longer accessible.
686        unsafe { self.push_with_handle(key, val).into_val_mut() }
687    }
688}
689
690impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
691    /// Adds a key-value pair, and an edge to go to the right of that pair,
692    /// to the end of the node.
693    pub(super) fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
694        assert!(edge.height == self.height - 1);
695
696        let len = self.len_mut();
697        let idx = usize::from(*len);
698        assert!(idx < CAPACITY);
699        *len += 1;
700        unsafe {
701            self.key_area_mut(idx).write(key);
702            self.val_area_mut(idx).write(val);
703            self.edge_area_mut(idx + 1).write(edge.node);
704            Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
705        }
706    }
707}
708
709impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
710    /// Removes any static information asserting that this node is a `Leaf` node.
711    pub(super) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
712        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
713    }
714}
715
716impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
717    /// Removes any static information asserting that this node is an `Internal` node.
718    pub(super) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
719        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
720    }
721}
722
723impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
724    /// Checks whether a node is an `Internal` node or a `Leaf` node.
725    pub(super) fn force(
726        self,
727    ) -> ForceResult<
728        NodeRef<BorrowType, K, V, marker::Leaf>,
729        NodeRef<BorrowType, K, V, marker::Internal>,
730    > {
731        if self.height == 0 {
732            ForceResult::Leaf(NodeRef {
733                height: self.height,
734                node: self.node,
735                _marker: PhantomData,
736            })
737        } else {
738            ForceResult::Internal(NodeRef {
739                height: self.height,
740                node: self.node,
741                _marker: PhantomData,
742            })
743        }
744    }
745}
746
747impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
748    /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
749    pub(super) unsafe fn cast_to_leaf_unchecked(
750        self,
751    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
752        debug_assert!(self.height == 0);
753        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
754    }
755
756    /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
757    unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
758        debug_assert!(self.height > 0);
759        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
760    }
761}
762
763/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
764/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
765/// pair) or `Edge` (signifying a handle on an edge).
766///
767/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
768/// a child node, these represent the spaces where child pointers would go between the key-value
769/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
770/// to the left of the node, one between the two pairs, and one at the right of the node.
771pub(super) struct Handle<Node, Type> {
772    node: Node,
773    idx: usize,
774    _marker: PhantomData<Type>,
775}
776
777impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
778// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
779// `Clone`able is when it is an immutable reference and therefore `Copy`.
780impl<Node: Copy, Type> Clone for Handle<Node, Type> {
781    fn clone(&self) -> Self {
782        *self
783    }
784}
785
786impl<Node, Type> Handle<Node, Type> {
787    /// Retrieves the node that contains the edge or key-value pair this handle points to.
788    pub(super) fn into_node(self) -> Node {
789        self.node
790    }
791
792    /// Returns the position of this handle in the node.
793    pub(super) fn idx(&self) -> usize {
794        self.idx
795    }
796}
797
798impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
799    /// Creates a new handle to a key-value pair in `node`.
800    /// Unsafe because the caller must ensure that `idx < node.len()`.
801    pub(super) unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
802        debug_assert!(idx < node.len());
803
804        Handle { node, idx, _marker: PhantomData }
805    }
806
807    pub(super) fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
808        unsafe { Handle::new_edge(self.node, self.idx) }
809    }
810
811    pub(super) fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
812        unsafe { Handle::new_edge(self.node, self.idx + 1) }
813    }
814}
815
816impl<BorrowType, K, V, NodeType, HandleType> PartialEq
817    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
818{
819    fn eq(&self, other: &Self) -> bool {
820        let Self { node, idx, _marker } = self;
821        node.eq(&other.node) && *idx == other.idx
822    }
823}
824
825impl<BorrowType, K, V, NodeType, HandleType>
826    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
827{
828    /// Temporarily takes out another immutable handle on the same location.
829    pub(super) fn reborrow(
830        &self,
831    ) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
832        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
833        Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
834    }
835}
836
837impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
838    /// Temporarily takes out another mutable handle on the same location. Beware, as
839    /// this method is very dangerous, doubly so since it might not immediately appear
840    /// dangerous.
841    ///
842    /// For details, see `NodeRef::reborrow_mut`.
843    pub(super) unsafe fn reborrow_mut(
844        &mut self,
845    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
846        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
847        Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
848    }
849
850    /// Returns a dormant copy of this handle which can be reawakened later.
851    ///
852    /// See `DormantMutRef` for more details.
853    pub(super) fn dormant(
854        &self,
855    ) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
856        Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData }
857    }
858}
859
860impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
861    /// Revert to the unique borrow initially captured.
862    ///
863    /// # Safety
864    ///
865    /// The reborrow must have ended, i.e., the reference returned by `new` and
866    /// all pointers and references derived from it, must not be used anymore.
867    pub(super) unsafe fn awaken<'a>(
868        self,
869    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
870        Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData }
871    }
872}
873
874impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
875    /// Creates a new handle to an edge in `node`.
876    /// Unsafe because the caller must ensure that `idx <= node.len()`.
877    pub(super) unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
878        debug_assert!(idx <= node.len());
879
880        Handle { node, idx, _marker: PhantomData }
881    }
882
883    pub(super) fn left_kv(
884        self,
885    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
886        if self.idx > 0 {
887            Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
888        } else {
889            Err(self)
890        }
891    }
892
893    pub(super) fn right_kv(
894        self,
895    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
896        if self.idx < self.node.len() {
897            Ok(unsafe { Handle::new_kv(self.node, self.idx) })
898        } else {
899            Err(self)
900        }
901    }
902}
903
904pub(super) enum LeftOrRight<T> {
905    Left(T),
906    Right(T),
907}
908
909/// Given an edge index where we want to insert into a node filled to capacity,
910/// computes a sensible KV index of a split point and where to perform the insertion.
911/// The goal of the split point is for its key and value to end up in a parent node;
912/// the keys, values and edges to the left of the split point become the left child;
913/// the keys, values and edges to the right of the split point become the right child.
914fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
915    debug_assert!(edge_idx <= CAPACITY);
916    // Rust issue #74834 tries to explain these symmetric rules.
917    match edge_idx {
918        0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
919        EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
920        EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
921        _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
922    }
923}
924
925impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
926    /// Inserts a new key-value pair between the key-value pairs to the right and left of
927    /// this edge. This method assumes that there is enough space in the node for the new
928    /// pair to fit.
929    unsafe fn insert_fit(
930        mut self,
931        key: K,
932        val: V,
933    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
934        debug_assert!(self.node.len() < CAPACITY);
935        let new_len = self.node.len() + 1;
936
937        unsafe {
938            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
939            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
940            *self.node.len_mut() = new_len as u16;
941
942            Handle::new_kv(self.node, self.idx)
943        }
944    }
945}
946
947impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
948    /// Inserts a new key-value pair between the key-value pairs to the right and left of
949    /// this edge. This method splits the node if there isn't enough room.
950    ///
951    /// Returns a dormant handle to the inserted node which can be reawakened
952    /// once splitting is complete.
953    fn insert<A: Allocator + Clone>(
954        self,
955        key: K,
956        val: V,
957        alloc: A,
958    ) -> (
959        Option<SplitResult<'a, K, V, marker::Leaf>>,
960        Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
961    ) {
962        if self.node.len() < CAPACITY {
963            // SAFETY: There is enough space in the node for insertion.
964            let handle = unsafe { self.insert_fit(key, val) };
965            (None, handle.dormant())
966        } else {
967            let (middle_kv_idx, insertion) = splitpoint(self.idx);
968            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
969            let mut result = middle.split(alloc);
970            let insertion_edge = match insertion {
971                LeftOrRight::Left(insert_idx) => unsafe {
972                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
973                },
974                LeftOrRight::Right(insert_idx) => unsafe {
975                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
976                },
977            };
978            // SAFETY: We just split the node, so there is enough space for
979            // insertion.
980            let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
981            (Some(result), handle)
982        }
983    }
984}
985
986impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
987    /// Fixes the parent pointer and index in the child node that this edge
988    /// links to. This is useful when the ordering of edges has been changed,
989    fn correct_parent_link(self) {
990        // Create backpointer without invalidating other references to the node.
991        let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
992        let idx = self.idx;
993        let mut child = self.descend();
994        child.set_parent_link(ptr, idx);
995    }
996}
997
998impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
999    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1000    /// between this edge and the key-value pair to the right of this edge. This method assumes
1001    /// that there is enough space in the node for the new pair to fit.
1002    fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
1003        debug_assert!(self.node.len() < CAPACITY);
1004        debug_assert!(edge.height == self.node.height - 1);
1005        let new_len = self.node.len() + 1;
1006
1007        unsafe {
1008            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
1009            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
1010            slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
1011            *self.node.len_mut() = new_len as u16;
1012
1013            self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
1014        }
1015    }
1016
1017    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1018    /// between this edge and the key-value pair to the right of this edge. This method splits
1019    /// the node if there isn't enough room.
1020    fn insert<A: Allocator + Clone>(
1021        mut self,
1022        key: K,
1023        val: V,
1024        edge: Root<K, V>,
1025        alloc: A,
1026    ) -> Option<SplitResult<'a, K, V, marker::Internal>> {
1027        assert!(edge.height == self.node.height - 1);
1028
1029        if self.node.len() < CAPACITY {
1030            self.insert_fit(key, val, edge);
1031            None
1032        } else {
1033            let (middle_kv_idx, insertion) = splitpoint(self.idx);
1034            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1035            let mut result = middle.split(alloc);
1036            let mut insertion_edge = match insertion {
1037                LeftOrRight::Left(insert_idx) => unsafe {
1038                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
1039                },
1040                LeftOrRight::Right(insert_idx) => unsafe {
1041                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
1042                },
1043            };
1044            insertion_edge.insert_fit(key, val, edge);
1045            Some(result)
1046        }
1047    }
1048}
1049
1050impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1051    /// Inserts a new key-value pair between the key-value pairs to the right and left of
1052    /// this edge. This method splits the node if there isn't enough room, and tries to
1053    /// insert the split off portion into the parent node recursively, until the root is reached.
1054    ///
1055    /// If the returned result is some `SplitResult`, the `left` field will be the root node.
1056    /// The returned pointer points to the inserted value, which in the case of `SplitResult`
1057    /// is in the `left` or `right` tree.
1058    pub(super) fn insert_recursing<A: Allocator + Clone>(
1059        self,
1060        key: K,
1061        value: V,
1062        alloc: A,
1063        split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>),
1064    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1065        let (mut split, handle) = match self.insert(key, value, alloc.clone()) {
1066            // SAFETY: we have finished splitting and can now re-awaken the
1067            // handle to the inserted element.
1068            (None, handle) => return unsafe { handle.awaken() },
1069            (Some(split), handle) => (split.forget_node_type(), handle),
1070        };
1071
1072        loop {
1073            split = match split.left.ascend() {
1074                Ok(parent) => {
1075                    match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
1076                        // SAFETY: we have finished splitting and can now re-awaken the
1077                        // handle to the inserted element.
1078                        None => return unsafe { handle.awaken() },
1079                        Some(split) => split.forget_node_type(),
1080                    }
1081                }
1082                Err(root) => {
1083                    split_root(SplitResult { left: root, ..split });
1084                    // SAFETY: we have finished splitting and can now re-awaken the
1085                    // handle to the inserted element.
1086                    return unsafe { handle.awaken() };
1087                }
1088            };
1089        }
1090    }
1091}
1092
1093impl<BorrowType: marker::BorrowType, K, V>
1094    Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
1095{
1096    /// Finds the node pointed to by this edge.
1097    ///
1098    /// The method name assumes you picture trees with the root node on top.
1099    ///
1100    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1101    /// both, upon success, do nothing.
1102    pub(super) fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1103        const {
1104            assert!(BorrowType::TRAVERSAL_PERMIT);
1105        }
1106
1107        // We need to use raw pointers to nodes because, if BorrowType is
1108        // marker::ValMut, there might be outstanding mutable references to
1109        // values that we must not invalidate. There's no worry accessing the
1110        // height field because that value is copied. Beware that, once the
1111        // node pointer is dereferenced, we access the edges array with a
1112        // reference (Rust issue #73987) and invalidate any other references
1113        // to or inside the array, should any be around.
1114        let parent_ptr = NodeRef::as_internal_ptr(&self.node);
1115        let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
1116        NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
1117    }
1118}
1119
1120impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1121    pub(super) fn into_kv(self) -> (&'a K, &'a V) {
1122        debug_assert!(self.idx < self.node.len());
1123        let leaf = self.node.into_leaf();
1124        let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1125        let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1126        (k, v)
1127    }
1128}
1129
1130impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1131    pub(super) fn key_mut(&mut self) -> &mut K {
1132        unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1133    }
1134
1135    pub(super) fn into_val_mut(self) -> &'a mut V {
1136        debug_assert!(self.idx < self.node.len());
1137        let leaf = self.node.into_leaf_mut();
1138        unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1139    }
1140
1141    pub(super) fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1142        debug_assert!(self.idx < self.node.len());
1143        let leaf = self.node.into_leaf_mut();
1144        let k = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() };
1145        let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
1146        (k, v)
1147    }
1148}
1149
1150impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1151    pub(super) fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1152        unsafe { self.node.into_key_val_mut_at(self.idx) }
1153    }
1154}
1155
1156impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1157    pub(super) fn kv_mut(&mut self) -> (&mut K, &mut V) {
1158        debug_assert!(self.idx < self.node.len());
1159        // We cannot call separate key and value methods, because calling the second one
1160        // invalidates the reference returned by the first.
1161        unsafe {
1162            let leaf = self.node.as_leaf_mut();
1163            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1164            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1165            (key, val)
1166        }
1167    }
1168
1169    /// Replaces the key and value that the KV handle refers to.
1170    pub(super) fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1171        let (key, val) = self.kv_mut();
1172        (mem::replace(key, k), mem::replace(val, v))
1173    }
1174}
1175
1176impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1177    /// Extracts the key and value that the KV handle refers to.
1178    /// # Safety
1179    /// The node that the handle refers to must not yet have been deallocated.
1180    pub(super) unsafe fn into_key_val(mut self) -> (K, V) {
1181        debug_assert!(self.idx < self.node.len());
1182        let leaf = self.node.as_leaf_dying();
1183        unsafe {
1184            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1185            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1186            (key, val)
1187        }
1188    }
1189
1190    /// Drops the key and value that the KV handle refers to.
1191    /// # Safety
1192    /// The node that the handle refers to must not yet have been deallocated.
1193    #[inline]
1194    pub(super) unsafe fn drop_key_val(mut self) {
1195        // Run the destructor of the value even if the destructor of the key panics.
1196        struct Dropper<'a, T>(&'a mut MaybeUninit<T>);
1197        impl<T> Drop for Dropper<'_, T> {
1198            #[inline]
1199            fn drop(&mut self) {
1200                unsafe {
1201                    self.0.assume_init_drop();
1202                }
1203            }
1204        }
1205
1206        debug_assert!(self.idx < self.node.len());
1207        let leaf = self.node.as_leaf_dying();
1208        unsafe {
1209            let key = leaf.keys.get_unchecked_mut(self.idx);
1210            let val = leaf.vals.get_unchecked_mut(self.idx);
1211            let _guard = Dropper(val);
1212            key.assume_init_drop();
1213            // dropping the guard will drop the value
1214        }
1215    }
1216}
1217
1218impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1219    /// Helps implementations of `split` for a particular `NodeType`,
1220    /// by taking care of leaf data.
1221    fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1222        debug_assert!(self.idx < self.node.len());
1223        let old_len = self.node.len();
1224        let new_len = old_len - self.idx - 1;
1225        new_node.len = new_len as u16;
1226        unsafe {
1227            let k = self.node.key_area_mut(self.idx).assume_init_read();
1228            let v = self.node.val_area_mut(self.idx).assume_init_read();
1229
1230            move_to_slice(
1231                self.node.key_area_mut(self.idx + 1..old_len),
1232                &mut new_node.keys[..new_len],
1233            );
1234            move_to_slice(
1235                self.node.val_area_mut(self.idx + 1..old_len),
1236                &mut new_node.vals[..new_len],
1237            );
1238
1239            *self.node.len_mut() = self.idx as u16;
1240            (k, v)
1241        }
1242    }
1243}
1244
1245impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1246    /// Splits the underlying node into three parts:
1247    ///
1248    /// - The node is truncated to only contain the key-value pairs to the left of
1249    ///   this handle.
1250    /// - The key and value pointed to by this handle are extracted.
1251    /// - All the key-value pairs to the right of this handle are put into a newly
1252    ///   allocated node.
1253    pub(super) fn split<A: Allocator + Clone>(
1254        mut self,
1255        alloc: A,
1256    ) -> SplitResult<'a, K, V, marker::Leaf> {
1257        let mut new_node = LeafNode::new(alloc);
1258
1259        let kv = self.split_leaf_data(&mut new_node);
1260
1261        let right = NodeRef::from_new_leaf(new_node);
1262        SplitResult { left: self.node, kv, right }
1263    }
1264
1265    /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1266    /// that the key-value pair collapsed into.
1267    pub(super) fn remove(
1268        mut self,
1269    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1270        let old_len = self.node.len();
1271        unsafe {
1272            let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1273            let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1274            *self.node.len_mut() = (old_len - 1) as u16;
1275            ((k, v), self.left_edge())
1276        }
1277    }
1278}
1279
1280impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1281    /// Splits the underlying node into three parts:
1282    ///
1283    /// - The node is truncated to only contain the edges and key-value pairs to the
1284    ///   left of this handle.
1285    /// - The key and value pointed to by this handle are extracted.
1286    /// - All the edges and key-value pairs to the right of this handle are put into
1287    ///   a newly allocated node.
1288    pub(super) fn split<A: Allocator + Clone>(
1289        mut self,
1290        alloc: A,
1291    ) -> SplitResult<'a, K, V, marker::Internal> {
1292        let old_len = self.node.len();
1293        unsafe {
1294            let mut new_node = InternalNode::new(alloc);
1295            let kv = self.split_leaf_data(&mut new_node.data);
1296            let new_len = usize::from(new_node.data.len);
1297            move_to_slice(
1298                self.node.edge_area_mut(self.idx + 1..old_len + 1),
1299                &mut new_node.edges[..new_len + 1],
1300            );
1301
1302            // SAFETY: self is `marker::Internal`, so `self.node.height` is positive
1303            let height = NonZero::new_unchecked(self.node.height);
1304            let right = NodeRef::from_new_internal(new_node, height);
1305
1306            SplitResult { left: self.node, kv, right }
1307        }
1308    }
1309}
1310
1311/// Represents a session for evaluating and performing a balancing operation
1312/// around an internal key-value pair.
1313pub(super) struct BalancingContext<'a, K, V> {
1314    parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1315    left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1316    right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1317}
1318
1319impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1320    pub(super) fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1321        let self1 = unsafe { ptr::read(&self) };
1322        let self2 = unsafe { ptr::read(&self) };
1323        BalancingContext {
1324            parent: self,
1325            left_child: self1.left_edge().descend(),
1326            right_child: self2.right_edge().descend(),
1327        }
1328    }
1329}
1330
1331impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1332    /// Chooses a balancing context involving the node as a child, thus between
1333    /// the KV immediately to the left or to the right in the parent node.
1334    /// Returns an `Err` if there is no parent.
1335    /// Panics if the parent is empty.
1336    ///
1337    /// Prefers the left side, to be optimal if the given node is somehow
1338    /// underfull, meaning here only that it has fewer elements than its left
1339    /// sibling and than its right sibling, if they exist. In that case,
1340    /// merging with the left sibling is faster, since we only need to move
1341    /// the node's N elements, instead of shifting them to the right and moving
1342    /// more than N elements in front. Stealing from the left sibling is also
1343    /// typically faster, since we only need to shift the node's N elements to
1344    /// the right, instead of shifting at least N of the sibling's elements to
1345    /// the left.
1346    pub(super) fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1347        match unsafe { ptr::read(&self) }.ascend() {
1348            Ok(parent_edge) => match parent_edge.left_kv() {
1349                Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1350                    parent: unsafe { ptr::read(&left_parent_kv) },
1351                    left_child: left_parent_kv.left_edge().descend(),
1352                    right_child: self,
1353                })),
1354                Err(parent_edge) => match parent_edge.right_kv() {
1355                    Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1356                        parent: unsafe { ptr::read(&right_parent_kv) },
1357                        left_child: self,
1358                        right_child: right_parent_kv.right_edge().descend(),
1359                    })),
1360                    Err(_) => unreachable!("empty internal node"),
1361                },
1362            },
1363            Err(root) => Err(root),
1364        }
1365    }
1366}
1367
1368impl<'a, K, V> BalancingContext<'a, K, V> {
1369    pub(super) fn left_child_len(&self) -> usize {
1370        self.left_child.len()
1371    }
1372
1373    pub(super) fn right_child_len(&self) -> usize {
1374        self.right_child.len()
1375    }
1376
1377    pub(super) fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1378        self.left_child
1379    }
1380
1381    pub(super) fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1382        self.right_child
1383    }
1384
1385    /// Returns whether merging is possible, i.e., whether there is enough room
1386    /// in a node to combine the central KV with both adjacent child nodes.
1387    pub(super) fn can_merge(&self) -> bool {
1388        self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1389    }
1390}
1391
1392impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1393    /// Performs a merge and lets a closure decide what to return.
1394    fn do_merge<
1395        F: FnOnce(
1396            NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1397            NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1398        ) -> R,
1399        R,
1400        A: Allocator,
1401    >(
1402        self,
1403        result: F,
1404        alloc: A,
1405    ) -> R {
1406        let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1407        let old_parent_len = parent_node.len();
1408        let mut left_node = self.left_child;
1409        let old_left_len = left_node.len();
1410        let mut right_node = self.right_child;
1411        let right_len = right_node.len();
1412        let new_left_len = old_left_len + 1 + right_len;
1413
1414        assert!(new_left_len <= CAPACITY);
1415
1416        unsafe {
1417            *left_node.len_mut() = new_left_len as u16;
1418
1419            let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1420            left_node.key_area_mut(old_left_len).write(parent_key);
1421            move_to_slice(
1422                right_node.key_area_mut(..right_len),
1423                left_node.key_area_mut(old_left_len + 1..new_left_len),
1424            );
1425
1426            let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1427            left_node.val_area_mut(old_left_len).write(parent_val);
1428            move_to_slice(
1429                right_node.val_area_mut(..right_len),
1430                left_node.val_area_mut(old_left_len + 1..new_left_len),
1431            );
1432
1433            slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
1434            parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1435            *parent_node.len_mut() -= 1;
1436
1437            if parent_node.height > 1 {
1438                // SAFETY: the height of the nodes being merged is one below the height
1439                // of the node of this edge, thus above zero, so they are internal.
1440                let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1441                let mut right_node = right_node.cast_to_internal_unchecked();
1442                move_to_slice(
1443                    right_node.edge_area_mut(..right_len + 1),
1444                    left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1445                );
1446
1447                left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1448
1449                alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1450            } else {
1451                alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1452            }
1453        }
1454        result(parent_node, left_node)
1455    }
1456
1457    /// Merges the parent's key-value pair and both adjacent child nodes into
1458    /// the left child node and returns the shrunk parent node.
1459    ///
1460    /// Panics unless we `.can_merge()`.
1461    pub(super) fn merge_tracking_parent<A: Allocator + Clone>(
1462        self,
1463        alloc: A,
1464    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1465        self.do_merge(|parent, _child| parent, alloc)
1466    }
1467
1468    /// Merges the parent's key-value pair and both adjacent child nodes into
1469    /// the left child node and returns that child node.
1470    ///
1471    /// Panics unless we `.can_merge()`.
1472    pub(super) fn merge_tracking_child<A: Allocator + Clone>(
1473        self,
1474        alloc: A,
1475    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1476        self.do_merge(|_parent, child| child, alloc)
1477    }
1478
1479    /// Merges the parent's key-value pair and both adjacent child nodes into
1480    /// the left child node and returns the edge handle in that child node
1481    /// where the tracked child edge ended up,
1482    ///
1483    /// Panics unless we `.can_merge()`.
1484    pub(super) fn merge_tracking_child_edge<A: Allocator + Clone>(
1485        self,
1486        track_edge_idx: LeftOrRight<usize>,
1487        alloc: A,
1488    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1489        let old_left_len = self.left_child.len();
1490        let right_len = self.right_child.len();
1491        assert!(match track_edge_idx {
1492            LeftOrRight::Left(idx) => idx <= old_left_len,
1493            LeftOrRight::Right(idx) => idx <= right_len,
1494        });
1495        let child = self.merge_tracking_child(alloc);
1496        let new_idx = match track_edge_idx {
1497            LeftOrRight::Left(idx) => idx,
1498            LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1499        };
1500        unsafe { Handle::new_edge(child, new_idx) }
1501    }
1502
1503    /// Removes a key-value pair from the left child and places it in the key-value storage
1504    /// of the parent, while pushing the old parent key-value pair into the right child.
1505    /// Returns a handle to the edge in the right child corresponding to where the original
1506    /// edge specified by `track_right_edge_idx` ended up.
1507    pub(super) fn steal_left(
1508        mut self,
1509        track_right_edge_idx: usize,
1510    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1511        self.bulk_steal_left(1);
1512        unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1513    }
1514
1515    /// Removes a key-value pair from the right child and places it in the key-value storage
1516    /// of the parent, while pushing the old parent key-value pair onto the left child.
1517    /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1518    /// which didn't move.
1519    pub(super) fn steal_right(
1520        mut self,
1521        track_left_edge_idx: usize,
1522    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1523        self.bulk_steal_right(1);
1524        unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1525    }
1526
1527    /// This does stealing similar to `steal_left` but steals multiple elements at once.
1528    pub(super) fn bulk_steal_left(&mut self, count: usize) {
1529        assert!(count > 0);
1530        unsafe {
1531            let left_node = &mut self.left_child;
1532            let old_left_len = left_node.len();
1533            let right_node = &mut self.right_child;
1534            let old_right_len = right_node.len();
1535
1536            // Make sure that we may steal safely.
1537            assert!(old_right_len + count <= CAPACITY);
1538            assert!(old_left_len >= count);
1539
1540            let new_left_len = old_left_len - count;
1541            let new_right_len = old_right_len + count;
1542            *left_node.len_mut() = new_left_len as u16;
1543            *right_node.len_mut() = new_right_len as u16;
1544
1545            // Move leaf data.
1546            {
1547                // Make room for stolen elements in the right child.
1548                slice_shr(right_node.key_area_mut(..new_right_len), count);
1549                slice_shr(right_node.val_area_mut(..new_right_len), count);
1550
1551                // Move elements from the left child to the right one.
1552                move_to_slice(
1553                    left_node.key_area_mut(new_left_len + 1..old_left_len),
1554                    right_node.key_area_mut(..count - 1),
1555                );
1556                move_to_slice(
1557                    left_node.val_area_mut(new_left_len + 1..old_left_len),
1558                    right_node.val_area_mut(..count - 1),
1559                );
1560
1561                // Move the leftmost stolen pair to the parent.
1562                let k = left_node.key_area_mut(new_left_len).assume_init_read();
1563                let v = left_node.val_area_mut(new_left_len).assume_init_read();
1564                let (k, v) = self.parent.replace_kv(k, v);
1565
1566                // Move parent's key-value pair to the right child.
1567                right_node.key_area_mut(count - 1).write(k);
1568                right_node.val_area_mut(count - 1).write(v);
1569            }
1570
1571            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1572                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1573                    // Make room for stolen edges.
1574                    slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1575
1576                    // Steal edges.
1577                    move_to_slice(
1578                        left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1579                        right.edge_area_mut(..count),
1580                    );
1581
1582                    right.correct_childrens_parent_links(0..new_right_len + 1);
1583                }
1584                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1585                _ => unreachable!(),
1586            }
1587        }
1588    }
1589
1590    /// The symmetric clone of `bulk_steal_left`.
1591    pub(super) fn bulk_steal_right(&mut self, count: usize) {
1592        assert!(count > 0);
1593        unsafe {
1594            let left_node = &mut self.left_child;
1595            let old_left_len = left_node.len();
1596            let right_node = &mut self.right_child;
1597            let old_right_len = right_node.len();
1598
1599            // Make sure that we may steal safely.
1600            assert!(old_left_len + count <= CAPACITY);
1601            assert!(old_right_len >= count);
1602
1603            let new_left_len = old_left_len + count;
1604            let new_right_len = old_right_len - count;
1605            *left_node.len_mut() = new_left_len as u16;
1606            *right_node.len_mut() = new_right_len as u16;
1607
1608            // Move leaf data.
1609            {
1610                // Move the rightmost stolen pair to the parent.
1611                let k = right_node.key_area_mut(count - 1).assume_init_read();
1612                let v = right_node.val_area_mut(count - 1).assume_init_read();
1613                let (k, v) = self.parent.replace_kv(k, v);
1614
1615                // Move parent's key-value pair to the left child.
1616                left_node.key_area_mut(old_left_len).write(k);
1617                left_node.val_area_mut(old_left_len).write(v);
1618
1619                // Move elements from the right child to the left one.
1620                move_to_slice(
1621                    right_node.key_area_mut(..count - 1),
1622                    left_node.key_area_mut(old_left_len + 1..new_left_len),
1623                );
1624                move_to_slice(
1625                    right_node.val_area_mut(..count - 1),
1626                    left_node.val_area_mut(old_left_len + 1..new_left_len),
1627                );
1628
1629                // Fill gap where stolen elements used to be.
1630                slice_shl(right_node.key_area_mut(..old_right_len), count);
1631                slice_shl(right_node.val_area_mut(..old_right_len), count);
1632            }
1633
1634            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1635                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1636                    // Steal edges.
1637                    move_to_slice(
1638                        right.edge_area_mut(..count),
1639                        left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1640                    );
1641
1642                    // Fill gap where stolen edges used to be.
1643                    slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1644
1645                    left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1646                    right.correct_childrens_parent_links(0..new_right_len + 1);
1647                }
1648                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1649                _ => unreachable!(),
1650            }
1651        }
1652    }
1653}
1654
1655impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1656    pub(super) fn forget_node_type(
1657        self,
1658    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1659        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1660    }
1661}
1662
1663impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1664    pub(super) fn forget_node_type(
1665        self,
1666    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1667        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1668    }
1669}
1670
1671impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1672    pub(super) fn forget_node_type(
1673        self,
1674    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1675        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1676    }
1677}
1678
1679impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1680    /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1681    pub(super) fn force(
1682        self,
1683    ) -> ForceResult<
1684        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1685        Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1686    > {
1687        match self.node.force() {
1688            ForceResult::Leaf(node) => {
1689                ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1690            }
1691            ForceResult::Internal(node) => {
1692                ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1693            }
1694        }
1695    }
1696}
1697
1698impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1699    /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1700    pub(super) unsafe fn cast_to_leaf_unchecked(
1701        self,
1702    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1703        let node = unsafe { self.node.cast_to_leaf_unchecked() };
1704        Handle { node, idx: self.idx, _marker: PhantomData }
1705    }
1706}
1707
1708impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1709    /// Move the suffix after `self` from one node to another one. `right` must be empty.
1710    /// The first edge of `right` remains unchanged.
1711    pub(super) fn move_suffix(
1712        &mut self,
1713        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1714    ) {
1715        unsafe {
1716            let new_left_len = self.idx;
1717            let mut left_node = self.reborrow_mut().into_node();
1718            let old_left_len = left_node.len();
1719
1720            let new_right_len = old_left_len - new_left_len;
1721            let mut right_node = right.reborrow_mut();
1722
1723            assert!(right_node.len() == 0);
1724            assert!(left_node.height == right_node.height);
1725
1726            if new_right_len > 0 {
1727                *left_node.len_mut() = new_left_len as u16;
1728                *right_node.len_mut() = new_right_len as u16;
1729
1730                move_to_slice(
1731                    left_node.key_area_mut(new_left_len..old_left_len),
1732                    right_node.key_area_mut(..new_right_len),
1733                );
1734                move_to_slice(
1735                    left_node.val_area_mut(new_left_len..old_left_len),
1736                    right_node.val_area_mut(..new_right_len),
1737                );
1738                match (left_node.force(), right_node.force()) {
1739                    (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1740                        move_to_slice(
1741                            left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1742                            right.edge_area_mut(1..new_right_len + 1),
1743                        );
1744                        right.correct_childrens_parent_links(1..new_right_len + 1);
1745                    }
1746                    (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1747                    _ => unreachable!(),
1748                }
1749            }
1750        }
1751    }
1752}
1753
1754pub(super) enum ForceResult<Leaf, Internal> {
1755    Leaf(Leaf),
1756    Internal(Internal),
1757}
1758
1759/// Result of insertion, when a node needed to expand beyond its capacity.
1760pub(super) struct SplitResult<'a, K, V, NodeType> {
1761    // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1762    pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1763    // Some key and value that existed before and were split off, to be inserted elsewhere.
1764    pub kv: (K, V),
1765    // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1766    pub right: NodeRef<marker::Owned, K, V, NodeType>,
1767}
1768
1769impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1770    pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1771        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1772    }
1773}
1774
1775impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1776    pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1777        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1778    }
1779}
1780
1781pub(super) mod marker {
1782    use core::marker::PhantomData;
1783
1784    pub(crate) enum Leaf {}
1785    pub(crate) enum Internal {}
1786    pub(crate) enum LeafOrInternal {}
1787
1788    pub(crate) enum Owned {}
1789    pub(crate) enum Dying {}
1790    pub(crate) enum DormantMut {}
1791    pub(crate) struct Immut<'a>(PhantomData<&'a ()>);
1792    pub(crate) struct Mut<'a>(PhantomData<&'a mut ()>);
1793    pub(crate) struct ValMut<'a>(PhantomData<&'a mut ()>);
1794
1795    pub(crate) trait BorrowType {
1796        /// If node references of this borrow type allow traversing to other
1797        /// nodes in the tree, this constant is set to `true`. It can be used
1798        /// for a compile-time assertion.
1799        const TRAVERSAL_PERMIT: bool = true;
1800    }
1801    impl BorrowType for Owned {
1802        /// Reject traversal, because it isn't needed. Instead traversal
1803        /// happens using the result of `borrow_mut`.
1804        /// By disabling traversal, and only creating new references to roots,
1805        /// we know that every reference of the `Owned` type is to a root node.
1806        const TRAVERSAL_PERMIT: bool = false;
1807    }
1808    impl BorrowType for Dying {}
1809    impl<'a> BorrowType for Immut<'a> {}
1810    impl<'a> BorrowType for Mut<'a> {}
1811    impl<'a> BorrowType for ValMut<'a> {}
1812    impl BorrowType for DormantMut {}
1813
1814    pub(crate) enum KV {}
1815    pub(crate) enum Edge {}
1816}
1817
1818/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1819///
1820/// # Safety
1821/// The slice has more than `idx` elements.
1822unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
1823    unsafe {
1824        let len = slice.len();
1825        debug_assert!(len > idx);
1826        let slice_ptr = slice.as_mut_ptr();
1827        if len > idx + 1 {
1828            ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
1829        }
1830        (*slice_ptr.add(idx)).write(val);
1831    }
1832}
1833
1834/// Removes and returns a value from a slice of all initialized elements, leaving behind one
1835/// trailing uninitialized element.
1836///
1837/// # Safety
1838/// The slice has more than `idx` elements.
1839unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
1840    unsafe {
1841        let len = slice.len();
1842        debug_assert!(idx < len);
1843        let slice_ptr = slice.as_mut_ptr();
1844        let ret = (*slice_ptr.add(idx)).assume_init_read();
1845        ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
1846        ret
1847    }
1848}
1849
1850/// Shifts the elements in a slice `distance` positions to the left.
1851///
1852/// # Safety
1853/// The slice has at least `distance` elements.
1854unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1855    unsafe {
1856        let slice_ptr = slice.as_mut_ptr();
1857        ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
1858    }
1859}
1860
1861/// Shifts the elements in a slice `distance` positions to the right.
1862///
1863/// # Safety
1864/// The slice has at least `distance` elements.
1865unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1866    unsafe {
1867        let slice_ptr = slice.as_mut_ptr();
1868        ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
1869    }
1870}
1871
1872/// Moves all values from a slice of initialized elements to a slice
1873/// of uninitialized elements, leaving behind `src` as all uninitialized.
1874/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1875fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1876    assert!(src.len() == dst.len());
1877    unsafe {
1878        ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
1879    }
1880}
1881
1882#[cfg(test)]
1883mod tests;