alloc/collections/btree/
node.rs

1// This is an attempt at an implementation following the ideal
2//
3// ```
4// struct BTreeMap<K, V> {
5//     height: usize,
6//     root: Option<Box<Node<K, V, height>>>
7// }
8//
9// struct Node<K, V, height: usize> {
10//     keys: [K; 2 * B - 1],
11//     vals: [V; 2 * B - 1],
12//     edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13//     parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14//     len: u16,
15// }
16// ```
17//
18// Since Rust doesn't actually have dependent types and polymorphic recursion,
19// we make do with lots of unsafety.
20
21// A major goal of this module is to avoid complexity by treating the tree as a generic (if
22// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24// even what underfull means. However, we do rely on a few invariants:
25//
26// - Trees must have uniform depth/height. This means that every path down to a leaf from a
27//   given node has exactly the same length.
28// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29//   This implies that even an empty node has at least one edge.
30//   For a leaf node, "having an edge" only means we can identify a position in the node,
31//   since leaf edges are empty and need no data representation. In an internal node,
32//   an edge both identifies a position and contains a pointer to a child node.
33
34use core::marker::PhantomData;
35use core::mem::{self, MaybeUninit};
36use core::ptr::{self, NonNull};
37use core::slice::SliceIndex;
38
39use crate::alloc::{Allocator, Layout};
40use crate::boxed::Box;
41
42const B: usize = 6;
43pub(super) const CAPACITY: usize = 2 * B - 1;
44pub(super) const MIN_LEN_AFTER_SPLIT: usize = B - 1;
45const KV_IDX_CENTER: usize = B - 1;
46const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
48
49/// The underlying representation of leaf nodes and part of the representation of internal nodes.
50struct LeafNode<K, V> {
51    /// We want to be covariant in `K` and `V`.
52    parent: Option<NonNull<InternalNode<K, V>>>,
53
54    /// This node's index into the parent node's `edges` array.
55    /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
56    /// This is only guaranteed to be initialized when `parent` is non-null.
57    parent_idx: MaybeUninit<u16>,
58
59    /// The number of keys and values this node stores.
60    len: u16,
61
62    /// The arrays storing the actual data of the node. Only the first `len` elements of each
63    /// array are initialized and valid.
64    keys: [MaybeUninit<K>; CAPACITY],
65    vals: [MaybeUninit<V>; CAPACITY],
66}
67
68impl<K, V> LeafNode<K, V> {
69    /// Initializes a new `LeafNode` in-place.
70    ///
71    /// # Safety
72    ///
73    /// The caller must ensure that `this` points to a (possibly uninitialized) `LeafNode`
74    unsafe fn init(this: *mut Self) {
75        // As a general policy, we leave fields uninitialized if they can be, as this should
76        // be both slightly faster and easier to track in Valgrind.
77        unsafe {
78            // parent_idx, keys, and vals are all MaybeUninit
79            (&raw mut (*this).parent).write(None);
80            (&raw mut (*this).len).write(0);
81        }
82    }
83
84    /// Creates a new boxed `LeafNode`.
85    fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
86        let mut leaf = Box::new_uninit_in(alloc);
87        unsafe {
88            // SAFETY: `leaf` points to a `LeafNode`
89            LeafNode::init(leaf.as_mut_ptr());
90            // SAFETY: `leaf` was just initialized
91            leaf.assume_init()
92        }
93    }
94}
95
96/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
97/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
98/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
99/// node, allowing code to act on leaf and internal nodes generically without having to even check
100/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
101#[repr(C)]
102// gdb_providers.py uses this type name for introspection.
103struct InternalNode<K, V> {
104    data: LeafNode<K, V>,
105
106    /// The pointers to the children of this node. `len + 1` of these are considered
107    /// initialized and valid, except that near the end, while the tree is held
108    /// through borrow type `Dying`, some of these pointers are dangling.
109    edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
110}
111
112impl<K, V> InternalNode<K, V> {
113    /// Creates a new boxed `InternalNode`.
114    ///
115    /// # Safety
116    /// An invariant of internal nodes is that they have at least one
117    /// initialized and valid edge. This function does not set up
118    /// such an edge.
119    unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
120        let mut node = Box::<Self, _>::new_uninit_in(alloc);
121        unsafe {
122            // SAFETY: argument points to the `node.data` `LeafNode`
123            LeafNode::init(&raw mut (*node.as_mut_ptr()).data);
124            // SAFETY: `node.data` was just initialized and `node.edges` is MaybeUninit.
125            node.assume_init()
126        }
127    }
128}
129
130/// A managed, non-null pointer to a node. This is either an owned pointer to
131/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
132///
133/// However, `BoxedNode` contains no information as to which of the two types
134/// of nodes it actually contains, and, partially due to this lack of information,
135/// is not a separate type and has no destructor.
136type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
137
138// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
139// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
140// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
141// However, whenever a public type wraps `NodeRef`, make sure that it has the
142// correct variance.
143///
144/// A reference to a node.
145///
146/// This type has a number of parameters that controls how it acts:
147/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
148///    - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
149///    - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
150///      with respect to keys and tree structure, but also allows many
151///      mutable references to values throughout the tree to coexist.
152///    - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
153///      although insert methods allow a mutable pointer to a value to coexist.
154///    - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
155///      but does not have a destructor, and must be cleaned up manually.
156///    - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
157///      but has methods to destroy the tree bit by bit, and ordinary methods,
158///      while not marked as unsafe to call, can invoke UB if called incorrectly.
159///   Since any `NodeRef` allows navigating through the tree, `BorrowType`
160///   effectively applies to the entire tree, not just to the node itself.
161/// - `K` and `V`: These are the types of keys and values stored in the nodes.
162/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
163///   `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
164///   `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
165///   `NodeRef` could be pointing to either type of node.
166///   `Type` is named `NodeType` when used outside `NodeRef`.
167///
168/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
169/// exploit static type safety. There are limitations in the way we can apply
170/// such restrictions:
171/// - For each type parameter, we can only define a method either generically
172///   or for one particular type. For example, we cannot define a method like
173///   `into_kv` generically for all `BorrowType`, or once for all types that
174///   carry a lifetime, because we want it to return `&'a` references.
175///   Therefore, we define it only for the least powerful type `Immut<'a>`.
176/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
177///   Therefore, we have to explicitly call `reborrow` on a more powerful
178///   `NodeRef` in order to reach a method like `into_kv`.
179///
180/// All methods on `NodeRef` that return some kind of reference, either:
181/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
182///   Sometimes, to invoke such a method, we need to call `reborrow_mut`.
183/// - Take `self` by reference, and (implicitly) return that reference's
184///   lifetime, instead of the lifetime carried by `BorrowType`. That way,
185///   the borrow checker guarantees that the `NodeRef` remains borrowed as long
186///   as the returned reference is used.
187///   The methods supporting insert bend this rule by returning a raw pointer,
188///   i.e., a reference without any lifetime.
189pub(super) struct NodeRef<BorrowType, K, V, Type> {
190    /// The number of levels that the node and the level of leaves are apart, a
191    /// constant of the node that cannot be entirely described by `Type`, and that
192    /// the node itself does not store. We only need to store the height of the root
193    /// node, and derive every other node's height from it.
194    /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
195    height: usize,
196    /// The pointer to the leaf or internal node. The definition of `InternalNode`
197    /// ensures that the pointer is valid either way.
198    node: NonNull<LeafNode<K, V>>,
199    _marker: PhantomData<(BorrowType, Type)>,
200}
201
202/// The root node of an owned tree.
203///
204/// Note that this does not have a destructor, and must be cleaned up manually.
205pub(super) type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
206
207impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
208impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
209    fn clone(&self) -> Self {
210        *self
211    }
212}
213
214unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
215
216unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Immut<'_>, K, V, Type> {}
217unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Mut<'_>, K, V, Type> {}
218unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::ValMut<'_>, K, V, Type> {}
219unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
220unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
221
222impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
223    pub(super) fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
224        Self::from_new_leaf(LeafNode::new(alloc))
225    }
226
227    fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
228        // The allocator must be dropped, not leaked.  See also `BTreeMap::alloc`.
229        let (leaf, _alloc) = Box::into_raw_with_allocator(leaf);
230        // SAFETY: the node was just allocated.
231        let node = unsafe { NonNull::new_unchecked(leaf) };
232        NodeRef { height: 0, node, _marker: PhantomData }
233    }
234}
235
236impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
237    fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
238        let mut new_node = unsafe { InternalNode::new(alloc) };
239        new_node.edges[0].write(child.node);
240        unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
241    }
242
243    /// # Safety
244    /// `height` must not be zero.
245    unsafe fn from_new_internal<A: Allocator + Clone>(
246        internal: Box<InternalNode<K, V>, A>,
247        height: usize,
248    ) -> Self {
249        debug_assert!(height > 0);
250        // The allocator must be dropped, not leaked.  See also `BTreeMap::alloc`.
251        let (internal, _alloc) = Box::into_raw_with_allocator(internal);
252        // SAFETY: the node was just allocated.
253        let internal = unsafe { NonNull::new_unchecked(internal) };
254        let node = internal.cast();
255        let mut this = NodeRef { height, node, _marker: PhantomData };
256        this.borrow_mut().correct_all_childrens_parent_links();
257        this
258    }
259}
260
261impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
262    /// Unpack a node reference that was packed as `NodeRef::parent`.
263    fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
264        debug_assert!(height > 0);
265        NodeRef { height, node: node.cast(), _marker: PhantomData }
266    }
267}
268
269impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
270    /// Exposes the data of an internal node.
271    ///
272    /// Returns a raw ptr to avoid invalidating other references to this node.
273    fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
274        // SAFETY: the static node type is `Internal`.
275        this.node.as_ptr() as *mut InternalNode<K, V>
276    }
277}
278
279impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
280    /// Borrows exclusive access to the data of an internal node.
281    fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
282        let ptr = Self::as_internal_ptr(self);
283        unsafe { &mut *ptr }
284    }
285}
286
287impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
288    /// Finds the length of the node. This is the number of keys or values.
289    /// The number of edges is `len() + 1`.
290    /// Note that, despite being safe, calling this function can have the side effect
291    /// of invalidating mutable references that unsafe code has created.
292    pub(super) fn len(&self) -> usize {
293        // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
294        // there might be outstanding mutable references to values that we must not invalidate.
295        unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
296    }
297
298    /// Returns the number of levels that the node and leaves are apart. Zero
299    /// height means the node is a leaf itself. If you picture trees with the
300    /// root on top, the number says at which elevation the node appears.
301    /// If you picture trees with leaves on top, the number says how high
302    /// the tree extends above the node.
303    pub(super) fn height(&self) -> usize {
304        self.height
305    }
306
307    /// Temporarily takes out another, immutable reference to the same node.
308    pub(super) fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
309        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
310    }
311
312    /// Exposes the leaf portion of any leaf or internal node.
313    ///
314    /// Returns a raw ptr to avoid invalidating other references to this node.
315    fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
316        // The node must be valid for at least the LeafNode portion.
317        // This is not a reference in the NodeRef type because we don't know if
318        // it should be unique or shared.
319        this.node.as_ptr()
320    }
321}
322
323impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
324    /// Finds the parent of the current node. Returns `Ok(handle)` if the current
325    /// node actually has a parent, where `handle` points to the edge of the parent
326    /// that points to the current node. Returns `Err(self)` if the current node has
327    /// no parent, giving back the original `NodeRef`.
328    ///
329    /// The method name assumes you picture trees with the root node on top.
330    ///
331    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
332    /// both, upon success, do nothing.
333    pub(super) fn ascend(
334        self,
335    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
336        const {
337            assert!(BorrowType::TRAVERSAL_PERMIT);
338        }
339
340        // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
341        // there might be outstanding mutable references to values that we must not invalidate.
342        let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
343        unsafe { (*leaf_ptr).parent }
344            .as_ref()
345            .map(|parent| Handle {
346                node: NodeRef::from_internal(*parent, self.height + 1),
347                idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
348                _marker: PhantomData,
349            })
350            .ok_or(self)
351    }
352
353    pub(super) fn first_edge(self) -> Handle<Self, marker::Edge> {
354        unsafe { Handle::new_edge(self, 0) }
355    }
356
357    pub(super) fn last_edge(self) -> Handle<Self, marker::Edge> {
358        let len = self.len();
359        unsafe { Handle::new_edge(self, len) }
360    }
361
362    /// Note that `self` must be nonempty.
363    pub(super) fn first_kv(self) -> Handle<Self, marker::KV> {
364        let len = self.len();
365        assert!(len > 0);
366        unsafe { Handle::new_kv(self, 0) }
367    }
368
369    /// Note that `self` must be nonempty.
370    pub(super) fn last_kv(self) -> Handle<Self, marker::KV> {
371        let len = self.len();
372        assert!(len > 0);
373        unsafe { Handle::new_kv(self, len - 1) }
374    }
375}
376
377impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
378    /// Could be a public implementation of PartialEq, but only used in this module.
379    fn eq(&self, other: &Self) -> bool {
380        let Self { node, height, _marker } = self;
381        if node.eq(&other.node) {
382            debug_assert_eq!(*height, other.height);
383            true
384        } else {
385            false
386        }
387    }
388}
389
390impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
391    /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
392    fn into_leaf(self) -> &'a LeafNode<K, V> {
393        let ptr = Self::as_leaf_ptr(&self);
394        // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
395        unsafe { &*ptr }
396    }
397
398    /// Borrows a view into the keys stored in the node.
399    pub(super) fn keys(&self) -> &[K] {
400        let leaf = self.into_leaf();
401        unsafe { leaf.keys.get_unchecked(..usize::from(leaf.len)).assume_init_ref() }
402    }
403}
404
405impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
406    /// Similar to `ascend`, gets a reference to a node's parent node, but also
407    /// deallocates the current node in the process. This is unsafe because the
408    /// current node will still be accessible despite being deallocated.
409    pub(super) unsafe fn deallocate_and_ascend<A: Allocator + Clone>(
410        self,
411        alloc: A,
412    ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
413        let height = self.height;
414        let node = self.node;
415        let ret = self.ascend().ok();
416        unsafe {
417            alloc.deallocate(
418                node.cast(),
419                if height > 0 {
420                    Layout::new::<InternalNode<K, V>>()
421                } else {
422                    Layout::new::<LeafNode<K, V>>()
423                },
424            );
425        }
426        ret
427    }
428}
429
430impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
431    /// Temporarily takes out another mutable reference to the same node. Beware, as
432    /// this method is very dangerous, doubly so since it might not immediately appear
433    /// dangerous.
434    ///
435    /// Because mutable pointers can roam anywhere around the tree, the returned
436    /// pointer can easily be used to make the original pointer dangling, out of
437    /// bounds, or invalid under stacked borrow rules.
438    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
439    // that restricts the use of navigation methods on reborrowed pointers,
440    // preventing this unsafety.
441    unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
442        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
443    }
444
445    /// Borrows exclusive access to the leaf portion of a leaf or internal node.
446    fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
447        let ptr = Self::as_leaf_ptr(self);
448        // SAFETY: we have exclusive access to the entire node.
449        unsafe { &mut *ptr }
450    }
451
452    /// Offers exclusive access to the leaf portion of a leaf or internal node.
453    fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
454        let ptr = Self::as_leaf_ptr(&mut self);
455        // SAFETY: we have exclusive access to the entire node.
456        unsafe { &mut *ptr }
457    }
458
459    /// Returns a dormant copy of this node with its lifetime erased which can
460    /// be reawakened later.
461    pub(super) fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
462        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
463    }
464}
465
466impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
467    /// Revert to the unique borrow initially captured.
468    ///
469    /// # Safety
470    ///
471    /// The reborrow must have ended, i.e., the reference returned by `new` and
472    /// all pointers and references derived from it, must not be used anymore.
473    pub(super) unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
474        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
475    }
476}
477
478impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
479    /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
480    fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
481        let ptr = Self::as_leaf_ptr(self);
482        // SAFETY: we have exclusive access to the entire node.
483        unsafe { &mut *ptr }
484    }
485}
486
487impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
488    /// Borrows exclusive access to an element of the key storage area.
489    ///
490    /// # Safety
491    /// `index` is in bounds of 0..CAPACITY
492    unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
493    where
494        I: SliceIndex<[MaybeUninit<K>], Output = Output>,
495    {
496        // SAFETY: the caller will not be able to call further methods on self
497        // until the key slice reference is dropped, as we have unique access
498        // for the lifetime of the borrow.
499        unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
500    }
501
502    /// Borrows exclusive access to an element or slice of the node's value storage area.
503    ///
504    /// # Safety
505    /// `index` is in bounds of 0..CAPACITY
506    unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
507    where
508        I: SliceIndex<[MaybeUninit<V>], Output = Output>,
509    {
510        // SAFETY: the caller will not be able to call further methods on self
511        // until the value slice reference is dropped, as we have unique access
512        // for the lifetime of the borrow.
513        unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
514    }
515}
516
517impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
518    /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
519    ///
520    /// # Safety
521    /// `index` is in bounds of 0..CAPACITY + 1
522    unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
523    where
524        I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
525    {
526        // SAFETY: the caller will not be able to call further methods on self
527        // until the edge slice reference is dropped, as we have unique access
528        // for the lifetime of the borrow.
529        unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
530    }
531}
532
533impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
534    /// # Safety
535    /// - The node has more than `idx` initialized elements.
536    unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
537        // We only create a reference to the one element we are interested in,
538        // to avoid aliasing with outstanding references to other elements,
539        // in particular, those returned to the caller in earlier iterations.
540        let leaf = Self::as_leaf_ptr(&mut self);
541        let keys = unsafe { &raw const (*leaf).keys };
542        let vals = unsafe { &raw mut (*leaf).vals };
543        // We must coerce to unsized array pointers because of Rust issue #74679.
544        let keys: *const [_] = keys;
545        let vals: *mut [_] = vals;
546        let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
547        let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
548        (key, val)
549    }
550}
551
552impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
553    /// Borrows exclusive access to the length of the node.
554    pub(super) fn len_mut(&mut self) -> &mut u16 {
555        &mut self.as_leaf_mut().len
556    }
557}
558
559impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
560    /// # Safety
561    /// Every item returned by `range` is a valid edge index for the node.
562    unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
563        for i in range {
564            debug_assert!(i <= self.len());
565            unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
566        }
567    }
568
569    fn correct_all_childrens_parent_links(&mut self) {
570        let len = self.len();
571        unsafe { self.correct_childrens_parent_links(0..=len) };
572    }
573}
574
575impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
576    /// Sets the node's link to its parent edge,
577    /// without invalidating other references to the node.
578    fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
579        let leaf = Self::as_leaf_ptr(self);
580        unsafe { (*leaf).parent = Some(parent) };
581        unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
582    }
583}
584
585impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
586    /// Clears the root's link to its parent edge.
587    fn clear_parent_link(&mut self) {
588        let mut root_node = self.borrow_mut();
589        let leaf = root_node.as_leaf_mut();
590        leaf.parent = None;
591    }
592}
593
594impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
595    /// Returns a new owned tree, with its own root node that is initially empty.
596    pub(super) fn new<A: Allocator + Clone>(alloc: A) -> Self {
597        NodeRef::new_leaf(alloc).forget_type()
598    }
599
600    /// Adds a new internal node with a single edge pointing to the previous root node,
601    /// make that new node the root node, and return it. This increases the height by 1
602    /// and is the opposite of `pop_internal_level`.
603    pub(super) fn push_internal_level<A: Allocator + Clone>(
604        &mut self,
605        alloc: A,
606    ) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
607        super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
608
609        // `self.borrow_mut()`, except that we just forgot we're internal now:
610        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
611    }
612
613    /// Removes the internal root node, using its first child as the new root node.
614    /// As it is intended only to be called when the root node has only one child,
615    /// no cleanup is done on any of the keys, values and other children.
616    /// This decreases the height by 1 and is the opposite of `push_internal_level`.
617    ///
618    /// Does not invalidate any handles or references pointing into the subtree
619    /// rooted at the first child of `self`.
620    ///
621    /// Panics if there is no internal level, i.e., if the root node is a leaf.
622    pub(super) fn pop_internal_level<A: Allocator + Clone>(&mut self, alloc: A) {
623        assert!(self.height > 0);
624
625        let top = self.node;
626
627        // SAFETY: we asserted to be internal.
628        let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
629        // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
630        let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
631        // SAFETY: the first edge is always initialized.
632        self.node = unsafe { internal_node.edges[0].assume_init_read() };
633        self.height -= 1;
634        self.clear_parent_link();
635
636        unsafe {
637            alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
638        }
639    }
640}
641
642impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
643    /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
644    /// because the return value cannot be used to destroy the root, and there
645    /// cannot be other references to the tree.
646    pub(super) fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
647        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
648    }
649
650    /// Slightly mutably borrows the owned root node.
651    pub(super) fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
652        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
653    }
654
655    /// Irreversibly transitions to a reference that permits traversal and offers
656    /// destructive methods and little else.
657    pub(super) fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
658        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
659    }
660}
661
662impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
663    /// Adds a key-value pair to the end of the node, and returns
664    /// a handle to the inserted value.
665    ///
666    /// # Safety
667    ///
668    /// The returned handle has an unbound lifetime.
669    pub(super) unsafe fn push_with_handle<'b>(
670        &mut self,
671        key: K,
672        val: V,
673    ) -> Handle<NodeRef<marker::Mut<'b>, K, V, marker::Leaf>, marker::KV> {
674        let len = self.len_mut();
675        let idx = usize::from(*len);
676        assert!(idx < CAPACITY);
677        *len += 1;
678        unsafe {
679            self.key_area_mut(idx).write(key);
680            self.val_area_mut(idx).write(val);
681            Handle::new_kv(
682                NodeRef { height: self.height, node: self.node, _marker: PhantomData },
683                idx,
684            )
685        }
686    }
687
688    /// Adds a key-value pair to the end of the node, and returns
689    /// the mutable reference of the inserted value.
690    pub(super) fn push(&mut self, key: K, val: V) -> *mut V {
691        // SAFETY: The unbound handle is no longer accessible.
692        unsafe { self.push_with_handle(key, val).into_val_mut() }
693    }
694}
695
696impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
697    /// Adds a key-value pair, and an edge to go to the right of that pair,
698    /// to the end of the node.
699    pub(super) fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
700        assert!(edge.height == self.height - 1);
701
702        let len = self.len_mut();
703        let idx = usize::from(*len);
704        assert!(idx < CAPACITY);
705        *len += 1;
706        unsafe {
707            self.key_area_mut(idx).write(key);
708            self.val_area_mut(idx).write(val);
709            self.edge_area_mut(idx + 1).write(edge.node);
710            Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
711        }
712    }
713}
714
715impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
716    /// Removes any static information asserting that this node is a `Leaf` node.
717    pub(super) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
718        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
719    }
720}
721
722impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
723    /// Removes any static information asserting that this node is an `Internal` node.
724    pub(super) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
725        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
726    }
727}
728
729impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
730    /// Checks whether a node is an `Internal` node or a `Leaf` node.
731    pub(super) fn force(
732        self,
733    ) -> ForceResult<
734        NodeRef<BorrowType, K, V, marker::Leaf>,
735        NodeRef<BorrowType, K, V, marker::Internal>,
736    > {
737        if self.height == 0 {
738            ForceResult::Leaf(NodeRef {
739                height: self.height,
740                node: self.node,
741                _marker: PhantomData,
742            })
743        } else {
744            ForceResult::Internal(NodeRef {
745                height: self.height,
746                node: self.node,
747                _marker: PhantomData,
748            })
749        }
750    }
751}
752
753impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
754    /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
755    pub(super) unsafe fn cast_to_leaf_unchecked(
756        self,
757    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
758        debug_assert!(self.height == 0);
759        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
760    }
761
762    /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
763    unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
764        debug_assert!(self.height > 0);
765        NodeRef { height: self.height, node: self.node, _marker: PhantomData }
766    }
767}
768
769/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
770/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
771/// pair) or `Edge` (signifying a handle on an edge).
772///
773/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
774/// a child node, these represent the spaces where child pointers would go between the key-value
775/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
776/// to the left of the node, one between the two pairs, and one at the right of the node.
777pub(super) struct Handle<Node, Type> {
778    node: Node,
779    idx: usize,
780    _marker: PhantomData<Type>,
781}
782
783impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
784// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
785// `Clone`able is when it is an immutable reference and therefore `Copy`.
786impl<Node: Copy, Type> Clone for Handle<Node, Type> {
787    fn clone(&self) -> Self {
788        *self
789    }
790}
791
792impl<Node, Type> Handle<Node, Type> {
793    /// Retrieves the node that contains the edge or key-value pair this handle points to.
794    pub(super) fn into_node(self) -> Node {
795        self.node
796    }
797
798    /// Returns the position of this handle in the node.
799    pub(super) fn idx(&self) -> usize {
800        self.idx
801    }
802}
803
804impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
805    /// Creates a new handle to a key-value pair in `node`.
806    /// Unsafe because the caller must ensure that `idx < node.len()`.
807    pub(super) unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
808        debug_assert!(idx < node.len());
809
810        Handle { node, idx, _marker: PhantomData }
811    }
812
813    pub(super) fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
814        unsafe { Handle::new_edge(self.node, self.idx) }
815    }
816
817    pub(super) fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
818        unsafe { Handle::new_edge(self.node, self.idx + 1) }
819    }
820}
821
822impl<BorrowType, K, V, NodeType, HandleType> PartialEq
823    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
824{
825    fn eq(&self, other: &Self) -> bool {
826        let Self { node, idx, _marker } = self;
827        node.eq(&other.node) && *idx == other.idx
828    }
829}
830
831impl<BorrowType, K, V, NodeType, HandleType>
832    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
833{
834    /// Temporarily takes out another immutable handle on the same location.
835    pub(super) fn reborrow(
836        &self,
837    ) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
838        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
839        Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
840    }
841}
842
843impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
844    /// Temporarily takes out another mutable handle on the same location. Beware, as
845    /// this method is very dangerous, doubly so since it might not immediately appear
846    /// dangerous.
847    ///
848    /// For details, see `NodeRef::reborrow_mut`.
849    pub(super) unsafe fn reborrow_mut(
850        &mut self,
851    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
852        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
853        Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
854    }
855
856    /// Returns a dormant copy of this handle which can be reawakened later.
857    ///
858    /// See `DormantMutRef` for more details.
859    pub(super) fn dormant(
860        &self,
861    ) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
862        Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData }
863    }
864}
865
866impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
867    /// Revert to the unique borrow initially captured.
868    ///
869    /// # Safety
870    ///
871    /// The reborrow must have ended, i.e., the reference returned by `new` and
872    /// all pointers and references derived from it, must not be used anymore.
873    pub(super) unsafe fn awaken<'a>(
874        self,
875    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
876        Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData }
877    }
878}
879
880impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
881    /// Creates a new handle to an edge in `node`.
882    /// Unsafe because the caller must ensure that `idx <= node.len()`.
883    pub(super) unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
884        debug_assert!(idx <= node.len());
885
886        Handle { node, idx, _marker: PhantomData }
887    }
888
889    pub(super) fn left_kv(
890        self,
891    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
892        if self.idx > 0 {
893            Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
894        } else {
895            Err(self)
896        }
897    }
898
899    pub(super) fn right_kv(
900        self,
901    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
902        if self.idx < self.node.len() {
903            Ok(unsafe { Handle::new_kv(self.node, self.idx) })
904        } else {
905            Err(self)
906        }
907    }
908}
909
910pub(super) enum LeftOrRight<T> {
911    Left(T),
912    Right(T),
913}
914
915/// Given an edge index where we want to insert into a node filled to capacity,
916/// computes a sensible KV index of a split point and where to perform the insertion.
917/// The goal of the split point is for its key and value to end up in a parent node;
918/// the keys, values and edges to the left of the split point become the left child;
919/// the keys, values and edges to the right of the split point become the right child.
920fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
921    debug_assert!(edge_idx <= CAPACITY);
922    // Rust issue #74834 tries to explain these symmetric rules.
923    match edge_idx {
924        0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
925        EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
926        EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
927        _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
928    }
929}
930
931impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
932    /// Inserts a new key-value pair between the key-value pairs to the right and left of
933    /// this edge. This method assumes that there is enough space in the node for the new
934    /// pair to fit.
935    unsafe fn insert_fit(
936        mut self,
937        key: K,
938        val: V,
939    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
940        debug_assert!(self.node.len() < CAPACITY);
941        let new_len = self.node.len() + 1;
942
943        unsafe {
944            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
945            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
946            *self.node.len_mut() = new_len as u16;
947
948            Handle::new_kv(self.node, self.idx)
949        }
950    }
951}
952
953impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
954    /// Inserts a new key-value pair between the key-value pairs to the right and left of
955    /// this edge. This method splits the node if there isn't enough room.
956    ///
957    /// Returns a dormant handle to the inserted node which can be reawakened
958    /// once splitting is complete.
959    fn insert<A: Allocator + Clone>(
960        self,
961        key: K,
962        val: V,
963        alloc: A,
964    ) -> (
965        Option<SplitResult<'a, K, V, marker::Leaf>>,
966        Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
967    ) {
968        if self.node.len() < CAPACITY {
969            // SAFETY: There is enough space in the node for insertion.
970            let handle = unsafe { self.insert_fit(key, val) };
971            (None, handle.dormant())
972        } else {
973            let (middle_kv_idx, insertion) = splitpoint(self.idx);
974            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
975            let mut result = middle.split(alloc);
976            let insertion_edge = match insertion {
977                LeftOrRight::Left(insert_idx) => unsafe {
978                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
979                },
980                LeftOrRight::Right(insert_idx) => unsafe {
981                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
982                },
983            };
984            // SAFETY: We just split the node, so there is enough space for
985            // insertion.
986            let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
987            (Some(result), handle)
988        }
989    }
990}
991
992impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
993    /// Fixes the parent pointer and index in the child node that this edge
994    /// links to. This is useful when the ordering of edges has been changed,
995    fn correct_parent_link(self) {
996        // Create backpointer without invalidating other references to the node.
997        let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
998        let idx = self.idx;
999        let mut child = self.descend();
1000        child.set_parent_link(ptr, idx);
1001    }
1002}
1003
1004impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1005    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1006    /// between this edge and the key-value pair to the right of this edge. This method assumes
1007    /// that there is enough space in the node for the new pair to fit.
1008    fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
1009        debug_assert!(self.node.len() < CAPACITY);
1010        debug_assert!(edge.height == self.node.height - 1);
1011        let new_len = self.node.len() + 1;
1012
1013        unsafe {
1014            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
1015            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
1016            slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
1017            *self.node.len_mut() = new_len as u16;
1018
1019            self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
1020        }
1021    }
1022
1023    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1024    /// between this edge and the key-value pair to the right of this edge. This method splits
1025    /// the node if there isn't enough room.
1026    fn insert<A: Allocator + Clone>(
1027        mut self,
1028        key: K,
1029        val: V,
1030        edge: Root<K, V>,
1031        alloc: A,
1032    ) -> Option<SplitResult<'a, K, V, marker::Internal>> {
1033        assert!(edge.height == self.node.height - 1);
1034
1035        if self.node.len() < CAPACITY {
1036            self.insert_fit(key, val, edge);
1037            None
1038        } else {
1039            let (middle_kv_idx, insertion) = splitpoint(self.idx);
1040            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1041            let mut result = middle.split(alloc);
1042            let mut insertion_edge = match insertion {
1043                LeftOrRight::Left(insert_idx) => unsafe {
1044                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
1045                },
1046                LeftOrRight::Right(insert_idx) => unsafe {
1047                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
1048                },
1049            };
1050            insertion_edge.insert_fit(key, val, edge);
1051            Some(result)
1052        }
1053    }
1054}
1055
1056impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1057    /// Inserts a new key-value pair between the key-value pairs to the right and left of
1058    /// this edge. This method splits the node if there isn't enough room, and tries to
1059    /// insert the split off portion into the parent node recursively, until the root is reached.
1060    ///
1061    /// If the returned result is some `SplitResult`, the `left` field will be the root node.
1062    /// The returned pointer points to the inserted value, which in the case of `SplitResult`
1063    /// is in the `left` or `right` tree.
1064    pub(super) fn insert_recursing<A: Allocator + Clone>(
1065        self,
1066        key: K,
1067        value: V,
1068        alloc: A,
1069        split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>),
1070    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1071        let (mut split, handle) = match self.insert(key, value, alloc.clone()) {
1072            // SAFETY: we have finished splitting and can now re-awaken the
1073            // handle to the inserted element.
1074            (None, handle) => return unsafe { handle.awaken() },
1075            (Some(split), handle) => (split.forget_node_type(), handle),
1076        };
1077
1078        loop {
1079            split = match split.left.ascend() {
1080                Ok(parent) => {
1081                    match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
1082                        // SAFETY: we have finished splitting and can now re-awaken the
1083                        // handle to the inserted element.
1084                        None => return unsafe { handle.awaken() },
1085                        Some(split) => split.forget_node_type(),
1086                    }
1087                }
1088                Err(root) => {
1089                    split_root(SplitResult { left: root, ..split });
1090                    // SAFETY: we have finished splitting and can now re-awaken the
1091                    // handle to the inserted element.
1092                    return unsafe { handle.awaken() };
1093                }
1094            };
1095        }
1096    }
1097}
1098
1099impl<BorrowType: marker::BorrowType, K, V>
1100    Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
1101{
1102    /// Finds the node pointed to by this edge.
1103    ///
1104    /// The method name assumes you picture trees with the root node on top.
1105    ///
1106    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1107    /// both, upon success, do nothing.
1108    pub(super) fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1109        const {
1110            assert!(BorrowType::TRAVERSAL_PERMIT);
1111        }
1112
1113        // We need to use raw pointers to nodes because, if BorrowType is
1114        // marker::ValMut, there might be outstanding mutable references to
1115        // values that we must not invalidate. There's no worry accessing the
1116        // height field because that value is copied. Beware that, once the
1117        // node pointer is dereferenced, we access the edges array with a
1118        // reference (Rust issue #73987) and invalidate any other references
1119        // to or inside the array, should any be around.
1120        let parent_ptr = NodeRef::as_internal_ptr(&self.node);
1121        let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
1122        NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
1123    }
1124}
1125
1126impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1127    pub(super) fn into_kv(self) -> (&'a K, &'a V) {
1128        debug_assert!(self.idx < self.node.len());
1129        let leaf = self.node.into_leaf();
1130        let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1131        let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1132        (k, v)
1133    }
1134}
1135
1136impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1137    pub(super) fn key_mut(&mut self) -> &mut K {
1138        unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1139    }
1140
1141    pub(super) fn into_val_mut(self) -> &'a mut V {
1142        debug_assert!(self.idx < self.node.len());
1143        let leaf = self.node.into_leaf_mut();
1144        unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1145    }
1146
1147    pub(super) fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1148        debug_assert!(self.idx < self.node.len());
1149        let leaf = self.node.into_leaf_mut();
1150        let k = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() };
1151        let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
1152        (k, v)
1153    }
1154}
1155
1156impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1157    pub(super) fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1158        unsafe { self.node.into_key_val_mut_at(self.idx) }
1159    }
1160}
1161
1162impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1163    pub(super) fn kv_mut(&mut self) -> (&mut K, &mut V) {
1164        debug_assert!(self.idx < self.node.len());
1165        // We cannot call separate key and value methods, because calling the second one
1166        // invalidates the reference returned by the first.
1167        unsafe {
1168            let leaf = self.node.as_leaf_mut();
1169            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1170            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1171            (key, val)
1172        }
1173    }
1174
1175    /// Replaces the key and value that the KV handle refers to.
1176    pub(super) fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1177        let (key, val) = self.kv_mut();
1178        (mem::replace(key, k), mem::replace(val, v))
1179    }
1180}
1181
1182impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1183    /// Extracts the key and value that the KV handle refers to.
1184    /// # Safety
1185    /// The node that the handle refers to must not yet have been deallocated.
1186    pub(super) unsafe fn into_key_val(mut self) -> (K, V) {
1187        debug_assert!(self.idx < self.node.len());
1188        let leaf = self.node.as_leaf_dying();
1189        unsafe {
1190            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1191            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1192            (key, val)
1193        }
1194    }
1195
1196    /// Drops the key and value that the KV handle refers to.
1197    /// # Safety
1198    /// The node that the handle refers to must not yet have been deallocated.
1199    #[inline]
1200    pub(super) unsafe fn drop_key_val(mut self) {
1201        // Run the destructor of the value even if the destructor of the key panics.
1202        struct Dropper<'a, T>(&'a mut MaybeUninit<T>);
1203        impl<T> Drop for Dropper<'_, T> {
1204            #[inline]
1205            fn drop(&mut self) {
1206                unsafe {
1207                    self.0.assume_init_drop();
1208                }
1209            }
1210        }
1211
1212        debug_assert!(self.idx < self.node.len());
1213        let leaf = self.node.as_leaf_dying();
1214        unsafe {
1215            let key = leaf.keys.get_unchecked_mut(self.idx);
1216            let val = leaf.vals.get_unchecked_mut(self.idx);
1217            let _guard = Dropper(val);
1218            key.assume_init_drop();
1219            // dropping the guard will drop the value
1220        }
1221    }
1222}
1223
1224impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1225    /// Helps implementations of `split` for a particular `NodeType`,
1226    /// by taking care of leaf data.
1227    fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1228        debug_assert!(self.idx < self.node.len());
1229        let old_len = self.node.len();
1230        let new_len = old_len - self.idx - 1;
1231        new_node.len = new_len as u16;
1232        unsafe {
1233            let k = self.node.key_area_mut(self.idx).assume_init_read();
1234            let v = self.node.val_area_mut(self.idx).assume_init_read();
1235
1236            move_to_slice(
1237                self.node.key_area_mut(self.idx + 1..old_len),
1238                &mut new_node.keys[..new_len],
1239            );
1240            move_to_slice(
1241                self.node.val_area_mut(self.idx + 1..old_len),
1242                &mut new_node.vals[..new_len],
1243            );
1244
1245            *self.node.len_mut() = self.idx as u16;
1246            (k, v)
1247        }
1248    }
1249}
1250
1251impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1252    /// Splits the underlying node into three parts:
1253    ///
1254    /// - The node is truncated to only contain the key-value pairs to the left of
1255    ///   this handle.
1256    /// - The key and value pointed to by this handle are extracted.
1257    /// - All the key-value pairs to the right of this handle are put into a newly
1258    ///   allocated node.
1259    pub(super) fn split<A: Allocator + Clone>(
1260        mut self,
1261        alloc: A,
1262    ) -> SplitResult<'a, K, V, marker::Leaf> {
1263        let mut new_node = LeafNode::new(alloc);
1264
1265        let kv = self.split_leaf_data(&mut new_node);
1266
1267        let right = NodeRef::from_new_leaf(new_node);
1268        SplitResult { left: self.node, kv, right }
1269    }
1270
1271    /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1272    /// that the key-value pair collapsed into.
1273    pub(super) fn remove(
1274        mut self,
1275    ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1276        let old_len = self.node.len();
1277        unsafe {
1278            let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1279            let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1280            *self.node.len_mut() = (old_len - 1) as u16;
1281            ((k, v), self.left_edge())
1282        }
1283    }
1284}
1285
1286impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1287    /// Splits the underlying node into three parts:
1288    ///
1289    /// - The node is truncated to only contain the edges and key-value pairs to the
1290    ///   left of this handle.
1291    /// - The key and value pointed to by this handle are extracted.
1292    /// - All the edges and key-value pairs to the right of this handle are put into
1293    ///   a newly allocated node.
1294    pub(super) fn split<A: Allocator + Clone>(
1295        mut self,
1296        alloc: A,
1297    ) -> SplitResult<'a, K, V, marker::Internal> {
1298        let old_len = self.node.len();
1299        unsafe {
1300            let mut new_node = InternalNode::new(alloc);
1301            let kv = self.split_leaf_data(&mut new_node.data);
1302            let new_len = usize::from(new_node.data.len);
1303            move_to_slice(
1304                self.node.edge_area_mut(self.idx + 1..old_len + 1),
1305                &mut new_node.edges[..new_len + 1],
1306            );
1307
1308            let height = self.node.height;
1309            let right = NodeRef::from_new_internal(new_node, height);
1310
1311            SplitResult { left: self.node, kv, right }
1312        }
1313    }
1314}
1315
1316/// Represents a session for evaluating and performing a balancing operation
1317/// around an internal key-value pair.
1318pub(super) struct BalancingContext<'a, K, V> {
1319    parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1320    left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1321    right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1322}
1323
1324impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1325    pub(super) fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1326        let self1 = unsafe { ptr::read(&self) };
1327        let self2 = unsafe { ptr::read(&self) };
1328        BalancingContext {
1329            parent: self,
1330            left_child: self1.left_edge().descend(),
1331            right_child: self2.right_edge().descend(),
1332        }
1333    }
1334}
1335
1336impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1337    /// Chooses a balancing context involving the node as a child, thus between
1338    /// the KV immediately to the left or to the right in the parent node.
1339    /// Returns an `Err` if there is no parent.
1340    /// Panics if the parent is empty.
1341    ///
1342    /// Prefers the left side, to be optimal if the given node is somehow
1343    /// underfull, meaning here only that it has fewer elements than its left
1344    /// sibling and than its right sibling, if they exist. In that case,
1345    /// merging with the left sibling is faster, since we only need to move
1346    /// the node's N elements, instead of shifting them to the right and moving
1347    /// more than N elements in front. Stealing from the left sibling is also
1348    /// typically faster, since we only need to shift the node's N elements to
1349    /// the right, instead of shifting at least N of the sibling's elements to
1350    /// the left.
1351    pub(super) fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1352        match unsafe { ptr::read(&self) }.ascend() {
1353            Ok(parent_edge) => match parent_edge.left_kv() {
1354                Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1355                    parent: unsafe { ptr::read(&left_parent_kv) },
1356                    left_child: left_parent_kv.left_edge().descend(),
1357                    right_child: self,
1358                })),
1359                Err(parent_edge) => match parent_edge.right_kv() {
1360                    Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1361                        parent: unsafe { ptr::read(&right_parent_kv) },
1362                        left_child: self,
1363                        right_child: right_parent_kv.right_edge().descend(),
1364                    })),
1365                    Err(_) => unreachable!("empty internal node"),
1366                },
1367            },
1368            Err(root) => Err(root),
1369        }
1370    }
1371}
1372
1373impl<'a, K, V> BalancingContext<'a, K, V> {
1374    pub(super) fn left_child_len(&self) -> usize {
1375        self.left_child.len()
1376    }
1377
1378    pub(super) fn right_child_len(&self) -> usize {
1379        self.right_child.len()
1380    }
1381
1382    pub(super) fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1383        self.left_child
1384    }
1385
1386    pub(super) fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1387        self.right_child
1388    }
1389
1390    /// Returns whether merging is possible, i.e., whether there is enough room
1391    /// in a node to combine the central KV with both adjacent child nodes.
1392    pub(super) fn can_merge(&self) -> bool {
1393        self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1394    }
1395}
1396
1397impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1398    /// Performs a merge and lets a closure decide what to return.
1399    fn do_merge<
1400        F: FnOnce(
1401            NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1402            NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1403        ) -> R,
1404        R,
1405        A: Allocator,
1406    >(
1407        self,
1408        result: F,
1409        alloc: A,
1410    ) -> R {
1411        let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1412        let old_parent_len = parent_node.len();
1413        let mut left_node = self.left_child;
1414        let old_left_len = left_node.len();
1415        let mut right_node = self.right_child;
1416        let right_len = right_node.len();
1417        let new_left_len = old_left_len + 1 + right_len;
1418
1419        assert!(new_left_len <= CAPACITY);
1420
1421        unsafe {
1422            *left_node.len_mut() = new_left_len as u16;
1423
1424            let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1425            left_node.key_area_mut(old_left_len).write(parent_key);
1426            move_to_slice(
1427                right_node.key_area_mut(..right_len),
1428                left_node.key_area_mut(old_left_len + 1..new_left_len),
1429            );
1430
1431            let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1432            left_node.val_area_mut(old_left_len).write(parent_val);
1433            move_to_slice(
1434                right_node.val_area_mut(..right_len),
1435                left_node.val_area_mut(old_left_len + 1..new_left_len),
1436            );
1437
1438            slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
1439            parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1440            *parent_node.len_mut() -= 1;
1441
1442            if parent_node.height > 1 {
1443                // SAFETY: the height of the nodes being merged is one below the height
1444                // of the node of this edge, thus above zero, so they are internal.
1445                let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1446                let mut right_node = right_node.cast_to_internal_unchecked();
1447                move_to_slice(
1448                    right_node.edge_area_mut(..right_len + 1),
1449                    left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1450                );
1451
1452                left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1453
1454                alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1455            } else {
1456                alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1457            }
1458        }
1459        result(parent_node, left_node)
1460    }
1461
1462    /// Merges the parent's key-value pair and both adjacent child nodes into
1463    /// the left child node and returns the shrunk parent node.
1464    ///
1465    /// Panics unless we `.can_merge()`.
1466    pub(super) fn merge_tracking_parent<A: Allocator + Clone>(
1467        self,
1468        alloc: A,
1469    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1470        self.do_merge(|parent, _child| parent, alloc)
1471    }
1472
1473    /// Merges the parent's key-value pair and both adjacent child nodes into
1474    /// the left child node and returns that child node.
1475    ///
1476    /// Panics unless we `.can_merge()`.
1477    pub(super) fn merge_tracking_child<A: Allocator + Clone>(
1478        self,
1479        alloc: A,
1480    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1481        self.do_merge(|_parent, child| child, alloc)
1482    }
1483
1484    /// Merges the parent's key-value pair and both adjacent child nodes into
1485    /// the left child node and returns the edge handle in that child node
1486    /// where the tracked child edge ended up,
1487    ///
1488    /// Panics unless we `.can_merge()`.
1489    pub(super) fn merge_tracking_child_edge<A: Allocator + Clone>(
1490        self,
1491        track_edge_idx: LeftOrRight<usize>,
1492        alloc: A,
1493    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1494        let old_left_len = self.left_child.len();
1495        let right_len = self.right_child.len();
1496        assert!(match track_edge_idx {
1497            LeftOrRight::Left(idx) => idx <= old_left_len,
1498            LeftOrRight::Right(idx) => idx <= right_len,
1499        });
1500        let child = self.merge_tracking_child(alloc);
1501        let new_idx = match track_edge_idx {
1502            LeftOrRight::Left(idx) => idx,
1503            LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1504        };
1505        unsafe { Handle::new_edge(child, new_idx) }
1506    }
1507
1508    /// Removes a key-value pair from the left child and places it in the key-value storage
1509    /// of the parent, while pushing the old parent key-value pair into the right child.
1510    /// Returns a handle to the edge in the right child corresponding to where the original
1511    /// edge specified by `track_right_edge_idx` ended up.
1512    pub(super) fn steal_left(
1513        mut self,
1514        track_right_edge_idx: usize,
1515    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1516        self.bulk_steal_left(1);
1517        unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1518    }
1519
1520    /// Removes a key-value pair from the right child and places it in the key-value storage
1521    /// of the parent, while pushing the old parent key-value pair onto the left child.
1522    /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1523    /// which didn't move.
1524    pub(super) fn steal_right(
1525        mut self,
1526        track_left_edge_idx: usize,
1527    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1528        self.bulk_steal_right(1);
1529        unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1530    }
1531
1532    /// This does stealing similar to `steal_left` but steals multiple elements at once.
1533    pub(super) fn bulk_steal_left(&mut self, count: usize) {
1534        assert!(count > 0);
1535        unsafe {
1536            let left_node = &mut self.left_child;
1537            let old_left_len = left_node.len();
1538            let right_node = &mut self.right_child;
1539            let old_right_len = right_node.len();
1540
1541            // Make sure that we may steal safely.
1542            assert!(old_right_len + count <= CAPACITY);
1543            assert!(old_left_len >= count);
1544
1545            let new_left_len = old_left_len - count;
1546            let new_right_len = old_right_len + count;
1547            *left_node.len_mut() = new_left_len as u16;
1548            *right_node.len_mut() = new_right_len as u16;
1549
1550            // Move leaf data.
1551            {
1552                // Make room for stolen elements in the right child.
1553                slice_shr(right_node.key_area_mut(..new_right_len), count);
1554                slice_shr(right_node.val_area_mut(..new_right_len), count);
1555
1556                // Move elements from the left child to the right one.
1557                move_to_slice(
1558                    left_node.key_area_mut(new_left_len + 1..old_left_len),
1559                    right_node.key_area_mut(..count - 1),
1560                );
1561                move_to_slice(
1562                    left_node.val_area_mut(new_left_len + 1..old_left_len),
1563                    right_node.val_area_mut(..count - 1),
1564                );
1565
1566                // Move the leftmost stolen pair to the parent.
1567                let k = left_node.key_area_mut(new_left_len).assume_init_read();
1568                let v = left_node.val_area_mut(new_left_len).assume_init_read();
1569                let (k, v) = self.parent.replace_kv(k, v);
1570
1571                // Move parent's key-value pair to the right child.
1572                right_node.key_area_mut(count - 1).write(k);
1573                right_node.val_area_mut(count - 1).write(v);
1574            }
1575
1576            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1577                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1578                    // Make room for stolen edges.
1579                    slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1580
1581                    // Steal edges.
1582                    move_to_slice(
1583                        left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1584                        right.edge_area_mut(..count),
1585                    );
1586
1587                    right.correct_childrens_parent_links(0..new_right_len + 1);
1588                }
1589                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1590                _ => unreachable!(),
1591            }
1592        }
1593    }
1594
1595    /// The symmetric clone of `bulk_steal_left`.
1596    pub(super) fn bulk_steal_right(&mut self, count: usize) {
1597        assert!(count > 0);
1598        unsafe {
1599            let left_node = &mut self.left_child;
1600            let old_left_len = left_node.len();
1601            let right_node = &mut self.right_child;
1602            let old_right_len = right_node.len();
1603
1604            // Make sure that we may steal safely.
1605            assert!(old_left_len + count <= CAPACITY);
1606            assert!(old_right_len >= count);
1607
1608            let new_left_len = old_left_len + count;
1609            let new_right_len = old_right_len - count;
1610            *left_node.len_mut() = new_left_len as u16;
1611            *right_node.len_mut() = new_right_len as u16;
1612
1613            // Move leaf data.
1614            {
1615                // Move the rightmost stolen pair to the parent.
1616                let k = right_node.key_area_mut(count - 1).assume_init_read();
1617                let v = right_node.val_area_mut(count - 1).assume_init_read();
1618                let (k, v) = self.parent.replace_kv(k, v);
1619
1620                // Move parent's key-value pair to the left child.
1621                left_node.key_area_mut(old_left_len).write(k);
1622                left_node.val_area_mut(old_left_len).write(v);
1623
1624                // Move elements from the right child to the left one.
1625                move_to_slice(
1626                    right_node.key_area_mut(..count - 1),
1627                    left_node.key_area_mut(old_left_len + 1..new_left_len),
1628                );
1629                move_to_slice(
1630                    right_node.val_area_mut(..count - 1),
1631                    left_node.val_area_mut(old_left_len + 1..new_left_len),
1632                );
1633
1634                // Fill gap where stolen elements used to be.
1635                slice_shl(right_node.key_area_mut(..old_right_len), count);
1636                slice_shl(right_node.val_area_mut(..old_right_len), count);
1637            }
1638
1639            match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1640                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1641                    // Steal edges.
1642                    move_to_slice(
1643                        right.edge_area_mut(..count),
1644                        left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1645                    );
1646
1647                    // Fill gap where stolen edges used to be.
1648                    slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1649
1650                    left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1651                    right.correct_childrens_parent_links(0..new_right_len + 1);
1652                }
1653                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1654                _ => unreachable!(),
1655            }
1656        }
1657    }
1658}
1659
1660impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1661    pub(super) fn forget_node_type(
1662        self,
1663    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1664        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1665    }
1666}
1667
1668impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1669    pub(super) fn forget_node_type(
1670        self,
1671    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1672        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1673    }
1674}
1675
1676impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1677    pub(super) fn forget_node_type(
1678        self,
1679    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1680        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1681    }
1682}
1683
1684impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1685    /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1686    pub(super) fn force(
1687        self,
1688    ) -> ForceResult<
1689        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1690        Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1691    > {
1692        match self.node.force() {
1693            ForceResult::Leaf(node) => {
1694                ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1695            }
1696            ForceResult::Internal(node) => {
1697                ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1698            }
1699        }
1700    }
1701}
1702
1703impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1704    /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1705    pub(super) unsafe fn cast_to_leaf_unchecked(
1706        self,
1707    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1708        let node = unsafe { self.node.cast_to_leaf_unchecked() };
1709        Handle { node, idx: self.idx, _marker: PhantomData }
1710    }
1711}
1712
1713impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1714    /// Move the suffix after `self` from one node to another one. `right` must be empty.
1715    /// The first edge of `right` remains unchanged.
1716    pub(super) fn move_suffix(
1717        &mut self,
1718        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1719    ) {
1720        unsafe {
1721            let new_left_len = self.idx;
1722            let mut left_node = self.reborrow_mut().into_node();
1723            let old_left_len = left_node.len();
1724
1725            let new_right_len = old_left_len - new_left_len;
1726            let mut right_node = right.reborrow_mut();
1727
1728            assert!(right_node.len() == 0);
1729            assert!(left_node.height == right_node.height);
1730
1731            if new_right_len > 0 {
1732                *left_node.len_mut() = new_left_len as u16;
1733                *right_node.len_mut() = new_right_len as u16;
1734
1735                move_to_slice(
1736                    left_node.key_area_mut(new_left_len..old_left_len),
1737                    right_node.key_area_mut(..new_right_len),
1738                );
1739                move_to_slice(
1740                    left_node.val_area_mut(new_left_len..old_left_len),
1741                    right_node.val_area_mut(..new_right_len),
1742                );
1743                match (left_node.force(), right_node.force()) {
1744                    (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1745                        move_to_slice(
1746                            left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1747                            right.edge_area_mut(1..new_right_len + 1),
1748                        );
1749                        right.correct_childrens_parent_links(1..new_right_len + 1);
1750                    }
1751                    (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1752                    _ => unreachable!(),
1753                }
1754            }
1755        }
1756    }
1757}
1758
1759pub(super) enum ForceResult<Leaf, Internal> {
1760    Leaf(Leaf),
1761    Internal(Internal),
1762}
1763
1764/// Result of insertion, when a node needed to expand beyond its capacity.
1765pub(super) struct SplitResult<'a, K, V, NodeType> {
1766    // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1767    pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1768    // Some key and value that existed before and were split off, to be inserted elsewhere.
1769    pub kv: (K, V),
1770    // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1771    pub right: NodeRef<marker::Owned, K, V, NodeType>,
1772}
1773
1774impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1775    pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1776        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1777    }
1778}
1779
1780impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1781    pub(super) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1782        SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1783    }
1784}
1785
1786pub(super) mod marker {
1787    use core::marker::PhantomData;
1788
1789    pub(crate) enum Leaf {}
1790    pub(crate) enum Internal {}
1791    pub(crate) enum LeafOrInternal {}
1792
1793    pub(crate) enum Owned {}
1794    pub(crate) enum Dying {}
1795    pub(crate) enum DormantMut {}
1796    pub(crate) struct Immut<'a>(PhantomData<&'a ()>);
1797    pub(crate) struct Mut<'a>(PhantomData<&'a mut ()>);
1798    pub(crate) struct ValMut<'a>(PhantomData<&'a mut ()>);
1799
1800    pub(crate) trait BorrowType {
1801        /// If node references of this borrow type allow traversing to other
1802        /// nodes in the tree, this constant is set to `true`. It can be used
1803        /// for a compile-time assertion.
1804        const TRAVERSAL_PERMIT: bool = true;
1805    }
1806    impl BorrowType for Owned {
1807        /// Reject traversal, because it isn't needed. Instead traversal
1808        /// happens using the result of `borrow_mut`.
1809        /// By disabling traversal, and only creating new references to roots,
1810        /// we know that every reference of the `Owned` type is to a root node.
1811        const TRAVERSAL_PERMIT: bool = false;
1812    }
1813    impl BorrowType for Dying {}
1814    impl<'a> BorrowType for Immut<'a> {}
1815    impl<'a> BorrowType for Mut<'a> {}
1816    impl<'a> BorrowType for ValMut<'a> {}
1817    impl BorrowType for DormantMut {}
1818
1819    pub(crate) enum KV {}
1820    pub(crate) enum Edge {}
1821}
1822
1823/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1824///
1825/// # Safety
1826/// The slice has more than `idx` elements.
1827unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
1828    unsafe {
1829        let len = slice.len();
1830        debug_assert!(len > idx);
1831        let slice_ptr = slice.as_mut_ptr();
1832        if len > idx + 1 {
1833            ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
1834        }
1835        (*slice_ptr.add(idx)).write(val);
1836    }
1837}
1838
1839/// Removes and returns a value from a slice of all initialized elements, leaving behind one
1840/// trailing uninitialized element.
1841///
1842/// # Safety
1843/// The slice has more than `idx` elements.
1844unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
1845    unsafe {
1846        let len = slice.len();
1847        debug_assert!(idx < len);
1848        let slice_ptr = slice.as_mut_ptr();
1849        let ret = (*slice_ptr.add(idx)).assume_init_read();
1850        ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
1851        ret
1852    }
1853}
1854
1855/// Shifts the elements in a slice `distance` positions to the left.
1856///
1857/// # Safety
1858/// The slice has at least `distance` elements.
1859unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1860    unsafe {
1861        let slice_ptr = slice.as_mut_ptr();
1862        ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
1863    }
1864}
1865
1866/// Shifts the elements in a slice `distance` positions to the right.
1867///
1868/// # Safety
1869/// The slice has at least `distance` elements.
1870unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1871    unsafe {
1872        let slice_ptr = slice.as_mut_ptr();
1873        ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
1874    }
1875}
1876
1877/// Moves all values from a slice of initialized elements to a slice
1878/// of uninitialized elements, leaving behind `src` as all uninitialized.
1879/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1880fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1881    assert!(src.len() == dst.len());
1882    unsafe {
1883        ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
1884    }
1885}
1886
1887#[cfg(test)]
1888mod tests;