rune_alloc/btree/
node.rs

1// This is an attempt at an implementation following the ideal
2//
3// ```
4// struct BTreeMap<K, V> {
5//     height: usize,
6//     root: Option<Box<Node<K, V, height>>>
7// }
8//
9// struct Node<K, V, height: usize> {
10//     keys: [K; 2 * B - 1],
11//     vals: [V; 2 * B - 1],
12//     edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13//     parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14//     len: u16,
15// }
16// ```
17//
18// Since Rust doesn't actually have dependent types and polymorphic recursion,
19// we make do with lots of unsafety.
20
21// A major goal of this module is to avoid complexity by treating the tree as a generic (if
22// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24// even what underfull means. However, we do rely on a few invariants:
25//
26// - Trees must have uniform depth/height. This means that every path down to a leaf from a
27//   given node has exactly the same length.
28// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29//   This implies that even an empty node has at least one edge.
30//   For a leaf node, "having an edge" only means we can identify a position in the node,
31//   since leaf edges are empty and need no data representation. In an internal node,
32//   an edge both identifies a position and contains a pointer to a child node.
33
34use core::alloc::Layout;
35use core::marker::PhantomData;
36use core::mem::{self, MaybeUninit};
37use core::slice::SliceIndex;
38
39use crate::alloc::{AllocError, Allocator};
40use crate::ptr::{self, NonNull};
41
42const B: usize = 6;
43pub(crate) const CAPACITY: usize = 2 * B - 1;
44pub(crate) const MIN_LEN_AFTER_SPLIT: usize = B - 1;
45const KV_IDX_CENTER: usize = B - 1;
46const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47const EDGE_IDX_LEFT_OF_CENTER_N1: usize = EDGE_IDX_LEFT_OF_CENTER - 1;
48const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
49
50/// The underlying representation of leaf nodes and part of the representation of internal nodes.
51struct LeafNode<K, V> {
52    /// We want to be covariant in `K` and `V`.
53    parent: Option<NonNull<InternalNode<K, V>>>,
54
55    /// This node's index into the parent node's `edges` array.
56    /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
57    /// This is only guaranteed to be initialized when `parent` is non-null.
58    parent_idx: MaybeUninit<u16>,
59
60    /// The number of keys and values this node stores.
61    len: u16,
62
63    /// The arrays storing the actual data of the node. Only the first `len` elements of each
64    /// array are initialized and valid.
65    keys: [MaybeUninit<K>; CAPACITY],
66    vals: [MaybeUninit<V>; CAPACITY],
67}
68
69impl<K, V> LeafNode<K, V> {
70    /// Initializes a new `LeafNode` in-place.
71    unsafe fn init(this: *mut Self) {
72        // As a general policy, we leave fields uninitialized if they can be, as this should
73        // be both slightly faster and easier to track in Valgrind.
74        unsafe {
75            // parent_idx, keys, and vals are all MaybeUninit
76            ptr::addr_of_mut!((*this).parent).write(None);
77            ptr::addr_of_mut!((*this).len).write(0);
78        }
79    }
80
81    /// Creates a new boxed `LeafNode`.
82    fn new<A: Allocator>(alloc: &A) -> Result<NonNull<Self>, AllocError> {
83        unsafe {
84            let layout = Layout::new::<Self>();
85            let ptr = alloc.allocate(layout)?.cast::<Self>();
86            LeafNode::init(ptr.as_ptr());
87            Ok(ptr)
88        }
89    }
90}
91
92/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
93/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
94/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
95/// node, allowing code to act on leaf and internal nodes generically without having to even check
96/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
97#[repr(C)]
98// gdb_providers.py uses this type name for introspection.
99struct InternalNode<K, V> {
100    data: LeafNode<K, V>,
101
102    /// The pointers to the children of this node. `len + 1` of these are considered
103    /// initialized and valid, except that near the end, while the tree is held
104    /// through borrow type `Dying`, some of these pointers are dangling.
105    edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
106}
107
108impl<K, V> InternalNode<K, V> {
109    /// Creates a new boxed `InternalNode`.
110    ///
111    /// # Safety
112    /// An invariant of internal nodes is that they have at least one
113    /// initialized and valid edge. This function does not set up
114    /// such an edge.
115    unsafe fn new<A: Allocator>(alloc: &A) -> Result<NonNull<Self>, AllocError> {
116        unsafe {
117            let layout = Layout::new::<Self>();
118            let ptr = alloc.allocate(layout)?.cast::<Self>();
119            // We only need to initialize the data; the edges are MaybeUninit.
120            LeafNode::init(ptr::addr_of_mut!((*ptr.as_ptr()).data));
121            Ok(ptr)
122        }
123    }
124}
125
126/// A managed, non-null pointer to a node. This is either an owned pointer to
127/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
128///
129/// However, `BoxedNode` contains no information as to which of the two types
130/// of nodes it actually contains, and, partially due to this lack of information,
131/// is not a separate type and has no destructor.
132type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
133
134// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
135// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
136// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
137// However, whenever a public type wraps `NodeRef`, make sure that it has the
138// correct variance.
139///
140/// A reference to a node.
141///
142/// This type has a number of parameters that controls how it acts:
143/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
144///   Since any `NodeRef` allows navigating through the tree, `BorrowType`
145///   effectively applies to the entire tree, not just to the node itself.
146///    - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
147///    - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
148///      with respect to keys and tree structure, but also allows many
149///      mutable references to values throughout the tree to coexist.
150///    - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
151///      although insert methods allow a mutable pointer to a value to coexist.
152///    - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
153///      but does not have a destructor, and must be cleaned up manually.
154///    - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
155///      but has methods to destroy the tree bit by bit, and ordinary methods,
156///      while not marked as unsafe to call, can invoke UB if called incorrectly.
157/// - `K` and `V`: These are the types of keys and values stored in the nodes.
158/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
159///   `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
160///   `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
161///   `NodeRef` could be pointing to either type of node.
162///   `Type` is named `NodeType` when used outside `NodeRef`.
163///
164/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
165/// exploit static type safety. There are limitations in the way we can apply
166/// such restrictions:
167/// - For each type parameter, we can only define a method either generically
168///   or for one particular type. For example, we cannot define a method like
169///   `into_kv` generically for all `BorrowType`, or once for all types that
170///   carry a lifetime, because we want it to return `&'a` references.
171///   Therefore, we define it only for the least powerful type `Immut<'a>`.
172/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
173///   Therefore, we have to explicitly call `reborrow` on a more powerful
174///   `NodeRef` in order to reach a method like `into_kv`.
175///
176/// All methods on `NodeRef` that return some kind of reference, either:
177/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
178///   Sometimes, to invoke such a method, we need to call `reborrow_mut`.
179/// - Take `self` by reference, and (implicitly) return that reference's
180///   lifetime, instead of the lifetime carried by `BorrowType`. That way,
181///   the borrow checker guarantees that the `NodeRef` remains borrowed as long
182///   as the returned reference is used.
183///   The methods supporting insert bend this rule by returning a raw pointer,
184///   i.e., a reference without any lifetime.
185pub(crate) struct NodeRef<BorrowType, K, V, Type> {
186    /// The number of levels that the node and the level of leaves are apart, a
187    /// constant of the node that cannot be entirely described by `Type`, and that
188    /// the node itself does not store. We only need to store the height of the root
189    /// node, and derive every other node's height from it.
190    /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
191    height: usize,
192    /// The pointer to the leaf or internal node. The definition of `InternalNode`
193    /// ensures that the pointer is valid either way.
194    node: NonNull<LeafNode<K, V>>,
195    _marker: PhantomData<(BorrowType, Type)>,
196}
197
198/// The root node of an owned tree.
199///
200/// Note that this does not have a destructor, and must be cleaned up manually.
201pub(crate) type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
202
203impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
204impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
205    fn clone(&self) -> Self {
206        *self
207    }
208}
209
210impl<K, V, Type> Copy for NodeRef<marker::Raw, K, V, Type> {}
211impl<K, V, Type> Clone for NodeRef<marker::Raw, K, V, Type> {
212    fn clone(&self) -> Self {
213        *self
214    }
215}
216
217unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
218
219unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Immut<'_>, K, V, Type> {}
220unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Raw, K, V, Type> {}
221unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Mut<'_>, K, V, Type> {}
222unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::ValMut<'_>, K, V, Type> {}
223unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
224unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
225
226impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
227    pub(crate) fn new_leaf<A: Allocator>(alloc: &A) -> Result<Self, AllocError> {
228        Ok(Self::from_new_leaf(LeafNode::new(alloc)?))
229    }
230
231    fn from_new_leaf(leaf: NonNull<LeafNode<K, V>>) -> Self {
232        NodeRef {
233            height: 0,
234            node: leaf,
235            _marker: PhantomData,
236        }
237    }
238}
239
240impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
241    fn new_internal<A: Allocator>(child: Root<K, V>, alloc: &A) -> Result<Self, AllocError> {
242        let mut new_node = unsafe { InternalNode::new(alloc)? };
243
244        // SAFETY: new_node has been initialized to the point where we can
245        // construct a reference to it.
246        unsafe {
247            let new_node = new_node.as_mut();
248            new_node.edges[0].write(child.node);
249        }
250
251        Ok(unsafe { NodeRef::from_new_internal(new_node, child.height + 1) })
252    }
253
254    /// # Safety
255    /// `height` must not be zero.
256    unsafe fn from_new_internal(internal: NonNull<InternalNode<K, V>>, height: usize) -> Self {
257        debug_assert!(height > 0);
258        let node = internal.cast();
259        let mut this = NodeRef {
260            height,
261            node,
262            _marker: PhantomData,
263        };
264        this.borrow_mut().correct_all_childrens_parent_links();
265        this
266    }
267}
268
269impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
270    /// Unpack a node reference that was packed as `NodeRef::parent`.
271    fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
272        debug_assert!(height > 0);
273        NodeRef {
274            height,
275            node: node.cast(),
276            _marker: PhantomData,
277        }
278    }
279}
280
281impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
282    /// Exposes the data of an internal node.
283    ///
284    /// Returns a raw ptr to avoid invalidating other references to this node.
285    fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
286        // SAFETY: the static node type is `Internal`.
287        this.node.as_ptr() as *mut InternalNode<K, V>
288    }
289}
290
291impl<K, V> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
292    /// Borrows exclusive access to the data of an internal node.
293    fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
294        let ptr = Self::as_internal_ptr(self);
295        unsafe { &mut *ptr }
296    }
297}
298
299impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
300    /// Finds the length of the node. This is the number of keys or values.
301    /// The number of edges is `len() + 1`.
302    /// Note that, despite being safe, calling this function can have the side effect
303    /// of invalidating mutable references that unsafe code has created.
304    pub(crate) fn len(&self) -> usize {
305        // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
306        // there might be outstanding mutable references to values that we must not invalidate.
307        unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
308    }
309
310    /// Returns the number of levels that the node and leaves are apart. Zero
311    /// height means the node is a leaf itself. If you picture trees with the
312    /// root on top, the number says at which elevation the node appears.
313    /// If you picture trees with leaves on top, the number says how high
314    /// the tree extends above the node.
315    pub(crate) fn height(&self) -> usize {
316        self.height
317    }
318
319    /// Temporarily takes out another, immutable reference to the same node.
320    pub(crate) fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
321        NodeRef {
322            height: self.height,
323            node: self.node,
324            _marker: PhantomData,
325        }
326    }
327
328    /// Temporarily takes out another, raw pointer to the same node.
329    pub(crate) fn raw(&self) -> NodeRef<marker::Raw, K, V, Type> {
330        NodeRef {
331            height: self.height,
332            node: self.node,
333            _marker: PhantomData,
334        }
335    }
336
337    /// Exposes the leaf portion of any leaf or internal node.
338    ///
339    /// Returns a raw ptr to avoid invalidating other references to this node.
340    fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
341        // The node must be valid for at least the LeafNode portion.
342        // This is not a reference in the NodeRef type because we don't know if
343        // it should be unique or shared.
344        this.node.as_ptr()
345    }
346}
347
348impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
349    /// Finds the parent of the current node. Returns `Ok(handle)` if the current
350    /// node actually has a parent, where `handle` points to the edge of the parent
351    /// that points to the current node. Returns `Err(self)` if the current node has
352    /// no parent, giving back the original `NodeRef`.
353    ///
354    /// The method name assumes you picture trees with the root node on top.
355    ///
356    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
357    /// both, upon success, do nothing.
358    pub(crate) fn ascend(
359        self,
360    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
361        assert!(BorrowType::TRAVERSAL_PERMIT);
362
363        // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
364        // there might be outstanding mutable references to values that we must not invalidate.
365        let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
366        unsafe { (*leaf_ptr).parent }
367            .as_ref()
368            .map(|parent| Handle {
369                node: NodeRef::from_internal(*parent, self.height + 1),
370                idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
371                _marker: PhantomData,
372            })
373            .ok_or(self)
374    }
375
376    pub(crate) fn first_edge(self) -> Handle<Self, marker::Edge> {
377        unsafe { Handle::new_edge(self, 0) }
378    }
379
380    pub(crate) fn last_edge(self) -> Handle<Self, marker::Edge> {
381        let len = self.len();
382        unsafe { Handle::new_edge(self, len) }
383    }
384
385    /// Note that `self` must be nonempty.
386    pub(crate) fn first_kv(self) -> Handle<Self, marker::KV> {
387        let len = self.len();
388        assert!(len > 0);
389        unsafe { Handle::new_kv(self, 0) }
390    }
391
392    /// Note that `self` must be nonempty.
393    pub(crate) fn last_kv(self) -> Handle<Self, marker::KV> {
394        let len = self.len();
395        assert!(len > 0);
396        unsafe { Handle::new_kv(self, len - 1) }
397    }
398}
399
400impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
401    /// Could be a public implementation of PartialEq, but only used in this module.
402    fn eq(&self, other: &Self) -> bool {
403        let Self {
404            node,
405            height,
406            _marker,
407        } = self;
408        if node.eq(&other.node) {
409            debug_assert_eq!(*height, other.height);
410            true
411        } else {
412            false
413        }
414    }
415}
416
417impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
418    /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
419    fn into_leaf(self) -> &'a LeafNode<K, V> {
420        let ptr = Self::as_leaf_ptr(&self);
421        // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
422        unsafe { &*ptr }
423    }
424
425    /// Borrows a view into the keys stored in the node.
426    pub(crate) fn keys(&self) -> &[K] {
427        const unsafe fn slice_assume_init_ref<T>(slice: &[MaybeUninit<T>]) -> &[T] {
428            // SAFETY: casting `slice` to a `*const [T]` is safe since the
429            // caller guarantees that `slice` is initialized, and `MaybeUninit`
430            // is guaranteed to have the same layout as `T`. The pointer
431            // obtained is valid since it refers to memory owned by `slice`
432            // which is a reference and thus guaranteed to be valid for reads.
433            unsafe { &*(slice as *const [MaybeUninit<T>] as *const [T]) }
434        }
435
436        let leaf = self.into_leaf();
437
438        unsafe { slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len))) }
439    }
440}
441
442impl<K, V, Type> NodeRef<marker::Raw, K, V, Type> {
443    /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
444    fn into_leaf(self) -> *const LeafNode<K, V> {
445        let ptr = Self::as_leaf_ptr(&self);
446        // SAFETY: there can be no mutable references into this tree borrowed as `Raw`.
447        ptr as *const _
448    }
449}
450
451impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
452    /// Similar to `ascend`, gets a reference to a node's parent node, but also
453    /// deallocates the current node in the process. This is unsafe because the
454    /// current node will still be accessible despite being deallocated.
455    pub(crate) unsafe fn deallocate_and_ascend<A: Allocator>(
456        self,
457        alloc: &A,
458    ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
459        let height = self.height;
460        let node = self.node;
461        let ret = self.ascend().ok();
462        unsafe {
463            alloc.deallocate(
464                node.cast(),
465                if height > 0 {
466                    Layout::new::<InternalNode<K, V>>()
467                } else {
468                    Layout::new::<LeafNode<K, V>>()
469                },
470            );
471        }
472        ret
473    }
474}
475
476impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
477    /// Temporarily takes out another mutable reference to the same node. Beware, as
478    /// this method is very dangerous, doubly so since it might not immediately appear
479    /// dangerous.
480    ///
481    /// Because mutable pointers can roam anywhere around the tree, the returned
482    /// pointer can easily be used to make the original pointer dangling, out of
483    /// bounds, or invalid under stacked borrow rules.
484    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
485    // that restricts the use of navigation methods on reborrowed pointers,
486    // preventing this unsafety.
487    unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
488        NodeRef {
489            height: self.height,
490            node: self.node,
491            _marker: PhantomData,
492        }
493    }
494
495    /// Borrows exclusive access to the leaf portion of a leaf or internal node.
496    fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
497        let ptr = Self::as_leaf_ptr(self);
498        // SAFETY: we have exclusive access to the entire node.
499        unsafe { &mut *ptr }
500    }
501
502    /// Offers exclusive access to the leaf portion of a leaf or internal node.
503    fn into_leaf_mut(self) -> &'a mut LeafNode<K, V> {
504        let ptr = Self::as_leaf_ptr(&self);
505        // SAFETY: we have exclusive access to the entire node.
506        unsafe { &mut *ptr }
507    }
508
509    /// Returns a dormant copy of this node with its lifetime erased which can
510    /// be reawakened later.
511    pub(crate) fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
512        NodeRef {
513            height: self.height,
514            node: self.node,
515            _marker: PhantomData,
516        }
517    }
518}
519
520impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
521    /// Revert to the unique borrow initially captured.
522    ///
523    /// # Safety
524    ///
525    /// The reborrow must have ended, i.e., the reference returned by `new` and
526    /// all pointers and references derived from it, must not be used anymore.
527    pub(crate) unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
528        NodeRef {
529            height: self.height,
530            node: self.node,
531            _marker: PhantomData,
532        }
533    }
534}
535
536impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
537    /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
538    fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
539        let ptr = Self::as_leaf_ptr(self);
540        // SAFETY: we have exclusive access to the entire node.
541        unsafe { &mut *ptr }
542    }
543}
544
545impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
546    /// Borrows exclusive access to an element of the key storage area.
547    ///
548    /// # Safety
549    /// `index` is in bounds of 0..CAPACITY
550    unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
551    where
552        I: SliceIndex<[MaybeUninit<K>], Output = Output>,
553    {
554        // SAFETY: the caller will not be able to call further methods on self
555        // until the key slice reference is dropped, as we have unique access
556        // for the lifetime of the borrow.
557        unsafe {
558            self.as_leaf_mut()
559                .keys
560                .as_mut_slice()
561                .get_unchecked_mut(index)
562        }
563    }
564
565    /// Borrows exclusive access to an element or slice of the node's value storage area.
566    ///
567    /// # Safety
568    /// `index` is in bounds of 0..CAPACITY
569    unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
570    where
571        I: SliceIndex<[MaybeUninit<V>], Output = Output>,
572    {
573        // SAFETY: the caller will not be able to call further methods on self
574        // until the value slice reference is dropped, as we have unique access
575        // for the lifetime of the borrow.
576        unsafe {
577            self.as_leaf_mut()
578                .vals
579                .as_mut_slice()
580                .get_unchecked_mut(index)
581        }
582    }
583}
584
585impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
586    /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
587    ///
588    /// # Safety
589    /// `index` is in bounds of 0..CAPACITY + 1
590    unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
591    where
592        I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
593    {
594        // SAFETY: the caller will not be able to call further methods on self
595        // until the edge slice reference is dropped, as we have unique access
596        // for the lifetime of the borrow.
597        unsafe {
598            self.as_internal_mut()
599                .edges
600                .as_mut_slice()
601                .get_unchecked_mut(index)
602        }
603    }
604}
605
606impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
607    /// # Safety
608    /// - The node has more than `idx` initialized elements.
609    unsafe fn into_key_val_mut_at(self, idx: usize) -> (&'a K, &'a mut V) {
610        // We only create a reference to the one element we are interested in,
611        // to avoid aliasing with outstanding references to other elements,
612        // in particular, those returned to the caller in earlier iterations.
613        let leaf = Self::as_leaf_ptr(&self);
614        let keys = unsafe { ptr::addr_of!((*leaf).keys) };
615        let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
616        // We must coerce to unsized array pointers because of Rust issue #74679.
617        let keys: *const [_] = keys;
618        let vals: *mut [_] = vals;
619        let key = unsafe { (*(keys as *const MaybeUninit<K>).wrapping_add(idx)).assume_init_ref() };
620        let val = unsafe { (*(vals as *mut MaybeUninit<V>).wrapping_add(idx)).assume_init_mut() };
621        (key, val)
622    }
623}
624
625impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
626    /// Borrows exclusive access to the length of the node.
627    pub(crate) fn len_mut(&mut self) -> &mut u16 {
628        &mut self.as_leaf_mut().len
629    }
630}
631
632impl<K, V> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
633    /// # Safety
634    /// Every item returned by `range` is a valid edge index for the node.
635    unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
636        for i in range {
637            debug_assert!(i <= self.len());
638            unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
639        }
640    }
641
642    fn correct_all_childrens_parent_links(&mut self) {
643        let len = self.len();
644        unsafe { self.correct_childrens_parent_links(0..=len) };
645    }
646}
647
648impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
649    /// Sets the node's link to its parent edge,
650    /// without invalidating other references to the node.
651    fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
652        let leaf = Self::as_leaf_mut(self);
653        leaf.parent = Some(parent);
654        leaf.parent_idx.write(parent_idx as u16);
655    }
656}
657
658impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
659    /// Clears the root's link to its parent edge.
660    fn clear_parent_link(&mut self) {
661        let mut root_node = self.borrow_mut();
662        let leaf = root_node.as_leaf_mut();
663        leaf.parent = None;
664    }
665}
666
667impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
668    /// Returns a new owned tree, with its own root node that is initially empty.
669    pub(crate) fn new<A: Allocator>(alloc: &A) -> Result<Self, AllocError> {
670        Ok(NodeRef::new_leaf(alloc)?.forget_type())
671    }
672
673    /// Adds a new internal node with a single edge pointing to the previous root node,
674    /// make that new node the root node, and return it. This increases the height by 1
675    /// and is the opposite of `pop_internal_level`.
676    pub(crate) fn push_internal_level<A: Allocator>(
677        &mut self,
678        alloc: &A,
679    ) -> Result<NodeRef<marker::Mut<'_>, K, V, marker::Internal>, AllocError> {
680        super::mem::take_mut(self, |old_root| {
681            Ok(NodeRef::new_internal(old_root, alloc)?.forget_type())
682        })?;
683
684        // `self.borrow_mut()`, except that we just forgot we're internal now:
685        Ok(NodeRef {
686            height: self.height,
687            node: self.node,
688            _marker: PhantomData,
689        })
690    }
691
692    /// Removes the internal root node, using its first child as the new root node.
693    /// As it is intended only to be called when the root node has only one child,
694    /// no cleanup is done on any of the keys, values and other children.
695    /// This decreases the height by 1 and is the opposite of `push_internal_level`.
696    ///
697    /// Requires exclusive access to the `NodeRef` object but not to the root node;
698    /// it will not invalidate other handles or references to the root node.
699    ///
700    /// Panics if there is no internal level, i.e., if the root node is a leaf.
701    pub(crate) fn pop_internal_level<A: Allocator>(&mut self, alloc: &A) {
702        assert!(self.height > 0);
703
704        let top = self.node;
705
706        // SAFETY: we asserted to be internal.
707        let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
708        // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
709        let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
710        // SAFETY: the first edge is always initialized.
711        self.node = unsafe { internal_node.edges[0].assume_init_read() };
712        self.height -= 1;
713        self.clear_parent_link();
714
715        unsafe {
716            alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
717        }
718    }
719}
720
721impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
722    /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
723    /// because the return value cannot be used to destroy the root, and there
724    /// cannot be other references to the tree.
725    pub(crate) fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
726        NodeRef {
727            height: self.height,
728            node: self.node,
729            _marker: PhantomData,
730        }
731    }
732
733    /// Slightly mutably borrows the owned root node.
734    pub(crate) fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
735        NodeRef {
736            height: self.height,
737            node: self.node,
738            _marker: PhantomData,
739        }
740    }
741
742    /// Irreversibly transitions to a reference that permits traversal and offers
743    /// destructive methods and little else.
744    pub(crate) fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
745        NodeRef {
746            height: self.height,
747            node: self.node,
748            _marker: PhantomData,
749        }
750    }
751}
752
753impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
754    /// Adds a key-value pair to the end of the node, and returns
755    /// the mutable reference of the inserted value.
756    pub(crate) fn push(&mut self, key: K, val: V) -> &mut V {
757        let len = self.len_mut();
758        let idx = usize::from(*len);
759        assert!(idx < CAPACITY);
760        *len += 1;
761        unsafe {
762            self.key_area_mut(idx).write(key);
763            self.val_area_mut(idx).write(val)
764        }
765    }
766}
767
768impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
769    /// Adds a key-value pair, and an edge to go to the right of that pair,
770    /// to the end of the node.
771    pub(crate) fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
772        assert!(edge.height == self.height - 1);
773
774        let len = self.len_mut();
775        let idx = usize::from(*len);
776        assert!(idx < CAPACITY);
777        *len += 1;
778        unsafe {
779            self.key_area_mut(idx).write(key);
780            self.val_area_mut(idx).write(val);
781            self.edge_area_mut(idx + 1).write(edge.node);
782            Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
783        }
784    }
785}
786
787impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
788    /// Removes any static information asserting that this node is a `Leaf` node.
789    pub(crate) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
790        NodeRef {
791            height: self.height,
792            node: self.node,
793            _marker: PhantomData,
794        }
795    }
796}
797
798impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
799    /// Removes any static information asserting that this node is an `Internal` node.
800    pub(crate) fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
801        NodeRef {
802            height: self.height,
803            node: self.node,
804            _marker: PhantomData,
805        }
806    }
807}
808
809impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
810    /// Checks whether a node is an `Internal` node or a `Leaf` node.
811    pub(crate) fn force(
812        self,
813    ) -> ForceResult<
814        NodeRef<BorrowType, K, V, marker::Leaf>,
815        NodeRef<BorrowType, K, V, marker::Internal>,
816    > {
817        if self.height == 0 {
818            ForceResult::Leaf(NodeRef {
819                height: self.height,
820                node: self.node,
821                _marker: PhantomData,
822            })
823        } else {
824            ForceResult::Internal(NodeRef {
825                height: self.height,
826                node: self.node,
827                _marker: PhantomData,
828            })
829        }
830    }
831}
832
833impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
834    /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
835    unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
836        debug_assert!(self.height == 0);
837        NodeRef {
838            height: self.height,
839            node: self.node,
840            _marker: PhantomData,
841        }
842    }
843
844    /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
845    unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
846        debug_assert!(self.height > 0);
847        NodeRef {
848            height: self.height,
849            node: self.node,
850            _marker: PhantomData,
851        }
852    }
853}
854
855/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
856/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
857/// pair) or `Edge` (signifying a handle on an edge).
858///
859/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
860/// a child node, these represent the spaces where child pointers would go between the key-value
861/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
862/// to the left of the node, one between the two pairs, and one at the right of the node.
863pub(crate) struct Handle<Node, Type> {
864    node: Node,
865    idx: usize,
866    _marker: PhantomData<Type>,
867}
868
869impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
870// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
871// `Clone`able is when it is an immutable reference and therefore `Copy`.
872impl<Node: Copy, Type> Clone for Handle<Node, Type> {
873    fn clone(&self) -> Self {
874        *self
875    }
876}
877
878impl<Node, Type> Handle<Node, Type> {
879    /// Retrieves the node that contains the edge or key-value pair this handle points to.
880    pub(crate) fn into_node(self) -> Node {
881        self.node
882    }
883
884    /// Returns the position of this handle in the node.
885    pub(crate) fn idx(&self) -> usize {
886        self.idx
887    }
888}
889
890impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
891    /// Creates a new handle to a key-value pair in `node`.
892    /// Unsafe because the caller must ensure that `idx < node.len()`.
893    pub(crate) unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
894        debug_assert!(idx < node.len());
895
896        Handle {
897            node,
898            idx,
899            _marker: PhantomData,
900        }
901    }
902
903    pub(crate) fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
904        unsafe { Handle::new_edge(self.node, self.idx) }
905    }
906
907    pub(crate) fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
908        unsafe { Handle::new_edge(self.node, self.idx + 1) }
909    }
910}
911
912impl<BorrowType, K, V, NodeType, HandleType> PartialEq
913    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
914{
915    fn eq(&self, other: &Self) -> bool {
916        let Self { node, idx, _marker } = self;
917        node.eq(&other.node) && *idx == other.idx
918    }
919}
920
921impl<BorrowType, K, V, NodeType, HandleType>
922    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
923{
924    /// Temporarily takes out another immutable handle on the same location.
925    pub(crate) fn reborrow(
926        &self,
927    ) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
928        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
929        Handle {
930            node: self.node.reborrow(),
931            idx: self.idx,
932            _marker: PhantomData,
933        }
934    }
935}
936
937impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
938    /// Temporarily takes out another mutable handle on the same location. Beware, as
939    /// this method is very dangerous, doubly so since it might not immediately appear
940    /// dangerous.
941    ///
942    /// For details, see `NodeRef::reborrow_mut`.
943    pub(crate) unsafe fn reborrow_mut(
944        &mut self,
945    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
946        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
947        Handle {
948            node: unsafe { self.node.reborrow_mut() },
949            idx: self.idx,
950            _marker: PhantomData,
951        }
952    }
953
954    /// Returns a dormant copy of this handle which can be reawakened later.
955    ///
956    /// See `DormantMutRef` for more details.
957    pub(crate) fn dormant(
958        &self,
959    ) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
960        Handle {
961            node: self.node.dormant(),
962            idx: self.idx,
963            _marker: PhantomData,
964        }
965    }
966}
967
968impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
969    /// Revert to the unique borrow initially captured.
970    ///
971    /// # Safety
972    ///
973    /// The reborrow must have ended, i.e., the reference returned by `new` and
974    /// all pointers and references derived from it, must not be used anymore.
975    pub(crate) unsafe fn awaken<'a>(
976        self,
977    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
978        Handle {
979            node: unsafe { self.node.awaken() },
980            idx: self.idx,
981            _marker: PhantomData,
982        }
983    }
984}
985
986impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
987    /// Creates a new handle to an edge in `node`.
988    /// Unsafe because the caller must ensure that `idx <= node.len()`.
989    pub(crate) unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
990        debug_assert!(idx <= node.len());
991
992        Handle {
993            node,
994            idx,
995            _marker: PhantomData,
996        }
997    }
998
999    pub(crate) fn left_kv(
1000        self,
1001    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
1002        if self.idx > 0 {
1003            Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
1004        } else {
1005            Err(self)
1006        }
1007    }
1008
1009    pub(crate) fn right_kv(
1010        self,
1011    ) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
1012        if self.idx < self.node.len() {
1013            Ok(unsafe { Handle::new_kv(self.node, self.idx) })
1014        } else {
1015            Err(self)
1016        }
1017    }
1018}
1019
1020pub(crate) enum LeftOrRight<T> {
1021    Left(T),
1022    Right(T),
1023}
1024
1025/// Given an edge index where we want to insert into a node filled to capacity,
1026/// computes a sensible KV index of a split point and where to perform the insertion.
1027/// The goal of the split point is for its key and value to end up in a parent node;
1028/// the keys, values and edges to the left of the split point become the left child;
1029/// the keys, values and edges to the right of the split point become the right child.
1030fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
1031    debug_assert!(edge_idx <= CAPACITY);
1032    // Rust issue #74834 tries to explain these symmetric rules.
1033    match edge_idx {
1034        0..=EDGE_IDX_LEFT_OF_CENTER_N1 => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
1035        EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
1036        EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
1037        _ => (
1038            KV_IDX_CENTER + 1,
1039            LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1)),
1040        ),
1041    }
1042}
1043
1044impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1045    /// Inserts a new key-value pair between the key-value pairs to the right and left of
1046    /// this edge. This method assumes that there is enough space in the node for the new
1047    /// pair to fit.
1048    unsafe fn insert_fit(
1049        mut self,
1050        key: K,
1051        val: V,
1052    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1053        debug_assert!(self.node.len() < CAPACITY);
1054        let new_len = self.node.len() + 1;
1055
1056        unsafe {
1057            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
1058            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
1059            *self.node.len_mut() = new_len as u16;
1060
1061            Handle::new_kv(self.node, self.idx)
1062        }
1063    }
1064}
1065
1066impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1067    /// Inserts a new key-value pair between the key-value pairs to the right and left of
1068    /// this edge. This method splits the node if there isn't enough room.
1069    ///
1070    /// Returns a dormant handle to the inserted node which can be reawakened
1071    /// once splitting is complete.
1072    fn insert<A: Allocator>(
1073        self,
1074        key: K,
1075        val: V,
1076        alloc: &A,
1077    ) -> Result<
1078        (
1079            Option<SplitResult<'a, K, V, marker::Leaf>>,
1080            Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
1081        ),
1082        AllocError,
1083    > {
1084        if self.node.len() < CAPACITY {
1085            // SAFETY: There is enough space in the node for insertion.
1086            let handle = unsafe { self.insert_fit(key, val) };
1087            Ok((None, handle.dormant()))
1088        } else {
1089            let (middle_kv_idx, insertion) = splitpoint(self.idx);
1090            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1091            let mut result = middle.split(alloc)?;
1092            let insertion_edge = match insertion {
1093                LeftOrRight::Left(insert_idx) => unsafe {
1094                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
1095                },
1096                LeftOrRight::Right(insert_idx) => unsafe {
1097                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
1098                },
1099            };
1100            // SAFETY: We just split the node, so there is enough space for
1101            // insertion.
1102            let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
1103            Ok((Some(result), handle))
1104        }
1105    }
1106}
1107
1108impl<K, V> Handle<NodeRef<marker::Mut<'_>, K, V, marker::Internal>, marker::Edge> {
1109    /// Fixes the parent pointer and index in the child node that this edge
1110    /// links to. This is useful when the ordering of edges has been changed,
1111    fn correct_parent_link(self) {
1112        // Create backpointer without invalidating other references to the node.
1113        let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
1114        let idx = self.idx;
1115        let mut child = self.descend();
1116        child.set_parent_link(ptr, idx);
1117    }
1118}
1119
1120impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1121    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1122    /// between this edge and the key-value pair to the right of this edge. This method assumes
1123    /// that there is enough space in the node for the new pair to fit.
1124    fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
1125        debug_assert!(self.node.len() < CAPACITY);
1126        debug_assert!(edge.height == self.node.height - 1);
1127        let new_len = self.node.len() + 1;
1128
1129        unsafe {
1130            slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
1131            slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
1132            slice_insert(
1133                self.node.edge_area_mut(..new_len + 1),
1134                self.idx + 1,
1135                edge.node,
1136            );
1137            *self.node.len_mut() = new_len as u16;
1138
1139            self.node
1140                .correct_childrens_parent_links(self.idx + 1..new_len + 1);
1141        }
1142    }
1143
1144    /// Inserts a new key-value pair and an edge that will go to the right of that new pair
1145    /// between this edge and the key-value pair to the right of this edge. This method splits
1146    /// the node if there isn't enough room.
1147    fn insert<A: Allocator>(
1148        mut self,
1149        key: K,
1150        val: V,
1151        edge: Root<K, V>,
1152        alloc: &A,
1153    ) -> Result<Option<SplitResult<'a, K, V, marker::Internal>>, AllocError> {
1154        assert!(edge.height == self.node.height - 1);
1155
1156        if self.node.len() < CAPACITY {
1157            self.insert_fit(key, val, edge);
1158            Ok(None)
1159        } else {
1160            let (middle_kv_idx, insertion) = splitpoint(self.idx);
1161            let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1162            let mut result = middle.split(alloc)?;
1163            let mut insertion_edge = match insertion {
1164                LeftOrRight::Left(insert_idx) => unsafe {
1165                    Handle::new_edge(result.left.reborrow_mut(), insert_idx)
1166                },
1167                LeftOrRight::Right(insert_idx) => unsafe {
1168                    Handle::new_edge(result.right.borrow_mut(), insert_idx)
1169                },
1170            };
1171            insertion_edge.insert_fit(key, val, edge);
1172            Ok(Some(result))
1173        }
1174    }
1175}
1176
1177impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1178    /// Inserts a new key-value pair between the key-value pairs to the right and left of
1179    /// this edge. This method splits the node if there isn't enough room, and tries to
1180    /// insert the split off portion into the parent node recursively, until the root is reached.
1181    ///
1182    /// If the returned result is some `SplitResult`, the `left` field will be the root node.
1183    /// The returned pointer points to the inserted value, which in the case of `SplitResult`
1184    /// is in the `left` or `right` tree.
1185    pub(crate) fn insert_recursing<A: Allocator>(
1186        self,
1187        key: K,
1188        value: V,
1189        alloc: &A,
1190        split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>) -> Result<(), AllocError>,
1191    ) -> Result<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>, AllocError> {
1192        let (mut split, handle) = match self.insert(key, value, alloc)? {
1193            // SAFETY: we have finished splitting and can now re-awaken the
1194            // handle to the inserted element.
1195            (None, handle) => return Ok(unsafe { handle.awaken() }),
1196            (Some(split), handle) => (split.forget_node_type(), handle),
1197        };
1198
1199        loop {
1200            split = match split.left.ascend() {
1201                Ok(parent) => {
1202                    match parent.insert(split.kv.0, split.kv.1, split.right, alloc)? {
1203                        // SAFETY: we have finished splitting and can now re-awaken the
1204                        // handle to the inserted element.
1205                        None => return Ok(unsafe { handle.awaken() }),
1206                        Some(split) => split.forget_node_type(),
1207                    }
1208                }
1209                Err(root) => {
1210                    split_root(SplitResult {
1211                        left: root,
1212                        ..split
1213                    })?;
1214                    // SAFETY: we have finished splitting and can now re-awaken the
1215                    // handle to the inserted element.
1216                    return Ok(unsafe { handle.awaken() });
1217                }
1218            };
1219        }
1220    }
1221}
1222
1223impl<BorrowType: marker::BorrowType, K, V>
1224    Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
1225{
1226    /// Finds the node pointed to by this edge.
1227    ///
1228    /// The method name assumes you picture trees with the root node on top.
1229    ///
1230    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1231    /// both, upon success, do nothing.
1232    pub(crate) fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1233        assert!(BorrowType::TRAVERSAL_PERMIT);
1234
1235        // We need to use raw pointers to nodes because, if BorrowType is
1236        // marker::ValMut, there might be outstanding mutable references to
1237        // values that we must not invalidate. There's no worry accessing the
1238        // height field because that value is copied. Beware that, once the
1239        // node pointer is dereferenced, we access the edges array with a
1240        // reference (Rust issue #73987) and invalidate any other references
1241        // to or inside the array, should any be around.
1242        let parent_ptr = NodeRef::as_internal_ptr(&self.node);
1243        let node = unsafe {
1244            (*parent_ptr)
1245                .edges
1246                .get_unchecked(self.idx)
1247                .assume_init_read()
1248        };
1249        NodeRef {
1250            node,
1251            height: self.node.height - 1,
1252            _marker: PhantomData,
1253        }
1254    }
1255}
1256
1257impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1258    pub(crate) fn into_kv(self) -> (&'a K, &'a V) {
1259        debug_assert!(self.idx < self.node.len());
1260        let leaf = self.node.into_leaf();
1261        let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1262        let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1263        (k, v)
1264    }
1265}
1266
1267impl<K, V, NodeType> Handle<NodeRef<marker::Raw, K, V, NodeType>, marker::KV> {
1268    pub(crate) fn into_kv_raw(self) -> (*const K, *const V) {
1269        debug_assert!(self.idx < self.node.len());
1270        let leaf = self.node.into_leaf();
1271        let k = unsafe { (*leaf).keys.get_unchecked(self.idx).assume_init_ref() };
1272        let v = unsafe { (*leaf).vals.get_unchecked(self.idx).assume_init_ref() };
1273        (k, v)
1274    }
1275}
1276
1277impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1278    pub(crate) fn key_mut(&mut self) -> &mut K {
1279        unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1280    }
1281
1282    pub(crate) fn into_val_mut(self) -> &'a mut V {
1283        debug_assert!(self.idx < self.node.len());
1284        let leaf = self.node.into_leaf_mut();
1285        unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1286    }
1287
1288    #[cfg(test)]
1289    pub(crate) fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1290        debug_assert!(self.idx < self.node.len());
1291        let leaf = self.node.into_leaf_mut();
1292        let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1293        let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
1294        (k, v)
1295    }
1296}
1297
1298impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1299    pub(crate) fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1300        unsafe { self.node.into_key_val_mut_at(self.idx) }
1301    }
1302}
1303
1304impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1305    pub(crate) fn kv_mut(&mut self) -> (&mut K, &mut V) {
1306        debug_assert!(self.idx < self.node.len());
1307        // We cannot call separate key and value methods, because calling the second one
1308        // invalidates the reference returned by the first.
1309        unsafe {
1310            let leaf = self.node.as_leaf_mut();
1311            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1312            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1313            (key, val)
1314        }
1315    }
1316
1317    /// Replaces the key and value that the KV handle refers to.
1318    pub(crate) fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1319        let (key, val) = self.kv_mut();
1320        (mem::replace(key, k), mem::replace(val, v))
1321    }
1322}
1323
1324impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1325    /// Extracts the key and value that the KV handle refers to.
1326    /// # Safety
1327    /// The node that the handle refers to must not yet have been deallocated.
1328    pub(crate) unsafe fn into_key_val(mut self) -> (K, V) {
1329        debug_assert!(self.idx < self.node.len());
1330        let leaf = self.node.as_leaf_dying();
1331        unsafe {
1332            let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1333            let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1334            (key, val)
1335        }
1336    }
1337
1338    /// Drops the key and value that the KV handle refers to.
1339    /// # Safety
1340    /// The node that the handle refers to must not yet have been deallocated.
1341    #[inline]
1342    pub(crate) unsafe fn drop_key_val(mut self) {
1343        debug_assert!(self.idx < self.node.len());
1344        let leaf = self.node.as_leaf_dying();
1345        unsafe {
1346            leaf.keys.get_unchecked_mut(self.idx).assume_init_drop();
1347            leaf.vals.get_unchecked_mut(self.idx).assume_init_drop();
1348        }
1349    }
1350}
1351
1352impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1353    /// Helps implementations of `split` for a particular `NodeType`,
1354    /// by taking care of leaf data.
1355    fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1356        debug_assert!(self.idx < self.node.len());
1357        let old_len = self.node.len();
1358        let new_len = old_len - self.idx - 1;
1359        new_node.len = new_len as u16;
1360        unsafe {
1361            let k = self.node.key_area_mut(self.idx).assume_init_read();
1362            let v = self.node.val_area_mut(self.idx).assume_init_read();
1363
1364            move_to_slice(
1365                self.node.key_area_mut(self.idx + 1..old_len),
1366                &mut new_node.keys[..new_len],
1367            );
1368            move_to_slice(
1369                self.node.val_area_mut(self.idx + 1..old_len),
1370                &mut new_node.vals[..new_len],
1371            );
1372
1373            *self.node.len_mut() = self.idx as u16;
1374            (k, v)
1375        }
1376    }
1377}
1378
1379impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1380    /// Splits the underlying node into three parts:
1381    ///
1382    /// - The node is truncated to only contain the key-value pairs to the left of
1383    ///   this handle.
1384    /// - The key and value pointed to by this handle are extracted.
1385    /// - All the key-value pairs to the right of this handle are put into a newly
1386    ///   allocated node.
1387    pub(crate) fn split<A: Allocator>(
1388        mut self,
1389        alloc: &A,
1390    ) -> Result<SplitResult<'a, K, V, marker::Leaf>, AllocError> {
1391        let mut new_node = LeafNode::new(alloc)?;
1392
1393        let kv = self.split_leaf_data(unsafe { new_node.as_mut() });
1394
1395        let right = NodeRef::from_new_leaf(new_node);
1396
1397        Ok(SplitResult {
1398            left: self.node,
1399            kv,
1400            right,
1401        })
1402    }
1403
1404    /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1405    /// that the key-value pair collapsed into.
1406    pub(crate) fn remove(
1407        mut self,
1408    ) -> (
1409        (K, V),
1410        Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
1411    ) {
1412        let old_len = self.node.len();
1413        unsafe {
1414            let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1415            let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1416            *self.node.len_mut() = (old_len - 1) as u16;
1417            ((k, v), self.left_edge())
1418        }
1419    }
1420}
1421
1422impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1423    /// Splits the underlying node into three parts:
1424    ///
1425    /// - The node is truncated to only contain the edges and key-value pairs to the
1426    ///   left of this handle.
1427    /// - The key and value pointed to by this handle are extracted.
1428    /// - All the edges and key-value pairs to the right of this handle are put into
1429    ///   a newly allocated node.
1430    pub(crate) fn split<A: Allocator>(
1431        mut self,
1432        alloc: &A,
1433    ) -> Result<SplitResult<'a, K, V, marker::Internal>, AllocError> {
1434        let old_len = self.node.len();
1435        unsafe {
1436            let mut new_node = InternalNode::new(alloc)?;
1437
1438            // SAFETY: new_node has been initialized to the point where we can
1439            // construct a reference to it.
1440            let kv = {
1441                let new_node = new_node.as_mut();
1442                let kv = self.split_leaf_data(&mut new_node.data);
1443                let new_len = usize::from(new_node.data.len);
1444                move_to_slice(
1445                    self.node.edge_area_mut(self.idx + 1..old_len + 1),
1446                    &mut new_node.edges[..new_len + 1],
1447                );
1448                kv
1449            };
1450
1451            let height = self.node.height;
1452            let right = NodeRef::from_new_internal(new_node, height);
1453
1454            Ok(SplitResult {
1455                left: self.node,
1456                kv,
1457                right,
1458            })
1459        }
1460    }
1461}
1462
1463/// Represents a session for evaluating and performing a balancing operation
1464/// around an internal key-value pair.
1465pub(crate) struct BalancingContext<'a, K, V> {
1466    parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1467    left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1468    right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1469}
1470
1471impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1472    pub(crate) fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1473        let self1 = unsafe { ptr::read(&self) };
1474        let self2 = unsafe { ptr::read(&self) };
1475        BalancingContext {
1476            parent: self,
1477            left_child: self1.left_edge().descend(),
1478            right_child: self2.right_edge().descend(),
1479        }
1480    }
1481}
1482
1483impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1484    /// Chooses a balancing context involving the node as a child, thus between
1485    /// the KV immediately to the left or to the right in the parent node.
1486    /// Returns an `Err` if there is no parent.
1487    /// Panics if the parent is empty.
1488    ///
1489    /// Prefers the left side, to be optimal if the given node is somehow
1490    /// underfull, meaning here only that it has fewer elements than its left
1491    /// sibling and than its right sibling, if they exist. In that case,
1492    /// merging with the left sibling is faster, since we only need to move
1493    /// the node's N elements, instead of shifting them to the right and moving
1494    /// more than N elements in front. Stealing from the left sibling is also
1495    /// typically faster, since we only need to shift the node's N elements to
1496    /// the right, instead of shifting at least N of the sibling's elements to
1497    /// the left.
1498    pub(crate) fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1499        match unsafe { ptr::read(&self) }.ascend() {
1500            Ok(parent_edge) => match parent_edge.left_kv() {
1501                Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1502                    parent: unsafe { ptr::read(&left_parent_kv) },
1503                    left_child: left_parent_kv.left_edge().descend(),
1504                    right_child: self,
1505                })),
1506                Err(parent_edge) => match parent_edge.right_kv() {
1507                    Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1508                        parent: unsafe { ptr::read(&right_parent_kv) },
1509                        left_child: self,
1510                        right_child: right_parent_kv.right_edge().descend(),
1511                    })),
1512                    Err(_) => unreachable!("empty internal node"),
1513                },
1514            },
1515            Err(root) => Err(root),
1516        }
1517    }
1518}
1519
1520impl<'a, K, V> BalancingContext<'a, K, V> {
1521    pub(crate) fn left_child_len(&self) -> usize {
1522        self.left_child.len()
1523    }
1524
1525    pub(crate) fn right_child_len(&self) -> usize {
1526        self.right_child.len()
1527    }
1528
1529    pub(crate) fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1530        self.left_child
1531    }
1532
1533    pub(crate) fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1534        self.right_child
1535    }
1536
1537    /// Returns whether merging is possible, i.e., whether there is enough room
1538    /// in a node to combine the central KV with both adjacent child nodes.
1539    pub(crate) fn can_merge(&self) -> bool {
1540        self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1541    }
1542}
1543
1544impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1545    /// Performs a merge and lets a closure decide what to return.
1546    fn do_merge<
1547        F: FnOnce(
1548            NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1549            NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1550        ) -> R,
1551        R,
1552        A: Allocator,
1553    >(
1554        self,
1555        result: F,
1556        alloc: &A,
1557    ) -> R {
1558        let Handle {
1559            node: mut parent_node,
1560            idx: parent_idx,
1561            _marker,
1562        } = self.parent;
1563        let old_parent_len = parent_node.len();
1564        let mut left_node = self.left_child;
1565        let old_left_len = left_node.len();
1566        let mut right_node = self.right_child;
1567        let right_len = right_node.len();
1568        let new_left_len = old_left_len + 1 + right_len;
1569
1570        assert!(new_left_len <= CAPACITY);
1571
1572        unsafe {
1573            *left_node.len_mut() = new_left_len as u16;
1574
1575            let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1576            left_node.key_area_mut(old_left_len).write(parent_key);
1577            move_to_slice(
1578                right_node.key_area_mut(..right_len),
1579                left_node.key_area_mut(old_left_len + 1..new_left_len),
1580            );
1581
1582            let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1583            left_node.val_area_mut(old_left_len).write(parent_val);
1584            move_to_slice(
1585                right_node.val_area_mut(..right_len),
1586                left_node.val_area_mut(old_left_len + 1..new_left_len),
1587            );
1588
1589            slice_remove(
1590                parent_node.edge_area_mut(..old_parent_len + 1),
1591                parent_idx + 1,
1592            );
1593            parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1594            *parent_node.len_mut() -= 1;
1595
1596            if parent_node.height > 1 {
1597                // SAFETY: the height of the nodes being merged is one below the height
1598                // of the node of this edge, thus above zero, so they are internal.
1599                let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1600                let mut right_node = right_node.cast_to_internal_unchecked();
1601                move_to_slice(
1602                    right_node.edge_area_mut(..right_len + 1),
1603                    left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1604                );
1605
1606                left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1607
1608                alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1609            } else {
1610                alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1611            }
1612        }
1613
1614        result(parent_node, left_node)
1615    }
1616
1617    /// Merges the parent's key-value pair and both adjacent child nodes into
1618    /// the left child node and returns the shrunk parent node.
1619    ///
1620    /// Panics unless we `.can_merge()`.
1621    pub(crate) fn merge_tracking_parent<A: Allocator>(
1622        self,
1623        alloc: &A,
1624    ) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1625        self.do_merge(|parent, _child| parent, alloc)
1626    }
1627
1628    /// Merges the parent's key-value pair and both adjacent child nodes into
1629    /// the left child node and returns that child node.
1630    ///
1631    /// Panics unless we `.can_merge()`.
1632    pub(crate) fn merge_tracking_child<A: Allocator>(
1633        self,
1634        alloc: &A,
1635    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1636        self.do_merge(|_parent, child| child, alloc)
1637    }
1638
1639    /// Merges the parent's key-value pair and both adjacent child nodes into
1640    /// the left child node and returns the edge handle in that child node
1641    /// where the tracked child edge ended up,
1642    ///
1643    /// Panics unless we `.can_merge()`.
1644    pub(crate) fn merge_tracking_child_edge<A: Allocator>(
1645        self,
1646        track_edge_idx: LeftOrRight<usize>,
1647        alloc: &A,
1648    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1649        let old_left_len = self.left_child.len();
1650        let right_len = self.right_child.len();
1651        assert!(match track_edge_idx {
1652            LeftOrRight::Left(idx) => idx <= old_left_len,
1653            LeftOrRight::Right(idx) => idx <= right_len,
1654        });
1655        let child = self.merge_tracking_child(alloc);
1656        let new_idx = match track_edge_idx {
1657            LeftOrRight::Left(idx) => idx,
1658            LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1659        };
1660        unsafe { Handle::new_edge(child, new_idx) }
1661    }
1662
1663    /// Removes a key-value pair from the left child and places it in the key-value storage
1664    /// of the parent, while pushing the old parent key-value pair into the right child.
1665    /// Returns a handle to the edge in the right child corresponding to where the original
1666    /// edge specified by `track_right_edge_idx` ended up.
1667    pub(crate) fn steal_left(
1668        mut self,
1669        track_right_edge_idx: usize,
1670    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1671        self.bulk_steal_left(1);
1672        unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1673    }
1674
1675    /// Removes a key-value pair from the right child and places it in the key-value storage
1676    /// of the parent, while pushing the old parent key-value pair onto the left child.
1677    /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1678    /// which didn't move.
1679    pub(crate) fn steal_right(
1680        mut self,
1681        track_left_edge_idx: usize,
1682    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1683        self.bulk_steal_right(1);
1684        unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1685    }
1686
1687    /// This does stealing similar to `steal_left` but steals multiple elements at once.
1688    pub(crate) fn bulk_steal_left(&mut self, count: usize) {
1689        assert!(count > 0);
1690        unsafe {
1691            let left_node = &mut self.left_child;
1692            let old_left_len = left_node.len();
1693            let right_node = &mut self.right_child;
1694            let old_right_len = right_node.len();
1695
1696            // Make sure that we may steal safely.
1697            assert!(old_right_len + count <= CAPACITY);
1698            assert!(old_left_len >= count);
1699
1700            let new_left_len = old_left_len - count;
1701            let new_right_len = old_right_len + count;
1702            *left_node.len_mut() = new_left_len as u16;
1703            *right_node.len_mut() = new_right_len as u16;
1704
1705            // Move leaf data.
1706            {
1707                // Make room for stolen elements in the right child.
1708                slice_shr(right_node.key_area_mut(..new_right_len), count);
1709                slice_shr(right_node.val_area_mut(..new_right_len), count);
1710
1711                // Move elements from the left child to the right one.
1712                move_to_slice(
1713                    left_node.key_area_mut(new_left_len + 1..old_left_len),
1714                    right_node.key_area_mut(..count - 1),
1715                );
1716                move_to_slice(
1717                    left_node.val_area_mut(new_left_len + 1..old_left_len),
1718                    right_node.val_area_mut(..count - 1),
1719                );
1720
1721                // Move the left-most stolen pair to the parent.
1722                let k = left_node.key_area_mut(new_left_len).assume_init_read();
1723                let v = left_node.val_area_mut(new_left_len).assume_init_read();
1724                let (k, v) = self.parent.replace_kv(k, v);
1725
1726                // Move parent's key-value pair to the right child.
1727                right_node.key_area_mut(count - 1).write(k);
1728                right_node.val_area_mut(count - 1).write(v);
1729            }
1730
1731            match (
1732                left_node.reborrow_mut().force(),
1733                right_node.reborrow_mut().force(),
1734            ) {
1735                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1736                    // Make room for stolen edges.
1737                    slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1738
1739                    // Steal edges.
1740                    move_to_slice(
1741                        left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1742                        right.edge_area_mut(..count),
1743                    );
1744
1745                    right.correct_childrens_parent_links(0..new_right_len + 1);
1746                }
1747                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1748                _ => unreachable!(),
1749            }
1750        }
1751    }
1752
1753    /// The symmetric clone of `bulk_steal_left`.
1754    pub(crate) fn bulk_steal_right(&mut self, count: usize) {
1755        assert!(count > 0);
1756        unsafe {
1757            let left_node = &mut self.left_child;
1758            let old_left_len = left_node.len();
1759            let right_node = &mut self.right_child;
1760            let old_right_len = right_node.len();
1761
1762            // Make sure that we may steal safely.
1763            assert!(old_left_len + count <= CAPACITY);
1764            assert!(old_right_len >= count);
1765
1766            let new_left_len = old_left_len + count;
1767            let new_right_len = old_right_len - count;
1768            *left_node.len_mut() = new_left_len as u16;
1769            *right_node.len_mut() = new_right_len as u16;
1770
1771            // Move leaf data.
1772            {
1773                // Move the right-most stolen pair to the parent.
1774                let k = right_node.key_area_mut(count - 1).assume_init_read();
1775                let v = right_node.val_area_mut(count - 1).assume_init_read();
1776                let (k, v) = self.parent.replace_kv(k, v);
1777
1778                // Move parent's key-value pair to the left child.
1779                left_node.key_area_mut(old_left_len).write(k);
1780                left_node.val_area_mut(old_left_len).write(v);
1781
1782                // Move elements from the right child to the left one.
1783                move_to_slice(
1784                    right_node.key_area_mut(..count - 1),
1785                    left_node.key_area_mut(old_left_len + 1..new_left_len),
1786                );
1787                move_to_slice(
1788                    right_node.val_area_mut(..count - 1),
1789                    left_node.val_area_mut(old_left_len + 1..new_left_len),
1790                );
1791
1792                // Fill gap where stolen elements used to be.
1793                slice_shl(right_node.key_area_mut(..old_right_len), count);
1794                slice_shl(right_node.val_area_mut(..old_right_len), count);
1795            }
1796
1797            match (
1798                left_node.reborrow_mut().force(),
1799                right_node.reborrow_mut().force(),
1800            ) {
1801                (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1802                    // Steal edges.
1803                    move_to_slice(
1804                        right.edge_area_mut(..count),
1805                        left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1806                    );
1807
1808                    // Fill gap where stolen edges used to be.
1809                    slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1810
1811                    left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1812                    right.correct_childrens_parent_links(0..new_right_len + 1);
1813                }
1814                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1815                _ => unreachable!(),
1816            }
1817        }
1818    }
1819}
1820
1821impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1822    pub(crate) fn forget_node_type(
1823        self,
1824    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1825        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1826    }
1827}
1828
1829impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1830    pub(crate) fn forget_node_type(
1831        self,
1832    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1833        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1834    }
1835}
1836
1837impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1838    pub(crate) fn forget_node_type(
1839        self,
1840    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1841        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1842    }
1843}
1844
1845impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1846    /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1847    pub(crate) fn force(
1848        self,
1849    ) -> ForceResult<
1850        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1851        Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1852    > {
1853        match self.node.force() {
1854            ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
1855                node,
1856                idx: self.idx,
1857                _marker: PhantomData,
1858            }),
1859            ForceResult::Internal(node) => ForceResult::Internal(Handle {
1860                node,
1861                idx: self.idx,
1862                _marker: PhantomData,
1863            }),
1864        }
1865    }
1866}
1867
1868impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1869    /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1870    pub(crate) unsafe fn cast_to_leaf_unchecked(
1871        self,
1872    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1873        let node = unsafe { self.node.cast_to_leaf_unchecked() };
1874        Handle {
1875            node,
1876            idx: self.idx,
1877            _marker: PhantomData,
1878        }
1879    }
1880}
1881
1882impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1883    /// Move the suffix after `self` from one node to another one. `right` must be empty.
1884    /// The first edge of `right` remains unchanged.
1885    pub(crate) fn move_suffix(
1886        &mut self,
1887        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1888    ) {
1889        unsafe {
1890            let new_left_len = self.idx;
1891            let mut left_node = self.reborrow_mut().into_node();
1892            let old_left_len = left_node.len();
1893
1894            let new_right_len = old_left_len - new_left_len;
1895            let mut right_node = right.reborrow_mut();
1896
1897            assert!(right_node.len() == 0);
1898            assert!(left_node.height == right_node.height);
1899
1900            if new_right_len > 0 {
1901                *left_node.len_mut() = new_left_len as u16;
1902                *right_node.len_mut() = new_right_len as u16;
1903
1904                move_to_slice(
1905                    left_node.key_area_mut(new_left_len..old_left_len),
1906                    right_node.key_area_mut(..new_right_len),
1907                );
1908                move_to_slice(
1909                    left_node.val_area_mut(new_left_len..old_left_len),
1910                    right_node.val_area_mut(..new_right_len),
1911                );
1912                match (left_node.force(), right_node.force()) {
1913                    (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1914                        move_to_slice(
1915                            left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1916                            right.edge_area_mut(1..new_right_len + 1),
1917                        );
1918                        right.correct_childrens_parent_links(1..new_right_len + 1);
1919                    }
1920                    (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1921                    _ => unreachable!(),
1922                }
1923            }
1924        }
1925    }
1926}
1927
1928pub(crate) enum ForceResult<Leaf, Internal> {
1929    Leaf(Leaf),
1930    Internal(Internal),
1931}
1932
1933/// Result of insertion, when a node needed to expand beyond its capacity.
1934pub(crate) struct SplitResult<'a, K, V, NodeType> {
1935    // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1936    pub(crate) left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1937    // Some key and value that existed before and were split off, to be inserted elsewhere.
1938    pub(crate) kv: (K, V),
1939    // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1940    pub(crate) right: NodeRef<marker::Owned, K, V, NodeType>,
1941}
1942
1943impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1944    pub(crate) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1945        SplitResult {
1946            left: self.left.forget_type(),
1947            kv: self.kv,
1948            right: self.right.forget_type(),
1949        }
1950    }
1951}
1952
1953impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1954    pub(crate) fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1955        SplitResult {
1956            left: self.left.forget_type(),
1957            kv: self.kv,
1958            right: self.right.forget_type(),
1959        }
1960    }
1961}
1962
1963pub(crate) mod marker {
1964    use core::marker::PhantomData;
1965
1966    pub(crate) enum Leaf {}
1967    pub(crate) enum Internal {}
1968    pub(crate) enum LeafOrInternal {}
1969
1970    pub(crate) enum Owned {}
1971    pub(crate) enum Dying {}
1972    pub(crate) enum DormantMut {}
1973    pub(crate) struct Immut<'a>(PhantomData<&'a ()>);
1974    pub(crate) struct Mut<'a>(PhantomData<&'a mut ()>);
1975    pub(crate) struct ValMut<'a>(PhantomData<&'a mut ()>);
1976    pub(crate) struct Raw;
1977
1978    pub(crate) trait BorrowType {
1979        /// If node references of this borrow type allow traversing to other
1980        /// nodes in the tree, this constant is set to `true`. It can be used
1981        /// for a compile-time assertion.
1982        const TRAVERSAL_PERMIT: bool = true;
1983    }
1984    impl BorrowType for Owned {
1985        /// Reject traversal, because it isn't needed. Instead traversal
1986        /// happens using the result of `borrow_mut`.
1987        /// By disabling traversal, and only creating new references to roots,
1988        /// we know that every reference of the `Owned` type is to a root node.
1989        const TRAVERSAL_PERMIT: bool = false;
1990    }
1991    impl BorrowType for Dying {}
1992    impl BorrowType for Immut<'_> {}
1993    impl BorrowType for Raw {}
1994    impl BorrowType for Mut<'_> {}
1995    impl BorrowType for ValMut<'_> {}
1996    impl BorrowType for DormantMut {}
1997
1998    pub(crate) enum KV {}
1999    pub(crate) enum Edge {}
2000}
2001
2002/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
2003///
2004/// # Safety
2005/// The slice has more than `idx` elements.
2006unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
2007    unsafe {
2008        let len = slice.len();
2009        debug_assert!(len > idx);
2010        let slice_ptr = slice.as_mut_ptr();
2011        if len > idx + 1 {
2012            ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
2013        }
2014        (*slice_ptr.add(idx)).write(val);
2015    }
2016}
2017
2018/// Removes and returns a value from a slice of all initialized elements, leaving behind one
2019/// trailing uninitialized element.
2020///
2021/// # Safety
2022/// The slice has more than `idx` elements.
2023unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
2024    unsafe {
2025        let len = slice.len();
2026        debug_assert!(idx < len);
2027        let slice_ptr = slice.as_mut_ptr();
2028        let ret = (*slice_ptr.add(idx)).assume_init_read();
2029        ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
2030        ret
2031    }
2032}
2033
2034/// Shifts the elements in a slice `distance` positions to the left.
2035///
2036/// # Safety
2037/// The slice has at least `distance` elements.
2038unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
2039    unsafe {
2040        let slice_ptr = slice.as_mut_ptr();
2041        ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
2042    }
2043}
2044
2045/// Shifts the elements in a slice `distance` positions to the right.
2046///
2047/// # Safety
2048/// The slice has at least `distance` elements.
2049unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
2050    unsafe {
2051        let slice_ptr = slice.as_mut_ptr();
2052        ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
2053    }
2054}
2055
2056/// Moves all values from a slice of initialized elements to a slice
2057/// of uninitialized elements, leaving behind `src` as all uninitialized.
2058/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
2059fn move_to_slice<T>(src: &[MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
2060    assert!(src.len() == dst.len());
2061    unsafe {
2062        ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
2063    }
2064}
2065
2066#[cfg(test)]
2067mod tests;