1#[cfg(test)]
2mod tests;
3
4use core::cell::UnsafeCell;
5use core::marker::PhantomData;
6use core::mem::{align_of, forget, replace, size_of, MaybeUninit};
7use core::num::NonZeroU16;
8use core::ops::{Deref, DerefMut};
9use core::ptr;
10
11use super::{Alloc, AllocError, Allocator, SliceBuffer};
12
13const MAX_BYTES: usize = i32::MAX as usize;
16
17pub struct Slice<'a> {
113 internal: UnsafeCell<Internal>,
118 _marker: PhantomData<&'a mut [MaybeUninit<u8>]>,
120}
121
122impl<'a> Slice<'a> {
123 pub fn new<B>(buffer: &'a mut B) -> Self
131 where
132 B: ?Sized + SliceBuffer,
133 {
134 let buffer = buffer.as_uninit_bytes();
135 let size = buffer.len();
136
137 assert!(
138 size <= MAX_BYTES,
139 "Buffer of {size} bytes is larger than the maximum {MAX_BYTES}"
140 );
141
142 let mut data = Range::new(buffer.as_mut_ptr_range());
143
144 let align = data.end.align_offset(align_of::<Header>());
149
150 if align != 0 {
151 let sub = align_of::<Header>() - align;
152
153 if sub <= size {
154 unsafe {
157 data.end = data.end.sub(sub);
158 }
159 } else {
160 data.end = data.start;
161 }
162 }
163
164 Self {
165 internal: UnsafeCell::new(Internal {
166 free_head: None,
167 tail: None,
168 occupied: None,
169 full: data,
170 free: data,
171 }),
172 _marker: PhantomData,
173 }
174 }
175}
176
177unsafe impl<'a> Allocator for &'a Slice<'_> {
178 const IS_SYSTEM: bool = false;
179
180 type Alloc<T> = SliceAlloc<'a, T>;
181
182 #[inline]
183 fn alloc<T>(self, value: T) -> Result<Self::Alloc<T>, AllocError> {
184 if size_of::<T>() == 0 {
185 return Ok(SliceAlloc::ZST);
186 }
187
188 let region = unsafe {
191 let i = &mut *self.internal.get();
192 let region = i.alloc(size_of::<T>(), align_of::<T>());
193
194 let Some(region) = region else {
195 return Err(AllocError);
196 };
197
198 region.range.start.cast::<T>().write(value);
200 Some(region.id)
201 };
202
203 Ok(SliceAlloc {
204 region,
205 internal: Some(&self.internal),
206 cap: 1,
207 _marker: PhantomData,
208 })
209 }
210
211 #[inline]
212 fn alloc_empty<T>(self) -> Self::Alloc<T> {
213 if size_of::<T>() == 0 {
214 return SliceAlloc::ZST;
215 }
216
217 let region = unsafe {
220 (*self.internal.get())
221 .alloc(0, align_of::<T>())
222 .map(|r| r.id)
223 };
224
225 SliceAlloc {
226 region,
227 internal: Some(&self.internal),
228 cap: 0,
229 _marker: PhantomData,
230 }
231 }
232}
233
234pub struct SliceAlloc<'a, T> {
238 region: Option<HeaderId>,
239 internal: Option<&'a UnsafeCell<Internal>>,
240 cap: usize,
243 _marker: PhantomData<T>,
244}
245
246impl<T> SliceAlloc<'_, T> {
247 const ZST: Self = Self {
248 region: None,
249 internal: None,
250 cap: usize::MAX,
251 _marker: PhantomData,
252 };
253}
254
255impl<T> SliceAlloc<'_, T> {
256 #[inline]
257 fn free(&mut self) {
258 let (Some(region), Some(internal)) = (self.region.take(), self.internal) else {
259 return;
260 };
261
262 unsafe {
264 (*internal.get()).free(region);
265 }
266 }
267}
268
269impl<T> Alloc<T> for SliceAlloc<'_, T> {
270 #[inline]
271 fn as_ptr(&self) -> *const T {
272 let (Some(region), Some(internal)) = (self.region, self.internal) else {
273 return ptr::NonNull::dangling().as_ptr();
274 };
275
276 unsafe {
277 let i = &*internal.get();
278 let this = i.header(region);
279 this.range.start.cast_const().cast()
280 }
281 }
282
283 #[inline]
284 fn as_mut_ptr(&mut self) -> *mut T {
285 let (Some(region), Some(internal)) = (self.region, self.internal) else {
286 return ptr::NonNull::dangling().as_ptr();
287 };
288
289 unsafe {
290 let i = &*internal.get();
291 let this = i.header(region);
292 this.range.start.cast()
293 }
294 }
295
296 #[inline]
297 fn capacity(&self) -> usize {
298 let Some(internal) = self.internal else {
299 return usize::MAX;
300 };
301
302 let Some(region_id) = self.region else {
303 return 0;
304 };
305
306 unsafe {
309 let i = &mut *internal.get();
310 i.region(region_id).capacity()
311 }
312 }
313
314 #[inline]
315 fn resize(&mut self, len: usize, additional: usize) -> Result<(), AllocError> {
316 if len + additional <= self.cap {
321 return Ok(());
322 }
323
324 let Some(internal) = self.internal else {
325 debug_assert_eq!(
326 size_of::<T>(),
327 0,
328 "Only ZSTs should lack an internal pointer"
329 );
330 return Ok(());
332 };
333
334 let Some(region_id) = self.region else {
335 return Err(AllocError);
336 };
337
338 let Some(len) = len.checked_mul(size_of::<T>()) else {
339 return Err(AllocError);
340 };
341
342 let Some(additional) = additional.checked_mul(size_of::<T>()) else {
343 return Err(AllocError);
344 };
345
346 let Some(requested) = len.checked_add(additional) else {
347 return Err(AllocError);
348 };
349
350 if requested > MAX_BYTES {
351 return Err(AllocError);
352 }
353
354 unsafe {
357 let i = &mut *internal.get();
358
359 let region = i.region(region_id);
360
361 let actual = region.capacity();
362
363 if actual >= requested {
365 self.cap = actual / size_of::<T>();
366 return Ok(());
367 };
368
369 let Some(region) = i.realloc(region_id, len, requested, align_of::<T>()) else {
370 return Err(AllocError);
371 };
372
373 self.region = Some(region.id);
374 self.cap = region.capacity() / size_of::<T>();
375 Ok(())
376 }
377 }
378
379 #[inline]
380 fn try_merge<B>(&mut self, this_len: usize, buf: B, other_len: usize) -> Result<(), B>
381 where
382 B: Alloc<T>,
383 {
384 let Some(internal) = self.internal else {
385 return Ok(());
388 };
389
390 let Some(region) = self.region else {
391 return Err(buf);
392 };
393
394 let this_len = this_len * size_of::<T>();
395 let other_len = other_len * size_of::<T>();
396
397 let other_ptr = buf.as_ptr().cast();
400
401 unsafe {
402 let i = &mut *internal.get();
403 let mut this = i.region(region);
404
405 debug_assert!(this.capacity() >= this_len);
406
407 if !ptr::eq(this.range.end.cast_const(), other_ptr) {
412 return Err(buf);
413 }
414
415 let Some(next) = this.next else {
416 return Err(buf);
417 };
418
419 forget(buf);
422
423 let next = i.region(next);
424
425 let to = this.range.start.wrapping_add(this_len);
426
427 if this.range.end != to {
430 this.range.end.copy_to(to, other_len);
431 }
432
433 let old = i.free_region(next);
434 this.range.end = old.range.end;
435 Ok(())
436 }
437 }
438}
439
440impl<T> Drop for SliceAlloc<'_, T> {
441 #[inline]
442 fn drop(&mut self) {
443 self.free()
444 }
445}
446
447struct Region {
448 id: HeaderId,
449 ptr: *mut Header,
450}
451
452impl Deref for Region {
453 type Target = Header;
454
455 #[inline]
456 fn deref(&self) -> &Self::Target {
457 unsafe { &*self.ptr }
460 }
461}
462
463impl DerefMut for Region {
464 #[inline]
465 fn deref_mut(&mut self) -> &mut Self::Target {
466 unsafe { &mut *self.ptr }
469 }
470}
471
472#[derive(Debug, Clone, Copy, PartialEq, Eq)]
474#[cfg_attr(test, derive(PartialOrd, Ord, Hash))]
475#[repr(transparent)]
476struct HeaderId(NonZeroU16);
477
478impl HeaderId {
479 #[cfg(test)]
480 const unsafe fn new_unchecked(value: u16) -> Self {
481 Self(NonZeroU16::new_unchecked(value))
482 }
483
484 #[inline]
490 fn new(value: isize) -> Option<Self> {
491 Some(Self(NonZeroU16::new(u16::try_from(value).ok()?)?))
492 }
493
494 #[inline]
496 fn get(self) -> usize {
497 self.0.get() as usize
498 }
499}
500
501#[derive(Debug, Clone, Copy, PartialEq, Eq)]
502struct Range {
503 start: *mut MaybeUninit<u8>,
504 end: *mut MaybeUninit<u8>,
505}
506
507impl Range {
508 fn new(range: core::ops::Range<*mut MaybeUninit<u8>>) -> Self {
509 Self {
510 start: range.start,
511 end: range.end,
512 }
513 }
514
515 fn head(self) -> Range {
516 Self {
517 start: self.start,
518 end: self.start,
519 }
520 }
521}
522
523struct Internal {
524 free_head: Option<HeaderId>,
526 tail: Option<HeaderId>,
528 occupied: Option<HeaderId>,
530 full: Range,
532 free: Range,
534}
535
536impl Internal {
537 #[cfg(test)]
539 #[inline]
540 fn bytes(&self) -> usize {
541 unsafe { self.free.start.byte_offset_from(self.full.start) as usize }
543 }
544
545 #[cfg(test)]
546 #[inline]
547 fn headers(&self) -> usize {
548 unsafe {
549 self.full
550 .end
551 .cast::<Header>()
552 .offset_from(self.free.end.cast()) as usize
553 }
554 }
555
556 #[inline]
558 fn remaining(&self) -> usize {
559 unsafe { self.free.end.byte_offset_from(self.free.start) as usize }
561 }
562
563 #[inline]
565 fn header(&self, at: HeaderId) -> &Header {
566 unsafe { &*self.full.end.cast::<Header>().wrapping_sub(at.get()) }
569 }
570
571 #[inline]
573 fn header_mut(&mut self, at: HeaderId) -> *mut Header {
574 self.full.end.cast::<Header>().wrapping_sub(at.get())
575 }
576
577 #[inline]
579 fn region(&mut self, id: HeaderId) -> Region {
580 Region {
581 id,
582 ptr: self.header_mut(id),
583 }
584 }
585
586 unsafe fn unlink(&mut self, header: &Header) {
587 if let Some(next) = header.next {
588 (*self.header_mut(next)).prev = header.prev;
589 } else {
590 self.tail = header.prev;
591 }
592
593 if let Some(prev) = header.prev {
594 (*self.header_mut(prev)).next = header.next;
595 }
596 }
597
598 unsafe fn replace_back(&mut self, region: &mut Region) {
599 let prev = region.prev.take();
600 let next = region.next.take();
601
602 if let Some(prev) = prev {
603 (*self.header_mut(prev)).next = next;
604 }
605
606 if let Some(next) = next {
607 (*self.header_mut(next)).prev = prev;
608 }
609
610 self.push_back(region);
611 }
612
613 unsafe fn push_back(&mut self, region: &mut Region) {
614 if let Some(tail) = self.tail.replace(region.id) {
615 region.prev = Some(tail);
616 (*self.header_mut(tail)).next = Some(region.id);
617 }
618 }
619
620 unsafe fn free_region(&mut self, region: Region) -> Header {
622 self.unlink(®ion);
623
624 region.ptr.replace(Header {
625 range: self.full.head(),
626 next: self.free_head.replace(region.id),
627 prev: None,
628 })
629 }
630
631 unsafe fn alloc_header(&mut self, end: *mut MaybeUninit<u8>) -> Option<Region> {
638 if let Some(region) = self.free_head.take() {
639 let mut region = self.region(region);
640
641 region.range.start = self.free.start;
642 region.range.end = end;
643
644 return Some(region);
645 }
646
647 debug_assert_eq!(
648 self.free.end.align_offset(align_of::<Header>()),
649 0,
650 "End pointer should be aligned to header"
651 );
652
653 let ptr = self.free.end.cast::<Header>().wrapping_sub(1);
654
655 if ptr < self.free.start.cast() || ptr >= self.free.end.cast() {
656 return None;
657 }
658
659 let id = HeaderId::new(self.full.end.cast::<Header>().offset_from(ptr))?;
660
661 ptr.write(Header {
662 range: Range::new(self.free.start..end),
663 prev: None,
664 next: None,
665 });
666
667 self.free.end = ptr.cast();
668 Some(Region { id, ptr })
669 }
670
671 unsafe fn alloc(&mut self, requested: usize, align: usize) -> Option<Region> {
677 if let Some(occupied) = self.occupied {
678 let region = self.region(occupied);
679
680 if region.capacity() >= requested && region.is_aligned(align) {
681 self.occupied = None;
682 return Some(region);
683 }
684 }
685
686 self.align(align)?;
687
688 if self.remaining() < requested {
689 return None;
690 }
691
692 let end = self.free.start.wrapping_add(requested);
693
694 let mut region = self.alloc_header(end)?;
695
696 self.free.start = end;
697 debug_assert!(self.free.start <= self.free.end);
698 self.push_back(&mut region);
699 Some(region)
700 }
701
702 unsafe fn align(&mut self, align: usize) -> Option<()> {
708 let align = self.free.start.align_offset(align);
709
710 if align == 0 {
711 return Some(());
712 }
713
714 if self.remaining() < align {
715 return None;
716 }
717
718 let aligned_start = self.free.start.wrapping_add(align);
719
720 if let Some(tail) = self.tail {
721 self.region(tail).range.end = aligned_start;
723 } else {
724 let mut region = self.alloc_header(aligned_start)?;
728
729 self.push_back(&mut region);
730 }
731
732 self.free.start = aligned_start;
733 Some(())
734 }
735
736 unsafe fn free(&mut self, region: HeaderId) {
737 let region = self.region(region);
738
739 if region.next.is_none() {
741 self.free_tail(region);
742 return;
743 }
744
745 let Some(prev) = region.prev else {
747 debug_assert!(
748 self.occupied.is_none(),
749 "There can only be one occupied region"
750 );
751
752 self.occupied = Some(region.id);
753 return;
754 };
755
756 let mut prev = self.region(prev);
757
758 let region = self.free_region(region);
760
761 prev.range.end = region.range.end;
762
763 if region.next.is_none() {
765 self.free.start = region.range.start;
766 }
767 }
768
769 unsafe fn free_tail(&mut self, current: Region) {
771 debug_assert_eq!(self.tail, Some(current.id));
772
773 let current = self.free_region(current);
774 debug_assert_eq!(current.next, None);
775
776 self.free.start = match current.prev {
777 Some(prev) if self.occupied == Some(prev) => {
779 self.occupied = None;
780 let prev = self.region(prev);
781 self.free_region(prev).range.start
782 }
783 _ => current.range.start,
784 };
785 }
786
787 unsafe fn reserve(&mut self, additional: usize, align: usize) -> Option<*mut MaybeUninit<u8>> {
788 self.align(align)?;
789
790 let free_start = self.free.start.wrapping_add(additional);
791
792 if free_start > self.free.end || free_start < self.free.start {
793 return None;
794 }
795
796 Some(free_start)
797 }
798
799 unsafe fn realloc(
800 &mut self,
801 from: HeaderId,
802 len: usize,
803 requested: usize,
804 align: usize,
805 ) -> Option<Region> {
806 let mut from = self.region(from);
807
808 if from.next.is_none() {
810 let additional = requested - from.capacity();
813 self.free.start = self.reserve(additional, align)?;
814 from.range.end = from.range.end.add(additional);
815 return Some(from);
816 }
817
818 if from.range.start == from.range.end {
821 let free_start = self.reserve(requested, align)?;
822 from.range.start = replace(&mut self.free.start, free_start);
823 from.range.end = free_start;
824 self.replace_back(&mut from);
825 return Some(from);
826 }
827
828 'bail: {
831 let Some(prev) = from.prev else {
833 break 'bail;
834 };
835
836 if self.occupied != Some(prev) {
837 break 'bail;
838 }
839
840 let mut prev = self.region(prev);
841
842 if prev.capacity() + from.capacity() < requested {
843 break 'bail;
844 }
845
846 if !prev.is_aligned(align) {
847 break 'bail;
848 }
849
850 let from = self.free_region(from);
851
852 from.range.start.copy_to(prev.range.start, len);
853 prev.range.end = from.range.end;
854 self.occupied = None;
855 return Some(prev);
856 }
857
858 let to = self.alloc(requested, align)?;
859 from.range.start.copy_to_nonoverlapping(to.range.start, len);
860 self.free(from.id);
861 Some(to)
862 }
863}
864
865#[derive(Debug, Clone, Copy, PartialEq, Eq)]
867struct Header {
868 range: Range,
870 prev: Option<HeaderId>,
872 next: Option<HeaderId>,
874}
875
876impl Header {
877 #[inline]
878 fn capacity(&self) -> usize {
879 unsafe { self.range.end.byte_offset_from(self.range.start) as usize }
881 }
882
883 #[inline]
885 fn is_aligned(&self, align: usize) -> bool {
886 self.range.start.align_offset(align) == 0
887 }
888}