1#[cfg(test)]
2mod tests;
3
4use core::cell::UnsafeCell;
5use core::marker::PhantomData;
6use core::mem::{align_of, forget, replace, size_of, MaybeUninit};
7use core::num::NonZeroU16;
8use core::ops::{Deref, DerefMut};
9use core::ptr;
10
11use super::{Allocator, RawVec};
12
13const MAX_BYTES: usize = i32::MAX as usize;
16
17pub struct Slice<'a> {
112 internal: UnsafeCell<Internal>,
117 _marker: PhantomData<&'a mut [MaybeUninit<u8>]>,
119}
120
121impl<'a> Slice<'a> {
122 pub fn new(buffer: &'a mut [MaybeUninit<u8>]) -> Self {
130 let size = buffer.len();
131 assert!(
132 size <= MAX_BYTES,
133 "Buffer of {size} bytes is larger than the maximum {MAX_BYTES}"
134 );
135
136 let mut data = Range::new(buffer.as_mut_ptr_range());
137
138 let align = data.end.align_offset(align_of::<Header>());
143
144 if align != 0 {
145 let sub = align_of::<Header>() - align;
146
147 if sub <= size {
148 unsafe {
151 data.end = data.end.sub(sub);
152 }
153 } else {
154 data.end = data.start;
155 }
156 }
157
158 Self {
159 internal: UnsafeCell::new(Internal {
160 free_head: None,
161 tail: None,
162 occupied: None,
163 full: data,
164 free: data,
165 }),
166 _marker: PhantomData,
167 }
168 }
169}
170
171impl Allocator for Slice<'_> {
172 type RawVec<'this, T> = SliceBuf<'this, T> where Self: 'this, T: 'this;
173
174 #[inline]
175 fn new_raw_vec<'a, T>(&'a self) -> Self::RawVec<'a, T>
176 where
177 T: 'a,
178 {
179 let region = if size_of::<T>() == 0 {
182 None
183 } else {
184 unsafe {
185 (*self.internal.get())
186 .alloc(0, align_of::<T>())
187 .map(|r| r.id)
188 }
189 };
190
191 SliceBuf {
192 region,
193 internal: &self.internal,
194 _marker: PhantomData,
195 }
196 }
197}
198
199pub struct SliceBuf<'a, T> {
203 region: Option<HeaderId>,
204 internal: &'a UnsafeCell<Internal>,
205 _marker: PhantomData<T>,
206}
207
208impl<'a, T> RawVec<T> for SliceBuf<'a, T> {
209 #[inline]
210 fn resize(&mut self, len: usize, additional: usize) -> bool {
211 if additional == 0 || size_of::<T>() == 0 {
212 return true;
213 }
214
215 let Some(region_id) = self.region else {
216 return false;
217 };
218
219 let Some(len) = len.checked_mul(size_of::<T>()) else {
220 return false;
221 };
222
223 let Some(additional) = additional.checked_mul(size_of::<T>()) else {
224 return false;
225 };
226
227 let Some(requested) = len.checked_add(additional) else {
228 return false;
229 };
230
231 if requested > MAX_BYTES {
232 return false;
233 }
234
235 unsafe {
238 let i = &mut *self.internal.get();
239
240 let region = i.region(region_id);
241
242 if region.capacity() >= requested {
244 return true;
245 };
246
247 let Some(region) = i.realloc(region_id, len, requested, align_of::<T>()) else {
248 return false;
249 };
250
251 self.region = Some(region.id);
252 true
253 }
254 }
255
256 #[inline]
257 fn as_ptr(&self) -> *const T {
258 let Some(region) = self.region else {
259 return ptr::NonNull::dangling().as_ptr();
260 };
261
262 unsafe {
263 let i = &*self.internal.get();
264 let this = i.header(region);
265 this.range.start.cast_const().cast()
266 }
267 }
268
269 #[inline]
270 fn as_mut_ptr(&mut self) -> *mut T {
271 let Some(region) = self.region else {
272 return ptr::NonNull::dangling().as_ptr();
273 };
274
275 unsafe {
276 let i = &*self.internal.get();
277 let this = i.header(region);
278 this.range.start.cast()
279 }
280 }
281
282 #[inline]
283 fn try_merge<B>(&mut self, this_len: usize, buf: B, other_len: usize) -> Result<(), B>
284 where
285 B: RawVec<T>,
286 {
287 let Some(region) = self.region else {
288 return Err(buf);
289 };
290
291 let this_len = this_len * size_of::<T>();
292 let other_len = other_len * size_of::<T>();
293
294 let other_ptr = buf.as_ptr().cast();
297
298 unsafe {
299 let i = &mut *self.internal.get();
300 let mut this = i.region(region);
301
302 debug_assert!(this.capacity() >= this_len);
303
304 if !ptr::eq(this.range.end.cast_const(), other_ptr) {
309 return Err(buf);
310 }
311
312 let Some(next) = this.next else {
313 return Err(buf);
314 };
315
316 forget(buf);
319
320 let next = i.region(next);
321
322 let to = this.range.start.wrapping_add(this_len);
323
324 if this.range.end != to {
327 this.range.end.copy_to(to, other_len);
328 }
329
330 let old = i.free_region(next);
331 this.range.end = old.range.end;
332 Ok(())
333 }
334 }
335}
336
337impl<T> Drop for SliceBuf<'_, T> {
338 fn drop(&mut self) {
339 if let Some(region) = self.region.take() {
340 unsafe {
342 (*self.internal.get()).free(region);
343 }
344 }
345 }
346}
347
348struct Region {
349 id: HeaderId,
350 ptr: *mut Header,
351}
352
353impl Deref for Region {
354 type Target = Header;
355
356 #[inline]
357 fn deref(&self) -> &Self::Target {
358 unsafe { &*self.ptr }
361 }
362}
363
364impl DerefMut for Region {
365 #[inline]
366 fn deref_mut(&mut self) -> &mut Self::Target {
367 unsafe { &mut *self.ptr }
370 }
371}
372
373#[derive(Debug, Clone, Copy, PartialEq, Eq)]
375#[cfg_attr(test, derive(PartialOrd, Ord, Hash))]
376#[repr(transparent)]
377struct HeaderId(NonZeroU16);
378
379impl HeaderId {
380 #[cfg(test)]
381 const unsafe fn new_unchecked(value: u16) -> Self {
382 Self(NonZeroU16::new_unchecked(value))
383 }
384
385 #[inline]
391 fn new(value: isize) -> Option<Self> {
392 Some(Self(NonZeroU16::new(u16::try_from(value).ok()?)?))
393 }
394
395 #[inline]
397 fn get(self) -> usize {
398 self.0.get() as usize
399 }
400}
401
402#[derive(Debug, Clone, Copy, PartialEq, Eq)]
403struct Range {
404 start: *mut MaybeUninit<u8>,
405 end: *mut MaybeUninit<u8>,
406}
407
408impl Range {
409 fn new(range: core::ops::Range<*mut MaybeUninit<u8>>) -> Self {
410 Self {
411 start: range.start,
412 end: range.end,
413 }
414 }
415
416 fn head(self) -> Range {
417 Self {
418 start: self.start,
419 end: self.start,
420 }
421 }
422}
423
424struct Internal {
425 free_head: Option<HeaderId>,
427 tail: Option<HeaderId>,
429 occupied: Option<HeaderId>,
431 full: Range,
433 free: Range,
435}
436
437impl Internal {
438 #[cfg(test)]
440 #[inline]
441 fn bytes(&self) -> usize {
442 unsafe { self.free.start.byte_offset_from(self.full.start) as usize }
444 }
445
446 #[cfg(test)]
447 #[inline]
448 fn headers(&self) -> usize {
449 unsafe {
450 self.full
451 .end
452 .cast::<Header>()
453 .offset_from(self.free.end.cast()) as usize
454 }
455 }
456
457 #[inline]
459 fn remaining(&self) -> usize {
460 unsafe { self.free.end.byte_offset_from(self.free.start) as usize }
462 }
463
464 #[inline]
466 fn header(&self, at: HeaderId) -> &Header {
467 unsafe { &*self.full.end.cast::<Header>().wrapping_sub(at.get()) }
470 }
471
472 #[inline]
474 fn header_mut(&mut self, at: HeaderId) -> *mut Header {
475 self.full.end.cast::<Header>().wrapping_sub(at.get())
476 }
477
478 #[inline]
480 fn region(&mut self, id: HeaderId) -> Region {
481 Region {
482 id,
483 ptr: self.header_mut(id),
484 }
485 }
486
487 unsafe fn unlink(&mut self, header: &Header) {
488 if let Some(next) = header.next {
489 (*self.header_mut(next)).prev = header.prev;
490 } else {
491 self.tail = header.prev;
492 }
493
494 if let Some(prev) = header.prev {
495 (*self.header_mut(prev)).next = header.next;
496 }
497 }
498
499 unsafe fn replace_back(&mut self, region: &mut Region) {
500 let prev = region.prev.take();
501 let next = region.next.take();
502
503 if let Some(prev) = prev {
504 (*self.header_mut(prev)).next = next;
505 }
506
507 if let Some(next) = next {
508 (*self.header_mut(next)).prev = prev;
509 }
510
511 self.push_back(region);
512 }
513
514 unsafe fn push_back(&mut self, region: &mut Region) {
515 if let Some(tail) = self.tail.replace(region.id) {
516 region.prev = Some(tail);
517 (*self.header_mut(tail)).next = Some(region.id);
518 }
519 }
520
521 unsafe fn free_region(&mut self, region: Region) -> Header {
523 self.unlink(®ion);
524
525 region.ptr.replace(Header {
526 range: self.full.head(),
527 next: self.free_head.replace(region.id),
528 prev: None,
529 })
530 }
531
532 unsafe fn alloc_header(&mut self, end: *mut MaybeUninit<u8>) -> Option<Region> {
539 if let Some(region) = self.free_head.take() {
540 let mut region = self.region(region);
541
542 region.range.start = self.free.start;
543 region.range.end = end;
544
545 return Some(region);
546 }
547
548 debug_assert_eq!(
549 self.free.end.align_offset(align_of::<Header>()),
550 0,
551 "End pointer should be aligned to header"
552 );
553
554 let ptr = self.free.end.cast::<Header>().wrapping_sub(1);
555
556 if ptr < self.free.start.cast() || ptr >= self.free.end.cast() {
557 return None;
558 }
559
560 let id = HeaderId::new(self.full.end.cast::<Header>().offset_from(ptr))?;
561
562 ptr.write(Header {
563 range: Range::new(self.free.start..end),
564 prev: None,
565 next: None,
566 });
567
568 self.free.end = ptr.cast();
569 Some(Region { id, ptr })
570 }
571
572 unsafe fn alloc(&mut self, requested: usize, align: usize) -> Option<Region> {
578 if let Some(occupied) = self.occupied {
579 let region = self.region(occupied);
580
581 if region.capacity() >= requested && region.is_aligned(align) {
582 self.occupied = None;
583 return Some(region);
584 }
585 }
586
587 self.align(align)?;
588
589 if self.remaining() < requested {
590 return None;
591 }
592
593 let end = self.free.start.wrapping_add(requested);
594
595 let mut region = self.alloc_header(end)?;
596
597 self.free.start = end;
598 debug_assert!(self.free.start <= self.free.end);
599 self.push_back(&mut region);
600 Some(region)
601 }
602
603 unsafe fn align(&mut self, align: usize) -> Option<()> {
609 let align = self.free.start.align_offset(align);
610
611 if align == 0 {
612 return Some(());
613 }
614
615 if self.remaining() < align {
616 return None;
617 }
618
619 let aligned_start = self.free.start.wrapping_add(align);
620
621 if let Some(tail) = self.tail {
622 self.region(tail).range.end = aligned_start;
624 } else {
625 let mut region = self.alloc_header(aligned_start)?;
629
630 self.push_back(&mut region);
631 }
632
633 self.free.start = aligned_start;
634 Some(())
635 }
636
637 unsafe fn free(&mut self, region: HeaderId) {
638 let region = self.region(region);
639
640 if region.next.is_none() {
642 self.free_tail(region);
643 return;
644 }
645
646 let Some(prev) = region.prev else {
648 debug_assert!(
649 self.occupied.is_none(),
650 "There can only be one occupied region"
651 );
652
653 self.occupied = Some(region.id);
654 return;
655 };
656
657 let mut prev = self.region(prev);
658
659 let region = self.free_region(region);
661
662 prev.range.end = region.range.end;
663
664 if region.next.is_none() {
666 self.free.start = region.range.start;
667 }
668 }
669
670 unsafe fn free_tail(&mut self, current: Region) {
672 debug_assert_eq!(self.tail, Some(current.id));
673
674 let current = self.free_region(current);
675 debug_assert_eq!(current.next, None);
676
677 self.free.start = match current.prev {
678 Some(prev) if self.occupied == Some(prev) => {
680 self.occupied = None;
681 let prev = self.region(prev);
682 self.free_region(prev).range.start
683 }
684 _ => current.range.start,
685 };
686 }
687
688 unsafe fn reserve(&mut self, additional: usize, align: usize) -> Option<*mut MaybeUninit<u8>> {
689 self.align(align)?;
690
691 let free_start = self.free.start.wrapping_add(additional);
692
693 if free_start > self.free.end || free_start < self.free.start {
694 return None;
695 }
696
697 Some(free_start)
698 }
699
700 unsafe fn realloc(
701 &mut self,
702 from: HeaderId,
703 len: usize,
704 requested: usize,
705 align: usize,
706 ) -> Option<Region> {
707 let mut from = self.region(from);
708
709 if from.next.is_none() {
711 let additional = requested - from.capacity();
714 self.free.start = self.reserve(additional, align)?;
715 from.range.end = from.range.end.add(additional);
716 return Some(from);
717 }
718
719 if from.range.start == from.range.end {
722 let free_start = self.reserve(requested, align)?;
723 from.range.start = replace(&mut self.free.start, free_start);
724 from.range.end = free_start;
725 self.replace_back(&mut from);
726 return Some(from);
727 }
728
729 'bail: {
732 let Some(prev) = from.prev else {
734 break 'bail;
735 };
736
737 if self.occupied != Some(prev) {
738 break 'bail;
739 }
740
741 let mut prev = self.region(prev);
742
743 if prev.capacity() + from.capacity() < requested {
744 break 'bail;
745 }
746
747 if !prev.is_aligned(align) {
748 break 'bail;
749 }
750
751 let from = self.free_region(from);
752
753 from.range.start.copy_to(prev.range.start, len);
754 prev.range.end = from.range.end;
755 self.occupied = None;
756 return Some(prev);
757 }
758
759 let to = self.alloc(requested, align)?;
760 from.range.start.copy_to_nonoverlapping(to.range.start, len);
761 self.free(from.id);
762 Some(to)
763 }
764}
765
766#[derive(Debug, Clone, Copy, PartialEq, Eq)]
768struct Header {
769 range: Range,
771 prev: Option<HeaderId>,
773 next: Option<HeaderId>,
775}
776
777impl Header {
778 #[inline]
779 fn capacity(&self) -> usize {
780 unsafe { self.range.end.byte_offset_from(self.range.start) as usize }
782 }
783
784 #[inline]
786 fn is_aligned(&self, align: usize) -> bool {
787 self.range.start.align_offset(align) == 0
788 }
789}