rune/hir/
arena.rs
1#[cfg(test)]
2mod tests;
3
4use core::alloc::Layout;
5use core::cell::{Cell, RefCell};
6use core::marker::PhantomData;
7use core::mem;
8use core::ptr;
9use core::slice;
10use core::str;
11
12use crate::alloc::prelude::*;
13use crate::alloc::{self, HashMap};
14
15#[non_exhaustive]
16pub struct ArenaWriteSliceOutOfBounds {
17 pub index: usize,
18}
19
20#[derive(Debug)]
21#[non_exhaustive]
22pub struct ArenaAllocError {
23 pub requested: usize,
24}
25
26impl From<alloc::Error> for ArenaAllocError {
27 fn from(_: alloc::Error) -> Self {
28 Self { requested: 0 }
29 }
30}
31
32const PAGE: usize = 4096;
34const HUGE_PAGE: usize = 2 * 1024 * 1024;
35
36struct Chunk {
37 storage: Box<[u8]>,
38}
39
40impl Chunk {
41 fn new(len: usize) -> Result<Self, ArenaAllocError> {
43 Ok(Self {
44 storage: try_vec![0u8; len].try_into_boxed_slice()?,
45 })
46 }
47}
48
49pub struct Arena {
51 start: Cell<*mut u8>,
52 end: Cell<*mut u8>,
53 chunks: RefCell<Vec<Chunk>>,
54 bytes: RefCell<HashMap<Box<[u8]>, ptr::NonNull<u8>>>,
56}
57
58impl Arena {
59 pub fn new() -> Self {
61 Self {
62 start: Cell::new(ptr::null_mut()),
63 end: Cell::new(ptr::null_mut()),
64 chunks: RefCell::new(Vec::new()),
65 bytes: RefCell::new(HashMap::new()),
66 }
67 }
68
69 pub(crate) fn alloc_bytes(&self, bytes: &[u8]) -> Result<&[u8], ArenaAllocError> {
71 if let Some(ptr) = self.bytes.borrow().get(bytes).copied() {
72 unsafe {
75 return Ok(slice::from_raw_parts(ptr.as_ptr() as *const _, bytes.len()));
76 }
77 }
78
79 let layout = Layout::array::<u8>(bytes.len()).map_err(|_| ArenaAllocError {
80 requested: bytes.len(),
81 })?;
82 let ptr = self.alloc_raw(layout)?;
83
84 let output = unsafe {
87 ptr::copy_nonoverlapping(bytes.as_ptr(), ptr.as_ptr(), bytes.len());
88 slice::from_raw_parts(ptr.as_ptr() as *const _, bytes.len())
89 };
90
91 self.bytes.borrow_mut().try_insert(bytes.try_into()?, ptr)?;
92 Ok(output)
93 }
94
95 pub(crate) fn alloc_str(&self, string: &str) -> Result<&str, ArenaAllocError> {
97 let bytes = self.alloc_bytes(string.as_bytes())?;
98
99 unsafe { Ok(str::from_utf8_unchecked(bytes)) }
102 }
103
104 pub(crate) fn alloc<T>(&self, object: T) -> Result<&mut T, ArenaAllocError> {
106 assert!(!mem::needs_drop::<T>());
107
108 let mut ptr = self.alloc_raw(Layout::for_value::<T>(&object))?.cast();
109
110 unsafe {
111 ptr::write(ptr.as_ptr(), object);
113 Ok(ptr.as_mut())
114 }
115 }
116
117 pub(crate) fn alloc_iter<T>(&self, len: usize) -> Result<AllocIter<'_, T>, ArenaAllocError> {
119 assert!(!mem::needs_drop::<T>(), "cannot allocate drop element");
120
121 let mem = if len == 0 {
122 None
123 } else {
124 Some(self.alloc_raw(Layout::array::<T>(len).unwrap())?.cast())
125 };
126
127 Ok(AllocIter {
128 mem,
129 index: 0,
130 len,
131 _marker: PhantomData,
132 })
133 }
134
135 #[inline]
136 fn alloc_raw_without_grow(&self, layout: Layout) -> Option<ptr::NonNull<u8>> {
137 let start = addr(self.start.get());
138 let old_end = self.end.get();
139 let end = addr(old_end);
140
141 let align = layout.align();
142 let bytes = layout.size();
143
144 let new_end = end.checked_sub(bytes)? & !(align - 1);
145
146 if start > new_end {
147 return None;
148 }
149
150 let new_end = with_addr(old_end, new_end);
151 self.end.set(new_end);
152
153 unsafe { Some(ptr::NonNull::new_unchecked(new_end)) }
155 }
156
157 #[inline]
158 fn alloc_raw(&self, layout: Layout) -> Result<ptr::NonNull<u8>, ArenaAllocError> {
159 assert!(layout.align() != 0);
161
162 if layout.size() == 0 {
163 return unsafe { Ok(ptr::NonNull::new_unchecked(layout.align() as *mut u8)) };
165 }
166
167 loop {
168 if let Some(a) = self.alloc_raw_without_grow(layout) {
169 break Ok(a);
170 }
171
172 self.grow(layout.size())?;
173 }
174 }
175
176 #[cold]
177 fn grow(&self, additional: usize) -> Result<(), ArenaAllocError> {
178 let mut chunks = self.chunks.borrow_mut();
179
180 let new_cap = additional.max(
181 chunks
182 .last()
183 .map(|c| c.storage.len().min(HUGE_PAGE / 2) * 2)
184 .unwrap_or(PAGE),
185 );
186
187 chunks.try_push(Chunk::new(new_cap)?)?;
188
189 let Some(chunk) = chunks.last_mut() else {
190 return Err(ArenaAllocError {
191 requested: additional,
192 });
193 };
194
195 let range = chunk.storage.as_mut_ptr_range();
196 self.start.set(range.start);
197 self.end.set(range.end);
198 Ok(())
199 }
200}
201
202#[inline]
203pub(crate) fn addr(this: *mut u8) -> usize {
204 this as usize
205}
206
207#[inline]
208pub(crate) fn with_addr(this: *mut u8, a: usize) -> *mut u8 {
209 let this_addr = addr(this) as isize;
210 let dest_addr = a as isize;
211 let offset = dest_addr.wrapping_sub(this_addr);
212 this.wrapping_offset(offset)
213}
214
215pub(crate) struct AllocIter<'hir, T> {
217 mem: Option<ptr::NonNull<T>>,
218 index: usize,
219 len: usize,
220 _marker: PhantomData<&'hir ()>,
221}
222
223impl<'hir, T> AllocIter<'hir, T> {
224 pub(crate) fn write(&mut self, object: T) -> Result<(), ArenaWriteSliceOutOfBounds> {
226 let mem = self
227 .mem
228 .ok_or(ArenaWriteSliceOutOfBounds { index: self.index })?;
229
230 if self.index >= self.len {
232 return Err(ArenaWriteSliceOutOfBounds { index: self.index });
233 }
234
235 unsafe {
236 ptr::write(mem.as_ptr().add(self.index), object);
237 self.index += 1;
238 Ok(())
239 }
240 }
241
242 pub(crate) fn finish(self) -> &'hir mut [T] {
244 match self.mem {
245 Some(mem) => {
246 unsafe { slice::from_raw_parts_mut(mem.as_ptr(), self.index) }
248 }
249 None => &mut [],
250 }
251 }
252}