1use core::alloc::{Layout, LayoutError};
2use core::cell::Cell;
3use core::fmt;
4use core::mem::{align_of, needs_drop, replace, size_of, take};
5use core::ptr::{self, addr_of, addr_of_mut, NonNull};
6
7use rust_alloc::sync::Arc;
8
9use crate::alloc;
10use crate::alloc::alloc::{Allocator, Global};
11use crate::alloc::fmt::TryWrite;
12use crate::hash::Hash;
13use crate::runtime::{
14 Access, AccessError, BorrowMut, BorrowRef, Formatter, IntoOutput, ProtocolCaller, Rtti,
15 RttiKind, RuntimeError, Snapshot, TypeInfo, Value, VmResult,
16};
17
18#[derive(Debug)]
19pub(crate) enum DynamicTakeError {
20 Access(AccessError),
21 Alloc(alloc::Error),
22}
23
24impl fmt::Display for DynamicTakeError {
25 #[inline]
26 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
27 match self {
28 DynamicTakeError::Access(error) => error.fmt(f),
29 DynamicTakeError::Alloc(error) => error.fmt(f),
30 }
31 }
32}
33
34impl core::error::Error for DynamicTakeError {}
35
36impl From<AccessError> for DynamicTakeError {
37 fn from(error: AccessError) -> Self {
38 Self::Access(error)
39 }
40}
41
42impl From<alloc::Error> for DynamicTakeError {
43 fn from(error: alloc::Error) -> Self {
44 Self::Alloc(error)
45 }
46}
47
48pub struct Dynamic<H, T> {
54 shared: NonNull<Shared<H, T>>,
55}
56
57impl<H, T> Dynamic<H, T> {
58 pub(crate) fn new(
60 rtti: H,
61 it: impl IntoIterator<Item = T, IntoIter: ExactSizeIterator>,
62 ) -> alloc::Result<Self> {
63 let it = it.into_iter();
64 let this = Self::alloc(rtti, it.len())?;
65
66 unsafe {
68 let data = Shared::as_data_ptr(this.shared);
69
70 for (i, value) in it.enumerate() {
71 data.add(i).write(value);
72 }
73 }
74
75 Ok(this)
76 }
77
78 fn alloc(rtti: H, len: usize) -> alloc::Result<Self> {
80 let layout = Shared::<H, T>::layout(len)?;
81
82 let shared = Global.allocate(layout)?.cast::<Shared<H, T>>();
83
84 unsafe {
87 shared.write(Shared {
88 rtti,
89 count: Cell::new(1),
90 access: Access::new(),
91 len,
92 data: [],
93 });
94 }
95
96 Ok(Self { shared })
97 }
98
99 #[inline]
101 pub(crate) fn is_readable(&self) -> bool {
102 unsafe { self.shared.as_ref().access.is_shared() }
105 }
106
107 #[inline]
109 pub(crate) fn is_writable(&self) -> bool {
110 unsafe { self.shared.as_ref().access.is_exclusive() }
111 }
112
113 #[inline]
115 pub(crate) fn snapshot(&self) -> Snapshot {
116 unsafe { self.shared.as_ref().access.snapshot() }
118 }
119
120 #[inline]
122 pub(crate) fn len(&self) -> usize {
123 unsafe { self.shared.as_ref().len }
125 }
126
127 #[inline]
129 pub(crate) fn rtti(&self) -> &H {
130 unsafe { &self.shared.as_ref().rtti }
132 }
133
134 #[inline]
136 pub(crate) fn borrow_ref(&self) -> Result<BorrowRef<[T]>, AccessError> {
137 unsafe {
139 let guard = self.shared.as_ref().access.shared()?;
140 let data = Shared::as_data_ptr(self.shared);
141 let data = NonNull::slice_from_raw_parts(data, self.shared.as_ref().len);
142 Ok(BorrowRef::new(data, guard.into_raw()))
143 }
144 }
145
146 #[inline]
148 pub(crate) fn borrow_mut(&self) -> Result<BorrowMut<[T]>, AccessError> {
149 unsafe {
151 let guard = self.shared.as_ref().access.exclusive()?;
152 let data = Shared::as_data_ptr(self.shared);
153 let data = NonNull::slice_from_raw_parts(data, self.shared.as_ref().len);
154 Ok(BorrowMut::new(data, guard.into_raw()))
155 }
156 }
157
158 #[inline]
160 pub(crate) fn drop(self) -> Result<(), AccessError> {
161 unsafe {
163 self.shared.as_ref().access.try_take()?;
164 let len = self.shared.as_ref().len;
165 Shared::drop_values(self.shared, len);
166 Ok(())
167 }
168 }
169}
170
171impl<H, T> Dynamic<H, T>
172where
173 H: Clone,
174{
175 pub(crate) fn take(self) -> Result<Self, DynamicTakeError> {
177 unsafe {
180 self.shared.as_ref().access.try_take()?;
181 let len = self.shared.as_ref().len;
182 let new = Self::alloc(self.rtti().clone(), len)?;
183 let from = Shared::as_data_ptr(self.shared);
184 let to = Shared::as_data_ptr(new.shared);
185 to.copy_from_nonoverlapping(from, len);
186 Ok(new)
187 }
188 }
189}
190
191impl<H, T> Drop for Dynamic<H, T> {
192 fn drop(&mut self) {
193 unsafe {
195 Shared::dec(self.shared);
196 }
197 }
198}
199
200impl<H, T> Clone for Dynamic<H, T> {
201 #[inline]
202 fn clone(&self) -> Self {
203 unsafe {
205 Shared::inc(self.shared);
206 }
207
208 Self {
209 shared: self.shared,
210 }
211 }
212
213 #[inline]
214 fn clone_from(&mut self, source: &Self) {
215 if ptr::eq(self.shared.as_ptr(), source.shared.as_ptr()) {
216 return;
217 }
218
219 let old = replace(&mut self.shared, source.shared);
220
221 unsafe {
223 Shared::dec(old);
224 Shared::inc(self.shared);
225 }
226 }
227}
228
229#[repr(C)]
230struct Shared<H, T> {
231 rtti: H,
233 count: Cell<usize>,
235 access: Access,
237 len: usize,
239 data: [T; 0],
241}
242
243impl<H, T> Shared<H, T> {
244 #[inline]
245 fn layout(len: usize) -> Result<Layout, LayoutError> {
246 let array = Layout::array::<T>(len)?;
247 Layout::from_size_align(
248 size_of::<Shared<H, T>>() + array.size(),
249 align_of::<Shared<H, T>>(),
250 )
251 }
252
253 #[inline]
255 unsafe fn as_rtti_ptr(this: NonNull<Self>) -> NonNull<H> {
256 NonNull::new_unchecked(addr_of_mut!((*this.as_ptr()).rtti))
257 }
258
259 #[inline]
261 unsafe fn as_data_ptr(this: NonNull<Self>) -> NonNull<T> {
262 NonNull::new_unchecked(addr_of_mut!((*this.as_ptr()).data)).cast::<T>()
263 }
264
265 #[inline]
267 unsafe fn inc(this: NonNull<Self>) {
268 let count_ref = &*addr_of!((*this.as_ptr()).count);
269 let count = count_ref.get();
270
271 debug_assert_ne!(
272 count, 0,
273 "Reference count of zero should only happen if Shared is incorrectly implemented"
274 );
275
276 if count == usize::MAX {
277 crate::alloc::abort();
278 }
279
280 count_ref.set(count + 1);
281 }
282
283 #[inline]
290 unsafe fn dec(this: NonNull<Self>) {
291 let count_ref = &*addr_of!((*this.as_ptr()).count);
292 let access = &*addr_of!((*this.as_ptr()).access);
293 let count = count_ref.get();
294
295 debug_assert_ne!(
296 count, 0,
297 "Reference count of zero should only happen if Shared is incorrectly implemented"
298 );
299
300 let count = count - 1;
301 count_ref.set(count);
302
303 if count != 0 {
304 return;
305 }
306
307 let len = (*this.as_ptr()).len;
308
309 let Ok(layout) = Self::layout(len) else {
310 unreachable!();
311 };
312
313 if !access.is_taken() {
314 Self::drop_values(this, len);
315 }
316
317 if needs_drop::<H>() {
318 Self::as_rtti_ptr(this).drop_in_place();
319 }
320
321 Global.deallocate(this.cast(), layout);
322 }
323
324 #[inline]
325 unsafe fn drop_values(this: NonNull<Self>, len: usize) {
326 if needs_drop::<T>() {
327 let data = Self::as_data_ptr(this);
328 NonNull::slice_from_raw_parts(data, len).drop_in_place();
329 }
330 }
331}
332
333impl<T> Dynamic<Arc<Rtti>, T> {
334 #[inline]
336 pub(crate) fn type_hash(&self) -> Hash {
337 self.rtti().hash
338 }
339
340 #[inline]
342 pub(crate) fn type_info(&self) -> TypeInfo {
343 self.rtti().clone().type_info()
344 }
345
346 #[inline]
348 pub(crate) fn get_field_ref(&self, key: &str) -> Result<Option<BorrowRef<'_, T>>, AccessError> {
349 let Some(index) = self.rtti().fields.get(key) else {
350 return Ok(None);
351 };
352
353 self.get_ref(*index)
354 }
355
356 #[inline]
358 pub(crate) fn get_field_mut(&self, key: &str) -> Result<Option<BorrowMut<'_, T>>, AccessError> {
359 let Some(index) = self.rtti().fields.get(key) else {
360 return Ok(None);
361 };
362
363 self.get_mut(*index)
364 }
365
366 #[inline]
368 pub(crate) fn get_ref(&self, index: usize) -> Result<Option<BorrowRef<'_, T>>, AccessError> {
369 unsafe {
371 let shared = self.shared.as_ref();
372
373 if index >= shared.len {
374 return Ok(None);
375 }
376
377 let guard = shared.access.shared()?;
378 let data = Shared::as_data_ptr(self.shared).add(index);
379 Ok(Some(BorrowRef::new(data, guard.into_raw())))
380 }
381 }
382
383 #[inline]
385 pub(crate) fn get_mut(&self, index: usize) -> Result<Option<BorrowMut<'_, T>>, AccessError> {
386 unsafe {
388 let shared = self.shared.as_ref();
389
390 if index >= shared.len {
391 return Ok(None);
392 }
393
394 let guard = shared.access.exclusive()?;
395 let data = Shared::as_data_ptr(self.shared).add(index);
396 Ok(Some(BorrowMut::new(data, guard.into_raw())))
397 }
398 }
399}
400
401impl Dynamic<Arc<Rtti>, Value> {
402 pub(crate) fn debug_fmt_with(
404 &self,
405 f: &mut Formatter,
406 caller: &mut dyn ProtocolCaller,
407 ) -> VmResult<()> {
408 let rtti = self.rtti();
409 let values = vm_try!(self.borrow_ref());
410
411 match rtti.kind {
412 RttiKind::Empty => debug_empty(rtti, f),
413 RttiKind::Tuple => debug_tuple(rtti, &values, f, caller),
414 RttiKind::Struct => debug_struct(rtti, &values, f, caller),
415 }
416 }
417}
418
419fn debug_empty(rtti: &Rtti, f: &mut Formatter) -> VmResult<()> {
420 vm_try!(write!(f, "{}", rtti.item));
421 VmResult::Ok(())
422}
423
424fn debug_tuple(
425 rtti: &Rtti,
426 values: &[Value],
427 f: &mut Formatter,
428 caller: &mut dyn ProtocolCaller,
429) -> VmResult<()> {
430 vm_try!(write!(f, "{} (", rtti.item));
431
432 let mut first = true;
433
434 for value in values.iter() {
435 if !take(&mut first) {
436 vm_try!(write!(f, ", "));
437 }
438
439 vm_try!(value.debug_fmt_with(f, caller));
440 }
441
442 vm_try!(write!(f, ")"));
443 VmResult::Ok(())
444}
445
446fn debug_struct(
447 rtti: &Rtti,
448 values: &[Value],
449 f: &mut Formatter,
450 caller: &mut dyn ProtocolCaller,
451) -> VmResult<()> {
452 vm_try!(write!(f, "{} {{", rtti.item));
453
454 let mut first = true;
455
456 for (index, field) in values.iter().enumerate() {
457 let Some((name, _)) = rtti.fields.iter().find(|t| *t.1 == index) else {
458 continue;
459 };
460
461 if !take(&mut first) {
462 vm_try!(write!(f, ", "));
463 }
464
465 vm_try!(write!(f, "{name}: "));
466 vm_try!(field.debug_fmt_with(f, caller));
467 }
468
469 vm_try!(write!(f, "}}"));
470 VmResult::Ok(())
471}
472
473impl IntoOutput for Dynamic<Arc<Rtti>, Value> {
474 #[inline]
475 fn into_output(self) -> Result<Value, RuntimeError> {
476 Ok(Value::from(self))
477 }
478}