rune_alloc/sync.rs
1use core::alloc::Layout;
2use core::borrow::Borrow;
3use core::cmp::Ordering;
4use core::fmt;
5use core::hash::{Hash, Hasher};
6use core::hint;
7use core::marker::PhantomData;
8use core::mem;
9use core::ops::Deref;
10use core::panic::RefUnwindSafe;
11use core::panic::UnwindSafe;
12use core::ptr;
13use core::ptr::NonNull;
14use core::sync::atomic::AtomicUsize;
15use core::sync::atomic::Ordering::{Acquire, Relaxed, Release};
16
17use crate::alloc::{AllocError, Allocator, Global};
18use crate::clone::TryClone;
19use crate::{abort, Box, Result, Vec};
20
21fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
22 (ptr.cast::<()>()).addr() == usize::MAX
23}
24
25#[must_use]
26unsafe fn for_value_raw<T>(t: *const T) -> Layout
27where
28 T: ?Sized,
29{
30 // SAFETY: we pass along the prerequisites of these functions to the caller
31 // TODO: Use mem::{size_of_val_raw, align_of_val_raw} when they become
32 // stable, for now we privately know that this can safely be turned into a
33 // reference since it's only used while dropping an owned value of type `T`.
34 let (size, align) = unsafe { (mem::size_of_val(&*t), mem::align_of_val(&*t)) };
35 // SAFETY: see rationale in `new` for why this is using the unsafe variant
36 unsafe { Layout::from_size_align_unchecked(size, align) }
37}
38
39/// A soft limit on the amount of references that may be made to an `Arc`.
40///
41/// Going above this limit will abort your program (although not necessarily) at
42/// _exactly_ `MAX_REFCOUNT + 1` references. Trying to go above it might call a
43/// `panic` (if not actually going above it).
44///
45/// This is a global invariant, and also applies when using a compare-exchange
46/// loop.
47///
48/// See comment in `Arc::clone`.
49const MAX_REFCOUNT: usize = (isize::MAX) as usize;
50
51/// The error in case either counter reaches above `MAX_REFCOUNT`, and we can `panic` safely.
52const INTERNAL_OVERFLOW_ERROR: &str = "Arc counter overflow";
53
54macro_rules! acquire {
55 ($x:expr) => {
56 core::sync::atomic::fence(Acquire)
57 };
58}
59
60/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
61/// Reference Counted'.
62///
63/// The type `Arc<T>` provides shared ownership of a value of type `T`,
64/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces a new
65/// `Arc` instance, which points to the same allocation on the heap as the
66/// source `Arc`, while increasing a reference count. When the last `Arc`
67/// pointer to a given allocation is destroyed, the value stored in that
68/// allocation (often referred to as "inner value") is also dropped.
69///
70/// Shared references in Rust disallow mutation by default, and `Arc` is no
71/// exception: you cannot generally obtain a mutable reference to something
72/// inside an `Arc`. If you do need to mutate through an `Arc`, you have several
73/// options:
74///
75/// 1. Use interior mutability with synchronization primitives like
76/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
77/// types.
78///
79/// 2. Use [`Arc::get_mut`] when you know your `Arc` is not shared (has a
80/// reference count of 1), which provides direct mutable access to the inner
81/// value without any cloning.
82///
83/// ```
84/// use rune::sync::Arc;
85/// use rune::alloc::try_vec;
86///
87/// let mut data = Arc::try_new(try_vec![1, 2, 3])?;
88///
89/// // This will clone the vector only if there are other references to it
90/// Arc::get_mut(&mut data).unwrap().try_push(4)?;
91///
92/// assert_eq!(*data, try_vec![1, 2, 3, 4]);
93/// # Ok::<_, rune::alloc::Error>(())
94/// ```
95///
96/// **Note**: This type is only available on platforms that support atomic loads
97/// and stores of pointers, which includes all platforms that support the `std`
98/// crate but not all those which only support [`alloc`](crate). This may be
99/// detected at compile time using `#[cfg(target_has_atomic = "ptr")]`.
100///
101/// ## Thread Safety
102///
103/// Unlike `Rc<T>`, `Arc<T>` uses atomic operations for its reference counting.
104/// This means that it is thread-safe. The disadvantage is that atomic
105/// operations are more expensive than ordinary memory accesses. If you are not
106/// sharing reference-counted allocations between threads, consider using
107/// `Rc<T>` for lower overhead. `Rc<T>` is a safe default, because the compiler
108/// will catch any attempt to send an `Rc<T>` between threads. However, a
109/// library might choose `Arc<T>` in order to give library consumers more
110/// flexibility.
111///
112/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
113/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
114/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
115/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
116/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
117/// data, but it doesn't add thread safety to its data. Consider
118/// <code>Arc<[RefCell\<T>]></code>. [`RefCell<T>`] isn't [`Sync`], and if
119/// `Arc<T>` was always [`Send`], <code>Arc<[RefCell\<T>]></code> would be as
120/// well. But then we'd have a problem: [`RefCell<T>`] is not thread safe; it
121/// keeps track of the borrowing count using non-atomic operations.
122///
123/// In the end, this means that you may need to pair `Arc<T>` with some sort of
124/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
125///
126/// ## Breaking cycles with `Weak`
127///
128/// The [`downgrade`][downgrade] method can be used to create a non-owning
129/// [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d to an
130/// `Arc`, but this will return [`None`] if the value stored in the allocation
131/// has already been dropped. In other words, `Weak` pointers do not keep the
132/// value inside the allocation alive; however, they *do* keep the allocation
133/// (the backing store for the value) alive.
134///
135/// A cycle between `Arc` pointers will never be deallocated. For this reason,
136/// [`Weak`] is used to break cycles. For example, a tree could have strong
137/// `Arc` pointers from parent nodes to children, and [`Weak`] pointers from
138/// children back to their parents.
139///
140/// # Cloning references
141///
142/// Creating a new reference from an existing reference-counted pointer is done
143/// using the `Clone` trait implemented for [`Arc<T>`][Arc] and
144/// [`Weak<T>`][Weak].
145///
146/// ```
147/// use rune::sync::Arc;
148/// use rune::alloc::try_vec;
149///
150/// let foo = Arc::try_new(try_vec![1.0, 2.0, 3.0])?;
151/// // The two syntaxes below are equivalent.
152/// let a = foo.clone();
153/// let b = Arc::clone(&foo);
154/// // a, b, and foo are all Arcs that point to the same memory location
155/// # Ok::<_, rune::alloc::Error>(())
156/// ```
157///
158/// ## `Deref` behavior
159///
160/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`] trait), so you
161/// can call `T`'s methods on a value of type `Arc<T>`. To avoid name clashes
162/// with `T`'s methods, the methods of `Arc<T>` itself are associated functions,
163/// called using [fully qualified syntax]:
164///
165/// ```
166/// use rune::sync::Arc;
167///
168/// let my_arc = Arc::try_new(())?;
169/// let my_weak = Arc::downgrade(&my_arc);
170/// # Ok::<_, rune::alloc::Error>(())
171/// ```
172///
173/// `Arc<T>`'s implementations of traits like `Clone` may also be called using
174/// fully qualified syntax. Some people prefer to use fully qualified syntax,
175/// while others prefer using method-call syntax.
176///
177/// ```
178/// use rune::sync::Arc;
179///
180/// let arc = Arc::try_new(())?;
181/// // Method-call syntax
182/// let arc2 = arc.clone();
183/// // Fully qualified syntax
184/// let arc3 = Arc::clone(&arc);
185/// # Ok::<_, rune::alloc::Error>(())
186/// ```
187///
188/// [`Weak<T>`][Weak] does not auto-dereference to `T`, because the inner value
189/// may have already been dropped.
190///
191/// [clone]: Clone::clone
192/// [mutex]: ../../std/sync/struct.Mutex.html
193/// [rwlock]: ../../std/sync/struct.RwLock.html
194/// [atomic]: core::sync::atomic
195/// [downgrade]: Arc::downgrade
196/// [upgrade]: Weak::upgrade
197/// [RefCell\<T>]: core::cell::RefCell
198/// [`RefCell<T>`]: core::cell::RefCell
199/// [`std::sync`]: ../../std/sync/index.html
200/// [`Arc::clone(&from)`]: Arc::clone
201/// [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
202///
203/// # Examples
204///
205/// Sharing some immutable data between threads:
206///
207/// ```
208/// use std::thread;
209///
210/// use rune::sync::Arc;
211///
212/// let five = Arc::try_new(5)?;
213///
214/// for _ in 0..10 {
215/// let five = Arc::clone(&five);
216///
217/// thread::spawn(move || {
218/// println!("{five:?}");
219/// });
220/// }
221/// # Ok::<_, rune::alloc::Error>(())
222/// ```
223///
224/// Sharing a mutable [`AtomicUsize`]:
225///
226/// [`AtomicUsize`]: core::sync::atomic::AtomicUsize "sync::atomic::AtomicUsize"
227///
228/// ```
229/// use std::sync::atomic::{AtomicUsize, Ordering};
230/// use std::thread;
231///
232/// use rune::sync::Arc;
233///
234/// let val = Arc::try_new(AtomicUsize::new(5))?;
235///
236/// for _ in 0..10 {
237/// let val = Arc::clone(&val);
238///
239/// thread::spawn(move || {
240/// let v = val.fetch_add(1, Ordering::Relaxed);
241/// println!("{v:?}");
242/// });
243/// }
244/// # Ok::<_, rune::alloc::Error>(())
245/// ```
246pub struct Arc<T, A = Global>
247where
248 T: ?Sized,
249 A: Allocator,
250{
251 ptr: NonNull<ArcInner<T>>,
252 phantom: PhantomData<ArcInner<T>>,
253 alloc: A,
254}
255
256unsafe impl<T, A> Send for Arc<T, A>
257where
258 T: ?Sized + Sync + Send,
259 A: Allocator + Send,
260{
261}
262
263unsafe impl<T, A> Sync for Arc<T, A>
264where
265 T: ?Sized + Sync + Send,
266 A: Allocator + Sync,
267{
268}
269
270impl<T, A> UnwindSafe for Arc<T, A>
271where
272 T: RefUnwindSafe + ?Sized,
273 A: Allocator + UnwindSafe,
274{
275}
276
277impl<T> Arc<[T]> {
278 /// Copy elements from slice into newly allocated `Arc<[T]>`
279 #[doc(hidden)]
280 pub fn copy_from_slice(v: &[T]) -> Result<Self, AllocError>
281 where
282 T: Copy,
283 {
284 Self::copy_from_slice_in(v, Global)
285 }
286}
287
288impl<T, A> Arc<[T], A>
289where
290 A: Allocator,
291{
292 /// Allocates an `ArcInner<[T]>` with the given length.
293 unsafe fn try_allocate_for_slice_in(
294 len: usize,
295 alloc: &A,
296 ) -> Result<*mut ArcInner<[T]>, AllocError> {
297 unsafe {
298 Self::try_allocate_for_layout(
299 Layout::array::<T>(len).unwrap(),
300 |layout| alloc.allocate(layout),
301 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut ArcInner<[T]>,
302 )
303 }
304 }
305
306 /// Copy elements from slice into newly allocated `Arc<[T]>`
307 #[doc(hidden)]
308 pub fn copy_from_slice_in(v: &[T], alloc: A) -> Result<Self, AllocError>
309 where
310 T: Copy,
311 {
312 unsafe {
313 let ptr = Self::try_allocate_for_slice_in(v.len(), &alloc)?;
314 ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).data) as *mut T, v.len());
315 Ok(Self::from_ptr_in(ptr, alloc))
316 }
317 }
318}
319
320/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
321/// managed allocation.
322///
323/// The allocation is accessed by calling [`upgrade`] on the `Weak`
324/// pointer, which returns an <code>[Option]<[Arc]\<T>></code>.
325///
326/// Since a `Weak` reference does not count towards ownership, it will not
327/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
328/// guarantees about the value still being present. Thus it may return [`None`]
329/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
330/// itself (the backing store) from being deallocated.
331///
332/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
333/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
334/// prevent circular references between [`Arc`] pointers, since mutual owning references
335/// would never allow either [`Arc`] to be dropped. For example, a tree could
336/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
337/// pointers from children back to their parents.
338///
339/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
340///
341/// [`upgrade`]: Weak::upgrade
342pub struct Weak<T, A = Global>
343where
344 T: ?Sized,
345 A: Allocator,
346{
347 // This is a `NonNull` to allow optimizing the size of this type in enums,
348 // but it is not necessarily a valid pointer.
349 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
350 // to allocate space on the heap. That's not a value a real pointer
351 // will ever have because RcInner has alignment at least 2.
352 // This is only possible when `T: Sized`; unsized `T` never dangle.
353 ptr: NonNull<ArcInner<T>>,
354 alloc: A,
355}
356
357/// Helper type to allow accessing the reference counts without making any
358/// assertions about the data field.
359struct WeakInner<'a> {
360 weak: &'a AtomicUsize,
361 #[allow(unused)]
362 strong: &'a AtomicUsize,
363}
364
365impl<T, A> Weak<T, A>
366where
367 T: ?Sized,
368 A: Allocator,
369{
370 /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying dropping
371 /// of the inner value if successful.
372 ///
373 /// Returns [`None`] if the inner value has since been dropped.
374 ///
375 /// # Examples
376 ///
377 /// ```
378 /// use rune::sync::Arc;
379 ///
380 /// let five = Arc::try_new(5)?;
381 ///
382 /// let weak_five = Arc::downgrade(&five);
383 ///
384 /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
385 /// assert!(strong_five.is_some());
386 ///
387 /// // Destroy all strong pointers.
388 /// drop(strong_five);
389 /// drop(five);
390 ///
391 /// assert!(weak_five.upgrade().is_none());
392 /// # Ok::<_, rune::alloc::Error>(())
393 /// ```
394 #[must_use = "this returns a new `Arc`, \
395 without modifying the original weak pointer"]
396 pub fn upgrade(&self) -> Option<Arc<T, A>>
397 where
398 A: Clone,
399 {
400 #[inline]
401 fn checked_increment(n: usize) -> Option<usize> {
402 // Any write of 0 we can observe leaves the field in permanently zero state.
403 if n == 0 {
404 return None;
405 }
406
407 // See comments in `Arc::clone` for why we do this (for `mem::forget`).
408 assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR);
409 Some(n + 1)
410 }
411
412 // We use a CAS loop to increment the strong count instead of a
413 // fetch_add as this function should never take the reference count
414 // from zero to one.
415 //
416 // Relaxed is fine for the failure case because we don't have any expectations about the new state.
417 // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner
418 // value can be initialized after `Weak` references have already been created. In that case, we
419 // expect to observe the fully initialized value.
420 if self
421 .inner()?
422 .strong
423 .fetch_update(Acquire, Relaxed, checked_increment)
424 .is_ok()
425 {
426 // SAFETY: pointer is not null, verified in checked_increment
427 unsafe { Some(Arc::from_inner_in(self.ptr, self.alloc.clone())) }
428 } else {
429 None
430 }
431 }
432
433 /// Gets the number of strong (`Arc`) pointers pointing to this allocation.
434 #[must_use]
435 pub fn strong_count(&self) -> usize {
436 if let Some(inner) = self.inner() {
437 inner.strong.load(Relaxed)
438 } else {
439 0
440 }
441 }
442
443 /// Gets an approximation of the number of `Weak` pointers pointing to this
444 /// allocation.
445 ///
446 /// # Accuracy
447 ///
448 /// Due to implementation details, the returned value can be off by 1 in
449 /// either direction when other threads are manipulating any `Arc`s or
450 /// `Weak`s pointing to the same allocation.
451 #[must_use]
452 pub fn weak_count(&self) -> usize {
453 if let Some(inner) = self.inner() {
454 let weak = inner.weak.load(Acquire);
455 let strong = inner.strong.load(Relaxed);
456 if strong == 0 {
457 0
458 } else {
459 // Since we observed that there was at least one strong pointer
460 // after reading the weak count, we know that the implicit weak
461 // reference (present whenever any strong references are alive)
462 // was still around when we observed the weak count, and can
463 // therefore safely subtract it.
464 weak - 1
465 }
466 } else {
467 0
468 }
469 }
470
471 /// Returns `None` when the pointer is dangling and there is no allocated
472 /// `ArcInner`.
473 #[inline]
474 fn inner(&self) -> Option<WeakInner<'_>> {
475 let ptr = self.ptr.as_ptr();
476
477 if is_dangling(ptr) {
478 None
479 } else {
480 // We are careful to *not* create a reference covering the "data"
481 // field, as the field may be mutated concurrently (for example, if
482 // the last `Arc` is dropped, the data field will be dropped
483 // in-place).
484 Some(unsafe {
485 WeakInner {
486 strong: &(*ptr).strong,
487 weak: &(*ptr).weak,
488 }
489 })
490 }
491 }
492}
493
494impl<T, A> Clone for Weak<T, A>
495where
496 T: ?Sized,
497 A: Allocator + Clone,
498{
499 /// Makes a clone of the `Weak` pointer that points to the same allocation.
500 ///
501 /// # Examples
502 ///
503 /// ```
504 /// use rune::alloc::sync::{Arc, Weak};
505 ///
506 /// let weak_five = Arc::downgrade(&Arc::try_new(5)?);
507 ///
508 /// let _ = Weak::clone(&weak_five);
509 /// # Ok::<_, rune::alloc::Error>(())
510 /// ```
511 #[inline]
512 fn clone(&self) -> Weak<T, A> {
513 if let Some(inner) = self.inner() {
514 // See comments in Arc::clone() for why this is relaxed. This can use a
515 // fetch_add (ignoring the lock) because the weak count is only locked
516 // where are *no other* weak pointers in existence. (So we can't be
517 // running this code in that case).
518 let old_size = inner.weak.fetch_add(1, Relaxed);
519
520 // See comments in Arc::clone() for why we do this (for mem::forget).
521 if old_size > MAX_REFCOUNT {
522 abort();
523 }
524 }
525
526 Weak {
527 ptr: self.ptr,
528 alloc: self.alloc.clone(),
529 }
530 }
531}
532
533impl<T, A> Drop for Weak<T, A>
534where
535 T: ?Sized,
536 A: Allocator,
537{
538 /// Drops the `Weak` pointer.
539 ///
540 /// # Examples
541 ///
542 /// ```
543 /// use rune::alloc::sync::{Arc, Weak};
544 ///
545 /// struct Foo;
546 ///
547 /// impl Drop for Foo {
548 /// fn drop(&mut self) {
549 /// println!("dropped!");
550 /// }
551 /// }
552 ///
553 /// let foo = Arc::try_new(Foo)?;
554 /// let weak_foo = Arc::downgrade(&foo);
555 /// let other_weak_foo = Weak::clone(&weak_foo);
556 ///
557 /// drop(weak_foo); // Doesn't print anything
558 /// drop(foo); // Prints "dropped!"
559 ///
560 /// assert!(other_weak_foo.upgrade().is_none());
561 /// # Ok::<_, rune::alloc::Error>(())
562 /// ```
563 fn drop(&mut self) {
564 // If we find out that we were the last weak pointer, then its time to
565 // deallocate the data entirely. See the discussion in Arc::drop() about
566 // the memory orderings
567 //
568 // It's not necessary to check for the locked state here, because the
569 // weak count can only be locked if there was precisely one weak ref,
570 // meaning that drop could only subsequently run ON that remaining weak
571 // ref, which can only happen after the lock is released.
572 let Some(inner) = self.inner() else {
573 return;
574 };
575
576 if inner.weak.fetch_sub(1, Release) == 1 {
577 acquire!(inner.weak);
578
579 // Make sure we aren't trying to "deallocate" the shared static for empty slices
580 // used by Default::default.
581 debug_assert!(
582 !ptr::addr_eq(self.ptr.as_ptr(), &STATIC_INNER_SLICE.inner),
583 "Arc/Weaks backed by a static should never be deallocated. \
584 Likely decrement_strong_count or from_raw were called too many times.",
585 );
586
587 unsafe {
588 self.alloc
589 .deallocate(self.ptr.cast(), for_value_raw(self.ptr.as_ptr()))
590 }
591 }
592 }
593}
594
595unsafe impl<T, A> Send for Weak<T, A>
596where
597 T: ?Sized + Sync + Send,
598 A: Allocator + Send,
599{
600}
601unsafe impl<T, A> Sync for Weak<T, A>
602where
603 T: ?Sized + Sync + Send,
604 A: Allocator + Sync,
605{
606}
607
608impl<T, A> fmt::Debug for Weak<T, A>
609where
610 T: ?Sized,
611 A: Allocator,
612{
613 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
614 write!(f, "(Weak)")
615 }
616}
617
618// This is repr(C) to future-proof against possible field-reordering, which
619// would interfere with otherwise safe [into|from]_raw() of transmutable
620// inner types.
621#[repr(C)]
622struct ArcInner<T>
623where
624 T: ?Sized,
625{
626 strong: AtomicUsize,
627
628 // the value usize::MAX acts as a sentinel for temporarily "locking" the
629 // ability to upgrade weak pointers or downgrade strong ones; this is used
630 // to avoid races in `make_mut` and `get_mut`.
631 weak: AtomicUsize,
632
633 data: T,
634}
635
636/// Calculate layout for `ArcInner<T>` using the inner value's layout
637fn arcinner_layout_for_value_layout(layout: Layout) -> Layout {
638 // Calculate layout using the given value layout.
639 // Previously, layout was calculated on the expression
640 // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
641 // reference (see #54908).
642 Layout::new::<ArcInner<()>>()
643 .extend(layout)
644 .unwrap()
645 .0
646 .pad_to_align()
647}
648
649unsafe impl<T> Send for ArcInner<T> where T: ?Sized + Sync + Send {}
650unsafe impl<T> Sync for ArcInner<T> where T: ?Sized + Sync + Send {}
651
652impl<T> Arc<T> {
653 /// Constructs a new `Arc<T>`.
654 ///
655 /// # Panics
656 ///
657 /// Panics if the allocation fails with an [`Error`][crate::error::Error].
658 ///
659 /// # Examples
660 ///
661 /// ```
662 /// use rune::sync::Arc;
663 ///
664 /// let five = Arc::new(5);
665 /// ```
666 #[inline]
667 #[deprecated = "Use `Arc::try_new` instead, which uses checked allocations."]
668 pub fn new(data: T) -> Arc<T> {
669 match Self::try_new_in(data, Global) {
670 Ok(arc) => arc,
671 Err(err) => panic!("{err}"),
672 }
673 }
674
675 /// Constructs a new `Arc<T>`.
676 ///
677 /// # Examples
678 ///
679 /// ```
680 /// use rune::sync::Arc;
681 ///
682 /// let five = Arc::try_new(5)?;
683 /// # Ok::<_, rune::alloc::Error>(())
684 /// ```
685 #[inline]
686 pub fn try_new(data: T) -> Result<Arc<T>> {
687 Self::try_new_in(data, Global)
688 }
689}
690
691impl<T, A> Arc<T, A>
692where
693 A: Allocator,
694{
695 /// Constructs a new `Arc<T>` in the provided allocator.
696 ///
697 /// # Examples
698 ///
699 /// ```
700 /// use rune::sync::Arc;
701 /// use rune::alloc::alloc::Global;
702 ///
703 /// let five = Arc::try_new_in(5, Global)?;
704 /// # Ok::<_, rune::alloc::Error>(())
705 /// ```
706 #[inline]
707 pub fn try_new_in(data: T, alloc: A) -> Result<Arc<T, A>> {
708 // Start the weak pointer count as 1 which is the weak pointer that's
709 // held by all the strong pointers (kinda), see std/rc.rs for more info
710 let x = Box::try_new_in(
711 ArcInner {
712 strong: AtomicUsize::new(1),
713 weak: AtomicUsize::new(1),
714 data,
715 },
716 alloc,
717 )?;
718
719 let (ptr, alloc) = Box::into_unique_with_allocator(x);
720 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
721 }
722}
723
724impl<T, A> Arc<T, A>
725where
726 T: ?Sized,
727 A: Allocator,
728{
729 /// Returns a reference to the underlying allocator.
730 ///
731 /// Note: this is an associated function, which means that you have to call
732 /// it as `Arc::allocator(&a)` instead of `a.allocator()`. This is so that
733 /// there is no conflict with a method on the inner type.
734 #[inline]
735 pub fn allocator(this: &Self) -> &A {
736 &this.alloc
737 }
738
739 /// Consumes the `Arc`, returning the wrapped pointer and allocator.
740 ///
741 /// To avoid a memory leak the pointer must be converted back to an `Arc`
742 /// using [`Arc::from_raw_in`].
743 ///
744 /// # Examples
745 ///
746 /// ```
747 /// use rune::alloc::prelude::*;
748 /// use rune::sync::Arc;
749 /// use rune::alloc::alloc::Global;
750 ///
751 /// let x = Arc::try_new_in("hello".try_to_owned()?, Global)?;
752 /// let (ptr, alloc) = Arc::into_raw_with_allocator(x);
753 /// assert_eq!(unsafe { &*ptr }, "hello");
754 /// let x = unsafe { Arc::from_raw_in(ptr, alloc) };
755 /// assert_eq!(&*x, "hello");
756 /// # Ok::<_, rune::alloc::Error>(())
757 /// ```
758 #[must_use = "losing the pointer will leak memory"]
759 pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
760 let this = mem::ManuallyDrop::new(this);
761 let ptr = Self::as_ptr(&this);
762 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
763 let alloc = unsafe { ptr::read(&this.alloc) };
764 (ptr, alloc)
765 }
766
767 /// Provides a raw pointer to the data.
768 ///
769 /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
770 /// as long as there are strong counts in the `Arc`.
771 ///
772 /// # Examples
773 ///
774 /// ```
775 /// use rune::alloc::prelude::*;
776 /// use rune::sync::Arc;
777 ///
778 /// let x = Arc::try_new("hello".try_to_owned()?)?;
779 /// let y = Arc::clone(&x);
780 /// let x_ptr = Arc::as_ptr(&x);
781 /// assert_eq!(x_ptr, Arc::as_ptr(&y));
782 /// assert_eq!(unsafe { &*x_ptr }, "hello");
783 /// # Ok::<_, rune::alloc::Error>(())
784 /// ```
785 #[must_use]
786 pub fn as_ptr(this: &Self) -> *const T {
787 let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
788
789 // SAFETY: This cannot go through Deref::deref or RcInnerPtr::inner because
790 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
791 // write through the pointer after the Rc is recovered through `from_raw`.
792 unsafe { &raw mut (*ptr).data }
793 }
794
795 /// Constructs an `Arc<T, A>` from a raw pointer.
796 ///
797 /// The raw pointer has the following requirements:
798 ///
799 /// * If `U` is sized, it must have the same size and alignment as `T`. This
800 /// is trivially true if `U` is `T`.
801 /// * If `U` is unsized, its data pointer must have the same size and
802 /// alignment as `T`. This is trivially true if `Arc<U>` was constructed
803 /// through `Arc<T>` and then converted to `Arc<U>` through an [unsized
804 /// coercion].
805 ///
806 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
807 /// and alignment, this is basically like transmuting references of
808 /// different types. See [`mem::transmute`] for more information on what
809 /// restrictions apply in this case.
810 ///
811 /// The raw pointer must point to a block of memory allocated by `alloc`
812 ///
813 /// The user of `from_raw` has to make sure a specific value of `T` is only
814 /// dropped once.
815 ///
816 /// This function is unsafe because improper use may lead to memory
817 /// unsafety, even if the returned `Arc<T>` is never accessed.
818 ///
819 /// [unsized coercion]:
820 /// https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
821 ///
822 /// # Safety
823 ///
824 /// The pointer must point to an instance which has previously ben returned
825 /// by [`Arc<T>::into_raw_with_allocator`]. The allocator that was used must
826 /// also be compatible.
827 ///
828 /// # Examples
829 ///
830 /// ```
831 /// use rune::alloc::prelude::*;
832 /// use rune::sync::Arc;
833 /// use rune::alloc::alloc::Global;
834 ///
835 /// let x = Arc::try_new_in("hello".try_to_owned()?, Global)?;
836 /// let (x_ptr, alloc) = Arc::into_raw_with_allocator(x);
837 ///
838 /// unsafe {
839 /// // Convert back to an `Arc` to prevent leak.
840 /// let x = Arc::from_raw_in(x_ptr, alloc);
841 /// assert_eq!(&*x, "hello");
842 ///
843 /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
844 /// }
845 ///
846 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
847 /// # Ok::<_, rune::alloc::Error>(())
848 /// ```
849 ///
850 /// Convert a slice back into its original array:
851 ///
852 /// ```
853 /// use rune::sync::Arc;
854 ///
855 /// let original: &[u8] = &[1, 2, 3];
856 /// let x: Arc<[u8]> = Arc::try_from(original)?;
857 /// let (x_ptr, alloc) = Arc::into_raw_with_allocator(x);
858 ///
859 /// unsafe {
860 /// let x: Arc<[u8; 3], _> = Arc::from_raw_in(x_ptr.cast::<[u8; 3]>(), alloc);
861 /// assert_eq!(&*x, &[1, 2, 3]);
862 /// }
863 /// # Ok::<_, rune::alloc::Error>(())
864 /// ```
865 #[inline]
866 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
867 unsafe {
868 let offset = data_offset(ptr);
869 // Reverse the offset to find the original ArcInner.
870 let arc_ptr = ptr.byte_sub(offset) as *mut ArcInner<T>;
871 Self::from_ptr_in(arc_ptr, alloc)
872 }
873 }
874
875 /// Returns `true` if the two `Arc`s point to the same allocation in a vein
876 /// similar to [`ptr::eq`]. This function ignores the metadata of `dyn
877 /// Trait` pointers.
878 ///
879 /// # Examples
880 ///
881 /// ```
882 /// use rune::sync::Arc;
883 ///
884 /// let five = Arc::try_new(5)?;
885 /// let same_five = Arc::clone(&five);
886 /// let other_five = Arc::try_new(5)?;
887 ///
888 /// assert!(Arc::ptr_eq(&five, &same_five));
889 /// assert!(!Arc::ptr_eq(&five, &other_five));
890 /// # Ok::<_, rune::alloc::Error>(())
891 /// ```
892 ///
893 /// [`ptr::eq`]: core::ptr::eq "ptr::eq"
894 #[inline]
895 #[must_use]
896 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
897 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
898 }
899
900 /// Returns a mutable reference into the given `Arc`, if there are
901 /// no other `Arc` or [`Weak`] pointers to the same allocation.
902 ///
903 /// Returns [`None`] otherwise, because it is not safe to
904 /// mutate a shared value.
905 ///
906 /// [clone]: Clone::clone
907 ///
908 /// # Examples
909 ///
910 /// ```
911 /// use rune::sync::Arc;
912 ///
913 /// let mut x = Arc::try_new(3)?;
914 /// *Arc::get_mut(&mut x).unwrap() = 4;
915 /// assert_eq!(*x, 4);
916 ///
917 /// let _y = Arc::clone(&x);
918 /// assert!(Arc::get_mut(&mut x).is_none());
919 /// # Ok::<_, rune::alloc::Error>(())
920 /// ```
921 #[inline]
922 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
923 if Self::is_unique(this) {
924 // This unsafety is ok because we're guaranteed that the pointer
925 // returned is the *only* pointer that will ever be returned to T. Our
926 // reference count is guaranteed to be 1 at this point, and we required
927 // the Arc itself to be `mut`, so we're returning the only possible
928 // reference to the inner data.
929 unsafe { Some(Arc::get_mut_unchecked(this)) }
930 } else {
931 None
932 }
933 }
934
935 /// Returns a mutable reference into the given `Arc`, without any check.
936 ///
937 /// See also [`get_mut`], which is safe and does appropriate checks.
938 ///
939 /// [`get_mut`]: Arc::get_mut
940 ///
941 /// # Safety
942 ///
943 /// If any other `Arc` or [`Weak`] pointers to the same allocation exist,
944 /// then they must not be dereferenced or have active borrows for the
945 /// duration of the returned borrow, and their inner type must be exactly
946 /// the same as the inner type of this Rc (including lifetimes). This is
947 /// trivially the case if no such pointers exist, for example immediately
948 /// after `Arc::new`.
949 ///
950 /// # Examples
951 ///
952 /// ```
953 /// use rune::sync::Arc;
954 /// use rune::alloc::String;
955 ///
956 /// let mut x = Arc::try_new(String::new())?;
957 ///
958 /// unsafe {
959 /// Arc::get_mut_unchecked(&mut x).try_push_str("foo")?
960 /// }
961 ///
962 /// assert_eq!(*x, "foo");
963 /// # Ok::<_, rune::alloc::Error>(())
964 /// ```
965 ///
966 /// Other `Arc` pointers to the same allocation must be to the same type.
967 ///
968 /// ```no_run
969 /// use rune::sync::Arc;
970 ///
971 /// let x: Arc<str> = Arc::try_from("Hello, world!")?;
972 /// let mut y: Arc<[u8]> = x.clone().try_into()?;
973 ///
974 /// unsafe {
975 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
976 /// Arc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
977 /// }
978 ///
979 /// println!("{}", &*x); // Invalid UTF-8 in a str
980 /// # Ok::<_, rune::alloc::Error>(())
981 /// ```
982 ///
983 /// Other `Arc` pointers to the same allocation must be to the exact same
984 /// type, including lifetimes.
985 ///
986 /// ```no_run
987 /// use rune::sync::Arc;
988 ///
989 /// let x: Arc<&str> = Arc::try_new("Hello, world!")?;
990 ///
991 /// {
992 /// let s = String::from("Oh, no!");
993 /// let mut y: Arc<&str> = x.clone();
994 /// unsafe {
995 /// // this is Undefined Behavior, because x's inner type
996 /// // is &'long str, not &'short str
997 /// *Arc::get_mut_unchecked(&mut y) = &s;
998 /// }
999 /// }
1000 ///
1001 /// println!("{}", &*x); // Use-after-free
1002 /// # Ok::<_, rune::alloc::Error>(())
1003 /// ```
1004 #[inline]
1005 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1006 // We are careful to *not* create a reference covering the "count" fields, as
1007 // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
1008 unsafe { &mut (*this.ptr.as_ptr()).data }
1009 }
1010
1011 /// Determine whether this is the unique reference to the underlying data.
1012 ///
1013 /// Returns `true` if there are no other `Arc` or [`Weak`] pointers to the same allocation;
1014 /// returns `false` otherwise.
1015 ///
1016 /// If this function returns `true`, then is guaranteed to be safe to call [`get_mut_unchecked`]
1017 /// on this `Arc`, so long as no clones occur in between.
1018 ///
1019 /// # Examples
1020 ///
1021 /// ```
1022 /// use rune::sync::Arc;
1023 ///
1024 /// let x = Arc::try_new(3)?;
1025 /// assert!(Arc::is_unique(&x));
1026 ///
1027 /// let y = Arc::clone(&x);
1028 /// assert!(!Arc::is_unique(&x));
1029 /// drop(y);
1030 ///
1031 /// // Weak references also count, because they could be upgraded at any time.
1032 /// let z = Arc::downgrade(&x);
1033 /// assert!(!Arc::is_unique(&x));
1034 /// # Ok::<_, rune::alloc::Error>(())
1035 /// ```
1036 ///
1037 /// # Pointer invalidation
1038 ///
1039 /// This function will always return the same value as `Arc::get_mut(arc).is_some()`. However,
1040 /// unlike that operation it does not produce any mutable references to the underlying data,
1041 /// meaning no pointers to the data inside the `Arc` are invalidated by the call. Thus, the
1042 /// following code is valid, even though it would be UB if it used `Arc::get_mut`:
1043 ///
1044 /// ```
1045 /// use rune::sync::Arc;
1046 ///
1047 /// let arc = Arc::try_new(5)?;
1048 /// let pointer: *const i32 = &*arc;
1049 /// assert!(Arc::is_unique(&arc));
1050 /// assert_eq!(unsafe { *pointer }, 5);
1051 /// # Ok::<_, rune::alloc::Error>(())
1052 /// ```
1053 ///
1054 /// # Atomic orderings
1055 ///
1056 /// Concurrent drops to other `Arc` pointers to the same allocation will synchronize with this
1057 /// call - that is, this call performs an `Acquire` operation on the underlying strong and weak
1058 /// ref counts. This ensures that calling `get_mut_unchecked` is safe.
1059 ///
1060 /// Note that this operation requires locking the weak ref count, so concurrent calls to
1061 /// `downgrade` may spin-loop for a short period of time.
1062 ///
1063 /// [`get_mut_unchecked`]: Self::get_mut_unchecked
1064 #[inline]
1065 pub fn is_unique(this: &Self) -> bool {
1066 // lock the weak pointer count if we appear to be the sole weak pointer
1067 // holder.
1068 //
1069 // The acquire label here ensures a happens-before relationship with any
1070 // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
1071 // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
1072 // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
1073 if this
1074 .inner()
1075 .weak
1076 .compare_exchange(1, usize::MAX, Acquire, Relaxed)
1077 .is_ok()
1078 {
1079 // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
1080 // counter in `drop` -- the only access that happens when any but the last reference
1081 // is being dropped.
1082 let unique = this.inner().strong.load(Acquire) == 1;
1083
1084 // The release write here synchronizes with a read in `downgrade`,
1085 // effectively preventing the above read of `strong` from happening
1086 // after the write.
1087 this.inner().weak.store(1, Release); // release the lock
1088 unique
1089 } else {
1090 false
1091 }
1092 }
1093
1094 /// Creates a new [`Weak`] pointer to this allocation.
1095 ///
1096 /// # Examples
1097 ///
1098 /// ```
1099 /// use rune::sync::Arc;
1100 ///
1101 /// let five = Arc::try_new(5)?;
1102 ///
1103 /// let weak_five = Arc::downgrade(&five);
1104 /// # Ok::<_, rune::alloc::Error>(())
1105 /// ```
1106 #[must_use = "this returns a new `Weak` pointer, \
1107 without modifying the original `Arc`"]
1108 pub fn downgrade(this: &Self) -> Weak<T, A>
1109 where
1110 A: Clone,
1111 {
1112 // This Relaxed is OK because we're checking the value in the CAS
1113 // below.
1114 let mut cur = this.inner().weak.load(Relaxed);
1115
1116 loop {
1117 // check if the weak counter is currently "locked"; if so, spin.
1118 if cur == usize::MAX {
1119 hint::spin_loop();
1120 cur = this.inner().weak.load(Relaxed);
1121 continue;
1122 }
1123
1124 // We can't allow the refcount to increase much past `MAX_REFCOUNT`.
1125 assert!(cur <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR);
1126
1127 // NOTE: this code currently ignores the possibility of overflow
1128 // into usize::MAX; in general both Rc and Arc need to be adjusted
1129 // to deal with overflow.
1130
1131 // Unlike with Clone(), we need this to be an Acquire read to
1132 // synchronize with the write coming from `is_unique`, so that the
1133 // events prior to that write happen before this read.
1134 match this
1135 .inner()
1136 .weak
1137 .compare_exchange_weak(cur, cur + 1, Acquire, Relaxed)
1138 {
1139 Ok(_) => {
1140 // Make sure we do not create a dangling Weak
1141 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1142 return Weak {
1143 ptr: this.ptr,
1144 alloc: this.alloc.clone(),
1145 };
1146 }
1147 Err(old) => cur = old,
1148 }
1149 }
1150 }
1151
1152 /// Allocates an `ArcInner<T>` with sufficient space for
1153 /// a possibly-unsized inner value where the value has the layout provided.
1154 ///
1155 /// The function `mem_to_arcinner` is called with the data pointer
1156 /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
1157 unsafe fn try_allocate_for_layout(
1158 value_layout: Layout,
1159 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
1160 mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
1161 ) -> Result<*mut ArcInner<T>, AllocError> {
1162 let layout = arcinner_layout_for_value_layout(value_layout);
1163 let ptr = allocate(layout)?;
1164 Ok(unsafe { Self::initialize_arcinner(ptr, layout, mem_to_arcinner) })
1165 }
1166
1167 unsafe fn initialize_arcinner(
1168 ptr: NonNull<[u8]>,
1169 layout: Layout,
1170 mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
1171 ) -> *mut ArcInner<T> {
1172 let inner = mem_to_arcinner(ptr.cast().as_ptr());
1173 // TODO: Switch to `Layout::for_value_raw` once stable.
1174 debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
1175
1176 unsafe {
1177 (&raw mut (*inner).strong).write(AtomicUsize::new(1));
1178 (&raw mut (*inner).weak).write(AtomicUsize::new(1));
1179 }
1180
1181 inner
1182 }
1183
1184 #[inline]
1185 unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
1186 Self {
1187 ptr,
1188 phantom: PhantomData,
1189 alloc,
1190 }
1191 }
1192
1193 #[inline]
1194 unsafe fn from_ptr_in(ptr: *mut ArcInner<T>, alloc: A) -> Self {
1195 unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
1196 }
1197
1198 #[inline]
1199 fn inner(&self) -> &ArcInner<T> {
1200 // This unsafety is ok because while this arc is alive we're guaranteed
1201 // that the inner pointer is valid. Furthermore, we know that the
1202 // `ArcInner` structure itself is `Sync` because the inner data is
1203 // `Sync` as well, so we're ok loaning out an immutable pointer to these
1204 // contents.
1205 unsafe { self.ptr.as_ref() }
1206 }
1207
1208 // Non-inlined part of `drop`.
1209 #[inline(never)]
1210 unsafe fn drop_slow(&mut self) {
1211 // Drop the weak ref collectively held by all strong references when this
1212 // variable goes out of scope. This ensures that the memory is deallocated
1213 // even if the destructor of `T` panics.
1214 // Take a reference to `self.alloc` instead of cloning because 1. it'll last long
1215 // enough, and 2. you should be able to drop `Arc`s with unclonable allocators
1216 let _weak = Weak {
1217 ptr: self.ptr,
1218 alloc: &self.alloc,
1219 };
1220
1221 // Destroy the data at this time, even though we must not free the box
1222 // allocation itself (there might still be weak pointers lying around).
1223 // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
1224 unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) };
1225 }
1226}
1227
1228impl<T, A> Clone for Arc<T, A>
1229where
1230 T: ?Sized,
1231 A: Allocator + Clone,
1232{
1233 /// Makes a clone of the `Arc` pointer.
1234 ///
1235 /// This creates another pointer to the same allocation, increasing the
1236 /// strong reference count.
1237 ///
1238 /// # Examples
1239 ///
1240 /// ```
1241 /// use rune::sync::Arc;
1242 ///
1243 /// let five = Arc::try_new(5)?;
1244 ///
1245 /// let _ = Arc::clone(&five);
1246 /// # Ok::<_, rune::alloc::Error>(())
1247 /// ```
1248 #[inline]
1249 fn clone(&self) -> Arc<T, A> {
1250 // Using a relaxed ordering is alright here, as knowledge of the
1251 // original reference prevents other threads from erroneously deleting
1252 // the object.
1253 //
1254 // As explained in the [Boost documentation][1], Increasing the
1255 // reference counter can always be done with memory_order_relaxed: New
1256 // references to an object can only be formed from an existing
1257 // reference, and passing an existing reference from one thread to
1258 // another must already provide any required synchronization.
1259 //
1260 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
1261 let old_size = self.inner().strong.fetch_add(1, Relaxed);
1262
1263 // However we need to guard against massive refcounts in case someone is `mem::forget`ing
1264 // Arcs. If we don't do this the count can overflow and users will use-after free. This
1265 // branch will never be taken in any realistic program. We abort because such a program is
1266 // incredibly degenerate, and we don't care to support it.
1267 //
1268 // This check is not 100% water-proof: we error when the refcount grows beyond `isize::MAX`.
1269 // But we do that check *after* having done the increment, so there is a chance here that
1270 // the worst already happened and we actually do overflow the `usize` counter. However, that
1271 // requires the counter to grow from `isize::MAX` to `usize::MAX` between the increment
1272 // above and the `abort` below, which seems exceedingly unlikely.
1273 //
1274 // This is a global invariant, and also applies when using a compare-exchange loop to increment
1275 // counters in other methods.
1276 // Otherwise, the counter could be brought to an almost-overflow using a compare-exchange loop,
1277 // and then overflow using a few `fetch_add`s.
1278 if old_size > MAX_REFCOUNT {
1279 abort();
1280 }
1281
1282 unsafe { Self::from_inner_in(self.ptr, self.alloc.clone()) }
1283 }
1284}
1285
1286impl<T, A> TryClone for Arc<T, A>
1287where
1288 T: ?Sized,
1289 A: Allocator + Clone,
1290{
1291 #[inline]
1292 fn try_clone(&self) -> Result<Self> {
1293 Ok(self.clone())
1294 }
1295}
1296
1297impl<T, A> Deref for Arc<T, A>
1298where
1299 T: ?Sized,
1300 A: Allocator,
1301{
1302 type Target = T;
1303
1304 #[inline]
1305 fn deref(&self) -> &T {
1306 &self.inner().data
1307 }
1308}
1309
1310impl<T, A> Borrow<T> for Arc<T, A>
1311where
1312 T: ?Sized,
1313 A: Allocator,
1314{
1315 #[inline]
1316 fn borrow(&self) -> &T {
1317 self
1318 }
1319}
1320
1321impl<T, A> AsRef<T> for Arc<T, A>
1322where
1323 T: ?Sized,
1324 A: Allocator,
1325{
1326 #[inline]
1327 fn as_ref(&self) -> &T {
1328 self
1329 }
1330}
1331
1332impl<T, A> Drop for Arc<T, A>
1333where
1334 T: ?Sized,
1335 A: Allocator,
1336{
1337 /// Drops the `Arc`.
1338 ///
1339 /// This will decrement the strong reference count. If the strong reference
1340 /// count reaches zero then the only other references (if any) are
1341 /// [`Weak`], so we `drop` the inner value.
1342 ///
1343 /// # Examples
1344 ///
1345 /// ```
1346 /// use rune::sync::Arc;
1347 ///
1348 /// struct Foo;
1349 ///
1350 /// impl Drop for Foo {
1351 /// fn drop(&mut self) {
1352 /// println!("dropped!");
1353 /// }
1354 /// }
1355 ///
1356 /// let foo = Arc::try_new(Foo)?;
1357 /// let foo2 = Arc::clone(&foo);
1358 ///
1359 /// drop(foo); // Doesn't print anything
1360 /// drop(foo2); // Prints "dropped!"
1361 /// # Ok::<_, rune::alloc::Error>(())
1362 /// ```
1363 #[inline]
1364 fn drop(&mut self) {
1365 // Because `fetch_sub` is already atomic, we do not need to synchronize
1366 // with other threads unless we are going to delete the object. This
1367 // same logic applies to the below `fetch_sub` to the `weak` count.
1368 if self.inner().strong.fetch_sub(1, Release) != 1 {
1369 return;
1370 }
1371
1372 // This fence is needed to prevent reordering of use of the data and
1373 // deletion of the data. Because it is marked `Release`, the decreasing
1374 // of the reference count synchronizes with this `Acquire` fence. This
1375 // means that use of the data happens before decreasing the reference
1376 // count, which happens before this fence, which happens before the
1377 // deletion of the data.
1378 //
1379 // As explained in the [Boost documentation][1],
1380 //
1381 // > It is important to enforce any possible access to the object in one
1382 // > thread (through an existing reference) to *happen before* deleting
1383 // > the object in a different thread. This is achieved by a "release"
1384 // > operation after dropping a reference (any access to the object
1385 // > through this reference must obviously happened before), and an
1386 // > "acquire" operation before deleting the object.
1387 //
1388 // In particular, while the contents of an Arc are usually immutable, it's
1389 // possible to have interior writes to something like a Mutex<T>. Since a
1390 // Mutex is not acquired when it is deleted, we can't rely on its
1391 // synchronization logic to make writes in thread A visible to a destructor
1392 // running in thread B.
1393 //
1394 // Also note that the Acquire fence here could probably be replaced with an
1395 // Acquire load, which could improve performance in highly-contended
1396 // situations. See [2].
1397 //
1398 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
1399 // [2]: (https://github.com/rust-lang/rust/pull/41714)
1400 acquire!(self.inner().strong);
1401
1402 // Make sure we aren't trying to "drop" the shared static for empty
1403 // slices used by Default::default.
1404 debug_assert!(
1405 !ptr::addr_eq(self.ptr.as_ptr(), &STATIC_INNER_SLICE.inner),
1406 "Arcs backed by a static should never reach a strong count of 0. \
1407 Likely decrement_strong_count or from_raw were called too many times.",
1408 );
1409
1410 unsafe {
1411 self.drop_slow();
1412 }
1413 }
1414}
1415
1416impl<T, A> PartialEq for Arc<T, A>
1417where
1418 T: ?Sized + PartialEq,
1419 A: Allocator,
1420{
1421 /// Equality for two `Arc`s.
1422 ///
1423 /// Two `Arc`s are equal if their inner values are equal, even if they are
1424 /// stored in different allocation.
1425 ///
1426 /// If `T` also implements `Eq` (implying reflexivity of equality),
1427 /// two `Arc`s that point to the same allocation are always equal.
1428 ///
1429 /// # Examples
1430 ///
1431 /// ```
1432 /// use rune::sync::Arc;
1433 ///
1434 /// let five = Arc::try_new(5)?;
1435 ///
1436 /// assert!(five == Arc::try_new(5)?);
1437 /// # Ok::<_, rune::alloc::Error>(())
1438 /// ```
1439 #[inline]
1440 fn eq(&self, other: &Arc<T, A>) -> bool {
1441 Arc::ptr_eq(self, other) || **self == **other
1442 }
1443
1444 /// Inequality for two `Arc`s.
1445 ///
1446 /// Two `Arc`s are not equal if their inner values are not equal.
1447 ///
1448 /// If `T` also implements `Eq` (implying reflexivity of equality),
1449 /// two `Arc`s that point to the same value are always equal.
1450 ///
1451 /// # Examples
1452 ///
1453 /// ```
1454 /// use rune::sync::Arc;
1455 ///
1456 /// let five = Arc::try_new(5)?;
1457 ///
1458 /// assert!(five != Arc::try_new(6)?);
1459 /// # Ok::<_, rune::alloc::Error>(())
1460 /// ```
1461 #[allow(clippy::partialeq_ne_impl)]
1462 #[inline]
1463 fn ne(&self, other: &Arc<T, A>) -> bool {
1464 !Arc::ptr_eq(self, other) && **self != **other
1465 }
1466}
1467
1468impl<T, A> PartialOrd for Arc<T, A>
1469where
1470 T: ?Sized + PartialOrd,
1471 A: Allocator,
1472{
1473 /// Partial comparison for two `Arc`s.
1474 ///
1475 /// The two are compared by calling `partial_cmp()` on their inner values.
1476 ///
1477 /// # Examples
1478 ///
1479 /// ```
1480 /// use std::cmp::Ordering;
1481 ///
1482 /// use rune::sync::Arc;
1483 ///
1484 /// let five = Arc::try_new(5)?;
1485 ///
1486 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::try_new(6)?));
1487 /// # Ok::<_, rune::alloc::Error>(())
1488 /// ```
1489 fn partial_cmp(&self, other: &Arc<T, A>) -> Option<Ordering> {
1490 (**self).partial_cmp(&**other)
1491 }
1492
1493 /// Less-than comparison for two `Arc`s.
1494 ///
1495 /// The two are compared by calling `<` on their inner values.
1496 ///
1497 /// # Examples
1498 ///
1499 /// ```
1500 /// use rune::sync::Arc;
1501 ///
1502 /// let five = Arc::try_new(5)?;
1503 ///
1504 /// assert!(five < Arc::try_new(6)?);
1505 /// # Ok::<_, rune::alloc::Error>(())
1506 /// ```
1507 fn lt(&self, other: &Arc<T, A>) -> bool {
1508 *(*self) < *(*other)
1509 }
1510
1511 /// 'Less than or equal to' comparison for two `Arc`s.
1512 ///
1513 /// The two are compared by calling `<=` on their inner values.
1514 ///
1515 /// # Examples
1516 ///
1517 /// ```
1518 /// use rune::sync::Arc;
1519 ///
1520 /// let five = Arc::try_new(5)?;
1521 ///
1522 /// assert!(five <= Arc::try_new(5)?);
1523 /// # Ok::<_, rune::alloc::Error>(())
1524 /// ```
1525 fn le(&self, other: &Arc<T, A>) -> bool {
1526 *(*self) <= *(*other)
1527 }
1528
1529 /// Greater-than comparison for two `Arc`s.
1530 ///
1531 /// The two are compared by calling `>` on their inner values.
1532 ///
1533 /// # Examples
1534 ///
1535 /// ```
1536 /// use rune::sync::Arc;
1537 ///
1538 /// let five = Arc::try_new(5)?;
1539 ///
1540 /// assert!(five > Arc::try_new(4)?);
1541 /// # Ok::<_, rune::alloc::Error>(())
1542 /// ```
1543 fn gt(&self, other: &Arc<T, A>) -> bool {
1544 *(*self) > *(*other)
1545 }
1546
1547 /// 'Greater than or equal to' comparison for two `Arc`s.
1548 ///
1549 /// The two are compared by calling `>=` on their inner values.
1550 ///
1551 /// # Examples
1552 ///
1553 /// ```
1554 /// use rune::sync::Arc;
1555 ///
1556 /// let five = Arc::try_new(5)?;
1557 ///
1558 /// assert!(five >= Arc::try_new(5)?);
1559 /// # Ok::<_, rune::alloc::Error>(())
1560 /// ```
1561 fn ge(&self, other: &Arc<T, A>) -> bool {
1562 *(*self) >= *(*other)
1563 }
1564}
1565
1566impl<T, A> Ord for Arc<T, A>
1567where
1568 T: ?Sized + Ord,
1569 A: Allocator,
1570{
1571 /// Comparison for two `Arc`s.
1572 ///
1573 /// The two are compared by calling `cmp()` on their inner values.
1574 ///
1575 /// # Examples
1576 ///
1577 /// ```
1578 /// use rune::sync::Arc;
1579 /// use std::cmp::Ordering;
1580 ///
1581 /// let five = Arc::try_new(5)?;
1582 ///
1583 /// assert_eq!(Ordering::Less, five.cmp(&Arc::try_new(6)?));
1584 /// # Ok::<_, rune::alloc::Error>(())
1585 /// ```
1586 fn cmp(&self, other: &Arc<T, A>) -> Ordering {
1587 (**self).cmp(&**other)
1588 }
1589}
1590
1591impl<T, A> Eq for Arc<T, A>
1592where
1593 T: ?Sized + Eq,
1594 A: Allocator,
1595{
1596}
1597
1598impl<T, A> fmt::Display for Arc<T, A>
1599where
1600 T: ?Sized + fmt::Display,
1601 A: Allocator,
1602{
1603 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1604 fmt::Display::fmt(&**self, f)
1605 }
1606}
1607
1608impl<T, A> fmt::Debug for Arc<T, A>
1609where
1610 T: ?Sized + fmt::Debug,
1611 A: Allocator,
1612{
1613 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1614 fmt::Debug::fmt(&**self, f)
1615 }
1616}
1617
1618impl<T, A> fmt::Pointer for Arc<T, A>
1619where
1620 T: ?Sized,
1621 A: Allocator,
1622{
1623 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1624 fmt::Pointer::fmt(&(&raw const **self), f)
1625 }
1626}
1627
1628impl<T, A> Hash for Arc<T, A>
1629where
1630 T: ?Sized + Hash,
1631 A: Allocator,
1632{
1633 #[inline]
1634 fn hash<H>(&self, state: &mut H)
1635 where
1636 H: Hasher,
1637 {
1638 (**self).hash(state)
1639 }
1640}
1641
1642impl<A> TryFrom<&[u8]> for Arc<[u8], A>
1643where
1644 A: Default + Allocator,
1645{
1646 type Error = AllocError;
1647
1648 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
1649 ///
1650 /// # Example
1651 ///
1652 /// ```
1653 /// use rune::sync::Arc;
1654 ///
1655 /// let original: &[u8] = &[1, 2, 3];
1656 /// let shared: Arc<[u8]> = Arc::try_from(original)?;
1657 /// assert_eq!(&[1, 2, 3], &shared[..]);
1658 /// # Ok::<_, rune::alloc::Error>(())
1659 /// ```
1660 #[inline]
1661 fn try_from(v: &[u8]) -> Result<Self, Self::Error> {
1662 // SAFETY: `T` is Copy.
1663 Arc::copy_from_slice_in(v, A::default())
1664 }
1665}
1666
1667impl<T, A> TryFrom<Vec<T, A>> for Arc<[T], A>
1668where
1669 A: Allocator,
1670{
1671 type Error = AllocError;
1672
1673 /// Allocates a reference-counted slice and moves `v`'s items into it.
1674 ///
1675 /// # Example
1676 ///
1677 /// ```
1678 /// use rune::sync::Arc;
1679 /// use rune::alloc::{try_vec, Vec};
1680 ///
1681 /// let unique: Vec<i32> = try_vec![1, 2, 3];
1682 /// let shared: Arc<[i32]> = Arc::try_from(unique)?;
1683 /// assert_eq!(&[1, 2, 3], &shared[..]);
1684 /// # Ok::<_, rune::alloc::Error>(())
1685 /// ```
1686 #[inline]
1687 fn try_from(v: Vec<T, A>) -> Result<Arc<[T], A>, Self::Error> {
1688 unsafe {
1689 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
1690
1691 let rc_ptr = Self::try_allocate_for_slice_in(len, &alloc)?;
1692 ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).data) as *mut T, len);
1693
1694 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
1695 // without dropping its contents or the allocator
1696 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
1697 Ok(Self::from_ptr_in(rc_ptr, alloc))
1698 }
1699 }
1700}
1701
1702impl<A> TryFrom<&str> for Arc<str, A>
1703where
1704 A: Default + Allocator,
1705{
1706 type Error = AllocError;
1707
1708 /// Allocates a reference-counted `str` and copies `v` into it.
1709 ///
1710 /// # Example
1711 ///
1712 /// ```
1713 /// use rune::sync::Arc;
1714 ///
1715 /// let shared: Arc<str> = Arc::try_from("eggplant")?;
1716 /// assert_eq!("eggplant", &shared[..]);
1717 /// # Ok::<_, rune::alloc::Error>(())
1718 /// ```
1719 #[inline]
1720 fn try_from(v: &str) -> Result<Self, Self::Error> {
1721 let arc = Arc::try_from(v.as_bytes())?;
1722 let (ptr, alloc) = Arc::into_raw_with_allocator(arc);
1723 Ok(unsafe { Arc::from_raw_in(ptr as *const str, alloc) })
1724 }
1725}
1726
1727impl<A> From<Arc<str, A>> for Arc<[u8], A>
1728where
1729 A: Allocator,
1730{
1731 /// Converts an atomically reference-counted string slice into a byte slice.
1732 ///
1733 /// # Example
1734 ///
1735 /// ```
1736 /// use rune::sync::Arc;
1737 ///
1738 /// let string: Arc<str> = Arc::try_from("eggplant")?;
1739 /// let bytes: Arc<[u8]> = Arc::from(string);
1740 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
1741 /// # Ok::<_, rune::alloc::Error>(())
1742 /// ```
1743 #[inline]
1744 fn from(arc: Arc<str, A>) -> Self {
1745 // SAFETY: `str` has the same layout as `[u8]`.
1746 let (ptr, alloc) = Arc::into_raw_with_allocator(arc);
1747 unsafe { Arc::from_raw_in(ptr as *const [u8], alloc) }
1748 }
1749}
1750
1751/// Gets the offset within an `ArcInner` for the payload behind a pointer.
1752///
1753/// # Safety
1754///
1755/// The pointer must point to (and have valid metadata for) a previously
1756/// valid instance of T, but the T is allowed to be dropped.
1757unsafe fn data_offset<T>(ptr: *const T) -> usize
1758where
1759 T: ?Sized,
1760{
1761 // Align the unsized value to the end of the ArcInner. Because RcInner is
1762 // repr(C), it will always be the last field in memory. SAFETY: since the
1763 // only unsized types possible are slices, trait objects, and extern types,
1764 // the input safety requirement is currently enough to satisfy the
1765 // requirements of align_of_val_raw; this is an implementation detail of the
1766 // language that must not be relied upon outside of std.
1767 // TODO: Switch to `align_of_val_raw` once stable.
1768 unsafe { data_offset_align(align_of_val(&*ptr)) }
1769}
1770
1771#[inline]
1772fn data_offset_align(align: usize) -> usize {
1773 let layout = Layout::new::<ArcInner<()>>();
1774 layout.size() + padding_needed_for(&layout, align)
1775}
1776
1777const fn padding_needed_for(this: &Layout, align: usize) -> usize {
1778 // TODO: Switch to `Alignment` once stable.
1779 let align = if align.is_power_of_two() {
1780 align
1781 } else {
1782 return usize::MAX;
1783 };
1784 let len_rounded_up = size_rounded_up_to_custom_align(this, align);
1785 // SAFETY: Cannot overflow because the rounded-up value is never less
1786 unsafe { len_rounded_up.unchecked_sub(this.size()) }
1787}
1788
1789/// Returns the smallest multiple of `align` greater than or equal to
1790/// `self.size()`.
1791///
1792/// This can return at most `Alignment::MAX` (aka `isize::MAX + 1`) because the
1793/// original size is at most `isize::MAX`.
1794#[inline]
1795const fn size_rounded_up_to_custom_align(layout: &Layout, align: usize) -> usize {
1796 // SAFETY: Rounded up value is: size_rounded_up = (size + align - 1) &
1797 // !(align - 1);
1798 //
1799 // The arithmetic we do here can never overflow:
1800 //
1801 // 1. align is guaranteed to be > 0, so align - 1 is always valid.
1802 //
1803 // 2. size is at most `isize::MAX`, so adding `align - 1` (which is at most
1804 // `isize::MAX`) can never overflow a `usize`.
1805 //
1806 // 3. masking by the alignment can remove at most `align - 1`, which is what
1807 // we just added, thus the value we return is never less than the
1808 // original `size`.
1809 //
1810 // (Size 0 Align MAX is already aligned, so stays the same, but things like
1811 // Size 1 Align MAX or Size isize::MAX Align 2 round up to `isize::MAX +
1812 // 1`.)
1813 unsafe {
1814 let align_m1 = align.unchecked_sub(1);
1815 layout.size().unchecked_add(align_m1) & !align_m1
1816 }
1817}
1818
1819/// Struct to hold the static `ArcInner` used for empty `Arc<str/CStr/[T]>` as
1820/// returned by `Default::default`.
1821///
1822/// Layout notes:
1823/// * `repr(align(16))` so we can use it for `[T]` with `align_of::<T>() <= 16`.
1824/// * `repr(C)` so `inner` is at offset 0 (and thus guaranteed to actually be
1825/// aligned to 16).
1826/// * `[u8; 1]` (to be initialized with 0) so it can be used for `Arc<CStr>`.
1827#[repr(C, align(16))]
1828struct SliceArcInnerForStatic {
1829 inner: ArcInner<[u8; 1]>,
1830}
1831
1832static STATIC_INNER_SLICE: SliceArcInnerForStatic = SliceArcInnerForStatic {
1833 inner: ArcInner {
1834 strong: AtomicUsize::new(1),
1835 weak: AtomicUsize::new(1),
1836 data: [0],
1837 },
1838};