1#![cfg_attr(not(all(test, feature = "float")), allow(dead_code, unused_macros))]
4
5#[macro_use]
6#[path = "gen/utils.rs"]
7mod generated;
8
9use core::sync::atomic::Ordering;
10
11macro_rules! static_assert {
12 ($cond:expr $(,)?) => {{
13 let [] = [(); true as usize - $crate::utils::_assert_is_bool($cond) as usize];
14 }};
15}
16pub(crate) const fn _assert_is_bool(v: bool) -> bool {
17 v
18}
19
20macro_rules! static_assert_layout {
21 ($atomic_type:ty, $value_type:ty) => {
22 static_assert!(
23 core::mem::align_of::<$atomic_type>() == core::mem::size_of::<$atomic_type>()
24 );
25 static_assert!(core::mem::size_of::<$atomic_type>() == core::mem::size_of::<$value_type>());
26 };
27}
28
29macro_rules! doc_comment {
31 ($doc:expr, $($tt:tt)*) => {
32 #[doc = $doc]
33 $($tt)*
34 };
35}
36
37#[allow(unused_macros)]
46#[cfg(not(portable_atomic_no_outline_atomics))]
47#[cfg(any(
48 target_arch = "aarch64",
49 target_arch = "arm",
50 target_arch = "arm64ec",
51 target_arch = "powerpc64",
52 target_arch = "riscv32",
53 target_arch = "riscv64",
54 all(target_arch = "x86_64", not(any(target_env = "sgx", miri))),
55))]
56macro_rules! ifunc {
57 (unsafe fn($($arg_pat:ident: $arg_ty:ty),*) $(-> $ret_ty:ty)? { $($detect_body:tt)* }) => {{
58 type FnTy = unsafe fn($($arg_ty),*) $(-> $ret_ty)?;
59 static FUNC: core::sync::atomic::AtomicPtr<()>
60 = core::sync::atomic::AtomicPtr::new(detect as *mut ());
61 #[cold]
62 unsafe fn detect($($arg_pat: $arg_ty),*) $(-> $ret_ty)? {
63 let func: FnTy = { $($detect_body)* };
64 FUNC.store(func as *mut (), core::sync::atomic::Ordering::Relaxed);
65 unsafe { func($($arg_pat),*) }
67 }
68 let func = {
72 core::mem::transmute::<*mut (), FnTy>(FUNC.load(core::sync::atomic::Ordering::Relaxed))
73 };
74 func($($arg_pat),*)
78 }};
79}
80
81#[allow(unused_macros)]
82#[cfg(not(portable_atomic_no_outline_atomics))]
83#[cfg(any(
84 target_arch = "aarch64",
85 target_arch = "arm",
86 target_arch = "arm64ec",
87 target_arch = "powerpc64",
88 target_arch = "riscv32",
89 target_arch = "riscv64",
90 all(target_arch = "x86_64", not(any(target_env = "sgx", miri))),
91))]
92macro_rules! fn_alias {
93 (
94 $(#[$($fn_attr:tt)*])*
95 $vis:vis unsafe fn($($arg_pat:ident: $arg_ty:ty),*) $(-> $ret_ty:ty)?;
96 $(#[$($alias_attr:tt)*])*
97 $new:ident = $from:ident($($last_args:tt)*);
98 $($rest:tt)*
99 ) => {
100 $(#[$($fn_attr)*])*
101 $(#[$($alias_attr)*])*
102 $vis unsafe fn $new($($arg_pat: $arg_ty),*) $(-> $ret_ty)? {
103 unsafe { $from($($arg_pat,)* $($last_args)*) }
105 }
106 fn_alias! {
107 $(#[$($fn_attr)*])*
108 $vis unsafe fn($($arg_pat: $arg_ty),*) $(-> $ret_ty)?;
109 $($rest)*
110 }
111 };
112 (
113 $(#[$($attr:tt)*])*
114 $vis:vis unsafe fn($($arg_pat:ident: $arg_ty:ty),*) $(-> $ret_ty:ty)?;
115 ) => {}
116}
117
118macro_rules! const_fn {
120 (
121 const_if: #[cfg($($cfg:tt)+)];
122 $(#[$($attr:tt)*])*
123 $vis:vis const $($rest:tt)*
124 ) => {
125 #[cfg($($cfg)+)]
126 $(#[$($attr)*])*
127 $vis const $($rest)*
128 #[cfg(not($($cfg)+))]
129 $(#[$($attr)*])*
130 $vis $($rest)*
131 };
132}
133
134macro_rules! impl_debug_and_serde {
137 (AtomicF16) => {
139 impl_debug!(AtomicF16);
140 };
141 (AtomicF128) => {
142 impl_debug!(AtomicF128);
143 };
144 ($atomic_type:ident) => {
145 impl_debug!($atomic_type);
146 #[cfg(feature = "serde")]
147 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
148 impl serde::ser::Serialize for $atomic_type {
149 #[allow(clippy::missing_inline_in_public_items)] fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
151 where
152 S: serde::ser::Serializer,
153 {
154 self.load(Ordering::Relaxed).serialize(serializer)
156 }
157 }
158 #[cfg(feature = "serde")]
159 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
160 impl<'de> serde::de::Deserialize<'de> for $atomic_type {
161 #[allow(clippy::missing_inline_in_public_items)] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
163 where
164 D: serde::de::Deserializer<'de>,
165 {
166 serde::de::Deserialize::deserialize(deserializer).map(Self::new)
167 }
168 }
169 };
170}
171macro_rules! impl_debug {
172 ($atomic_type:ident) => {
173 impl fmt::Debug for $atomic_type {
174 #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
176 fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
178 }
179 }
180 };
181}
182
183macro_rules! impl_default_no_fetch_ops {
186 ($atomic_type:ident, bool) => {
187 impl $atomic_type {
188 #[inline]
189 #[cfg_attr(miri, track_caller)] pub(crate) fn and(&self, val: bool, order: Ordering) {
191 self.fetch_and(val, order);
192 }
193 #[inline]
194 #[cfg_attr(miri, track_caller)] pub(crate) fn or(&self, val: bool, order: Ordering) {
196 self.fetch_or(val, order);
197 }
198 #[inline]
199 #[cfg_attr(miri, track_caller)] pub(crate) fn xor(&self, val: bool, order: Ordering) {
201 self.fetch_xor(val, order);
202 }
203 }
204 };
205 ($atomic_type:ident, $int_type:ty) => {
206 impl $atomic_type {
207 #[inline]
208 #[cfg_attr(miri, track_caller)] pub(crate) fn add(&self, val: $int_type, order: Ordering) {
210 self.fetch_add(val, order);
211 }
212 #[inline]
213 #[cfg_attr(miri, track_caller)] pub(crate) fn sub(&self, val: $int_type, order: Ordering) {
215 self.fetch_sub(val, order);
216 }
217 #[inline]
218 #[cfg_attr(miri, track_caller)] pub(crate) fn and(&self, val: $int_type, order: Ordering) {
220 self.fetch_and(val, order);
221 }
222 #[inline]
223 #[cfg_attr(miri, track_caller)] pub(crate) fn or(&self, val: $int_type, order: Ordering) {
225 self.fetch_or(val, order);
226 }
227 #[inline]
228 #[cfg_attr(miri, track_caller)] pub(crate) fn xor(&self, val: $int_type, order: Ordering) {
230 self.fetch_xor(val, order);
231 }
232 }
233 };
234}
235macro_rules! impl_default_bit_opts {
236 ($atomic_type:ident, $int_type:ty) => {
237 impl $atomic_type {
238 #[inline]
239 #[cfg_attr(miri, track_caller)] pub(crate) fn bit_set(&self, bit: u32, order: Ordering) -> bool {
241 let mask = <$int_type>::wrapping_shl(1, bit);
242 self.fetch_or(mask, order) & mask != 0
243 }
244 #[inline]
245 #[cfg_attr(miri, track_caller)] pub(crate) fn bit_clear(&self, bit: u32, order: Ordering) -> bool {
247 let mask = <$int_type>::wrapping_shl(1, bit);
248 self.fetch_and(!mask, order) & mask != 0
249 }
250 #[inline]
251 #[cfg_attr(miri, track_caller)] pub(crate) fn bit_toggle(&self, bit: u32, order: Ordering) -> bool {
253 let mask = <$int_type>::wrapping_shl(1, bit);
254 self.fetch_xor(mask, order) & mask != 0
255 }
256 }
257 };
258}
259
260macro_rules! items {
262 ($($tt:tt)*) => {
263 $($tt)*
264 };
265}
266
267#[allow(dead_code)]
268#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
269#[inline(always)]
272#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
273pub(crate) unsafe fn assert_unchecked(cond: bool) {
274 if !cond {
275 if cfg!(debug_assertions) {
276 unreachable!()
277 } else {
278 unsafe { core::hint::unreachable_unchecked() }
280 }
281 }
282}
283
284#[inline]
286#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
287pub(crate) fn assert_load_ordering(order: Ordering) {
288 match order {
289 Ordering::Acquire | Ordering::Relaxed | Ordering::SeqCst => {}
290 Ordering::Release => panic!("there is no such thing as a release load"),
291 Ordering::AcqRel => panic!("there is no such thing as an acquire-release load"),
292 _ => unreachable!(),
293 }
294}
295#[inline]
297#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
298pub(crate) fn assert_store_ordering(order: Ordering) {
299 match order {
300 Ordering::Release | Ordering::Relaxed | Ordering::SeqCst => {}
301 Ordering::Acquire => panic!("there is no such thing as an acquire store"),
302 Ordering::AcqRel => panic!("there is no such thing as an acquire-release store"),
303 _ => unreachable!(),
304 }
305}
306#[inline]
308#[cfg_attr(all(debug_assertions, not(portable_atomic_no_track_caller)), track_caller)]
309pub(crate) fn assert_compare_exchange_ordering(success: Ordering, failure: Ordering) {
310 match success {
311 Ordering::AcqRel
312 | Ordering::Acquire
313 | Ordering::Relaxed
314 | Ordering::Release
315 | Ordering::SeqCst => {}
316 _ => unreachable!(),
317 }
318 match failure {
319 Ordering::Acquire | Ordering::Relaxed | Ordering::SeqCst => {}
320 Ordering::Release => panic!("there is no such thing as a release failure ordering"),
321 Ordering::AcqRel => panic!("there is no such thing as an acquire-release failure ordering"),
322 _ => unreachable!(),
323 }
324}
325
326#[allow(dead_code)]
329#[inline]
330pub(crate) fn upgrade_success_ordering(success: Ordering, failure: Ordering) -> Ordering {
331 match (success, failure) {
332 (Ordering::Relaxed, Ordering::Acquire) => Ordering::Acquire,
333 (Ordering::Release, Ordering::Acquire) => Ordering::AcqRel,
334 (_, Ordering::SeqCst) => Ordering::SeqCst,
335 _ => success,
336 }
337}
338
339#[cfg(not(portable_atomic_no_asm_maybe_uninit))]
343#[cfg(target_pointer_width = "32")]
344#[allow(dead_code)]
345#[inline]
346pub(crate) fn zero_extend64_ptr(v: *mut ()) -> core::mem::MaybeUninit<u64> {
347 #[repr(C)]
348 struct ZeroExtended {
349 #[cfg(target_endian = "big")]
350 pad: *mut (),
351 v: *mut (),
352 #[cfg(target_endian = "little")]
353 pad: *mut (),
354 }
355 unsafe { core::mem::transmute(ZeroExtended { v, pad: core::ptr::null_mut() }) }
357}
358
359#[allow(dead_code)]
360#[cfg(any(
361 target_arch = "aarch64",
362 target_arch = "arm64ec",
363 target_arch = "powerpc64",
364 target_arch = "riscv64",
365 target_arch = "s390x",
366 target_arch = "x86_64",
367))]
368#[derive(Clone, Copy)]
373#[repr(C)]
374pub(crate) union U128 {
375 pub(crate) whole: u128,
376 pub(crate) pair: Pair<u64>,
377}
378#[allow(dead_code)]
379#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
380#[derive(Clone, Copy)]
385#[repr(C)]
386pub(crate) union U64 {
387 pub(crate) whole: u64,
388 pub(crate) pair: Pair<u32>,
389}
390#[allow(dead_code)]
391#[derive(Clone, Copy)]
392#[repr(C)]
393pub(crate) struct Pair<T: Copy> {
394 #[cfg(any(
396 target_endian = "little",
397 target_arch = "aarch64",
398 target_arch = "arm",
399 target_arch = "arm64ec",
400 ))]
401 pub(crate) lo: T,
402 pub(crate) hi: T,
403 #[cfg(not(any(
405 target_endian = "little",
406 target_arch = "aarch64",
407 target_arch = "arm",
408 target_arch = "arm64ec",
409 )))]
410 pub(crate) lo: T,
411}
412
413#[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
414type MinWord = u32;
415#[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
416type RetInt = u32;
417#[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
423#[allow(dead_code)]
424#[inline]
425pub(crate) fn create_sub_word_mask_values<T>(ptr: *mut T) -> (*mut MinWord, RetInt, RetInt) {
426 #[cfg(portable_atomic_no_strict_provenance)]
427 use self::ptr::PtrExt as _;
428 use core::mem;
429 const SHIFT_MASK: bool = !cfg!(any(
436 target_arch = "bpf",
437 target_arch = "loongarch64",
438 target_arch = "mips",
439 target_arch = "mips32r6",
440 target_arch = "mips64",
441 target_arch = "mips64r6",
442 target_arch = "riscv32",
443 target_arch = "riscv64",
444 target_arch = "s390x",
445 target_arch = "sparc",
446 target_arch = "sparc64",
447 target_arch = "xtensa",
448 ));
449 let ptr_mask = mem::size_of::<MinWord>() - 1;
450 let aligned_ptr = ptr.with_addr(ptr.addr() & !ptr_mask) as *mut MinWord;
451 let ptr_lsb = if SHIFT_MASK {
452 ptr.addr() & ptr_mask
453 } else {
454 ptr.addr()
456 };
457 let shift = if cfg!(any(target_endian = "little", target_arch = "s390x")) {
458 ptr_lsb.wrapping_mul(8)
459 } else {
460 (ptr_lsb ^ (mem::size_of::<MinWord>() - mem::size_of::<T>())).wrapping_mul(8)
461 };
462 let mut mask: RetInt = (1 << (mem::size_of::<T>() * 8)) - 1; if SHIFT_MASK {
464 mask <<= shift;
465 }
466 (aligned_ptr, shift as RetInt, mask)
467}
468
469#[allow(dead_code)]
471pub(crate) mod ptr {
472 #[cfg(portable_atomic_no_strict_provenance)]
473 use core::mem;
474 #[cfg(not(portable_atomic_no_strict_provenance))]
475 #[allow(unused_imports)]
476 pub(crate) use core::ptr::{with_exposed_provenance, with_exposed_provenance_mut};
477
478 #[cfg(portable_atomic_no_strict_provenance)]
479 #[inline(always)]
480 #[must_use]
481 #[cfg_attr(miri, track_caller)] pub(crate) fn with_exposed_provenance<T>(addr: usize) -> *const T {
483 addr as *const T
484 }
485 #[cfg(portable_atomic_no_strict_provenance)]
486 #[inline(always)]
487 #[must_use]
488 #[cfg_attr(miri, track_caller)] pub(crate) fn with_exposed_provenance_mut<T>(addr: usize) -> *mut T {
490 addr as *mut T
491 }
492
493 #[cfg(portable_atomic_no_strict_provenance)]
494 pub(crate) trait PtrExt<T: ?Sized>: Copy {
495 #[must_use]
496 fn addr(self) -> usize;
497 #[must_use]
498 fn with_addr(self, addr: usize) -> Self
499 where
500 T: Sized;
501 }
502 #[cfg(portable_atomic_no_strict_provenance)]
503 impl<T: ?Sized> PtrExt<T> for *mut T {
504 #[inline(always)]
505 #[must_use]
506 fn addr(self) -> usize {
507 #[allow(clippy::transmutes_expressible_as_ptr_casts)]
513 unsafe {
514 mem::transmute(self as *mut ())
515 }
516 }
517 #[allow(clippy::cast_possible_wrap)]
518 #[inline]
519 #[must_use]
520 fn with_addr(self, addr: usize) -> Self
521 where
522 T: Sized,
523 {
524 let self_addr = self.addr() as isize;
528 let dest_addr = addr as isize;
529 let offset = dest_addr.wrapping_sub(self_addr);
530 (self as *mut u8).wrapping_offset(offset) as *mut T
531 }
532 }
533}
534
535#[cfg(any(test, not(any(windows, target_arch = "x86", target_arch = "x86_64"))))]
541#[cfg(any(not(portable_atomic_no_asm), portable_atomic_unstable_asm))]
542#[allow(dead_code, non_camel_case_types, unused_macros)]
543#[macro_use]
544pub(crate) mod ffi {
545 pub(crate) type c_void = core::ffi::c_void;
546 #[cfg(target_pointer_width = "16")]
549 pub(crate) type c_int = i16;
550 #[cfg(target_pointer_width = "16")]
551 pub(crate) type c_uint = u16;
552 #[cfg(not(target_pointer_width = "16"))]
553 pub(crate) type c_int = i32;
554 #[cfg(not(target_pointer_width = "16"))]
555 pub(crate) type c_uint = u32;
556 #[cfg(all(target_pointer_width = "64", not(windows)))]
559 pub(crate) type c_long = i64;
560 #[cfg(all(target_pointer_width = "64", not(windows)))]
561 pub(crate) type c_ulong = u64;
562 #[cfg(not(all(target_pointer_width = "64", not(windows))))]
563 pub(crate) type c_long = i32;
564 #[cfg(not(all(target_pointer_width = "64", not(windows))))]
565 pub(crate) type c_ulong = u32;
566 pub(crate) type c_size_t = usize;
569 #[cfg(all(
572 not(any(target_vendor = "apple", windows)),
573 any(
574 target_arch = "aarch64",
575 target_arch = "arm",
576 target_arch = "csky",
577 target_arch = "hexagon",
578 target_arch = "msp430",
579 target_arch = "powerpc",
580 target_arch = "powerpc64",
581 target_arch = "riscv32",
582 target_arch = "riscv64",
583 target_arch = "s390x",
584 target_arch = "xtensa",
585 ),
586 ))]
587 pub(crate) type c_char = u8;
588 #[cfg(not(all(
589 not(any(target_vendor = "apple", windows)),
590 any(
591 target_arch = "aarch64",
592 target_arch = "arm",
593 target_arch = "csky",
594 target_arch = "hexagon",
595 target_arch = "msp430",
596 target_arch = "powerpc",
597 target_arch = "powerpc64",
598 target_arch = "riscv32",
599 target_arch = "riscv64",
600 target_arch = "s390x",
601 target_arch = "xtensa",
602 ),
603 )))]
604 pub(crate) type c_char = i8;
605
606 #[cfg(test)]
608 const _: fn() = || {
609 let _: c_int = 0 as std::os::raw::c_int;
610 let _: c_uint = 0 as std::os::raw::c_uint;
611 let _: c_long = 0 as std::os::raw::c_long;
612 let _: c_ulong = 0 as std::os::raw::c_ulong;
613 #[cfg(unix)]
614 let _: c_size_t = 0 as libc::size_t; let _: c_char = 0 as std::os::raw::c_char;
616 };
617
618 #[repr(transparent)]
619 pub(crate) struct CStr([c_char]);
620 impl CStr {
621 #[inline]
622 #[must_use]
623 pub(crate) const fn as_ptr(&self) -> *const c_char {
624 self.0.as_ptr()
625 }
626 #[inline]
631 #[must_use]
632 pub(crate) unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr {
633 unsafe { &*(bytes as *const [u8] as *const CStr) }
640 }
641 #[cfg(test)]
642 #[inline]
643 #[must_use]
644 pub(crate) fn to_bytes_with_nul(&self) -> &[u8] {
645 #[allow(clippy::unnecessary_cast)] unsafe {
649 &*(&self.0 as *const [c_char] as *const [u8])
650 }
651 }
652 }
653
654 macro_rules! c {
655 ($s:expr) => {{
656 const BYTES: &[u8] = concat!($s, "\0").as_bytes();
657 const _: () = static_assert!(crate::utils::ffi::_const_is_c_str(BYTES));
658 #[allow(unused_unsafe)]
659 unsafe {
661 crate::utils::ffi::CStr::from_bytes_with_nul_unchecked(BYTES)
662 }
663 }};
664 }
665
666 #[must_use]
667 pub(crate) const fn _const_is_c_str(bytes: &[u8]) -> bool {
668 #[cfg(portable_atomic_no_track_caller)]
669 {
670 !bytes.is_empty()
680 }
681 #[cfg(not(portable_atomic_no_track_caller))]
682 {
683 if bytes.is_empty() {
687 return false;
688 }
689 let mut i = bytes.len() - 1;
690 if bytes[i] != 0 {
691 return false;
692 }
693 while i != 0 {
695 i -= 1;
696 if bytes[i] == 0 {
697 return false;
698 }
699 }
700 true
701 }
702 }
703
704 #[allow(
705 clippy::alloc_instead_of_core,
706 clippy::std_instead_of_alloc,
707 clippy::std_instead_of_core,
708 clippy::undocumented_unsafe_blocks,
709 clippy::wildcard_imports
710 )]
711 #[cfg(test)]
712 mod tests {
713 #[test]
714 fn test_c_macro() {
715 #[track_caller]
716 fn t(s: &crate::utils::ffi::CStr, raw: &[u8]) {
717 assert_eq!(s.to_bytes_with_nul(), raw);
718 }
719 t(c!(""), b"\0");
720 t(c!("a"), b"a\0");
721 t(c!("abc"), b"abc\0");
722 t(c!(concat!("abc", "d")), b"abcd\0");
723 }
724
725 #[test]
726 fn test_is_c_str() {
727 #[track_caller]
728 fn t(bytes: &[u8]) {
729 assert_eq!(
730 super::_const_is_c_str(bytes),
731 std::ffi::CStr::from_bytes_with_nul(bytes).is_ok()
732 );
733 }
734 t(b"\0");
735 t(b"a\0");
736 t(b"abc\0");
737 t(b"");
738 t(b"a");
739 t(b"abc");
740 t(b"\0a");
741 t(b"\0a\0");
742 t(b"ab\0c\0");
743 t(b"\0\0");
744 }
745 }
746}