1use crate::cell::Cell;
2use crate::ops::{Deref, DerefMut};
3
4#[derive(#[automatically_derived]
impl<T: ::core::clone::Clone> ::core::clone::Clone for CachePadded<T> {
#[inline]
fn clone(&self) -> CachePadded<T> {
CachePadded { value: ::core::clone::Clone::clone(&self.value) }
}
}Clone, #[automatically_derived]
impl<T: ::core::marker::Copy> ::core::marker::Copy for CachePadded<T> { }Copy, #[automatically_derived]
impl<T: ::core::default::Default> ::core::default::Default for CachePadded<T>
{
#[inline]
fn default() -> CachePadded<T> {
CachePadded { value: ::core::default::Default::default() }
}
}Default, #[automatically_derived]
impl<T: ::core::hash::Hash> ::core::hash::Hash for CachePadded<T> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.value, state)
}
}Hash, #[automatically_derived]
impl<T: ::core::cmp::PartialEq> ::core::cmp::PartialEq for CachePadded<T> {
#[inline]
fn eq(&self, other: &CachePadded<T>) -> bool { self.value == other.value }
}PartialEq, #[automatically_derived]
impl<T: ::core::cmp::Eq> ::core::cmp::Eq for CachePadded<T> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) { let _: ::core::cmp::AssertParamIsEq<T>; }
}Eq)]
6#[cfg_attr(
23 any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64",),
24 repr(align(128))
25)]
26#[cfg_attr(
34 any(
35 target_arch = "arm",
36 target_arch = "mips",
37 target_arch = "mips32r6",
38 target_arch = "mips64",
39 target_arch = "mips64r6",
40 ),
41 repr(align(32))
42)]
43#[cfg_attr(target_arch = "s390x", repr(align(256)))]
48#[cfg_attr(
57 not(any(
58 target_arch = "x86_64",
59 target_arch = "aarch64",
60 target_arch = "powerpc64",
61 target_arch = "arm",
62 target_arch = "mips",
63 target_arch = "mips32r6",
64 target_arch = "mips64",
65 target_arch = "mips64r6",
66 target_arch = "s390x",
67 )),
68 repr(align(64))
69)]
70pub struct CachePadded<T> {
71 value: T,
72}
73
74impl<T> CachePadded<T> {
75 pub fn new(value: T) -> CachePadded<T> {
77 CachePadded::<T> { value }
78 }
79}
80
81impl<T> Deref for CachePadded<T> {
82 type Target = T;
83
84 fn deref(&self) -> &T {
85 &self.value
86 }
87}
88
89impl<T> DerefMut for CachePadded<T> {
90 fn deref_mut(&mut self) -> &mut T {
91 &mut self.value
92 }
93}
94
95const SPIN_LIMIT: u32 = 6;
96
97pub struct Backoff {
99 step: Cell<u32>,
100}
101
102impl Backoff {
103 pub fn new() -> Self {
105 Backoff { step: Cell::new(0) }
106 }
107
108 #[inline]
113 pub fn spin_light(&self) {
114 let step = self.step.get().min(SPIN_LIMIT);
115 for _ in 0..step.pow(2) {
116 crate::hint::spin_loop();
117 }
118
119 self.step.set(self.step.get() + 1);
120 }
121
122 #[inline]
126 pub fn spin_heavy(&self) {
127 if self.step.get() <= SPIN_LIMIT {
128 for _ in 0..self.step.get().pow(2) {
129 crate::hint::spin_loop()
130 }
131 } else {
132 crate::thread::yield_now();
133 }
134
135 self.step.set(self.step.get() + 1);
136 }
137}