1 use std::cell::Cell;
2 use std::ptr;
3 use std::sync::atomic::{AtomicBool, AtomicPtr, AtomicUsize, Ordering};
4
5 use super::RefCnt;
6
7 const DEBT_SLOT_CNT: usize = 8;
8
9 /// One debt slot.
10 pub(crate) struct Debt(AtomicUsize);
11
12 impl Default for Debt {
default() -> Self13 fn default() -> Self {
14 Debt(AtomicUsize::new(NO_DEBT))
15 }
16 }
17
18 #[repr(align(64))]
19 #[derive(Default)]
20 struct Slots([Debt; DEBT_SLOT_CNT]);
21
22 /// One thread-local node for debts.
23 #[repr(C)]
24 struct Node {
25 slots: Slots,
26 next: Option<&'static Node>,
27 in_use: AtomicBool,
28 }
29
30 impl Default for Node {
default() -> Self31 fn default() -> Self {
32 Node {
33 next: None,
34 in_use: AtomicBool::new(true),
35 slots: Default::default(),
36 }
37 }
38 }
39
40 impl Node {
get() -> &'static Self41 fn get() -> &'static Self {
42 // Try to find an unused one in the chain and reuse it.
43 traverse(|node| {
44 // Try to claim this node. Nothing is synchronized through this atomic, we only
45 // track if someone claims ownership of it.
46 if !node.in_use.compare_and_swap(false, true, Ordering::Relaxed) {
47 Some(node)
48 } else {
49 None
50 }
51 })
52 // If that didn't work, create a new one and prepend to the list.
53 .unwrap_or_else(|| {
54 let node = Box::leak(Box::new(Node::default()));
55 // Not shared between threads yet, so ordinary write would be fine too.
56 node.in_use.store(true, Ordering::Relaxed);
57 // We don't want to read any data in addition to the head, Relaxed is fine
58 // here.
59 //
60 // We do need to release the data to others, but for that, we acquire in the
61 // compare_exchange below.
62 let mut head = DEBT_HEAD.load(Ordering::Relaxed);
63 loop {
64 node.next = unsafe { head.as_ref() };
65 if let Err(old) = DEBT_HEAD.compare_exchange_weak(
66 head,
67 node,
68 // We need to release *the whole chain* here. For that, we need to
69 // acquire it first.
70 Ordering::AcqRel,
71 Ordering::Relaxed, // Nothing changed, go next round of the loop.
72 ) {
73 head = old;
74 } else {
75 return node;
76 }
77 }
78 })
79 }
80 }
81
82 /// The value of pointer `1` should be pretty safe, for two reasons:
83 ///
84 /// * It's an odd number, but the pointers we have are likely aligned at least to the word size,
85 /// because the data at the end of the `Arc` has the counters.
86 /// * It's in the very first page where NULL lives, so it's not mapped.
87 pub(crate) const NO_DEBT: usize = 1;
88
89 /// The head of the debt chain.
90 static DEBT_HEAD: AtomicPtr<Node> = AtomicPtr::new(ptr::null_mut());
91
92 /// A wrapper around a node pointer, to un-claim the node on thread shutdown.
93 struct DebtHead {
94 // Node for this thread.
95 node: Cell<Option<&'static Node>>,
96 // The next slot in round-robin rotation. Heuristically tries to balance the load across them
97 // instead of having all of them stuffed towards the start of the array which gets
98 // unsuccessfully iterated through every time.
99 offset: Cell<usize>,
100 }
101
102 impl Drop for DebtHead {
drop(&mut self)103 fn drop(&mut self) {
104 if let Some(node) = self.node.get() {
105 // Nothing synchronized by this atomic.
106 assert!(node.in_use.swap(false, Ordering::Relaxed));
107 }
108 }
109 }
110
111 thread_local! {
112 /// A debt node assigned to this thread.
113 static THREAD_HEAD: DebtHead = DebtHead {
114 node: Cell::new(None),
115 offset: Cell::new(0),
116 };
117 }
118
119 /// Goes through the debt linked list.
120 ///
121 /// This traverses the linked list, calling the closure on each node. If the closure returns
122 /// `Some`, it terminates with that value early, otherwise it runs to the end.
traverse<R, F: FnMut(&'static Node) -> Option<R>>(mut f: F) -> Option<R>123 fn traverse<R, F: FnMut(&'static Node) -> Option<R>>(mut f: F) -> Option<R> {
124 // Acquire ‒ we want to make sure we read the correct version of data at the end of the
125 // pointer. Any write to the DEBT_HEAD is with Release.
126 //
127 // Note that the other pointers in the chain never change and are *ordinary* pointers. The
128 // whole linked list is synchronized through the head.
129 let mut current = unsafe { DEBT_HEAD.load(Ordering::Acquire).as_ref() };
130 while let Some(node) = current {
131 let result = f(node);
132 if result.is_some() {
133 return result;
134 }
135 current = node.next;
136 }
137 None
138 }
139
140 impl Debt {
141 /// Creates a new debt.
142 ///
143 /// This stores the debt of the given pointer (untyped, casted into an usize) and returns a
144 /// reference to that slot, or gives up with `None` if all the slots are currently full.
145 ///
146 /// This is technically lock-free on the first call in a given thread and wait-free on all the
147 /// other accesses.
148 // Turn the lint off in clippy, but don't complain anywhere else. clippy::new_ret_no_self
149 // doesn't work yet, that thing is not stabilized.
150 #[allow(unknown_lints, renamed_and_removed_lints, new_ret_no_self)]
151 #[inline]
new(ptr: usize) -> Option<&'static Self>152 pub(crate) fn new(ptr: usize) -> Option<&'static Self> {
153 THREAD_HEAD
154 .try_with(|head| {
155 let node = match head.node.get() {
156 // Already have my own node (most likely)?
157 Some(node) => node,
158 // No node yet, called for the first time in this thread. Set one up.
159 None => {
160 let new_node = Node::get();
161 head.node.set(Some(new_node));
162 new_node
163 }
164 };
165 // Check it is in use by *us*
166 debug_assert!(node.in_use.load(Ordering::Relaxed));
167 // Trick with offsets: we rotate through the slots (save the value from last time)
168 // so successive leases are likely to succeed on the first attempt (or soon after)
169 // instead of going through the list of already held ones.
170 let offset = head.offset.get();
171 let len = node.slots.0.len();
172 for i in 0..len {
173 let i = (i + offset) % len;
174 // Note: the indexing check is almost certainly optimised out because the len
175 // is used above. And using .get_unchecked was actually *slower*.
176 let got_it = node.slots.0[i]
177 .0
178 // Try to acquire the slot. Relaxed if it doesn't work is fine, as we don't
179 // synchronize by it.
180 .compare_exchange(NO_DEBT, ptr, Ordering::SeqCst, Ordering::Relaxed)
181 .is_ok();
182 if got_it {
183 head.offset.set(i + 1);
184 return Some(&node.slots.0[i]);
185 }
186 }
187 None
188 })
189 .ok()
190 .and_then(|new| new)
191 }
192
193 /// Tries to pay the given debt.
194 ///
195 /// If the debt is still there, for the given pointer, it is paid and `true` is returned. If it
196 /// is empty or if there's some other pointer, it is not paid and `false` is returned, meaning
197 /// the debt was paid previously by someone else.
198 ///
199 /// # Notes
200 ///
201 /// * It is possible that someone paid the debt and then someone else put a debt for the same
202 /// pointer in there. This is fine, as we'll just pay the debt for that someone else.
203 /// * This relies on the fact that the same pointer must point to the same object and
204 /// specifically to the same type ‒ the caller provides the type, it's destructor, etc.
205 /// * It also relies on the fact the same thing is not stuffed both inside an `Arc` and `Rc` or
206 /// something like that, but that sounds like a reasonable assumption. Someone storing it
207 /// through `ArcSwap<T>` and someone else with `ArcSwapOption<T>` will work.
208 #[inline]
pay<T: RefCnt>(&self, ptr: *const T::Base) -> bool209 pub(crate) fn pay<T: RefCnt>(&self, ptr: *const T::Base) -> bool {
210 self.0
211 // If we don't change anything because there's something else, Relaxed is fine.
212 //
213 // The Release works as kind of Mutex. We make sure nothing from the debt-protected
214 // sections leaks below this point.
215 .compare_exchange(ptr as usize, NO_DEBT, Ordering::Release, Ordering::Relaxed)
216 .is_ok()
217 }
218
219 /// Pays all the debts on the given pointer.
pay_all<T: RefCnt>(ptr: *const T::Base)220 pub(crate) fn pay_all<T: RefCnt>(ptr: *const T::Base) {
221 let val = unsafe { T::from_ptr(ptr) };
222 T::inc(&val);
223 traverse::<(), _>(|node| {
224 for slot in &node.slots.0 {
225 if slot
226 .0
227 .compare_exchange(ptr as usize, NO_DEBT, Ordering::AcqRel, Ordering::Relaxed)
228 .is_ok()
229 {
230 T::inc(&val);
231 }
232 }
233 None
234 });
235 }
236 }
237
238 #[cfg(test)]
239 mod tests {
240 use std::sync::Arc;
241
242 /// Checks the assumption that arcs to ZSTs have different pointer values.
243 #[test]
arc_zst()244 fn arc_zst() {
245 struct A;
246 struct B;
247
248 let a = Arc::new(A);
249 let b = Arc::new(B);
250
251 let aref: &A = &a;
252 let bref: &B = &b;
253
254 let aptr = aref as *const _ as usize;
255 let bptr = bref as *const _ as usize;
256 assert_ne!(aptr, bptr);
257 }
258 }
259