1 use std::cell::UnsafeCell;
2 use std::fmt;
3 use std::ops;
4 
5 /// `AtomicUsize` providing an additional `load_unsync` function.
6 pub(crate) struct AtomicUsize {
7     inner: UnsafeCell<std::sync::atomic::AtomicUsize>,
8 }
9 
10 unsafe impl Send for AtomicUsize {}
11 unsafe impl Sync for AtomicUsize {}
12 
13 impl AtomicUsize {
new(val: usize) -> AtomicUsize14     pub(crate) fn new(val: usize) -> AtomicUsize {
15         let inner = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val));
16         AtomicUsize { inner }
17     }
18 
19     /// Performs an unsynchronized load.
20     ///
21     /// # Safety
22     ///
23     /// All mutations must have happened before the unsynchronized load.
24     /// Additionally, there must be no concurrent mutations.
unsync_load(&self) -> usize25     pub(crate) unsafe fn unsync_load(&self) -> usize {
26         *(*self.inner.get()).get_mut()
27     }
28 
with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R29     pub(crate) fn with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R {
30         // safety: we have mutable access
31         f(unsafe { (*self.inner.get()).get_mut() })
32     }
33 }
34 
35 impl ops::Deref for AtomicUsize {
36     type Target = std::sync::atomic::AtomicUsize;
37 
deref(&self) -> &Self::Target38     fn deref(&self) -> &Self::Target {
39         // safety: it is always safe to access `&self` fns on the inner value as
40         // we never perform unsafe mutations.
41         unsafe { &*self.inner.get() }
42     }
43 }
44 
45 impl ops::DerefMut for AtomicUsize {
deref_mut(&mut self) -> &mut Self::Target46     fn deref_mut(&mut self) -> &mut Self::Target {
47         // safety: we hold `&mut self`
48         unsafe { &mut *self.inner.get() }
49     }
50 }
51 
52 impl fmt::Debug for AtomicUsize {
fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result53     fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
54         (**self).fmt(fmt)
55     }
56 }
57