Skip to main content

atomic_refcell/
lib.rs

1//! Implements a container type providing RefCell-like semantics for objects
2//! shared across threads.
3//!
4//! RwLock is traditionally considered to be the |Sync| analogue of RefCell.
5//! However, for consumers that can guarantee that they will never mutably
6//! borrow the contents concurrently with immutable borrows, an RwLock is
7//! overkill, and has key disadvantages:
8//! * Performance: Even the fastest existing implementation of RwLock (that of
9//!   parking_lot) performs at least two atomic operations during immutable
10//!   borrows. This makes mutable borrows significantly cheaper than immutable
11//!   borrows, leading to weird incentives when writing performance-critical
12//!   code.
13//! * Features: Implementing AtomicRefCell on top of RwLock makes it impossible
14//!   to implement useful things like AtomicRef{,Mut}::map.
15//!
16//! As such, we re-implement RefCell semantics from scratch with a single atomic
17//! reference count. The primary complication of this scheme relates to keeping
18//! things in a consistent state when one thread performs an illegal borrow and
19//! panics. Since an AtomicRefCell can be accessed by multiple threads, and since
20//! panics are recoverable, we need to ensure that an illegal (panicking) access by
21//! one thread does not lead to undefined behavior on other, still-running threads.
22//!
23//! So we represent things as follows:
24//! * Any value with the high bit set (so half the total refcount space) indicates
25//!   a mutable borrow.
26//! * Mutable borrows perform an atomic compare-and-swap, swapping in the high bit
27//!   if the current value is zero. If the current value is non-zero, the thread
28//!   panics and the value is left undisturbed.
29//! * Immutable borrows perform an atomic increment. If the new value has the high
30//!   bit set, the thread panics. The incremented refcount is left as-is, since it
31//!   still represents a valid mutable borrow. When the mutable borrow is released,
32//!   the refcount is set unconditionally to zero, clearing any stray increments by
33//!   panicked threads.
34//!
35//! There are a few additional purely-academic complications to handle overflow,
36//! which are documented in the implementation.
37//!
38//! The rest of this module is mostly derived by copy-pasting the implementation of
39//! RefCell and fixing things up as appropriate. Certain non-threadsafe methods
40//! have been removed. We segment the concurrency logic from the rest of the code to
41//! keep the tricky parts small and easy to audit.
42
43#![no_std]
44#![allow(unsafe_code)]
45#![deny(missing_docs)]
46
47use core::cell::UnsafeCell;
48use core::cmp;
49use core::fmt;
50use core::fmt::{Debug, Display};
51use core::marker::PhantomData;
52use core::ops::{Deref, DerefMut};
53use core::ptr::NonNull;
54use core::sync::atomic;
55
56#[cfg(not(feature = "portable-atomic"))]
57use core::sync::atomic::AtomicUsize;
58
59#[cfg(feature = "portable-atomic")]
60use portable_atomic::AtomicUsize;
61
62#[cfg(feature = "serde")]
63use serde::{Deserialize, Serialize};
64
65/// A threadsafe analogue to RefCell.
66pub struct AtomicRefCell<T: ?Sized> {
67    borrow: AtomicUsize,
68    value: UnsafeCell<T>,
69}
70
71/// An error returned by [`AtomicRefCell::try_borrow`](struct.AtomicRefCell.html#method.try_borrow).
72pub struct BorrowError {
73    _private: (),
74}
75
76impl Debug for BorrowError {
77    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
78        f.debug_struct("BorrowError").finish()
79    }
80}
81
82impl Display for BorrowError {
83    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
84        Display::fmt("already mutably borrowed", f)
85    }
86}
87
88/// An error returned by [`AtomicRefCell::try_borrow_mut`](struct.AtomicRefCell.html#method.try_borrow_mut).
89pub struct BorrowMutError {
90    _private: (),
91}
92
93impl Debug for BorrowMutError {
94    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
95        f.debug_struct("BorrowMutError").finish()
96    }
97}
98
99impl Display for BorrowMutError {
100    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
101        Display::fmt("already borrowed", f)
102    }
103}
104
105impl<T> AtomicRefCell<T> {
106    /// Creates a new `AtomicRefCell` containing `value`.
107    #[inline]
108    pub const fn new(value: T) -> AtomicRefCell<T> {
109        AtomicRefCell {
110            borrow: AtomicUsize::new(0),
111            value: UnsafeCell::new(value),
112        }
113    }
114
115    /// Consumes the `AtomicRefCell`, returning the wrapped value.
116    #[inline]
117    pub fn into_inner(self) -> T {
118        debug_assert!(self.borrow.load(atomic::Ordering::Acquire) == 0);
119        self.value.into_inner()
120    }
121}
122
123impl<T: ?Sized> AtomicRefCell<T> {
124    /// Immutably borrows the wrapped value.
125    #[inline]
126    pub fn borrow(&self) -> AtomicRef<'_, T> {
127        match AtomicBorrowRef::try_new(&self.borrow) {
128            Ok(borrow) => AtomicRef {
129                value: unsafe { NonNull::new_unchecked(self.value.get()) },
130                borrow,
131            },
132            Err(s) => panic!("{}", s),
133        }
134    }
135
136    /// Attempts to immutably borrow the wrapped value, but instead of panicking
137    /// on a failed borrow, returns `Err`.
138    #[inline]
139    pub fn try_borrow(&self) -> Result<AtomicRef<'_, T>, BorrowError> {
140        match AtomicBorrowRef::try_new(&self.borrow) {
141            Ok(borrow) => Ok(AtomicRef {
142                value: unsafe { NonNull::new_unchecked(self.value.get()) },
143                borrow,
144            }),
145            Err(_) => Err(BorrowError { _private: () }),
146        }
147    }
148
149    /// Mutably borrows the wrapped value.
150    #[inline]
151    pub fn borrow_mut(&self) -> AtomicRefMut<'_, T> {
152        match AtomicBorrowRefMut::try_new(&self.borrow) {
153            Ok(borrow) => AtomicRefMut {
154                value: unsafe { NonNull::new_unchecked(self.value.get()) },
155                borrow,
156                marker: PhantomData,
157            },
158            Err(s) => panic!("{}", s),
159        }
160    }
161
162    /// Attempts to mutably borrow the wrapped value, but instead of panicking
163    /// on a failed borrow, returns `Err`.
164    #[inline]
165    pub fn try_borrow_mut(&self) -> Result<AtomicRefMut<'_, T>, BorrowMutError> {
166        match AtomicBorrowRefMut::try_new(&self.borrow) {
167            Ok(borrow) => Ok(AtomicRefMut {
168                value: unsafe { NonNull::new_unchecked(self.value.get()) },
169                borrow,
170                marker: PhantomData,
171            }),
172            Err(_) => Err(BorrowMutError { _private: () }),
173        }
174    }
175
176    /// Returns a raw pointer to the underlying data in this cell.
177    ///
178    /// External synchronization is needed to avoid data races when dereferencing
179    /// the pointer.
180    #[inline]
181    pub fn as_ptr(&self) -> *mut T {
182        self.value.get()
183    }
184
185    /// Returns a mutable reference to the wrapped value.
186    ///
187    /// No runtime checks take place (unless debug assertions are enabled)
188    /// because this call borrows `AtomicRefCell` mutably at compile-time.
189    #[inline]
190    pub fn get_mut(&mut self) -> &mut T {
191        debug_assert!(self.borrow.load(atomic::Ordering::Acquire) == 0);
192        unsafe { &mut *self.value.get() }
193    }
194}
195
196//
197// Core synchronization logic. Keep this section small and easy to audit.
198//
199
200const HIGH_BIT: usize = !(usize::MAX >> 1);
201const MAX_FAILED_BORROWS: usize = HIGH_BIT + (HIGH_BIT >> 1);
202
203struct AtomicBorrowRef<'b> {
204    borrow: &'b AtomicUsize,
205}
206
207impl<'b> AtomicBorrowRef<'b> {
208    #[inline]
209    fn try_new(borrow: &'b AtomicUsize) -> Result<Self, &'static str> {
210        let new = borrow.fetch_add(1, atomic::Ordering::Acquire) + 1;
211        if new & HIGH_BIT != 0 {
212            // If the new count has the high bit set, that almost certainly
213            // means there's an pre-existing mutable borrow. In that case,
214            // we simply leave the increment as a benign side-effect and
215            // return `Err`. Once the mutable borrow is released, the
216            // count will be reset to zero unconditionally.
217            //
218            // The overflow check here ensures that an unbounded number of
219            // immutable borrows during the scope of one mutable borrow
220            // will soundly trigger a panic (or abort) rather than UB.
221            Self::check_overflow(borrow, new);
222            Err("already mutably borrowed")
223        } else {
224            Ok(AtomicBorrowRef { borrow })
225        }
226    }
227
228    #[cold]
229    #[inline(never)]
230    fn check_overflow(borrow: &'b AtomicUsize, new: usize) {
231        if new == HIGH_BIT {
232            // We overflowed into the reserved upper half of the refcount
233            // space. Before panicking, decrement the refcount to leave things
234            // in a consistent immutable-borrow state.
235            //
236            // This can basically only happen if somebody forget()s AtomicRefs
237            // in a tight loop.
238            borrow.fetch_sub(1, atomic::Ordering::Release);
239            panic!("too many immutable borrows");
240        } else if new >= MAX_FAILED_BORROWS {
241            // During the mutable borrow, an absurd number of threads have
242            // attempted to increment the refcount with immutable borrows.
243            // To avoid hypothetically wrapping the refcount, we abort the
244            // process once a certain threshold is reached.
245            //
246            // This requires billions of borrows to fail during the scope of
247            // one mutable borrow, and so is very unlikely to happen in a real
248            // program.
249            //
250            // To avoid a potential unsound state after overflowing, we make
251            // sure the entire process aborts.
252            //
253            // Right now, there's no stable way to do that without `std`:
254            // https://github.com/rust-lang/rust/issues/67952
255            // As a workaround, we cause an abort by making this thread panic
256            // during the unwinding of another panic.
257            //
258            // On platforms where the panic strategy is already 'abort', the
259            // ForceAbort object here has no effect, as the program already
260            // panics before it is dropped.
261            struct ForceAbort;
262            impl Drop for ForceAbort {
263                fn drop(&mut self) {
264                    panic!("Aborting to avoid unsound state of AtomicRefCell");
265                }
266            }
267            let _abort = ForceAbort;
268            panic!("Too many failed borrows");
269        }
270    }
271}
272
273impl<'b> Drop for AtomicBorrowRef<'b> {
274    #[inline]
275    fn drop(&mut self) {
276        let old = self.borrow.fetch_sub(1, atomic::Ordering::Release);
277        // This assertion is technically incorrect in the case where another
278        // thread hits the hypothetical overflow case, since we might observe
279        // the refcount before it fixes it up (and panics). But that never will
280        // never happen in a real program, and this is a debug_assert! anyway.
281        debug_assert!(old & HIGH_BIT == 0);
282    }
283}
284
285struct AtomicBorrowRefMut<'b> {
286    borrow: &'b AtomicUsize,
287}
288
289impl<'b> Drop for AtomicBorrowRefMut<'b> {
290    #[inline]
291    fn drop(&mut self) {
292        self.borrow.store(0, atomic::Ordering::Release);
293    }
294}
295
296impl<'b> AtomicBorrowRefMut<'b> {
297    #[inline]
298    fn try_new(borrow: &'b AtomicUsize) -> Result<AtomicBorrowRefMut<'b>, &'static str> {
299        // Use compare-and-swap to avoid corrupting the immutable borrow count
300        // on illegal mutable borrows.
301        let old = match borrow.compare_exchange(
302            0,
303            HIGH_BIT,
304            atomic::Ordering::Acquire,
305            atomic::Ordering::Relaxed,
306        ) {
307            Ok(x) => x,
308            Err(x) => x,
309        };
310
311        if old == 0 {
312            Ok(AtomicBorrowRefMut { borrow })
313        } else if old & HIGH_BIT == 0 {
314            Err("already immutably borrowed")
315        } else {
316            Err("already mutably borrowed")
317        }
318    }
319}
320
321unsafe impl<T: ?Sized + Send> Send for AtomicRefCell<T> {}
322unsafe impl<T: ?Sized + Send + Sync> Sync for AtomicRefCell<T> {}
323
324//
325// End of core synchronization logic. No tricky thread stuff allowed below
326// this point.
327//
328
329impl<T: Clone> Clone for AtomicRefCell<T> {
330    #[inline]
331    fn clone(&self) -> AtomicRefCell<T> {
332        AtomicRefCell::new(self.borrow().clone())
333    }
334}
335
336impl<T: Default> Default for AtomicRefCell<T> {
337    #[inline]
338    fn default() -> AtomicRefCell<T> {
339        AtomicRefCell::new(Default::default())
340    }
341}
342
343impl<T: ?Sized + PartialEq> PartialEq for AtomicRefCell<T> {
344    #[inline]
345    fn eq(&self, other: &AtomicRefCell<T>) -> bool {
346        *self.borrow() == *other.borrow()
347    }
348}
349
350impl<T: ?Sized + Eq> Eq for AtomicRefCell<T> {}
351
352impl<T: ?Sized + PartialOrd> PartialOrd for AtomicRefCell<T> {
353    #[inline]
354    fn partial_cmp(&self, other: &AtomicRefCell<T>) -> Option<cmp::Ordering> {
355        self.borrow().partial_cmp(&*other.borrow())
356    }
357}
358
359impl<T: ?Sized + Ord> Ord for AtomicRefCell<T> {
360    #[inline]
361    fn cmp(&self, other: &AtomicRefCell<T>) -> cmp::Ordering {
362        self.borrow().cmp(&*other.borrow())
363    }
364}
365
366impl<T> From<T> for AtomicRefCell<T> {
367    fn from(t: T) -> AtomicRefCell<T> {
368        AtomicRefCell::new(t)
369    }
370}
371
372impl<'b> Clone for AtomicBorrowRef<'b> {
373    #[inline]
374    fn clone(&self) -> AtomicBorrowRef<'b> {
375        AtomicBorrowRef::try_new(self.borrow).unwrap()
376    }
377}
378
379/// A wrapper type for an immutably borrowed value from an `AtomicRefCell<T>`.
380pub struct AtomicRef<'b, T: ?Sized + 'b> {
381    value: NonNull<T>,
382    borrow: AtomicBorrowRef<'b>,
383}
384
385// SAFETY: `AtomicRef<'_, T> acts as a reference. `AtomicBorrowRef` is a
386// reference to an atomic.
387unsafe impl<'b, T: ?Sized> Sync for AtomicRef<'b, T> where for<'a> &'a T: Sync {}
388unsafe impl<'b, T: ?Sized> Send for AtomicRef<'b, T> where for<'a> &'a T: Send {}
389
390impl<'b, T: ?Sized> Deref for AtomicRef<'b, T> {
391    type Target = T;
392
393    #[inline]
394    fn deref(&self) -> &T {
395        // SAFETY: We hold shared borrow of the value.
396        unsafe { self.value.as_ref() }
397    }
398}
399
400impl<'b, T: ?Sized> AtomicRef<'b, T> {
401    /// Copies an `AtomicRef`.
402    ///
403    /// Like its [std-counterpart](core::cell::Ref::clone), this type does not implement `Clone`
404    /// to not interfere with cloning the contained type.
405    #[allow(clippy::should_implement_trait)]
406    #[inline]
407    pub fn clone(orig: &AtomicRef<'b, T>) -> AtomicRef<'b, T> {
408        AtomicRef {
409            value: orig.value,
410            borrow: orig.borrow.clone(),
411        }
412    }
413
414    /// Make a new `AtomicRef` for a component of the borrowed data.
415    #[inline]
416    pub fn map<U: ?Sized, F>(orig: AtomicRef<'b, T>, f: F) -> AtomicRef<'b, U>
417    where
418        F: FnOnce(&T) -> &U,
419    {
420        AtomicRef {
421            value: NonNull::from(f(&*orig)),
422            borrow: orig.borrow,
423        }
424    }
425
426    /// Make a new `AtomicRef` for an optional component of the borrowed data.
427    #[inline]
428    pub fn filter_map<U: ?Sized, F>(orig: AtomicRef<'b, T>, f: F) -> Option<AtomicRef<'b, U>>
429    where
430        F: FnOnce(&T) -> Option<&U>,
431    {
432        Some(AtomicRef {
433            value: NonNull::from(f(&*orig)?),
434            borrow: orig.borrow,
435        })
436    }
437}
438
439impl<'b, T: ?Sized> AtomicRefMut<'b, T> {
440    /// Make a new `AtomicRefMut` for a component of the borrowed data, e.g. an enum
441    /// variant.
442    #[inline]
443    pub fn map<U: ?Sized, F>(mut orig: AtomicRefMut<'b, T>, f: F) -> AtomicRefMut<'b, U>
444    where
445        F: FnOnce(&mut T) -> &mut U,
446    {
447        AtomicRefMut {
448            value: NonNull::from(f(&mut *orig)),
449            borrow: orig.borrow,
450            marker: PhantomData,
451        }
452    }
453
454    /// Make a new `AtomicRefMut` for an optional component of the borrowed data.
455    #[inline]
456    pub fn filter_map<U: ?Sized, F>(
457        mut orig: AtomicRefMut<'b, T>,
458        f: F,
459    ) -> Option<AtomicRefMut<'b, U>>
460    where
461        F: FnOnce(&mut T) -> Option<&mut U>,
462    {
463        Some(AtomicRefMut {
464            value: NonNull::from(f(&mut *orig)?),
465            borrow: orig.borrow,
466            marker: PhantomData,
467        })
468    }
469}
470
471/// A wrapper type for a mutably borrowed value from an `AtomicRefCell<T>`.
472pub struct AtomicRefMut<'b, T: ?Sized + 'b> {
473    value: NonNull<T>,
474    borrow: AtomicBorrowRefMut<'b>,
475    // `NonNull` is covariant over `T`, but this is used in place of a mutable
476    // reference so we need to be invariant over `T`.
477    marker: PhantomData<&'b mut T>,
478}
479
480// SAFETY: `AtomicRefMut<'_, T> acts as a mutable reference.
481// `AtomicBorrowRefMut` is a reference to an atomic.
482unsafe impl<'b, T: ?Sized> Sync for AtomicRefMut<'b, T> where for<'a> &'a mut T: Sync {}
483unsafe impl<'b, T: ?Sized> Send for AtomicRefMut<'b, T> where for<'a> &'a mut T: Send {}
484
485impl<'b, T: ?Sized> Deref for AtomicRefMut<'b, T> {
486    type Target = T;
487
488    #[inline]
489    fn deref(&self) -> &T {
490        // SAFETY: We hold an exclusive borrow of the value.
491        unsafe { self.value.as_ref() }
492    }
493}
494
495impl<'b, T: ?Sized> DerefMut for AtomicRefMut<'b, T> {
496    #[inline]
497    fn deref_mut(&mut self) -> &mut T {
498        // SAFETY: We hold an exclusive borrow of the value.
499        unsafe { self.value.as_mut() }
500    }
501}
502
503impl<'b, T: ?Sized + Debug + 'b> Debug for AtomicRef<'b, T> {
504    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
505        <T as Debug>::fmt(self, f)
506    }
507}
508
509impl<'b, T: ?Sized + Debug + 'b> Debug for AtomicRefMut<'b, T> {
510    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
511        <T as Debug>::fmt(self, f)
512    }
513}
514
515impl<T: ?Sized + Debug> Debug for AtomicRefCell<T> {
516    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
517        match self.try_borrow() {
518            Ok(borrow) => f
519                .debug_struct("AtomicRefCell")
520                .field("value", &borrow)
521                .finish(),
522            Err(_) => {
523                // The RefCell is mutably borrowed so we can't look at its value
524                // here. Show a placeholder instead.
525                struct BorrowedPlaceholder;
526
527                impl Debug for BorrowedPlaceholder {
528                    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
529                        f.write_str("<borrowed>")
530                    }
531                }
532
533                f.debug_struct("AtomicRefCell")
534                    .field("value", &BorrowedPlaceholder)
535                    .finish()
536            }
537        }
538    }
539}
540
541#[cfg(feature = "serde")]
542impl<'de, T: Deserialize<'de>> Deserialize<'de> for AtomicRefCell<T> {
543    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
544    where
545        D: serde::Deserializer<'de>,
546    {
547        T::deserialize(deserializer).map(Self::from)
548    }
549}
550
551#[cfg(feature = "serde")]
552impl<T: Serialize> Serialize for AtomicRefCell<T> {
553    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
554    where
555        S: serde::Serializer,
556    {
557        use serde::ser::Error;
558        match self.try_borrow() {
559            Ok(value) => value.serialize(serializer),
560            Err(_err) => Err(S::Error::custom("already mutably borrowed")),
561        }
562    }
563}