1 //! A hybrid strategy.
2 //!
3 //! This is based on debts ‒ an Arc may owe a reference, but it is marked in the debt. It is either
4 //! put back (by stopping using it), or if the pointer is replaced, the writer bumps the reference
5 //! count and removes the debt.
6 //!
7 //! The strategy uses two different slots for the debts. The first ones are faster, but fallible.
8 //! If they fail (either because there's interference from a writer at the same time, or because
9 //! they are full), the secondary one that is slower, but always succeeds, is used. In the latter
10 //! case, the reference is bumped and this secondary debt slot is released, so it is available for
11 //! further loads.
12 //!
13 //! See the [crate::debt] module for the actual slot manipulation. Here we just wrap them into the
14 //! strategy.
15 
16 use std::borrow::Borrow;
17 use std::mem::{self, ManuallyDrop};
18 use std::ops::Deref;
19 use std::ptr;
20 use std::sync::atomic::AtomicPtr;
21 use std::sync::atomic::Ordering::*;
22 
23 use super::sealed::{CaS, InnerStrategy, Protected};
24 use crate::debt::{Debt, LocalNode};
25 use crate::ref_cnt::RefCnt;
26 
27 pub struct HybridProtection<T: RefCnt> {
28     debt: Option<&'static Debt>,
29     ptr: ManuallyDrop<T>,
30 }
31 
32 impl<T: RefCnt> HybridProtection<T> {
new(ptr: *const T::Base, debt: Option<&'static Debt>) -> Self33     pub(super) unsafe fn new(ptr: *const T::Base, debt: Option<&'static Debt>) -> Self {
34         Self {
35             debt,
36             ptr: ManuallyDrop::new(T::from_ptr(ptr)),
37         }
38     }
39 
40     /// Try getting a dept into a fast slot.
41     #[inline]
attempt(node: &LocalNode, storage: &AtomicPtr<T::Base>) -> Option<Self>42     fn attempt(node: &LocalNode, storage: &AtomicPtr<T::Base>) -> Option<Self> {
43         // Relaxed is good enough here, see the Acquire below
44         let ptr = storage.load(Relaxed);
45         // Try to get a debt slot. If not possible, fail.
46         let debt = node.new_fast(ptr as usize)?;
47 
48         // Acquire to get the data.
49         //
50         // SeqCst to make sure the storage vs. the debt are well ordered.
51         let confirm = storage.load(SeqCst);
52         if ptr == confirm {
53             // Successfully got a debt
54             Some(unsafe { Self::new(ptr, Some(debt)) })
55         } else if debt.pay::<T>(ptr) {
56             // It changed in the meantime, we return the debt (that is on the outdated pointer,
57             // possibly destroyed) and fail.
58             None
59         } else {
60             // It changed in the meantime, but the debt for the previous pointer was already paid
61             // for by someone else, so we are fine using it.
62             Some(unsafe { Self::new(ptr, None) })
63         }
64     }
65 
66     /// Get a debt slot using the slower but always successful mechanism.
fallback(node: &LocalNode, storage: &AtomicPtr<T::Base>) -> Self67     fn fallback(node: &LocalNode, storage: &AtomicPtr<T::Base>) -> Self {
68         // First, we claim a debt slot and store the address of the atomic pointer there, so the
69         // writer can optionally help us out with loading and protecting something.
70         let gen = node.new_helping(storage as *const _ as usize);
71         // We already synchronized the start of the sequence by SeqCst in the new_helping vs swap on
72         // the pointer. We just need to make sure to bring the pointee in (this can be newer than
73         // what we got in the Debt)
74         let candidate = storage.load(Acquire);
75 
76         // Try to replace the debt with our candidate. If it works, we get the debt slot to use. If
77         // not, we get a replacement value, already protected and a debt to take care of.
78         match node.confirm_helping(gen, candidate as usize) {
79             Ok(debt) => {
80                 // The fast path -> we got the debt confirmed alright.
81                 Self::from_inner(unsafe { Self::new(candidate, Some(debt)).into_inner() })
82             }
83             Err((unused_debt, replacement)) => {
84                 // The debt is on the candidate we provided and it is unused, we so we just pay it
85                 // back right away.
86                 if !unused_debt.pay::<T>(candidate) {
87                     unsafe { T::dec(candidate) };
88                 }
89                 // We got a (possibly) different pointer out. But that one is already protected and
90                 // the slot is paid back.
91                 unsafe { Self::new(replacement as *mut _, None) }
92             }
93         }
94     }
95 
96     #[inline]
as_ptr(&self) -> *const T::Base97     fn as_ptr(&self) -> *const T::Base {
98         T::as_ptr(self.ptr.deref())
99     }
100 }
101 
102 impl<T: RefCnt> Drop for HybridProtection<T> {
103     #[inline]
drop(&mut self)104     fn drop(&mut self) {
105         match self.debt.take() {
106             // We have our own copy of Arc, so we don't need a protection. Do nothing (but release
107             // the Arc below).
108             None => (),
109             // If we owed something, just return the debt. We don't have a pointer owned, so
110             // nothing to release.
111             Some(debt) => {
112                 let ptr = T::as_ptr(&self.ptr);
113                 if debt.pay::<T>(ptr) {
114                     return;
115                 }
116                 // But if the debt was already paid for us, we need to release the pointer, as we
117                 // were effectively already in the Unprotected mode.
118             }
119         }
120         // Equivalent to T::dec(ptr)
121         unsafe { ManuallyDrop::drop(&mut self.ptr) };
122     }
123 }
124 
125 impl<T: RefCnt> Protected<T> for HybridProtection<T> {
126     #[inline]
from_inner(ptr: T) -> Self127     fn from_inner(ptr: T) -> Self {
128         Self {
129             debt: None,
130             ptr: ManuallyDrop::new(ptr),
131         }
132     }
133 
134     #[inline]
into_inner(mut self) -> T135     fn into_inner(mut self) -> T {
136         // Drop any debt and release any lock held by the given guard and return a
137         // full-featured value that even can outlive the ArcSwap it originated from.
138         match self.debt.take() {
139             None => (), // We have a fully loaded ref-counted pointer.
140             Some(debt) => {
141                 let ptr = T::inc(&self.ptr);
142                 if !debt.pay::<T>(ptr) {
143                     unsafe { T::dec(ptr) };
144                 }
145             }
146         }
147 
148         // The ptr::read & forget is something like a cheating move. We can't move it out, because
149         // we have a destructor and Rust doesn't allow us to do that.
150         let inner = unsafe { ptr::read(self.ptr.deref()) };
151         mem::forget(self);
152         inner
153     }
154 }
155 
156 impl<T: RefCnt> Borrow<T> for HybridProtection<T> {
157     #[inline]
borrow(&self) -> &T158     fn borrow(&self) -> &T {
159         &self.ptr
160     }
161 }
162 
163 pub trait Config {
164     // Mostly for testing, way to disable the fast slo
165     const USE_FAST: bool;
166 }
167 
168 #[derive(Clone, Default)]
169 pub struct DefaultConfig;
170 
171 impl Config for DefaultConfig {
172     const USE_FAST: bool = true;
173 }
174 
175 #[derive(Clone, Default)]
176 pub struct HybridStrategy<Cfg> {
177     pub(crate) _config: Cfg,
178 }
179 
180 impl<T, Cfg> InnerStrategy<T> for HybridStrategy<Cfg>
181 where
182     T: RefCnt,
183     Cfg: Config,
184 {
185     type Protected = HybridProtection<T>;
load(&self, storage: &AtomicPtr<T::Base>) -> Self::Protected186     unsafe fn load(&self, storage: &AtomicPtr<T::Base>) -> Self::Protected {
187         LocalNode::with(|node| {
188             let fast = if Cfg::USE_FAST {
189                 HybridProtection::attempt(node, storage)
190             } else {
191                 None
192             };
193             fast.unwrap_or_else(|| HybridProtection::fallback(node, storage))
194         })
195     }
wait_for_readers(&self, old: *const T::Base, storage: &AtomicPtr<T::Base>)196     unsafe fn wait_for_readers(&self, old: *const T::Base, storage: &AtomicPtr<T::Base>) {
197         // The pay_all may need to provide fresh replacement values if someone else is loading from
198         // this particular storage. We do so by the exact same way, by `load` ‒ it's OK, a writer
199         // does not hold a slot and the reader doesn't recurse back into writer, so we won't run
200         // out of slots.
201         let replacement = || self.load(storage).into_inner();
202         Debt::pay_all::<T, _>(old, storage as *const _ as usize, replacement);
203     }
204 }
205 
206 impl<T: RefCnt, Cfg: Config> CaS<T> for HybridStrategy<Cfg> {
compare_and_swap<C: crate::as_raw::AsRaw<T::Base>>( &self, storage: &AtomicPtr<T::Base>, current: C, new: T, ) -> Self::Protected207     unsafe fn compare_and_swap<C: crate::as_raw::AsRaw<T::Base>>(
208         &self,
209         storage: &AtomicPtr<T::Base>,
210         current: C,
211         new: T,
212     ) -> Self::Protected {
213         loop {
214             let old = <Self as InnerStrategy<T>>::load(self, storage);
215             // Observation of their inequality is enough to make a verdict
216             if old.as_ptr() != current.as_raw() {
217                 return old;
218             }
219             // If they are still equal, put the new one in.
220             let new_raw = T::as_ptr(&new);
221             if storage
222                 .compare_exchange_weak(current.as_raw(), new_raw, SeqCst, Relaxed)
223                 .is_ok()
224             {
225                 // We successfully put the new value in. The ref count went in there too.
226                 T::into_ptr(new);
227                 <Self as InnerStrategy<T>>::wait_for_readers(self, old.as_ptr(), storage);
228                 // We just got one ref count out of the storage and we have one in old. We don't
229                 // need two.
230                 T::dec(old.as_ptr());
231                 return old;
232             }
233         }
234     }
235 }
236