quick_cache/
shard.rs

1use std::{
2    hash::{BuildHasher, Hash},
3    hint::unreachable_unchecked,
4    mem::{self, MaybeUninit},
5};
6
7use hashbrown::HashTable;
8
9use crate::{
10    linked_slab::{LinkedSlab, Token},
11    options::DEFAULT_HOT_ALLOCATION,
12    shim::sync::atomic::{self, AtomicU16},
13    Equivalent, Lifecycle, MemoryUsed, Weighter,
14};
15
16#[cfg(feature = "stats")]
17use crate::shim::sync::atomic::AtomicU64;
18
19// Max reference counter, this is 1 in ClockPro and 3 in the S3-FIFO.
20const MAX_F: u16 = 2;
21
22pub trait SharedPlaceholder: Clone {
23    fn new(hash: u64, idx: Token) -> Self;
24    fn same_as(&self, other: &Self) -> bool;
25    fn hash(&self) -> u64;
26    fn idx(&self) -> Token;
27}
28
29pub enum InsertStrategy {
30    Insert,
31    Replace { soft: bool },
32}
33
34#[derive(Copy, Clone, Debug, PartialEq, Eq)]
35enum ResidentState {
36    Hot,
37    Cold,
38}
39
40#[derive(Debug)]
41pub struct Resident<Key, Val> {
42    key: Key,
43    value: Val,
44    state: ResidentState,
45    referenced: AtomicU16,
46}
47
48impl<Key: Clone, Val: Clone> Clone for Resident<Key, Val> {
49    #[inline]
50    fn clone(&self) -> Self {
51        Self {
52            key: self.key.clone(),
53            value: self.value.clone(),
54            state: self.state,
55            referenced: self.referenced.load(atomic::Ordering::Relaxed).into(),
56        }
57    }
58}
59
60#[derive(Debug, Clone)]
61struct Placeholder<Key, Plh> {
62    key: Key,
63    hot: ResidentState,
64    shared: Plh,
65}
66
67#[derive(Clone)]
68enum Entry<Key, Val, Plh> {
69    Resident(Resident<Key, Val>),
70    Placeholder(Placeholder<Key, Plh>),
71    Ghost(u64),
72}
73
74/// A bounded cache using a modified CLOCK-PRO eviction policy.
75/// The implementation allows some parallelism as gets don't require exclusive access.
76/// Any evicted items are returned so they can be dropped by the caller, outside the locks.
77pub struct CacheShard<Key, Val, We, B, L, Plh> {
78    hash_builder: B,
79    /// Map to an entry in the `entries` slab.
80    /// Note that the actual key/value/hash are not stored in the map but in the slab.
81    map: HashTable<Token>,
82    /// Slab holding entries
83    entries: LinkedSlab<Entry<Key, Val, Plh>>,
84    /// Head of cold list, containing Cold entries.
85    /// Only contains entries of kind `Resident`.
86    cold_head: Option<Token>,
87    /// Head of hot list, containing Hot entries.
88    /// Only contains entries of kind `Resident`.
89    hot_head: Option<Token>,
90    /// Head of ghost list, containing non-resident/Hash entries.
91    /// Only contains entries of kind `Ghost`.
92    ghost_head: Option<Token>,
93    weight_target_hot: u64,
94    weight_capacity: u64,
95    weight_hot: u64,
96    weight_cold: u64,
97    num_hot: usize,
98    num_cold: usize,
99    num_non_resident: usize,
100    capacity_non_resident: usize,
101    #[cfg(feature = "stats")]
102    hits: AtomicU64,
103    #[cfg(feature = "stats")]
104    misses: AtomicU64,
105    weighter: We,
106    pub(crate) lifecycle: L,
107}
108
109impl<Key: Clone, Val: Clone, We: Clone, B: Clone, L: Clone, Plh: Clone> Clone
110    for CacheShard<Key, Val, We, B, L, Plh>
111{
112    fn clone(&self) -> Self {
113        Self {
114            hash_builder: self.hash_builder.clone(),
115            map: self.map.clone(),
116            entries: self.entries.clone(),
117            cold_head: self.cold_head,
118            hot_head: self.hot_head,
119            ghost_head: self.ghost_head,
120            weight_target_hot: self.weight_target_hot,
121            weight_capacity: self.weight_capacity,
122            weight_hot: self.weight_hot,
123            weight_cold: self.weight_cold,
124            num_hot: self.num_hot,
125            num_cold: self.num_cold,
126            num_non_resident: self.num_non_resident,
127            capacity_non_resident: self.capacity_non_resident,
128            #[cfg(feature = "stats")]
129            hits: self.hits.load(atomic::Ordering::Relaxed).into(),
130            #[cfg(feature = "stats")]
131            misses: self.misses.load(atomic::Ordering::Relaxed).into(),
132            weighter: self.weighter.clone(),
133            lifecycle: self.lifecycle.clone(),
134        }
135    }
136}
137
138#[cfg(feature = "stats")]
139macro_rules! record_hit {
140    ($self: expr) => {{
141        $self.hits.fetch_add(1, atomic::Ordering::Relaxed);
142    }};
143}
144#[cfg(feature = "stats")]
145macro_rules! record_hit_mut {
146    ($self: expr) => {{
147        *$self.hits.get_mut() += 1;
148    }};
149}
150#[cfg(feature = "stats")]
151macro_rules! record_miss {
152    ($self: expr) => {{
153        $self.misses.fetch_add(1, atomic::Ordering::Relaxed);
154    }};
155}
156#[cfg(feature = "stats")]
157macro_rules! record_miss_mut {
158    ($self: expr) => {{
159        *$self.misses.get_mut() += 1;
160    }};
161}
162
163#[cfg(not(feature = "stats"))]
164macro_rules! record_hit {
165    ($self: expr) => {{}};
166}
167#[cfg(not(feature = "stats"))]
168macro_rules! record_hit_mut {
169    ($self: expr) => {{}};
170}
171#[cfg(not(feature = "stats"))]
172macro_rules! record_miss {
173    ($self: expr) => {{}};
174}
175#[cfg(not(feature = "stats"))]
176macro_rules! record_miss_mut {
177    ($self: expr) => {{}};
178}
179
180impl<Key, Val, We, B, L, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh> {
181    pub fn remove_placeholder(&mut self, placeholder: &Plh) {
182        if let Ok(entry) = self.map.find_entry(placeholder.hash(), |&idx| {
183            if idx != placeholder.idx() {
184                return false;
185            }
186            let (entry, _) = self.entries.get(idx).unwrap();
187            matches!(entry, Entry::Placeholder(Placeholder { shared, .. }) if shared.same_as(placeholder))
188        }) {
189            entry.remove();
190        }
191    }
192
193    #[cold]
194    fn cold_change_weight(&mut self, idx: Token, old_weight: u64, new_weight: u64) {
195        let Some((Entry::Resident(resident), _)) = self.entries.get_mut(idx) else {
196            unsafe { unreachable_unchecked() };
197        };
198        let (weight_ptr, target_head) = if resident.state == ResidentState::Hot {
199            (&mut self.weight_hot, &mut self.hot_head)
200        } else {
201            (&mut self.weight_cold, &mut self.cold_head)
202        };
203        *weight_ptr -= old_weight;
204        *weight_ptr += new_weight;
205
206        if old_weight == 0 && new_weight != 0 {
207            *target_head = Some(self.entries.link(idx, *target_head));
208        } else if old_weight != 0 && new_weight == 0 {
209            *target_head = self.entries.unlink(idx);
210        }
211    }
212}
213
214impl<Key, Val, We, B, L, Plh> CacheShard<Key, Val, We, B, L, Plh> {
215    pub fn memory_used(&self) -> MemoryUsed {
216        MemoryUsed {
217            entries: self.entries.memory_used(),
218            map: self.map.allocation_size(),
219        }
220    }
221
222    pub fn weight(&self) -> u64 {
223        self.weight_hot + self.weight_cold
224    }
225
226    pub fn len(&self) -> usize {
227        self.num_hot + self.num_cold
228    }
229
230    pub fn capacity(&self) -> u64 {
231        self.weight_capacity
232    }
233
234    #[cfg(feature = "stats")]
235    pub fn hits(&self) -> u64 {
236        self.hits.load(atomic::Ordering::Relaxed)
237    }
238
239    #[cfg(feature = "stats")]
240    pub fn misses(&self) -> u64 {
241        self.misses.load(atomic::Ordering::Relaxed)
242    }
243
244    pub fn clear(&mut self) {
245        let _ = self.drain();
246    }
247
248    pub fn drain(&mut self) -> impl Iterator<Item = (Key, Val)> + '_ {
249        self.cold_head = None;
250        self.hot_head = None;
251        self.ghost_head = None;
252        self.num_hot = 0;
253        self.num_cold = 0;
254        self.num_non_resident = 0;
255        self.weight_hot = 0;
256        self.weight_cold = 0;
257        self.map.clear();
258        self.entries.drain().filter_map(|i| match i {
259            Entry::Resident(r) => Some((r.key, r.value)),
260            Entry::Placeholder(_) | Entry::Ghost(_) => None,
261        })
262    }
263
264    pub fn iter(&self) -> impl Iterator<Item = (&'_ Key, &'_ Val)> + '_ {
265        self.entries.iter().filter_map(|i| match i {
266            Entry::Resident(r) => Some((&r.key, &r.value)),
267            Entry::Placeholder(_) | Entry::Ghost(_) => None,
268        })
269    }
270
271    pub fn iter_from(
272        &self,
273        continuation: Option<Token>,
274    ) -> impl Iterator<Item = (Token, &'_ Key, &'_ Val)> + '_ {
275        self.entries
276            .iter_from(continuation)
277            .filter_map(|(token, i)| match i {
278                Entry::Resident(r) => Some((token, &r.key, &r.value)),
279                Entry::Placeholder(_) | Entry::Ghost(_) => None,
280            })
281    }
282}
283
284impl<
285        Key: Eq + Hash,
286        Val,
287        We: Weighter<Key, Val>,
288        B: BuildHasher,
289        L: Lifecycle<Key, Val>,
290        Plh: SharedPlaceholder,
291    > CacheShard<Key, Val, We, B, L, Plh>
292{
293    pub fn new(
294        hot_allocation: f64,
295        ghost_allocation: f64,
296        estimated_items_capacity: usize,
297        weight_capacity: u64,
298        weighter: We,
299        hash_builder: B,
300        lifecycle: L,
301    ) -> Self {
302        // Clamp to at least 1 when weight_capacity > 0, since the f64 multiplication
303        // can truncate to 0 for small capacities, which would reject all inserts as overweight.
304        let weight_target_hot = ((weight_capacity as f64 * hot_allocation) as u64)
305            .clamp(weight_capacity.min(1), weight_capacity);
306        let capacity_non_resident = (estimated_items_capacity as f64 * ghost_allocation) as usize;
307        Self {
308            hash_builder,
309            map: HashTable::with_capacity(0),
310            entries: LinkedSlab::with_capacity(0),
311            weight_capacity,
312            #[cfg(feature = "stats")]
313            hits: Default::default(),
314            #[cfg(feature = "stats")]
315            misses: Default::default(),
316            cold_head: None,
317            hot_head: None,
318            ghost_head: None,
319            capacity_non_resident,
320            weight_target_hot,
321            num_hot: 0,
322            num_cold: 0,
323            num_non_resident: 0,
324            weight_hot: 0,
325            weight_cold: 0,
326            weighter,
327            lifecycle,
328        }
329    }
330
331    #[cfg(any(fuzzing, test))]
332    pub fn validate(&self, accept_overweight: bool) {
333        self.entries.validate();
334        let mut num_hot = 0;
335        let mut num_cold = 0;
336        let mut num_non_resident = 0;
337        let mut weight_hot = 0;
338        let mut weight_hot_pinned = 0;
339        let mut weight_cold = 0;
340        let mut weight_cold_pinned = 0;
341        for e in self.entries.iter_entries() {
342            match e {
343                Entry::Resident(r) if r.state == ResidentState::Cold => {
344                    num_cold += 1;
345                    let weight = self.weighter.weight(&r.key, &r.value);
346                    if self.lifecycle.is_pinned(&r.key, &r.value) {
347                        weight_cold_pinned += weight;
348                    } else {
349                        weight_cold += weight;
350                    }
351                }
352                Entry::Resident(r) => {
353                    num_hot += 1;
354                    let weight = self.weighter.weight(&r.key, &r.value);
355                    if self.lifecycle.is_pinned(&r.key, &r.value) {
356                        weight_hot_pinned += weight;
357                    } else {
358                        weight_hot += weight;
359                    }
360                }
361                Entry::Ghost(_) => {
362                    num_non_resident += 1;
363                }
364                Entry::Placeholder(_) => (),
365            }
366        }
367        // eprintln!("-------------");
368        // dbg!(
369        //     num_hot,
370        //     num_cold,
371        //     num_non_resident,
372        //     weight_hot,
373        //     weight_hot_pinned,
374        //     weight_cold,
375        //     weight_cold_pinned,
376        //     self.num_hot,
377        //     self.num_cold,
378        //     self.num_non_resident,
379        //     self.weight_hot,
380        //     self.weight_cold,
381        //     self.weight_target_hot,
382        //     self.weight_capacity,
383        //     self.capacity_non_resident
384        // );
385        assert_eq!(num_hot, self.num_hot);
386        assert_eq!(num_cold, self.num_cold);
387        assert_eq!(num_non_resident, self.num_non_resident);
388        assert_eq!(weight_hot + weight_hot_pinned, self.weight_hot);
389        assert_eq!(weight_cold + weight_cold_pinned, self.weight_cold);
390        if !accept_overweight {
391            assert!(weight_hot + weight_cold <= self.weight_capacity);
392        }
393        assert!(num_non_resident <= self.capacity_non_resident);
394    }
395
396    /// Reserver additional space for `additional` entries.
397    /// Note that this is counted in entries, and is not weighted.
398    pub fn reserve(&mut self, additional: usize) {
399        // extra 50% for non-resident entries
400        let additional = additional.saturating_add(additional / 2);
401        self.map.reserve(additional, |&idx| {
402            let (entry, _) = self.entries.get(idx).unwrap();
403            match entry {
404                Entry::Resident(Resident { key, .. })
405                | Entry::Placeholder(Placeholder { key, .. }) => {
406                    Self::hash_static(&self.hash_builder, key)
407                }
408                Entry::Ghost(non_resident_hash) => *non_resident_hash,
409            }
410        })
411    }
412
413    pub fn retain<F>(&mut self, f: F)
414    where
415        F: Fn(&Key, &Val) -> bool,
416    {
417        let retained_tokens = self
418            .map
419            .iter()
420            .filter_map(|&idx| match self.entries.get(idx) {
421                Some((entry, _idx)) => match entry {
422                    Entry::Resident(r) => {
423                        if !f(&r.key, &r.value) {
424                            let hash = self.hash(&r.key);
425                            Some((idx, hash))
426                        } else {
427                            None
428                        }
429                    }
430                    Entry::Placeholder(_) | Entry::Ghost(_) => None,
431                },
432                None => None,
433            })
434            .collect::<Vec<_>>();
435        for (idx, hash) in retained_tokens {
436            self.remove_internal(hash, idx);
437        }
438    }
439
440    #[inline]
441    fn hash_static<Q>(hasher: &B, key: &Q) -> u64
442    where
443        Q: Hash + Equivalent<Key> + ?Sized,
444    {
445        hasher.hash_one(key)
446    }
447
448    #[inline]
449    pub fn hash<Q>(&self, key: &Q) -> u64
450    where
451        Q: Hash + Equivalent<Key> + ?Sized,
452    {
453        Self::hash_static(&self.hash_builder, key)
454    }
455
456    #[inline]
457    fn search<Q>(&self, hash: u64, k: &Q) -> Option<Token>
458    where
459        Q: Hash + Equivalent<Key> + ?Sized,
460    {
461        let mut hash_match = None;
462        for bucket in self.map.iter_hash(hash) {
463            let idx = *bucket;
464            let (entry, _) = self.entries.get(idx).unwrap();
465            match entry {
466                Entry::Resident(Resident { key, .. })
467                | Entry::Placeholder(Placeholder { key, .. })
468                    if k.equivalent(key) =>
469                {
470                    return Some(idx);
471                }
472                Entry::Ghost(non_resident_hash) if *non_resident_hash == hash => {
473                    hash_match = Some(idx);
474                }
475                _ => (),
476            }
477        }
478        hash_match
479    }
480
481    #[inline]
482    fn search_resident<Q>(&self, hash: u64, k: &Q) -> Option<(Token, &Resident<Key, Val>)>
483    where
484        Q: Hash + Equivalent<Key> + ?Sized,
485    {
486        let mut resident = MaybeUninit::uninit();
487        self.map
488            .find(hash, |&idx| {
489                let (entry, _) = self.entries.get(idx).unwrap();
490                match entry {
491                    Entry::Resident(r) if k.equivalent(&r.key) => {
492                        resident.write(r);
493                        true
494                    }
495                    _ => false,
496                }
497            })
498            .map(|idx| {
499                // Safety: we found a successful entry in `get` above,
500                // thus resident is initialized.
501                (*idx, unsafe { resident.assume_init() })
502            })
503    }
504
505    pub fn contains<Q>(&self, hash: u64, key: &Q) -> bool
506    where
507        Q: Hash + Equivalent<Key> + ?Sized,
508    {
509        self.map
510            .find(hash, |&idx| {
511                let (entry, _) = self.entries.get(idx).unwrap();
512                matches!(entry, Entry::Resident(r) if key.equivalent(&r.key))
513            })
514            .is_some()
515    }
516
517    pub fn get<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
518    where
519        Q: Hash + Equivalent<Key> + ?Sized,
520    {
521        if let Some((_, resident)) = self.search_resident(hash, key) {
522            let referenced = resident.referenced.load(atomic::Ordering::Relaxed);
523            // Attempt to avoid contention on hot items, which may have their counters maxed out
524            if referenced < MAX_F {
525                // While extremely unlikely, this increment can theoretically overflow.
526                // Even if that happens there's no impact correctness wise.
527                resident.referenced.fetch_add(1, atomic::Ordering::Relaxed);
528            }
529            record_hit!(self);
530            Some(&resident.value)
531        } else {
532            record_miss!(self);
533            None
534        }
535    }
536
537    pub fn get_mut<Q>(&mut self, hash: u64, key: &Q) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
538    where
539        Q: Hash + Equivalent<Key> + ?Sized,
540    {
541        let Some((idx, _)) = self.search_resident(hash, key) else {
542            record_miss_mut!(self);
543            return None;
544        };
545        let (Entry::Resident(resident), _) = (unsafe { self.entries.get_mut_unchecked(idx) })
546        else {
547            unsafe { unreachable_unchecked() };
548        };
549        if *resident.referenced.get_mut() < MAX_F {
550            *resident.referenced.get_mut() += 1;
551        }
552        record_hit_mut!(self);
553
554        let old_weight = self.weighter.weight(&resident.key, &resident.value);
555        Some(RefMut {
556            idx,
557            old_weight,
558            cache: self,
559        })
560    }
561
562    #[inline]
563    pub fn peek_token(&self, token: Token) -> Option<&Val> {
564        if let Some((Entry::Resident(resident), _)) = self.entries.get(token) {
565            Some(&resident.value)
566        } else {
567            None
568        }
569    }
570
571    #[inline]
572    pub fn peek_token_mut(&mut self, token: Token) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>> {
573        if let Some((Entry::Resident(resident), _)) = self.entries.get_mut(token) {
574            let old_weight = self.weighter.weight(&resident.key, &resident.value);
575            Some(RefMut {
576                old_weight,
577                idx: token,
578                cache: self,
579            })
580        } else {
581            None
582        }
583    }
584
585    pub fn peek<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
586    where
587        Q: Hash + Equivalent<Key> + ?Sized,
588    {
589        let (_, resident) = self.search_resident(hash, key)?;
590        Some(&resident.value)
591    }
592
593    pub fn peek_mut<Q>(&mut self, hash: u64, key: &Q) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
594    where
595        Q: Hash + Equivalent<Key> + ?Sized,
596    {
597        let (idx, _) = self.search_resident(hash, key)?;
598        self.peek_token_mut(idx)
599    }
600
601    pub fn remove<Q>(&mut self, hash: u64, key: &Q) -> Option<(Key, Val)>
602    where
603        Q: Hash + Equivalent<Key> + ?Sized,
604    {
605        // This could be search_resident, but calling `remove` is likely a
606        // strong indication that this input isn't important.
607        let idx = self.search(hash, key)?;
608        self.remove_internal(hash, idx)
609    }
610
611    pub fn remove_if<Q, F>(&mut self, hash: u64, key: &Q, f: F) -> Option<(Key, Val)>
612    where
613        Q: Hash + Equivalent<Key> + ?Sized,
614        F: FnOnce(&Val) -> bool,
615    {
616        let (idx, resident) = self.search_resident(hash, key)?;
617        if f(&resident.value) {
618            self.remove_internal(hash, idx)
619        } else {
620            None
621        }
622    }
623
624    pub fn remove_token(&mut self, token: Token) -> Option<(Key, Val)> {
625        let Some((Entry::Resident(resident), _)) = self.entries.get(token) else {
626            return None;
627        };
628        let hash = Self::hash_static(&self.hash_builder, &resident.key);
629        self.remove_internal(hash, token)
630    }
631
632    pub fn remove_next(&mut self, continuation: Option<Token>) -> Option<(Token, Key, Val)> {
633        let (token, key, _) = self
634            .entries
635            .iter_from(continuation)
636            .filter_map(|(token, i)| match i {
637                Entry::Resident(r) => Some((token, &r.key, &r.value)),
638                Entry::Placeholder(_) | Entry::Ghost(_) => None,
639            })
640            .next()?;
641        let hash = Self::hash_static(&self.hash_builder, key);
642        self.remove_internal(hash, token)
643            .map(|(k, v)| (token, k, v))
644    }
645
646    fn remove_internal(&mut self, hash: u64, idx: Token) -> Option<(Key, Val)> {
647        self.map_remove(hash, idx);
648        let mut result = None;
649        let (entry, next) = self.entries.remove(idx).unwrap();
650        let list_head = match entry {
651            Entry::Resident(r) => {
652                let weight = self.weighter.weight(&r.key, &r.value);
653                result = Some((r.key, r.value));
654                if r.state == ResidentState::Hot {
655                    self.num_hot -= 1;
656                    self.weight_hot -= weight;
657                    &mut self.hot_head
658                } else {
659                    debug_assert!(r.state == ResidentState::Cold);
660                    self.num_cold -= 1;
661                    self.weight_cold -= weight;
662                    &mut self.cold_head
663                }
664            }
665            Entry::Ghost(_) => {
666                // Since this an user invoked remove we opt to remove even Ghost entries that could match it.
667                self.num_non_resident -= 1;
668                &mut self.ghost_head
669            }
670            Entry::Placeholder(_) => {
671                // TODO: this is probably undesirable as it could lead to two placeholders for the same key.
672                return None;
673            }
674        };
675        if *list_head == Some(idx) {
676            *list_head = next;
677        }
678        result
679    }
680
681    /// Advance cold ring, promoting to hot and demoting as needed.
682    #[must_use]
683    fn advance_cold(&mut self, lcs: &mut L::RequestState) -> bool {
684        let Some(mut idx) = self.cold_head else {
685            return self.advance_hot(lcs);
686        };
687        loop {
688            let (entry, next) = self.entries.get_mut(idx).unwrap();
689            let Entry::Resident(resident) = entry else {
690                unsafe { unreachable_unchecked() };
691            };
692            debug_assert_eq!(resident.state, ResidentState::Cold);
693            if *resident.referenced.get_mut() != 0 {
694                *resident.referenced.get_mut() -= 1;
695                resident.state = ResidentState::Hot;
696                let weight = self.weighter.weight(&resident.key, &resident.value);
697                self.weight_hot += weight;
698                self.weight_cold -= weight;
699                self.num_hot += 1;
700                self.num_cold -= 1;
701                self.cold_head = self.entries.unlink(idx);
702                self.hot_head = Some(self.entries.link(idx, self.hot_head));
703                // evict from hot if overweight
704                while self.weight_hot > self.weight_target_hot && self.advance_hot(lcs) {}
705                return true;
706            }
707
708            if self.lifecycle.is_pinned(&resident.key, &resident.value) {
709                if Some(next) == self.cold_head {
710                    return self.advance_hot(lcs);
711                }
712                idx = next;
713                continue;
714            }
715
716            self.weight_cold -= self.weighter.weight(&resident.key, &resident.value);
717            self.lifecycle
718                .before_evict(lcs, &resident.key, &mut resident.value);
719            if self.weighter.weight(&resident.key, &resident.value) == 0 {
720                self.cold_head = self.entries.unlink(idx);
721                return true;
722            }
723            let hash = Self::hash_static(&self.hash_builder, &resident.key);
724            let Entry::Resident(evicted) = mem::replace(entry, Entry::Ghost(hash)) else {
725                // Safety: we had a mut reference to the Resident inside entry until the previous line
726                unsafe { core::hint::unreachable_unchecked() };
727            };
728            self.cold_head = self.entries.unlink(idx);
729            self.ghost_head = Some(self.entries.link(idx, self.ghost_head));
730            self.num_cold -= 1;
731            self.num_non_resident += 1;
732            // evict from ghost if oversized
733            if self.num_non_resident > self.capacity_non_resident {
734                self.advance_ghost();
735            }
736            self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
737            return true;
738        }
739    }
740
741    /// Advance hot ring evicting entries.
742    #[must_use]
743    fn advance_hot(&mut self, lcs: &mut L::RequestState) -> bool {
744        let mut unpinned = 0usize;
745        let Some(mut idx) = self.hot_head else {
746            return false;
747        };
748        loop {
749            let (entry, next) = self.entries.get_mut(idx).unwrap();
750            let Entry::Resident(resident) = entry else {
751                unsafe { unreachable_unchecked() };
752            };
753            debug_assert_eq!(resident.state, ResidentState::Hot);
754            if self.lifecycle.is_pinned(&resident.key, &resident.value) {
755                *resident.referenced.get_mut() = (*resident.referenced.get_mut())
756                    .min(MAX_F)
757                    .saturating_sub(1);
758                if Some(next) == self.hot_head {
759                    if unpinned == 0 {
760                        // All entries are pinned
761                        return false;
762                    }
763                    // Restart unpinned count
764                    unpinned = 0;
765                }
766                idx = next;
767                continue;
768            }
769            unpinned += 1;
770            if *resident.referenced.get_mut() != 0 {
771                *resident.referenced.get_mut() = (*resident.referenced.get_mut()).min(MAX_F) - 1;
772                idx = next;
773                continue;
774            }
775            self.weight_hot -= self.weighter.weight(&resident.key, &resident.value);
776            self.lifecycle
777                .before_evict(lcs, &resident.key, &mut resident.value);
778            if self.weighter.weight(&resident.key, &resident.value) == 0 {
779                self.hot_head = self.entries.unlink(idx);
780            } else {
781                self.num_hot -= 1;
782                let hash = Self::hash_static(&self.hash_builder, &resident.key);
783                let Some((Entry::Resident(evicted), next)) = self.entries.remove(idx) else {
784                    // Safety: we had a mut reference to the Resident under `idx` until the previous line
785                    unsafe { core::hint::unreachable_unchecked() };
786                };
787                self.hot_head = next;
788                self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
789                self.map_remove(hash, idx);
790            }
791            return true;
792        }
793    }
794
795    #[inline]
796    fn advance_ghost(&mut self) {
797        debug_assert_ne!(self.num_non_resident, 0);
798        let idx = self.ghost_head.unwrap();
799        let (entry, _) = self.entries.get_mut(idx).unwrap();
800        let Entry::Ghost(hash) = *entry else {
801            unsafe { unreachable_unchecked() };
802        };
803        self.num_non_resident -= 1;
804        self.map_remove(hash, idx);
805        let (_, next) = self.entries.remove(idx).unwrap();
806        self.ghost_head = next;
807    }
808
809    fn insert_existing(
810        &mut self,
811        lcs: &mut L::RequestState,
812        idx: Token,
813        key: Key,
814        value: Val,
815        weight: u64,
816        strategy: InsertStrategy,
817    ) -> Result<(), (Key, Val)> {
818        // caller already handled overweight items, but it could have been pinned
819        let (entry, _) = self.entries.get_mut(idx).unwrap();
820        let referenced;
821        let enter_state;
822        match entry {
823            Entry::Resident(resident) => {
824                enter_state = resident.state;
825                referenced = resident
826                    .referenced
827                    .get_mut()
828                    .saturating_add(
829                        !matches!(strategy, InsertStrategy::Replace { soft: true }) as u16
830                    )
831                    .min(MAX_F);
832            }
833            _ if matches!(strategy, InsertStrategy::Replace { .. }) => {
834                return Err((key, value));
835            }
836            Entry::Ghost(_) => {
837                referenced = 0;
838                enter_state = ResidentState::Hot;
839            }
840            Entry::Placeholder(ph) => {
841                referenced = 1; // Pretend it's a newly inserted Resident
842                enter_state = ph.hot;
843            }
844        }
845
846        let evicted = mem::replace(
847            entry,
848            Entry::Resident(Resident {
849                key,
850                value,
851                state: enter_state,
852                referenced: referenced.into(),
853            }),
854        );
855        match evicted {
856            Entry::Resident(evicted) => {
857                debug_assert_eq!(evicted.state, enter_state);
858                let evicted_weight = self.weighter.weight(&evicted.key, &evicted.value);
859                let list_head = if enter_state == ResidentState::Hot {
860                    self.weight_hot -= evicted_weight;
861                    self.weight_hot += weight;
862                    &mut self.hot_head
863                } else {
864                    self.weight_cold -= evicted_weight;
865                    self.weight_cold += weight;
866                    &mut self.cold_head
867                };
868                if evicted_weight == 0 && weight != 0 {
869                    *list_head = Some(self.entries.link(idx, *list_head));
870                } else if evicted_weight != 0 && weight == 0 {
871                    *list_head = self.entries.unlink(idx);
872                }
873                self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
874            }
875            Entry::Ghost(_) => {
876                self.weight_hot += weight;
877                self.num_hot += 1;
878                self.num_non_resident -= 1;
879                let next_ghost = self.entries.unlink(idx);
880                if self.ghost_head == Some(idx) {
881                    self.ghost_head = next_ghost;
882                }
883                if weight != 0 {
884                    self.hot_head = Some(self.entries.link(idx, self.hot_head));
885                }
886            }
887            Entry::Placeholder(_) => {
888                let list_head = if enter_state == ResidentState::Hot {
889                    self.num_hot += 1;
890                    self.weight_hot += weight;
891                    &mut self.hot_head
892                } else {
893                    self.num_cold += 1;
894                    self.weight_cold += weight;
895                    &mut self.cold_head
896                };
897                if weight != 0 {
898                    *list_head = Some(self.entries.link(idx, *list_head));
899                }
900            }
901        }
902
903        while self.weight_hot + self.weight_cold > self.weight_capacity && self.advance_cold(lcs) {}
904        Ok(())
905    }
906
907    #[inline]
908    fn map_insert(&mut self, hash: u64, idx: Token) {
909        self.map.insert_unique(hash, idx, |&i| {
910            let (entry, _) = self.entries.get(i).unwrap();
911            match entry {
912                Entry::Resident(Resident { key, .. })
913                | Entry::Placeholder(Placeholder { key, .. }) => {
914                    Self::hash_static(&self.hash_builder, key)
915                }
916                Entry::Ghost(hash) => *hash,
917            }
918        });
919    }
920
921    #[inline]
922    fn map_remove(&mut self, hash: u64, idx: Token) {
923        if let Ok(entry) = self.map.find_entry(hash, |&i| i == idx) {
924            entry.remove();
925            return;
926        }
927        #[cfg(debug_assertions)]
928        panic!("key not found");
929    }
930
931    pub fn replace_placeholder(
932        &mut self,
933        lcs: &mut L::RequestState,
934        placeholder: &Plh,
935        referenced: bool,
936        mut value: Val,
937    ) -> Result<(), Val> {
938        let entry = match self.entries.get_mut(placeholder.idx()) {
939            Some((entry, _)) if matches!(&*entry, Entry::Placeholder(p) if p.shared.same_as(placeholder)) => {
940                entry
941            }
942            _ => return Err(value),
943        };
944        let Entry::Placeholder(Placeholder {
945            key,
946            hot: mut placeholder_hot,
947            ..
948        }) = mem::replace(entry, Entry::Ghost(0))
949        else {
950            // Safety: we had a mut reference to entry with Resident inside as checked by the first match statement
951            unsafe { core::hint::unreachable_unchecked() };
952        };
953        let mut weight = self.weighter.weight(&key, &value);
954        // don't admit if it won't fit within the budget
955        if weight > self.weight_target_hot && !self.lifecycle.is_pinned(&key, &value) {
956            self.lifecycle.before_evict(lcs, &key, &mut value);
957            weight = self.weighter.weight(&key, &value);
958            // check again, it could have changed weight
959            if weight > self.weight_target_hot {
960                return self.handle_overweight_replace_placeholder(lcs, placeholder, key, value);
961            }
962        }
963
964        // cache is filling up, admit as hot if possible
965        if self.weight_hot + weight <= self.weight_target_hot {
966            placeholder_hot = ResidentState::Hot;
967        }
968        *entry = Entry::Resident(Resident {
969            key,
970            value,
971            state: placeholder_hot,
972            referenced: (referenced as u16).into(),
973        });
974
975        let list_head = if placeholder_hot == ResidentState::Hot {
976            self.num_hot += 1;
977            self.weight_hot += weight;
978            &mut self.hot_head
979        } else {
980            self.num_cold += 1;
981            self.weight_cold += weight;
982            &mut self.cold_head
983        };
984
985        if weight != 0 {
986            *list_head = Some(self.entries.link(placeholder.idx(), *list_head));
987            while self.weight_hot + self.weight_cold > self.weight_capacity
988                && self.advance_cold(lcs)
989            {}
990        }
991
992        Ok(())
993    }
994
995    #[cold]
996    fn handle_overweight_replace_placeholder(
997        &mut self,
998        lcs: &mut L::RequestState,
999        placeholder: &Plh,
1000        key: Key,
1001        value: Val,
1002    ) -> Result<(), Val> {
1003        self.entries.remove(placeholder.idx());
1004        self.map_remove(placeholder.hash(), placeholder.idx());
1005        self.lifecycle.on_evict(lcs, key, value);
1006        Ok(())
1007    }
1008
1009    pub fn insert(
1010        &mut self,
1011        lcs: &mut L::RequestState,
1012        hash: u64,
1013        key: Key,
1014        mut value: Val,
1015        strategy: InsertStrategy,
1016    ) -> Result<(), (Key, Val)> {
1017        let mut weight = self.weighter.weight(&key, &value);
1018        // don't admit if it won't fit within the budget
1019        if weight > self.weight_target_hot && !self.lifecycle.is_pinned(&key, &value) {
1020            self.lifecycle.before_evict(lcs, &key, &mut value);
1021            weight = self.weighter.weight(&key, &value);
1022            // check again, it could have changed weight
1023            if weight > self.weight_target_hot {
1024                return self.handle_insert_overweight(lcs, hash, key, value, strategy);
1025            }
1026        }
1027
1028        if let Some(idx) = self.search(hash, &key) {
1029            return self.insert_existing(lcs, idx, key, value, weight, strategy);
1030        } else if matches!(strategy, InsertStrategy::Replace { .. }) {
1031            return Err((key, value));
1032        }
1033
1034        // cache is filling up, admit as hot if possible
1035        let enter_hot = self.weight_hot + weight <= self.weight_target_hot;
1036        // pre-evict instead of post-evict, this gives sightly more priority to the new item
1037        while self.weight_hot + self.weight_cold + weight > self.weight_capacity
1038            && self.advance_cold(lcs)
1039        {}
1040
1041        let (state, list_head) = if enter_hot {
1042            self.num_hot += 1;
1043            self.weight_hot += weight;
1044            (ResidentState::Hot, &mut self.hot_head)
1045        } else {
1046            self.num_cold += 1;
1047            self.weight_cold += weight;
1048            (ResidentState::Cold, &mut self.cold_head)
1049        };
1050        let idx = self.entries.insert(Entry::Resident(Resident {
1051            key,
1052            value,
1053            state,
1054            referenced: Default::default(),
1055        }));
1056        if weight != 0 {
1057            *list_head = Some(self.entries.link(idx, *list_head));
1058        }
1059        self.map_insert(hash, idx);
1060        Ok(())
1061    }
1062
1063    #[cold]
1064    fn handle_insert_overweight(
1065        &mut self,
1066        lcs: &mut L::RequestState,
1067        hash: u64,
1068        key: Key,
1069        value: Val,
1070        strategy: InsertStrategy,
1071    ) -> Result<(), (Key, Val)> {
1072        // Make sure to remove any existing entry
1073        if let Some((idx, _)) = self.search_resident(hash, &key) {
1074            if let Some((ek, ev)) = self.remove_internal(hash, idx) {
1075                self.lifecycle.on_evict(lcs, ek, ev);
1076            }
1077        }
1078        if matches!(strategy, InsertStrategy::Replace { .. }) {
1079            return Err((key, value));
1080        }
1081        self.lifecycle.on_evict(lcs, key, value);
1082        Ok(())
1083    }
1084
1085    pub fn upsert_placeholder<Q>(
1086        &mut self,
1087        hash: u64,
1088        key: &Q,
1089    ) -> Result<(Token, &Val), (Plh, bool)>
1090    where
1091        Q: Hash + Equivalent<Key> + ToOwned<Owned = Key> + ?Sized,
1092    {
1093        let shared;
1094        if let Some(idx) = self.search(hash, key) {
1095            let (entry, _) = self.entries.get_mut(idx).unwrap();
1096            match entry {
1097                Entry::Resident(resident) => {
1098                    if *resident.referenced.get_mut() < MAX_F {
1099                        *resident.referenced.get_mut() += 1;
1100                    }
1101                    record_hit_mut!(self);
1102                    unsafe {
1103                        // Rustc gets insanely confused returning references from mut borrows
1104                        // Safety: value will have the same lifetime as `resident`
1105                        let value_ptr: *const Val = &resident.value;
1106                        return Ok((idx, &*value_ptr));
1107                    }
1108                }
1109                Entry::Placeholder(p) => {
1110                    record_hit_mut!(self);
1111                    return Err((p.shared.clone(), false));
1112                }
1113                Entry::Ghost(_) => {
1114                    shared = Plh::new(hash, idx);
1115                    *entry = Entry::Placeholder(Placeholder {
1116                        key: key.to_owned(),
1117                        hot: ResidentState::Hot,
1118                        shared: shared.clone(),
1119                    });
1120                    self.num_non_resident -= 1;
1121                    let next = self.entries.unlink(idx);
1122                    if self.ghost_head == Some(idx) {
1123                        self.ghost_head = next;
1124                    }
1125                }
1126            }
1127        } else {
1128            let idx = self.entries.next_free();
1129            shared = Plh::new(hash, idx);
1130            let idx_ = self.entries.insert(Entry::Placeholder(Placeholder {
1131                key: key.to_owned(),
1132                hot: ResidentState::Cold,
1133                shared: shared.clone(),
1134            }));
1135            debug_assert_eq!(idx, idx_);
1136            self.map_insert(hash, idx);
1137        }
1138        record_miss_mut!(self);
1139        Err((shared, true))
1140    }
1141
1142    pub fn set_capacity(&mut self, new_weight_capacity: u64) {
1143        // Guard against division by zero when old capacity is 0 (produces inf/NaN ratios)
1144        if self.weight_capacity == 0 {
1145            self.weight_capacity = new_weight_capacity;
1146            self.weight_target_hot = ((new_weight_capacity as f64 * DEFAULT_HOT_ALLOCATION) as u64)
1147                .clamp(new_weight_capacity.min(1), new_weight_capacity);
1148            // capacity_non_resident stays 0 since we have no basis to estimate
1149        } else {
1150            let old_new_ratio = new_weight_capacity as f64 / self.weight_capacity as f64;
1151            let hot_ratio = self.weight_target_hot as f64 / self.weight_capacity as f64;
1152
1153            self.weight_capacity = new_weight_capacity;
1154            self.weight_target_hot = ((new_weight_capacity as f64 * hot_ratio) as u64)
1155                .clamp(new_weight_capacity.min(1), new_weight_capacity);
1156            self.capacity_non_resident =
1157                (self.capacity_non_resident as f64 * old_new_ratio) as usize;
1158        }
1159
1160        // Evict items if we're over the new capacity
1161        let mut lcs = self.lifecycle.begin_request();
1162        while self.weight_hot + self.weight_cold > self.weight_capacity
1163            && self.advance_cold(&mut lcs)
1164        {}
1165        self.lifecycle.end_request(lcs);
1166        // Trim ghost entries if needed
1167        while self.num_non_resident > self.capacity_non_resident {
1168            self.advance_ghost();
1169        }
1170    }
1171}
1172
1173/// Structure wrapping a mutable reference to a cached item.
1174pub struct RefMut<'cache, Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> {
1175    cache: &'cache mut CacheShard<Key, Val, We, B, L, Plh>,
1176    idx: Token,
1177    old_weight: u64,
1178}
1179
1180impl<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder>
1181    RefMut<'_, Key, Val, We, B, L, Plh>
1182{
1183    pub(crate) fn pair(&self) -> (&Key, &Val) {
1184        // Safety: RefMut was constructed correctly from a Resident entry in get_mut or peek_token_mut
1185        // and it couldn't be modified as we're holding a mutable reference to the cache
1186        unsafe {
1187            if let (Entry::Resident(Resident { key, value, .. }), _) =
1188                self.cache.entries.get_unchecked(self.idx)
1189            {
1190                (key, value)
1191            } else {
1192                core::hint::unreachable_unchecked()
1193            }
1194        }
1195    }
1196
1197    pub(crate) fn value_mut(&mut self) -> &mut Val {
1198        // Safety: RefMut was constructed correctly from a Resident entry in get_mut or peek_token_mut
1199        // and it couldn't be modified as we're holding a mutable reference to the cache
1200        unsafe {
1201            if let (Entry::Resident(Resident { value, .. }), _) =
1202                self.cache.entries.get_mut_unchecked(self.idx)
1203            {
1204                value
1205            } else {
1206                core::hint::unreachable_unchecked()
1207            }
1208        }
1209    }
1210}
1211
1212impl<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> Drop
1213    for RefMut<'_, Key, Val, We, B, L, Plh>
1214{
1215    #[inline]
1216    fn drop(&mut self) {
1217        let (key, value) = self.pair();
1218        let new_weight = self.cache.weighter.weight(key, value);
1219        if self.old_weight != new_weight {
1220            self.cache
1221                .cold_change_weight(self.idx, self.old_weight, new_weight);
1222        }
1223    }
1224}
1225
1226#[cfg(test)]
1227mod tests {
1228    use super::*;
1229
1230    #[test]
1231    fn entry_overhead() {
1232        use std::mem::size_of;
1233        assert_eq!(
1234            size_of::<Entry<u64, u64, crate::sync_placeholder::SharedPlaceholder<u64>>>()
1235                - size_of::<[u64; 2]>(),
1236            16 // 8 bytes from linked slab, 8 bytes from entry
1237        );
1238        assert_eq!(
1239            size_of::<Entry<u64, u64, crate::unsync::SharedPlaceholder>>() - size_of::<[u64; 2]>(),
1240            16 // 8 bytes from linked slab, 8 bytes from entry
1241        );
1242    }
1243}