1use std::{
2 hash::{BuildHasher, Hash},
3 hint::unreachable_unchecked,
4 mem::{self, MaybeUninit},
5};
6
7use hashbrown::HashTable;
8
9use crate::{
10 linked_slab::{LinkedSlab, Token},
11 options::DEFAULT_HOT_ALLOCATION,
12 shim::sync::atomic::{self, AtomicU16},
13 Equivalent, Lifecycle, MemoryUsed, Weighter,
14};
15
16#[cfg(feature = "stats")]
17use crate::shim::sync::atomic::AtomicU64;
18
19const MAX_F: u16 = 2;
21
22pub trait SharedPlaceholder: Clone {
23 fn new(hash: u64, idx: Token) -> Self;
24 fn same_as(&self, other: &Self) -> bool;
25 fn hash(&self) -> u64;
26 fn idx(&self) -> Token;
27}
28
29pub enum InsertStrategy {
30 Insert,
31 Replace { soft: bool },
32}
33
34pub enum EntryAction<T> {
39 Retain(T),
44 Remove,
48 ReplaceWithGuard,
53}
54
55pub enum EntryOrPlaceholder<Key, Val, Plh, T> {
57 Kept(T),
59 Removed(Key, Val),
61 Replaced(Plh, Val),
64 ExistingPlaceholder(Plh),
66 NewPlaceholder(Plh),
68}
69
70#[derive(Copy, Clone, Debug, PartialEq, Eq)]
71enum ResidentState {
72 Hot,
73 Cold,
74}
75
76#[derive(Debug)]
77pub struct Resident<Key, Val> {
78 key: Key,
79 value: Val,
80 state: ResidentState,
81 referenced: AtomicU16,
82}
83
84impl<Key: Clone, Val: Clone> Clone for Resident<Key, Val> {
85 #[inline]
86 fn clone(&self) -> Self {
87 Self {
88 key: self.key.clone(),
89 value: self.value.clone(),
90 state: self.state,
91 referenced: self.referenced.load(atomic::Ordering::Relaxed).into(),
92 }
93 }
94}
95
96#[derive(Debug, Clone)]
97struct Placeholder<Key, Plh> {
98 key: Key,
99 hot: ResidentState,
100 shared: Plh,
101}
102
103#[derive(Clone)]
104enum Entry<Key, Val, Plh> {
105 Resident(Resident<Key, Val>),
106 Placeholder(Placeholder<Key, Plh>),
107 Ghost(u64),
108}
109
110pub struct CacheShard<Key, Val, We, B, L, Plh> {
114 hash_builder: B,
115 map: HashTable<Token>,
118 entries: LinkedSlab<Entry<Key, Val, Plh>>,
120 cold_head: Option<Token>,
123 hot_head: Option<Token>,
126 ghost_head: Option<Token>,
129 weight_target_hot: u64,
130 weight_capacity: u64,
131 weight_hot: u64,
132 weight_cold: u64,
133 num_hot: usize,
134 num_cold: usize,
135 num_non_resident: usize,
136 capacity_non_resident: usize,
137 #[cfg(feature = "stats")]
138 hits: AtomicU64,
139 #[cfg(feature = "stats")]
140 misses: AtomicU64,
141 weighter: We,
142 pub(crate) lifecycle: L,
143}
144
145impl<Key: Clone, Val: Clone, We: Clone, B: Clone, L: Clone, Plh: Clone> Clone
146 for CacheShard<Key, Val, We, B, L, Plh>
147{
148 fn clone(&self) -> Self {
149 Self {
150 hash_builder: self.hash_builder.clone(),
151 map: self.map.clone(),
152 entries: self.entries.clone(),
153 cold_head: self.cold_head,
154 hot_head: self.hot_head,
155 ghost_head: self.ghost_head,
156 weight_target_hot: self.weight_target_hot,
157 weight_capacity: self.weight_capacity,
158 weight_hot: self.weight_hot,
159 weight_cold: self.weight_cold,
160 num_hot: self.num_hot,
161 num_cold: self.num_cold,
162 num_non_resident: self.num_non_resident,
163 capacity_non_resident: self.capacity_non_resident,
164 #[cfg(feature = "stats")]
165 hits: self.hits.load(atomic::Ordering::Relaxed).into(),
166 #[cfg(feature = "stats")]
167 misses: self.misses.load(atomic::Ordering::Relaxed).into(),
168 weighter: self.weighter.clone(),
169 lifecycle: self.lifecycle.clone(),
170 }
171 }
172}
173
174#[cfg(feature = "stats")]
175macro_rules! record_hit {
176 ($self: expr) => {{
177 $self.hits.fetch_add(1, atomic::Ordering::Relaxed);
178 }};
179}
180#[cfg(feature = "stats")]
181macro_rules! record_hit_mut {
182 ($self: expr) => {{
183 *$self.hits.get_mut() += 1;
184 }};
185}
186#[cfg(feature = "stats")]
187macro_rules! record_miss {
188 ($self: expr) => {{
189 $self.misses.fetch_add(1, atomic::Ordering::Relaxed);
190 }};
191}
192#[cfg(feature = "stats")]
193macro_rules! record_miss_mut {
194 ($self: expr) => {{
195 *$self.misses.get_mut() += 1;
196 }};
197}
198
199#[cfg(not(feature = "stats"))]
200macro_rules! record_hit {
201 ($self: expr) => {{}};
202}
203#[cfg(not(feature = "stats"))]
204macro_rules! record_hit_mut {
205 ($self: expr) => {{}};
206}
207#[cfg(not(feature = "stats"))]
208macro_rules! record_miss {
209 ($self: expr) => {{}};
210}
211#[cfg(not(feature = "stats"))]
212macro_rules! record_miss_mut {
213 ($self: expr) => {{}};
214}
215
216impl<Key, Val, We, B, L, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh> {
217 pub fn remove_placeholder(&mut self, placeholder: &Plh) {
218 if let Ok(entry) = self.map.find_entry(placeholder.hash(), |&idx| {
219 if idx != placeholder.idx() {
220 return false;
221 }
222 let (entry, _) = self.entries.get(idx).unwrap();
223 matches!(entry, Entry::Placeholder(Placeholder { shared, .. }) if shared.same_as(placeholder))
224 }) {
225 entry.remove();
226 }
227 }
228
229 #[cold]
230 fn cold_change_weight(&mut self, idx: Token, old_weight: u64, new_weight: u64) {
231 let Some((Entry::Resident(resident), _)) = self.entries.get_mut(idx) else {
232 unsafe { unreachable_unchecked() };
233 };
234 let (weight_ptr, target_head) = if resident.state == ResidentState::Hot {
235 (&mut self.weight_hot, &mut self.hot_head)
236 } else {
237 (&mut self.weight_cold, &mut self.cold_head)
238 };
239 *weight_ptr -= old_weight;
240 *weight_ptr += new_weight;
241
242 if old_weight == 0 && new_weight != 0 {
243 *target_head = Some(self.entries.link(idx, *target_head));
244 } else if old_weight != 0 && new_weight == 0 {
245 *target_head = self.entries.unlink(idx);
246 }
247 }
248}
249
250impl<Key, Val, We, B, L, Plh> CacheShard<Key, Val, We, B, L, Plh> {
251 pub fn memory_used(&self) -> MemoryUsed {
252 MemoryUsed {
253 entries: self.entries.memory_used(),
254 map: self.map.allocation_size(),
255 }
256 }
257
258 pub fn weight(&self) -> u64 {
259 self.weight_hot + self.weight_cold
260 }
261
262 pub fn len(&self) -> usize {
263 self.num_hot + self.num_cold
264 }
265
266 pub fn capacity(&self) -> u64 {
267 self.weight_capacity
268 }
269
270 #[cfg(feature = "stats")]
271 pub fn hits(&self) -> u64 {
272 self.hits.load(atomic::Ordering::Relaxed)
273 }
274
275 #[cfg(feature = "stats")]
276 pub fn misses(&self) -> u64 {
277 self.misses.load(atomic::Ordering::Relaxed)
278 }
279
280 pub fn clear(&mut self) {
281 let _ = self.drain();
282 }
283
284 pub fn drain(&mut self) -> impl Iterator<Item = (Key, Val)> + '_ {
285 self.cold_head = None;
286 self.hot_head = None;
287 self.ghost_head = None;
288 self.num_hot = 0;
289 self.num_cold = 0;
290 self.num_non_resident = 0;
291 self.weight_hot = 0;
292 self.weight_cold = 0;
293 self.map.clear();
294 self.entries.drain().filter_map(|i| match i {
295 Entry::Resident(r) => Some((r.key, r.value)),
296 Entry::Placeholder(_) | Entry::Ghost(_) => None,
297 })
298 }
299
300 pub fn iter(&self) -> impl Iterator<Item = (&'_ Key, &'_ Val)> + '_ {
301 self.entries.iter().filter_map(|i| match i {
302 Entry::Resident(r) => Some((&r.key, &r.value)),
303 Entry::Placeholder(_) | Entry::Ghost(_) => None,
304 })
305 }
306
307 pub fn iter_from(
308 &self,
309 continuation: Option<Token>,
310 ) -> impl Iterator<Item = (Token, &'_ Key, &'_ Val)> + '_ {
311 self.entries
312 .iter_from(continuation)
313 .filter_map(|(token, i)| match i {
314 Entry::Resident(r) => Some((token, &r.key, &r.value)),
315 Entry::Placeholder(_) | Entry::Ghost(_) => None,
316 })
317 }
318}
319
320impl<
321 Key: Eq + Hash,
322 Val,
323 We: Weighter<Key, Val>,
324 B: BuildHasher,
325 L: Lifecycle<Key, Val>,
326 Plh: SharedPlaceholder,
327 > CacheShard<Key, Val, We, B, L, Plh>
328{
329 pub fn new(
330 hot_allocation: f64,
331 ghost_allocation: f64,
332 estimated_items_capacity: usize,
333 weight_capacity: u64,
334 weighter: We,
335 hash_builder: B,
336 lifecycle: L,
337 ) -> Self {
338 let weight_target_hot = ((weight_capacity as f64 * hot_allocation) as u64)
341 .clamp(weight_capacity.min(1), weight_capacity);
342 let capacity_non_resident = (estimated_items_capacity as f64 * ghost_allocation) as usize;
343 Self {
344 hash_builder,
345 map: HashTable::with_capacity(0),
346 entries: LinkedSlab::with_capacity(0),
347 weight_capacity,
348 #[cfg(feature = "stats")]
349 hits: Default::default(),
350 #[cfg(feature = "stats")]
351 misses: Default::default(),
352 cold_head: None,
353 hot_head: None,
354 ghost_head: None,
355 capacity_non_resident,
356 weight_target_hot,
357 num_hot: 0,
358 num_cold: 0,
359 num_non_resident: 0,
360 weight_hot: 0,
361 weight_cold: 0,
362 weighter,
363 lifecycle,
364 }
365 }
366
367 #[cfg(any(fuzzing, test))]
368 pub fn validate(&self, accept_overweight: bool) {
369 self.entries.validate();
370 let mut num_hot = 0;
371 let mut num_cold = 0;
372 let mut num_non_resident = 0;
373 let mut weight_hot = 0;
374 let mut weight_hot_pinned = 0;
375 let mut weight_cold = 0;
376 let mut weight_cold_pinned = 0;
377 for e in self.entries.iter_entries() {
378 match e {
379 Entry::Resident(r) if r.state == ResidentState::Cold => {
380 num_cold += 1;
381 let weight = self.weighter.weight(&r.key, &r.value);
382 if self.lifecycle.is_pinned(&r.key, &r.value) {
383 weight_cold_pinned += weight;
384 } else {
385 weight_cold += weight;
386 }
387 }
388 Entry::Resident(r) => {
389 num_hot += 1;
390 let weight = self.weighter.weight(&r.key, &r.value);
391 if self.lifecycle.is_pinned(&r.key, &r.value) {
392 weight_hot_pinned += weight;
393 } else {
394 weight_hot += weight;
395 }
396 }
397 Entry::Ghost(_) => {
398 num_non_resident += 1;
399 }
400 Entry::Placeholder(_) => (),
401 }
402 }
403 assert_eq!(num_hot, self.num_hot);
422 assert_eq!(num_cold, self.num_cold);
423 assert_eq!(num_non_resident, self.num_non_resident);
424 assert_eq!(weight_hot + weight_hot_pinned, self.weight_hot);
425 assert_eq!(weight_cold + weight_cold_pinned, self.weight_cold);
426 if !accept_overweight {
427 assert!(weight_hot + weight_cold <= self.weight_capacity);
428 }
429 assert!(num_non_resident <= self.capacity_non_resident);
430 }
431
432 pub fn reserve(&mut self, additional: usize) {
435 let additional = additional.saturating_add(additional / 2);
437 self.map.reserve(additional, |&idx| {
438 let (entry, _) = self.entries.get(idx).unwrap();
439 match entry {
440 Entry::Resident(Resident { key, .. })
441 | Entry::Placeholder(Placeholder { key, .. }) => {
442 Self::hash_static(&self.hash_builder, key)
443 }
444 Entry::Ghost(non_resident_hash) => *non_resident_hash,
445 }
446 })
447 }
448
449 pub fn retain<F>(&mut self, f: F)
450 where
451 F: Fn(&Key, &Val) -> bool,
452 {
453 let retained_tokens = self
454 .map
455 .iter()
456 .filter_map(|&idx| match self.entries.get(idx) {
457 Some((entry, _idx)) => match entry {
458 Entry::Resident(r) => {
459 if !f(&r.key, &r.value) {
460 let hash = self.hash(&r.key);
461 Some((idx, hash))
462 } else {
463 None
464 }
465 }
466 Entry::Placeholder(_) | Entry::Ghost(_) => None,
467 },
468 None => None,
469 })
470 .collect::<Vec<_>>();
471 for (idx, hash) in retained_tokens {
472 self.remove_internal(hash, idx);
473 }
474 }
475
476 #[inline]
477 fn hash_static<Q>(hasher: &B, key: &Q) -> u64
478 where
479 Q: Hash + Equivalent<Key> + ?Sized,
480 {
481 hasher.hash_one(key)
482 }
483
484 #[inline]
485 pub fn hash<Q>(&self, key: &Q) -> u64
486 where
487 Q: Hash + Equivalent<Key> + ?Sized,
488 {
489 Self::hash_static(&self.hash_builder, key)
490 }
491
492 #[inline]
493 fn search<Q>(&self, hash: u64, k: &Q) -> Option<Token>
494 where
495 Q: Hash + Equivalent<Key> + ?Sized,
496 {
497 let mut hash_match = None;
498 for bucket in self.map.iter_hash(hash) {
499 let idx = *bucket;
500 let (entry, _) = self.entries.get(idx).unwrap();
501 match entry {
502 Entry::Resident(Resident { key, .. })
503 | Entry::Placeholder(Placeholder { key, .. })
504 if k.equivalent(key) =>
505 {
506 return Some(idx);
507 }
508 Entry::Ghost(non_resident_hash) if *non_resident_hash == hash => {
509 hash_match = Some(idx);
510 }
511 _ => (),
512 }
513 }
514 hash_match
515 }
516
517 #[inline]
518 fn search_resident<Q>(&self, hash: u64, k: &Q) -> Option<(Token, &Resident<Key, Val>)>
519 where
520 Q: Hash + Equivalent<Key> + ?Sized,
521 {
522 let mut resident = MaybeUninit::uninit();
523 self.map
524 .find(hash, |&idx| {
525 let (entry, _) = self.entries.get(idx).unwrap();
526 match entry {
527 Entry::Resident(r) if k.equivalent(&r.key) => {
528 resident.write(r);
529 true
530 }
531 _ => false,
532 }
533 })
534 .map(|idx| {
535 (*idx, unsafe { resident.assume_init() })
538 })
539 }
540
541 pub fn contains<Q>(&self, hash: u64, key: &Q) -> bool
542 where
543 Q: Hash + Equivalent<Key> + ?Sized,
544 {
545 self.map
546 .find(hash, |&idx| {
547 let (entry, _) = self.entries.get(idx).unwrap();
548 matches!(entry, Entry::Resident(r) if key.equivalent(&r.key))
549 })
550 .is_some()
551 }
552
553 pub fn get_key_value<Q>(&self, hash: u64, key: &Q) -> Option<(&Key, &Val)>
554 where
555 Q: Hash + Equivalent<Key> + ?Sized,
556 {
557 if let Some((_, resident)) = self.search_resident(hash, key) {
558 let referenced = resident.referenced.load(atomic::Ordering::Relaxed);
559 if referenced < MAX_F {
561 resident.referenced.fetch_add(1, atomic::Ordering::Relaxed);
564 }
565 record_hit!(self);
566 Some((&resident.key, &resident.value))
567 } else {
568 record_miss!(self);
569 None
570 }
571 }
572
573 #[inline]
574 pub fn get<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
575 where
576 Q: Hash + Equivalent<Key> + ?Sized,
577 {
578 self.get_key_value(hash, key).map(|(_k, v)| v)
579 }
580
581 pub fn get_mut<Q>(&mut self, hash: u64, key: &Q) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
582 where
583 Q: Hash + Equivalent<Key> + ?Sized,
584 {
585 let Some((idx, _)) = self.search_resident(hash, key) else {
586 record_miss_mut!(self);
587 return None;
588 };
589 let (Entry::Resident(resident), _) = (unsafe { self.entries.get_mut_unchecked(idx) })
590 else {
591 unsafe { unreachable_unchecked() };
592 };
593 if *resident.referenced.get_mut() < MAX_F {
594 *resident.referenced.get_mut() += 1;
595 }
596 record_hit_mut!(self);
597
598 let old_weight = self.weighter.weight(&resident.key, &resident.value);
599 Some(RefMut {
600 guard: WeightGuard {
601 shard: self as *mut _,
602 idx,
603 old_weight,
604 },
605 _phantom: std::marker::PhantomData,
606 })
607 }
608
609 #[inline]
610 pub fn peek_token(&self, token: Token) -> Option<&Val> {
611 if let Some((Entry::Resident(resident), _)) = self.entries.get(token) {
612 Some(&resident.value)
613 } else {
614 None
615 }
616 }
617
618 #[inline]
619 pub fn peek_token_mut(&mut self, token: Token) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>> {
620 if let Some((Entry::Resident(resident), _)) = self.entries.get_mut(token) {
621 let old_weight = self.weighter.weight(&resident.key, &resident.value);
622 Some(RefMut {
623 guard: WeightGuard {
624 shard: self as *mut _,
625 idx: token,
626 old_weight,
627 },
628 _phantom: std::marker::PhantomData,
629 })
630 } else {
631 None
632 }
633 }
634
635 pub fn peek<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
636 where
637 Q: Hash + Equivalent<Key> + ?Sized,
638 {
639 let (_, resident) = self.search_resident(hash, key)?;
640 Some(&resident.value)
641 }
642
643 pub fn peek_mut<Q>(&mut self, hash: u64, key: &Q) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
644 where
645 Q: Hash + Equivalent<Key> + ?Sized,
646 {
647 let (idx, _) = self.search_resident(hash, key)?;
648 self.peek_token_mut(idx)
649 }
650
651 pub fn remove<Q>(&mut self, hash: u64, key: &Q) -> Option<(Key, Val)>
652 where
653 Q: Hash + Equivalent<Key> + ?Sized,
654 {
655 let idx = self.search(hash, key)?;
658 self.remove_internal(hash, idx)
659 }
660
661 pub fn remove_if<Q, F>(&mut self, hash: u64, key: &Q, f: F) -> Option<(Key, Val)>
662 where
663 Q: Hash + Equivalent<Key> + ?Sized,
664 F: FnOnce(&Val) -> bool,
665 {
666 let (idx, resident) = self.search_resident(hash, key)?;
667 if f(&resident.value) {
668 self.remove_internal(hash, idx)
669 } else {
670 None
671 }
672 }
673
674 pub fn remove_token(&mut self, token: Token) -> Option<(Key, Val)> {
675 let Some((Entry::Resident(resident), _)) = self.entries.get(token) else {
676 return None;
677 };
678 let hash = Self::hash_static(&self.hash_builder, &resident.key);
679 self.remove_internal(hash, token)
680 }
681
682 pub fn remove_next(&mut self, continuation: Option<Token>) -> Option<(Token, Key, Val)> {
683 let (token, key, _) = self
684 .entries
685 .iter_from(continuation)
686 .filter_map(|(token, i)| match i {
687 Entry::Resident(r) => Some((token, &r.key, &r.value)),
688 Entry::Placeholder(_) | Entry::Ghost(_) => None,
689 })
690 .next()?;
691 let hash = Self::hash_static(&self.hash_builder, key);
692 self.remove_internal(hash, token)
693 .map(|(k, v)| (token, k, v))
694 }
695
696 fn remove_internal(&mut self, hash: u64, idx: Token) -> Option<(Key, Val)> {
697 self.map_remove(hash, idx);
698 let mut result = None;
699 let (entry, next) = self.entries.remove(idx).unwrap();
700 let list_head = match entry {
701 Entry::Resident(r) => {
702 let weight = self.weighter.weight(&r.key, &r.value);
703 result = Some((r.key, r.value));
704 if r.state == ResidentState::Hot {
705 self.num_hot -= 1;
706 self.weight_hot -= weight;
707 &mut self.hot_head
708 } else {
709 debug_assert!(r.state == ResidentState::Cold);
710 self.num_cold -= 1;
711 self.weight_cold -= weight;
712 &mut self.cold_head
713 }
714 }
715 Entry::Ghost(_) => {
716 self.num_non_resident -= 1;
718 &mut self.ghost_head
719 }
720 Entry::Placeholder(_) => {
721 return None;
723 }
724 };
725 if *list_head == Some(idx) {
726 *list_head = next;
727 }
728 result
729 }
730
731 #[must_use]
733 fn advance_cold(&mut self, lcs: &mut L::RequestState) -> bool {
734 let Some(mut idx) = self.cold_head else {
735 return self.advance_hot(lcs);
736 };
737 loop {
738 let (entry, next) = self.entries.get_mut(idx).unwrap();
739 let Entry::Resident(resident) = entry else {
740 unsafe { unreachable_unchecked() };
741 };
742 debug_assert_eq!(resident.state, ResidentState::Cold);
743 if *resident.referenced.get_mut() != 0 {
744 *resident.referenced.get_mut() -= 1;
745 resident.state = ResidentState::Hot;
746 let weight = self.weighter.weight(&resident.key, &resident.value);
747 self.weight_hot += weight;
748 self.weight_cold -= weight;
749 self.num_hot += 1;
750 self.num_cold -= 1;
751 self.cold_head = self.entries.unlink(idx);
752 self.hot_head = Some(self.entries.link(idx, self.hot_head));
753 while self.weight_hot > self.weight_target_hot && self.advance_hot(lcs) {}
755 return true;
756 }
757
758 if self.lifecycle.is_pinned(&resident.key, &resident.value) {
759 if Some(next) == self.cold_head {
760 return self.advance_hot(lcs);
761 }
762 idx = next;
763 continue;
764 }
765
766 self.weight_cold -= self.weighter.weight(&resident.key, &resident.value);
767 self.lifecycle
768 .before_evict(lcs, &resident.key, &mut resident.value);
769 if self.weighter.weight(&resident.key, &resident.value) == 0 {
770 self.cold_head = self.entries.unlink(idx);
771 return true;
772 }
773 let hash = Self::hash_static(&self.hash_builder, &resident.key);
774 let Entry::Resident(evicted) = mem::replace(entry, Entry::Ghost(hash)) else {
775 unsafe { core::hint::unreachable_unchecked() };
777 };
778 self.cold_head = self.entries.unlink(idx);
779 self.ghost_head = Some(self.entries.link(idx, self.ghost_head));
780 self.num_cold -= 1;
781 self.num_non_resident += 1;
782 if self.num_non_resident > self.capacity_non_resident {
784 self.advance_ghost();
785 }
786 self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
787 return true;
788 }
789 }
790
791 #[must_use]
793 fn advance_hot(&mut self, lcs: &mut L::RequestState) -> bool {
794 let mut unpinned = 0usize;
795 let Some(mut idx) = self.hot_head else {
796 return false;
797 };
798 loop {
799 let (entry, next) = self.entries.get_mut(idx).unwrap();
800 let Entry::Resident(resident) = entry else {
801 unsafe { unreachable_unchecked() };
802 };
803 debug_assert_eq!(resident.state, ResidentState::Hot);
804 if self.lifecycle.is_pinned(&resident.key, &resident.value) {
805 *resident.referenced.get_mut() = (*resident.referenced.get_mut())
806 .min(MAX_F)
807 .saturating_sub(1);
808 if Some(next) == self.hot_head {
809 if unpinned == 0 {
810 return false;
812 }
813 unpinned = 0;
815 }
816 idx = next;
817 continue;
818 }
819 unpinned += 1;
820 if *resident.referenced.get_mut() != 0 {
821 *resident.referenced.get_mut() = (*resident.referenced.get_mut()).min(MAX_F) - 1;
822 idx = next;
823 continue;
824 }
825 self.weight_hot -= self.weighter.weight(&resident.key, &resident.value);
826 self.lifecycle
827 .before_evict(lcs, &resident.key, &mut resident.value);
828 if self.weighter.weight(&resident.key, &resident.value) == 0 {
829 self.hot_head = self.entries.unlink(idx);
830 } else {
831 self.num_hot -= 1;
832 let hash = Self::hash_static(&self.hash_builder, &resident.key);
833 let Some((Entry::Resident(evicted), next)) = self.entries.remove(idx) else {
834 unsafe { core::hint::unreachable_unchecked() };
836 };
837 self.hot_head = next;
838 self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
839 self.map_remove(hash, idx);
840 }
841 return true;
842 }
843 }
844
845 #[inline]
846 fn advance_ghost(&mut self) {
847 debug_assert_ne!(self.num_non_resident, 0);
848 let idx = self.ghost_head.unwrap();
849 let (entry, _) = self.entries.get_mut(idx).unwrap();
850 let Entry::Ghost(hash) = *entry else {
851 unsafe { unreachable_unchecked() };
852 };
853 self.num_non_resident -= 1;
854 self.map_remove(hash, idx);
855 let (_, next) = self.entries.remove(idx).unwrap();
856 self.ghost_head = next;
857 }
858
859 fn insert_existing(
860 &mut self,
861 lcs: &mut L::RequestState,
862 idx: Token,
863 key: Key,
864 value: Val,
865 weight: u64,
866 strategy: InsertStrategy,
867 ) -> Result<(), (Key, Val)> {
868 let (entry, _) = self.entries.get_mut(idx).unwrap();
870 let referenced;
871 let enter_state;
872 match entry {
873 Entry::Resident(resident) => {
874 enter_state = resident.state;
875 referenced = resident
876 .referenced
877 .get_mut()
878 .saturating_add(
879 !matches!(strategy, InsertStrategy::Replace { soft: true }) as u16
880 )
881 .min(MAX_F);
882 }
883 _ if matches!(strategy, InsertStrategy::Replace { .. }) => {
884 return Err((key, value));
885 }
886 Entry::Ghost(_) => {
887 referenced = 0;
888 enter_state = ResidentState::Hot;
889 }
890 Entry::Placeholder(ph) => {
891 referenced = 1; enter_state = ph.hot;
893 }
894 }
895
896 let evicted = mem::replace(
897 entry,
898 Entry::Resident(Resident {
899 key,
900 value,
901 state: enter_state,
902 referenced: referenced.into(),
903 }),
904 );
905 match evicted {
906 Entry::Resident(evicted) => {
907 debug_assert_eq!(evicted.state, enter_state);
908 let evicted_weight = self.weighter.weight(&evicted.key, &evicted.value);
909 let list_head = if enter_state == ResidentState::Hot {
910 self.weight_hot -= evicted_weight;
911 self.weight_hot += weight;
912 &mut self.hot_head
913 } else {
914 self.weight_cold -= evicted_weight;
915 self.weight_cold += weight;
916 &mut self.cold_head
917 };
918 if evicted_weight == 0 && weight != 0 {
919 *list_head = Some(self.entries.link(idx, *list_head));
920 } else if evicted_weight != 0 && weight == 0 {
921 *list_head = self.entries.unlink(idx);
922 }
923 self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
924 }
925 Entry::Ghost(_) => {
926 self.weight_hot += weight;
927 self.num_hot += 1;
928 self.num_non_resident -= 1;
929 let next_ghost = self.entries.unlink(idx);
930 if self.ghost_head == Some(idx) {
931 self.ghost_head = next_ghost;
932 }
933 if weight != 0 {
934 self.hot_head = Some(self.entries.link(idx, self.hot_head));
935 }
936 }
937 Entry::Placeholder(_) => {
938 let list_head = if enter_state == ResidentState::Hot {
939 self.num_hot += 1;
940 self.weight_hot += weight;
941 &mut self.hot_head
942 } else {
943 self.num_cold += 1;
944 self.weight_cold += weight;
945 &mut self.cold_head
946 };
947 if weight != 0 {
948 *list_head = Some(self.entries.link(idx, *list_head));
949 }
950 }
951 }
952
953 while self.weight_hot + self.weight_cold > self.weight_capacity && self.advance_cold(lcs) {}
954 Ok(())
955 }
956
957 #[inline]
958 fn map_insert(&mut self, hash: u64, idx: Token) {
959 self.map.insert_unique(hash, idx, |&i| {
960 let (entry, _) = self.entries.get(i).unwrap();
961 match entry {
962 Entry::Resident(Resident { key, .. })
963 | Entry::Placeholder(Placeholder { key, .. }) => {
964 Self::hash_static(&self.hash_builder, key)
965 }
966 Entry::Ghost(hash) => *hash,
967 }
968 });
969 }
970
971 #[inline]
972 fn map_remove(&mut self, hash: u64, idx: Token) {
973 if let Ok(entry) = self.map.find_entry(hash, |&i| i == idx) {
974 entry.remove();
975 return;
976 }
977 #[cfg(debug_assertions)]
978 panic!("key not found");
979 }
980
981 pub fn replace_placeholder(
982 &mut self,
983 lcs: &mut L::RequestState,
984 placeholder: &Plh,
985 referenced: bool,
986 mut value: Val,
987 ) -> Result<(), Val> {
988 let entry = match self.entries.get_mut(placeholder.idx()) {
989 Some((entry, _)) if matches!(&*entry, Entry::Placeholder(p) if p.shared.same_as(placeholder)) => {
990 entry
991 }
992 _ => return Err(value),
993 };
994 let Entry::Placeholder(Placeholder {
995 key,
996 hot: mut placeholder_hot,
997 ..
998 }) = mem::replace(entry, Entry::Ghost(0))
999 else {
1000 unsafe { core::hint::unreachable_unchecked() };
1002 };
1003 let mut weight = self.weighter.weight(&key, &value);
1004 if weight > self.weight_target_hot && !self.lifecycle.is_pinned(&key, &value) {
1006 self.lifecycle.before_evict(lcs, &key, &mut value);
1007 weight = self.weighter.weight(&key, &value);
1008 if weight > self.weight_target_hot {
1010 return self.handle_overweight_replace_placeholder(lcs, placeholder, key, value);
1011 }
1012 }
1013
1014 if self.weight_hot + weight <= self.weight_target_hot {
1016 placeholder_hot = ResidentState::Hot;
1017 }
1018 *entry = Entry::Resident(Resident {
1019 key,
1020 value,
1021 state: placeholder_hot,
1022 referenced: (referenced as u16).into(),
1023 });
1024
1025 let list_head = if placeholder_hot == ResidentState::Hot {
1026 self.num_hot += 1;
1027 self.weight_hot += weight;
1028 &mut self.hot_head
1029 } else {
1030 self.num_cold += 1;
1031 self.weight_cold += weight;
1032 &mut self.cold_head
1033 };
1034
1035 if weight != 0 {
1036 *list_head = Some(self.entries.link(placeholder.idx(), *list_head));
1037 while self.weight_hot + self.weight_cold > self.weight_capacity
1038 && self.advance_cold(lcs)
1039 {}
1040 }
1041
1042 Ok(())
1043 }
1044
1045 #[cold]
1046 fn handle_overweight_replace_placeholder(
1047 &mut self,
1048 lcs: &mut L::RequestState,
1049 placeholder: &Plh,
1050 key: Key,
1051 value: Val,
1052 ) -> Result<(), Val> {
1053 self.entries.remove(placeholder.idx());
1054 self.map_remove(placeholder.hash(), placeholder.idx());
1055 self.lifecycle.on_evict(lcs, key, value);
1056 Ok(())
1057 }
1058
1059 pub fn insert(
1060 &mut self,
1061 lcs: &mut L::RequestState,
1062 hash: u64,
1063 key: Key,
1064 mut value: Val,
1065 strategy: InsertStrategy,
1066 ) -> Result<(), (Key, Val)> {
1067 let mut weight = self.weighter.weight(&key, &value);
1068 if weight > self.weight_target_hot && !self.lifecycle.is_pinned(&key, &value) {
1070 self.lifecycle.before_evict(lcs, &key, &mut value);
1071 weight = self.weighter.weight(&key, &value);
1072 if weight > self.weight_target_hot {
1074 return self.handle_insert_overweight(lcs, hash, key, value, strategy);
1075 }
1076 }
1077
1078 if let Some(idx) = self.search(hash, &key) {
1079 return self.insert_existing(lcs, idx, key, value, weight, strategy);
1080 } else if matches!(strategy, InsertStrategy::Replace { .. }) {
1081 return Err((key, value));
1082 }
1083
1084 let enter_hot = self.weight_hot + weight <= self.weight_target_hot;
1086 while self.weight_hot + self.weight_cold + weight > self.weight_capacity
1088 && self.advance_cold(lcs)
1089 {}
1090
1091 let (state, list_head) = if enter_hot {
1092 self.num_hot += 1;
1093 self.weight_hot += weight;
1094 (ResidentState::Hot, &mut self.hot_head)
1095 } else {
1096 self.num_cold += 1;
1097 self.weight_cold += weight;
1098 (ResidentState::Cold, &mut self.cold_head)
1099 };
1100 let idx = self.entries.insert(Entry::Resident(Resident {
1101 key,
1102 value,
1103 state,
1104 referenced: Default::default(),
1105 }));
1106 if weight != 0 {
1107 *list_head = Some(self.entries.link(idx, *list_head));
1108 }
1109 self.map_insert(hash, idx);
1110 Ok(())
1111 }
1112
1113 #[cold]
1114 fn handle_insert_overweight(
1115 &mut self,
1116 lcs: &mut L::RequestState,
1117 hash: u64,
1118 key: Key,
1119 value: Val,
1120 strategy: InsertStrategy,
1121 ) -> Result<(), (Key, Val)> {
1122 if let Some((idx, _)) = self.search_resident(hash, &key) {
1124 if let Some((ek, ev)) = self.remove_internal(hash, idx) {
1125 self.lifecycle.on_evict(lcs, ek, ev);
1126 }
1127 }
1128 if matches!(strategy, InsertStrategy::Replace { .. }) {
1129 return Err((key, value));
1130 }
1131 self.lifecycle.on_evict(lcs, key, value);
1132 Ok(())
1133 }
1134
1135 pub fn get_or_placeholder<Q>(
1136 &mut self,
1137 hash: u64,
1138 key: &Q,
1139 ) -> Result<(Token, &Val), (Plh, bool)>
1140 where
1141 Q: Hash + Equivalent<Key> + ToOwned<Owned = Key> + ?Sized,
1142 {
1143 let idx = self.search(hash, key);
1144 if let Some(idx) = idx {
1145 if let Some((Entry::Resident(resident), _)) = self.entries.get_mut(idx) {
1146 if *resident.referenced.get_mut() < MAX_F {
1147 *resident.referenced.get_mut() += 1;
1148 }
1149 record_hit_mut!(self);
1150 unsafe {
1151 let value_ptr: *const Val = &resident.value;
1154 return Ok((idx, &*value_ptr));
1155 }
1156 }
1157 }
1158 let (shared, is_new) = unsafe { self.non_resident_to_placeholder(hash, key, idx) };
1159 Err((shared, is_new))
1160 }
1161
1162 pub fn entry_or_placeholder<Q, T, F>(
1171 &mut self,
1172 hash: u64,
1173 key: &Q,
1174 on_occupied: &mut F,
1175 ) -> EntryOrPlaceholder<Key, Val, Plh, T>
1176 where
1177 Q: Hash + Equivalent<Key> + ToOwned<Owned = Key> + ?Sized,
1178 F: FnMut(&Key, &mut Val) -> EntryAction<T>,
1179 {
1180 let idx = self.search(hash, key);
1181 if let Some(idx) = idx {
1182 let shard = self as *mut _;
1183 if let Some((Entry::Resident(r), _)) = self.entries.get_mut(idx) {
1184 let action = {
1189 let (key_ptr, val_ptr) = (&r.key as *const Key, &mut r.value as *mut Val);
1190 let _guard = WeightGuard::<Key, Val, We, B, L, Plh> {
1191 idx,
1192 old_weight: self.weighter.weight(&r.key, &r.value),
1193 shard,
1194 };
1195 on_occupied(unsafe { &*key_ptr }, unsafe { &mut *val_ptr })
1196 };
1197
1198 return match action {
1199 EntryAction::Retain(t) => {
1200 record_hit_mut!(self);
1201 let Some((Entry::Resident(resident), _)) = self.entries.get_mut(idx) else {
1202 unsafe { unreachable_unchecked() };
1204 };
1205 if *resident.referenced.get_mut() < MAX_F {
1206 *resident.referenced.get_mut() += 1;
1207 }
1208 EntryOrPlaceholder::Kept(t)
1209 }
1210 EntryAction::Remove => {
1211 let (key, val) = self.remove_internal(hash, idx).unwrap();
1212 EntryOrPlaceholder::Removed(key, val)
1213 }
1214 EntryAction::ReplaceWithGuard => {
1215 let Some((Entry::Resident(r), _)) = self.entries.get_mut(idx) else {
1216 unsafe { unreachable_unchecked() };
1218 };
1219 let state = r.state;
1220 let current_weight = self.weighter.weight(&r.key, &r.value);
1221 let list_head = if state == ResidentState::Hot {
1222 self.num_hot -= 1;
1223 self.weight_hot -= current_weight;
1224 &mut self.hot_head
1225 } else {
1226 self.num_cold -= 1;
1227 self.weight_cold -= current_weight;
1228 &mut self.cold_head
1229 };
1230 if current_weight != 0 {
1231 let next = self.entries.unlink(idx);
1232 if *list_head == Some(idx) {
1233 *list_head = next;
1234 }
1235 }
1236 let shared = Plh::new(hash, idx);
1237 let (entry, _) = unsafe { self.entries.get_mut_unchecked(idx) };
1238 let Entry::Resident(r) = mem::replace(entry, Entry::Ghost(0)) else {
1239 unsafe { unreachable_unchecked() }
1240 };
1241 *entry = Entry::Placeholder(Placeholder {
1242 key: r.key,
1243 hot: state,
1244 shared: shared.clone(),
1245 });
1246 EntryOrPlaceholder::Replaced(shared, r.value)
1247 }
1248 };
1249 }
1250 }
1251 let (shared, is_new) = unsafe { self.non_resident_to_placeholder(hash, key, idx) };
1252 if is_new {
1253 EntryOrPlaceholder::NewPlaceholder(shared)
1254 } else {
1255 EntryOrPlaceholder::ExistingPlaceholder(shared)
1256 }
1257 }
1258
1259 unsafe fn non_resident_to_placeholder<Q>(
1263 &mut self,
1264 hash: u64,
1265 key: &Q,
1266 idx: Option<Token>,
1267 ) -> (Plh, bool)
1268 where
1269 Q: Hash + Equivalent<Key> + ToOwned<Owned = Key> + ?Sized,
1270 {
1271 if let Some(idx) = idx {
1272 let (entry, _) = unsafe { self.entries.get_mut_unchecked(idx) };
1273 match entry {
1274 Entry::Placeholder(p) => {
1275 record_hit_mut!(self);
1276 (p.shared.clone(), false)
1277 }
1278 Entry::Ghost(_) => {
1279 let shared = Plh::new(hash, idx);
1280 *entry = Entry::Placeholder(Placeholder {
1281 key: key.to_owned(),
1282 hot: ResidentState::Hot,
1283 shared: shared.clone(),
1284 });
1285 self.num_non_resident -= 1;
1286 let next = self.entries.unlink(idx);
1287 if self.ghost_head == Some(idx) {
1288 self.ghost_head = next;
1289 }
1290 record_miss_mut!(self);
1291 (shared, true)
1292 }
1293 Entry::Resident(_) => unsafe { unreachable_unchecked() },
1294 }
1295 } else {
1296 let idx = self.entries.next_free();
1297 let shared = Plh::new(hash, idx);
1298 let idx_ = self.entries.insert(Entry::Placeholder(Placeholder {
1299 key: key.to_owned(),
1300 hot: ResidentState::Cold,
1301 shared: shared.clone(),
1302 }));
1303 debug_assert_eq!(idx, idx_);
1304 self.map_insert(hash, idx);
1305 record_miss_mut!(self);
1306 (shared, true)
1307 }
1308 }
1309
1310 pub fn set_capacity(&mut self, new_weight_capacity: u64) {
1311 if self.weight_capacity == 0 {
1313 self.weight_capacity = new_weight_capacity;
1314 self.weight_target_hot = ((new_weight_capacity as f64 * DEFAULT_HOT_ALLOCATION) as u64)
1315 .clamp(new_weight_capacity.min(1), new_weight_capacity);
1316 } else {
1318 let old_new_ratio = new_weight_capacity as f64 / self.weight_capacity as f64;
1319 let hot_ratio = self.weight_target_hot as f64 / self.weight_capacity as f64;
1320
1321 self.weight_capacity = new_weight_capacity;
1322 self.weight_target_hot = ((new_weight_capacity as f64 * hot_ratio) as u64)
1323 .clamp(new_weight_capacity.min(1), new_weight_capacity);
1324 self.capacity_non_resident =
1325 (self.capacity_non_resident as f64 * old_new_ratio) as usize;
1326 }
1327
1328 let mut lcs = self.lifecycle.begin_request();
1330 while self.weight_hot + self.weight_cold > self.weight_capacity
1331 && self.advance_cold(&mut lcs)
1332 {}
1333 self.lifecycle.end_request(lcs);
1334 while self.num_non_resident > self.capacity_non_resident {
1336 self.advance_ghost();
1337 }
1338 }
1339}
1340
1341struct WeightGuard<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> {
1344 shard: *mut CacheShard<Key, Val, We, B, L, Plh>,
1345 idx: Token,
1346 old_weight: u64,
1347}
1348
1349impl<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> Drop
1350 for WeightGuard<Key, Val, We, B, L, Plh>
1351{
1352 fn drop(&mut self) {
1353 unsafe {
1356 let shard = &mut *self.shard;
1357 let (entry, _) = shard.entries.get_unchecked(self.idx);
1358 let Entry::Resident(r) = entry else {
1359 unreachable_unchecked()
1360 };
1361 let new_weight = shard.weighter.weight(&r.key, &r.value);
1362 if self.old_weight != new_weight {
1363 shard.cold_change_weight(self.idx, self.old_weight, new_weight);
1364 }
1365 }
1366 }
1367}
1368
1369pub struct RefMut<'cache, Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> {
1372 guard: WeightGuard<Key, Val, We, B, L, Plh>,
1373 _phantom: std::marker::PhantomData<&'cache mut CacheShard<Key, Val, We, B, L, Plh>>,
1374}
1375
1376impl<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder>
1377 RefMut<'_, Key, Val, We, B, L, Plh>
1378{
1379 pub(crate) fn pair(&self) -> (&Key, &Val) {
1380 unsafe {
1383 let shard = &*self.guard.shard;
1384 let (entry, _) = shard.entries.get_unchecked(self.guard.idx);
1385 let Entry::Resident(Resident { key, value, .. }) = entry else {
1386 core::hint::unreachable_unchecked()
1387 };
1388 (key, value)
1389 }
1390 }
1391
1392 pub(crate) fn value_mut(&mut self) -> &mut Val {
1393 unsafe {
1395 let shard = &mut *self.guard.shard;
1396 let (entry, _) = shard.entries.get_mut_unchecked(self.guard.idx);
1397 let Entry::Resident(Resident { value, .. }) = entry else {
1398 core::hint::unreachable_unchecked()
1399 };
1400 value
1401 }
1402 }
1403}
1404
1405#[cfg(test)]
1406mod tests {
1407 use super::*;
1408
1409 #[test]
1410 fn entry_overhead() {
1411 use std::mem::size_of;
1412 assert_eq!(
1413 size_of::<Entry<u64, u64, crate::sync_placeholder::SharedPlaceholder<u64>>>()
1414 - size_of::<[u64; 2]>(),
1415 16 );
1417 assert_eq!(
1418 size_of::<Entry<u64, u64, crate::unsync::SharedPlaceholder>>() - size_of::<[u64; 2]>(),
1419 16 );
1421 }
1422}