1use std::{
2 hash::{BuildHasher, Hash},
3 hint::unreachable_unchecked,
4 mem::{self, MaybeUninit},
5};
6
7use hashbrown::HashTable;
8
9use crate::{
10 linked_slab::{LinkedSlab, Token},
11 shim::sync::atomic::{self, AtomicU16},
12 Equivalent, Lifecycle, MemoryUsed, Weighter,
13};
14
15#[cfg(feature = "stats")]
16use crate::shim::sync::atomic::AtomicU64;
17
18const MAX_F: u16 = 2;
20
21pub trait SharedPlaceholder: Clone {
22 fn new(hash: u64, idx: Token) -> Self;
23 fn same_as(&self, other: &Self) -> bool;
24 fn hash(&self) -> u64;
25 fn idx(&self) -> Token;
26}
27
28pub enum InsertStrategy {
29 Insert,
30 Replace { soft: bool },
31}
32
33#[derive(Copy, Clone, Debug, PartialEq, Eq)]
34enum ResidentState {
35 Hot,
36 Cold,
37}
38
39#[derive(Debug)]
40pub struct Resident<Key, Val> {
41 key: Key,
42 value: Val,
43 state: ResidentState,
44 referenced: AtomicU16,
45}
46
47impl<Key: Clone, Val: Clone> Clone for Resident<Key, Val> {
48 #[inline]
49 fn clone(&self) -> Self {
50 Self {
51 key: self.key.clone(),
52 value: self.value.clone(),
53 state: self.state,
54 referenced: self.referenced.load(atomic::Ordering::Relaxed).into(),
55 }
56 }
57}
58
59#[derive(Debug, Clone)]
60struct Placeholder<Key, Plh> {
61 key: Key,
62 hot: ResidentState,
63 shared: Plh,
64}
65
66#[derive(Clone)]
67enum Entry<Key, Val, Plh> {
68 Resident(Resident<Key, Val>),
69 Placeholder(Placeholder<Key, Plh>),
70 Ghost(u64),
71}
72
73pub struct CacheShard<Key, Val, We, B, L, Plh> {
77 hash_builder: B,
78 map: HashTable<Token>,
81 entries: LinkedSlab<Entry<Key, Val, Plh>>,
83 cold_head: Option<Token>,
86 hot_head: Option<Token>,
89 ghost_head: Option<Token>,
92 weight_target_hot: u64,
93 weight_capacity: u64,
94 weight_hot: u64,
95 weight_cold: u64,
96 num_hot: usize,
97 num_cold: usize,
98 num_non_resident: usize,
99 capacity_non_resident: usize,
100 #[cfg(feature = "stats")]
101 hits: AtomicU64,
102 #[cfg(feature = "stats")]
103 misses: AtomicU64,
104 weighter: We,
105 pub(crate) lifecycle: L,
106}
107
108impl<Key: Clone, Val: Clone, We: Clone, B: Clone, L: Clone, Plh: Clone> Clone
109 for CacheShard<Key, Val, We, B, L, Plh>
110{
111 fn clone(&self) -> Self {
112 Self {
113 hash_builder: self.hash_builder.clone(),
114 map: self.map.clone(),
115 entries: self.entries.clone(),
116 cold_head: self.cold_head,
117 hot_head: self.hot_head,
118 ghost_head: self.ghost_head,
119 weight_target_hot: self.weight_target_hot,
120 weight_capacity: self.weight_capacity,
121 weight_hot: self.weight_hot,
122 weight_cold: self.weight_cold,
123 num_hot: self.num_hot,
124 num_cold: self.num_cold,
125 num_non_resident: self.num_non_resident,
126 capacity_non_resident: self.capacity_non_resident,
127 #[cfg(feature = "stats")]
128 hits: self.hits.load(atomic::Ordering::Relaxed).into(),
129 #[cfg(feature = "stats")]
130 misses: self.misses.load(atomic::Ordering::Relaxed).into(),
131 weighter: self.weighter.clone(),
132 lifecycle: self.lifecycle.clone(),
133 }
134 }
135}
136
137#[cfg(feature = "stats")]
138macro_rules! record_hit {
139 ($self: expr) => {{
140 $self.hits.fetch_add(1, atomic::Ordering::Relaxed);
141 }};
142}
143#[cfg(feature = "stats")]
144macro_rules! record_hit_mut {
145 ($self: expr) => {{
146 *$self.hits.get_mut() += 1;
147 }};
148}
149#[cfg(feature = "stats")]
150macro_rules! record_miss {
151 ($self: expr) => {{
152 $self.misses.fetch_add(1, atomic::Ordering::Relaxed);
153 }};
154}
155#[cfg(feature = "stats")]
156macro_rules! record_miss_mut {
157 ($self: expr) => {{
158 *$self.misses.get_mut() += 1;
159 }};
160}
161
162#[cfg(not(feature = "stats"))]
163macro_rules! record_hit {
164 ($self: expr) => {{}};
165}
166#[cfg(not(feature = "stats"))]
167macro_rules! record_hit_mut {
168 ($self: expr) => {{}};
169}
170#[cfg(not(feature = "stats"))]
171macro_rules! record_miss {
172 ($self: expr) => {{}};
173}
174#[cfg(not(feature = "stats"))]
175macro_rules! record_miss_mut {
176 ($self: expr) => {{}};
177}
178
179impl<Key, Val, We, B, L, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh> {
180 pub fn remove_placeholder(&mut self, placeholder: &Plh) {
181 if let Ok(entry) = self.map.find_entry(placeholder.hash(), |&idx| {
182 if idx != placeholder.idx() {
183 return false;
184 }
185 let (entry, _) = self.entries.get(idx).unwrap();
186 matches!(entry, Entry::Placeholder(Placeholder { shared, .. }) if shared.same_as(placeholder))
187 }) {
188 entry.remove();
189 }
190 }
191
192 #[cold]
193 fn cold_change_weight(&mut self, idx: Token, old_weight: u64, new_weight: u64) {
194 let Some((Entry::Resident(resident), _)) = self.entries.get_mut(idx) else {
195 unsafe { unreachable_unchecked() };
196 };
197 let (weight_ptr, target_head) = if resident.state == ResidentState::Hot {
198 (&mut self.weight_hot, &mut self.hot_head)
199 } else {
200 (&mut self.weight_cold, &mut self.cold_head)
201 };
202 *weight_ptr -= old_weight;
203 *weight_ptr += new_weight;
204
205 if old_weight == 0 && new_weight != 0 {
206 *target_head = Some(self.entries.link(idx, *target_head));
207 } else if old_weight != 0 && new_weight == 0 {
208 *target_head = self.entries.unlink(idx);
209 }
210 }
211}
212
213impl<Key, Val, We, B, L, Plh> CacheShard<Key, Val, We, B, L, Plh> {
214 pub fn memory_used(&self) -> MemoryUsed {
215 MemoryUsed {
216 entries: self.entries.memory_used(),
217 map: self.map.allocation_size(),
218 }
219 }
220
221 pub fn weight(&self) -> u64 {
222 self.weight_hot + self.weight_cold
223 }
224
225 pub fn len(&self) -> usize {
226 self.num_hot + self.num_cold
227 }
228
229 pub fn capacity(&self) -> u64 {
230 self.weight_capacity
231 }
232
233 #[cfg(feature = "stats")]
234 pub fn hits(&self) -> u64 {
235 self.hits.load(atomic::Ordering::Relaxed)
236 }
237
238 #[cfg(feature = "stats")]
239 pub fn misses(&self) -> u64 {
240 self.misses.load(atomic::Ordering::Relaxed)
241 }
242
243 pub fn clear(&mut self) {
244 let _ = self.drain();
245 }
246
247 pub fn drain(&mut self) -> impl Iterator<Item = (Key, Val)> + '_ {
248 self.cold_head = None;
249 self.hot_head = None;
250 self.ghost_head = None;
251 self.num_hot = 0;
252 self.num_cold = 0;
253 self.num_non_resident = 0;
254 self.weight_hot = 0;
255 self.weight_cold = 0;
256 self.map.clear();
257 self.entries.drain().filter_map(|i| match i {
258 Entry::Resident(r) => Some((r.key, r.value)),
259 Entry::Placeholder(_) | Entry::Ghost(_) => None,
260 })
261 }
262
263 pub fn iter(&self) -> impl Iterator<Item = (&'_ Key, &'_ Val)> + '_ {
264 self.entries.iter().filter_map(|i| match i {
265 Entry::Resident(r) => Some((&r.key, &r.value)),
266 Entry::Placeholder(_) | Entry::Ghost(_) => None,
267 })
268 }
269
270 pub fn iter_from(
271 &self,
272 continuation: Option<Token>,
273 ) -> impl Iterator<Item = (Token, &'_ Key, &'_ Val)> + '_ {
274 self.entries
275 .iter_from(continuation)
276 .filter_map(|(token, i)| match i {
277 Entry::Resident(r) => Some((token, &r.key, &r.value)),
278 Entry::Placeholder(_) | Entry::Ghost(_) => None,
279 })
280 }
281}
282
283impl<
284 Key: Eq + Hash,
285 Val,
286 We: Weighter<Key, Val>,
287 B: BuildHasher,
288 L: Lifecycle<Key, Val>,
289 Plh: SharedPlaceholder,
290 > CacheShard<Key, Val, We, B, L, Plh>
291{
292 pub fn new(
293 hot_allocation: f64,
294 ghost_allocation: f64,
295 estimated_items_capacity: usize,
296 weight_capacity: u64,
297 weighter: We,
298 hash_builder: B,
299 lifecycle: L,
300 ) -> Self {
301 let weight_target_hot = (weight_capacity as f64 * hot_allocation) as u64;
302 let capacity_non_resident = (estimated_items_capacity as f64 * ghost_allocation) as usize;
303 Self {
304 hash_builder,
305 map: HashTable::with_capacity(0),
306 entries: LinkedSlab::with_capacity(0),
307 weight_capacity,
308 #[cfg(feature = "stats")]
309 hits: Default::default(),
310 #[cfg(feature = "stats")]
311 misses: Default::default(),
312 cold_head: None,
313 hot_head: None,
314 ghost_head: None,
315 capacity_non_resident,
316 weight_target_hot,
317 num_hot: 0,
318 num_cold: 0,
319 num_non_resident: 0,
320 weight_hot: 0,
321 weight_cold: 0,
322 weighter,
323 lifecycle,
324 }
325 }
326
327 #[cfg(any(fuzzing, test))]
328 pub fn validate(&self, accept_overweight: bool) {
329 self.entries.validate();
330 let mut num_hot = 0;
331 let mut num_cold = 0;
332 let mut num_non_resident = 0;
333 let mut weight_hot = 0;
334 let mut weight_hot_pinned = 0;
335 let mut weight_cold = 0;
336 let mut weight_cold_pinned = 0;
337 for e in self.entries.iter_entries() {
338 match e {
339 Entry::Resident(r) if r.state == ResidentState::Cold => {
340 num_cold += 1;
341 let weight = self.weighter.weight(&r.key, &r.value);
342 if self.lifecycle.is_pinned(&r.key, &r.value) {
343 weight_cold_pinned += weight;
344 } else {
345 weight_cold += weight;
346 }
347 }
348 Entry::Resident(r) => {
349 num_hot += 1;
350 let weight = self.weighter.weight(&r.key, &r.value);
351 if self.lifecycle.is_pinned(&r.key, &r.value) {
352 weight_hot_pinned += weight;
353 } else {
354 weight_hot += weight;
355 }
356 }
357 Entry::Ghost(_) => {
358 num_non_resident += 1;
359 }
360 Entry::Placeholder(_) => (),
361 }
362 }
363 assert_eq!(num_hot, self.num_hot);
382 assert_eq!(num_cold, self.num_cold);
383 assert_eq!(num_non_resident, self.num_non_resident);
384 assert_eq!(weight_hot + weight_hot_pinned, self.weight_hot);
385 assert_eq!(weight_cold + weight_cold_pinned, self.weight_cold);
386 if !accept_overweight {
387 assert!(weight_hot + weight_cold <= self.weight_capacity);
388 }
389 assert!(num_non_resident <= self.capacity_non_resident);
390 }
391
392 pub fn reserve(&mut self, additional: usize) {
395 let additional = additional.saturating_add(additional / 2);
397 self.map.reserve(additional, |&idx| {
398 let (entry, _) = self.entries.get(idx).unwrap();
399 match entry {
400 Entry::Resident(Resident { key, .. })
401 | Entry::Placeholder(Placeholder { key, .. }) => {
402 Self::hash_static(&self.hash_builder, key)
403 }
404 Entry::Ghost(non_resident_hash) => *non_resident_hash,
405 }
406 })
407 }
408
409 pub fn retain<F>(&mut self, f: F)
410 where
411 F: Fn(&Key, &Val) -> bool,
412 {
413 let retained_tokens = self
414 .map
415 .iter()
416 .filter_map(|&idx| match self.entries.get(idx) {
417 Some((entry, _idx)) => match entry {
418 Entry::Resident(r) => {
419 if !f(&r.key, &r.value) {
420 let hash = self.hash(&r.key);
421 Some((idx, hash))
422 } else {
423 None
424 }
425 }
426 Entry::Placeholder(_) | Entry::Ghost(_) => None,
427 },
428 None => None,
429 })
430 .collect::<Vec<_>>();
431 for (idx, hash) in retained_tokens {
432 self.remove_internal(hash, idx);
433 }
434 }
435
436 #[inline]
437 fn hash_static<Q>(hasher: &B, key: &Q) -> u64
438 where
439 Q: Hash + Equivalent<Key> + ?Sized,
440 {
441 hasher.hash_one(key)
442 }
443
444 #[inline]
445 pub fn hash<Q>(&self, key: &Q) -> u64
446 where
447 Q: Hash + Equivalent<Key> + ?Sized,
448 {
449 Self::hash_static(&self.hash_builder, key)
450 }
451
452 #[inline]
453 fn search<Q>(&self, hash: u64, k: &Q) -> Option<Token>
454 where
455 Q: Hash + Equivalent<Key> + ?Sized,
456 {
457 let mut hash_match = None;
458 for bucket in self.map.iter_hash(hash) {
459 let idx = *bucket;
460 let (entry, _) = self.entries.get(idx).unwrap();
461 match entry {
462 Entry::Resident(Resident { key, .. })
463 | Entry::Placeholder(Placeholder { key, .. })
464 if k.equivalent(key) =>
465 {
466 return Some(idx);
467 }
468 Entry::Ghost(non_resident_hash) if *non_resident_hash == hash => {
469 hash_match = Some(idx);
470 }
471 _ => (),
472 }
473 }
474 hash_match
475 }
476
477 #[inline]
478 fn search_resident<Q>(&self, hash: u64, k: &Q) -> Option<(Token, &Resident<Key, Val>)>
479 where
480 Q: Hash + Equivalent<Key> + ?Sized,
481 {
482 let mut resident = MaybeUninit::uninit();
483 self.map
484 .find(hash, |&idx| {
485 let (entry, _) = self.entries.get(idx).unwrap();
486 match entry {
487 Entry::Resident(r) if k.equivalent(&r.key) => {
488 resident.write(r);
489 true
490 }
491 _ => false,
492 }
493 })
494 .map(|idx| {
495 (*idx, unsafe { resident.assume_init() })
498 })
499 }
500
501 pub fn contains<Q>(&self, hash: u64, key: &Q) -> bool
502 where
503 Q: Hash + Equivalent<Key> + ?Sized,
504 {
505 self.map
506 .find(hash, |&idx| {
507 let (entry, _) = self.entries.get(idx).unwrap();
508 matches!(entry, Entry::Resident(r) if key.equivalent(&r.key))
509 })
510 .is_some()
511 }
512
513 pub fn get<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
514 where
515 Q: Hash + Equivalent<Key> + ?Sized,
516 {
517 if let Some((_, resident)) = self.search_resident(hash, key) {
518 let referenced = resident.referenced.load(atomic::Ordering::Relaxed);
519 if referenced < MAX_F {
521 resident.referenced.fetch_add(1, atomic::Ordering::Relaxed);
524 }
525 record_hit!(self);
526 Some(&resident.value)
527 } else {
528 record_miss!(self);
529 None
530 }
531 }
532
533 pub fn get_mut<Q>(&mut self, hash: u64, key: &Q) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
534 where
535 Q: Hash + Equivalent<Key> + ?Sized,
536 {
537 let Some((idx, _)) = self.search_resident(hash, key) else {
538 record_miss_mut!(self);
539 return None;
540 };
541 let (Entry::Resident(resident), _) = (unsafe { self.entries.get_mut_unchecked(idx) })
542 else {
543 unsafe { unreachable_unchecked() };
544 };
545 if *resident.referenced.get_mut() < MAX_F {
546 *resident.referenced.get_mut() += 1;
547 }
548 record_hit_mut!(self);
549
550 let old_weight = self.weighter.weight(&resident.key, &resident.value);
551 Some(RefMut {
552 idx,
553 old_weight,
554 cache: self,
555 })
556 }
557
558 #[inline]
559 pub fn peek_token(&self, token: Token) -> Option<&Val> {
560 if let Some((Entry::Resident(resident), _)) = self.entries.get(token) {
561 Some(&resident.value)
562 } else {
563 None
564 }
565 }
566
567 #[inline]
568 pub fn peek_token_mut(&mut self, token: Token) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>> {
569 if let Some((Entry::Resident(resident), _)) = self.entries.get_mut(token) {
570 let old_weight = self.weighter.weight(&resident.key, &resident.value);
571 Some(RefMut {
572 old_weight,
573 idx: token,
574 cache: self,
575 })
576 } else {
577 None
578 }
579 }
580
581 pub fn peek<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
582 where
583 Q: Hash + Equivalent<Key> + ?Sized,
584 {
585 let (_, resident) = self.search_resident(hash, key)?;
586 Some(&resident.value)
587 }
588
589 pub fn peek_mut<Q>(&mut self, hash: u64, key: &Q) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
590 where
591 Q: Hash + Equivalent<Key> + ?Sized,
592 {
593 let (idx, _) = self.search_resident(hash, key)?;
594 self.peek_token_mut(idx)
595 }
596
597 pub fn remove<Q>(&mut self, hash: u64, key: &Q) -> Option<(Key, Val)>
598 where
599 Q: Hash + Equivalent<Key> + ?Sized,
600 {
601 let idx = self.search(hash, key)?;
604 self.remove_internal(hash, idx)
605 }
606
607 pub fn remove_if<Q, F>(&mut self, hash: u64, key: &Q, f: F) -> Option<(Key, Val)>
608 where
609 Q: Hash + Equivalent<Key> + ?Sized,
610 F: FnOnce(&Val) -> bool,
611 {
612 let (idx, resident) = self.search_resident(hash, key)?;
613 if f(&resident.value) {
614 self.remove_internal(hash, idx)
615 } else {
616 None
617 }
618 }
619
620 pub fn remove_token(&mut self, token: Token) -> Option<(Key, Val)> {
621 let Some((Entry::Resident(resident), _)) = self.entries.get(token) else {
622 return None;
623 };
624 let hash = Self::hash_static(&self.hash_builder, &resident.key);
625 self.remove_internal(hash, token)
626 }
627
628 pub fn remove_next(&mut self, continuation: Option<Token>) -> Option<(Token, Key, Val)> {
629 let (token, key, _) = self
630 .entries
631 .iter_from(continuation)
632 .filter_map(|(token, i)| match i {
633 Entry::Resident(r) => Some((token, &r.key, &r.value)),
634 Entry::Placeholder(_) | Entry::Ghost(_) => None,
635 })
636 .next()?;
637 let hash = Self::hash_static(&self.hash_builder, key);
638 self.remove_internal(hash, token)
639 .map(|(k, v)| (token, k, v))
640 }
641
642 fn remove_internal(&mut self, hash: u64, idx: Token) -> Option<(Key, Val)> {
643 self.map_remove(hash, idx);
644 let mut result = None;
645 let (entry, next) = self.entries.remove(idx).unwrap();
646 let list_head = match entry {
647 Entry::Resident(r) => {
648 let weight = self.weighter.weight(&r.key, &r.value);
649 result = Some((r.key, r.value));
650 if r.state == ResidentState::Hot {
651 self.num_hot -= 1;
652 self.weight_hot -= weight;
653 &mut self.hot_head
654 } else {
655 debug_assert!(r.state == ResidentState::Cold);
656 self.num_cold -= 1;
657 self.weight_cold -= weight;
658 &mut self.cold_head
659 }
660 }
661 Entry::Ghost(_) => {
662 self.num_non_resident -= 1;
664 &mut self.ghost_head
665 }
666 Entry::Placeholder(_) => {
667 return None;
669 }
670 };
671 if *list_head == Some(idx) {
672 *list_head = next;
673 }
674 result
675 }
676
677 #[must_use]
679 fn advance_cold(&mut self, lcs: &mut L::RequestState) -> bool {
680 let Some(mut idx) = self.cold_head else {
681 return self.advance_hot(lcs);
682 };
683 loop {
684 let (entry, next) = self.entries.get_mut(idx).unwrap();
685 let Entry::Resident(resident) = entry else {
686 unsafe { unreachable_unchecked() };
687 };
688 debug_assert_eq!(resident.state, ResidentState::Cold);
689 if *resident.referenced.get_mut() != 0 {
690 *resident.referenced.get_mut() -= 1;
691 resident.state = ResidentState::Hot;
692 let weight = self.weighter.weight(&resident.key, &resident.value);
693 self.weight_hot += weight;
694 self.weight_cold -= weight;
695 self.num_hot += 1;
696 self.num_cold -= 1;
697 self.cold_head = self.entries.unlink(idx);
698 self.hot_head = Some(self.entries.link(idx, self.hot_head));
699 while self.weight_hot > self.weight_target_hot && self.advance_hot(lcs) {}
701 return true;
702 }
703
704 if self.lifecycle.is_pinned(&resident.key, &resident.value) {
705 if Some(next) == self.cold_head {
706 return self.advance_hot(lcs);
707 }
708 idx = next;
709 continue;
710 }
711
712 self.weight_cold -= self.weighter.weight(&resident.key, &resident.value);
713 self.lifecycle
714 .before_evict(lcs, &resident.key, &mut resident.value);
715 if self.weighter.weight(&resident.key, &resident.value) == 0 {
716 self.cold_head = self.entries.unlink(idx);
717 return true;
718 }
719 let hash = Self::hash_static(&self.hash_builder, &resident.key);
720 let Entry::Resident(evicted) = mem::replace(entry, Entry::Ghost(hash)) else {
721 unsafe { core::hint::unreachable_unchecked() };
723 };
724 self.cold_head = self.entries.unlink(idx);
725 self.ghost_head = Some(self.entries.link(idx, self.ghost_head));
726 self.num_cold -= 1;
727 self.num_non_resident += 1;
728 if self.num_non_resident > self.capacity_non_resident {
730 self.advance_ghost();
731 }
732 self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
733 return true;
734 }
735 }
736
737 #[must_use]
739 fn advance_hot(&mut self, lcs: &mut L::RequestState) -> bool {
740 let mut unpinned = 0usize;
741 let Some(mut idx) = self.hot_head else {
742 return false;
743 };
744 loop {
745 let (entry, next) = self.entries.get_mut(idx).unwrap();
746 let Entry::Resident(resident) = entry else {
747 unsafe { unreachable_unchecked() };
748 };
749 debug_assert_eq!(resident.state, ResidentState::Hot);
750 if self.lifecycle.is_pinned(&resident.key, &resident.value) {
751 *resident.referenced.get_mut() = (*resident.referenced.get_mut())
752 .min(MAX_F)
753 .saturating_sub(1);
754 if Some(next) == self.hot_head {
755 if unpinned == 0 {
756 return false;
758 }
759 unpinned = 0;
761 }
762 idx = next;
763 continue;
764 }
765 unpinned += 1;
766 if *resident.referenced.get_mut() != 0 {
767 *resident.referenced.get_mut() = (*resident.referenced.get_mut()).min(MAX_F) - 1;
768 idx = next;
769 continue;
770 }
771 self.weight_hot -= self.weighter.weight(&resident.key, &resident.value);
772 self.lifecycle
773 .before_evict(lcs, &resident.key, &mut resident.value);
774 if self.weighter.weight(&resident.key, &resident.value) == 0 {
775 self.hot_head = self.entries.unlink(idx);
776 } else {
777 self.num_hot -= 1;
778 let hash = Self::hash_static(&self.hash_builder, &resident.key);
779 let Some((Entry::Resident(evicted), next)) = self.entries.remove(idx) else {
780 unsafe { core::hint::unreachable_unchecked() };
782 };
783 self.hot_head = next;
784 self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
785 self.map_remove(hash, idx);
786 }
787 return true;
788 }
789 }
790
791 #[inline]
792 fn advance_ghost(&mut self) {
793 debug_assert_ne!(self.num_non_resident, 0);
794 let idx = self.ghost_head.unwrap();
795 let (entry, _) = self.entries.get_mut(idx).unwrap();
796 let Entry::Ghost(hash) = *entry else {
797 unsafe { unreachable_unchecked() };
798 };
799 self.num_non_resident -= 1;
800 self.map_remove(hash, idx);
801 let (_, next) = self.entries.remove(idx).unwrap();
802 self.ghost_head = next;
803 }
804
805 fn insert_existing(
806 &mut self,
807 lcs: &mut L::RequestState,
808 idx: Token,
809 key: Key,
810 value: Val,
811 weight: u64,
812 strategy: InsertStrategy,
813 ) -> Result<(), (Key, Val)> {
814 let (entry, _) = self.entries.get_mut(idx).unwrap();
816 let referenced;
817 let enter_state;
818 match entry {
819 Entry::Resident(resident) => {
820 enter_state = resident.state;
821 referenced = resident
822 .referenced
823 .get_mut()
824 .saturating_add(
825 !matches!(strategy, InsertStrategy::Replace { soft: true }) as u16
826 )
827 .min(MAX_F);
828 }
829 _ if matches!(strategy, InsertStrategy::Replace { .. }) => {
830 return Err((key, value));
831 }
832 Entry::Ghost(_) => {
833 referenced = 0;
834 enter_state = ResidentState::Hot;
835 }
836 Entry::Placeholder(ph) => {
837 referenced = 1; enter_state = ph.hot;
839 }
840 }
841
842 let evicted = mem::replace(
843 entry,
844 Entry::Resident(Resident {
845 key,
846 value,
847 state: enter_state,
848 referenced: referenced.into(),
849 }),
850 );
851 match evicted {
852 Entry::Resident(evicted) => {
853 debug_assert_eq!(evicted.state, enter_state);
854 let evicted_weight = self.weighter.weight(&evicted.key, &evicted.value);
855 let list_head = if enter_state == ResidentState::Hot {
856 self.weight_hot -= evicted_weight;
857 self.weight_hot += weight;
858 &mut self.hot_head
859 } else {
860 self.weight_cold -= evicted_weight;
861 self.weight_cold += weight;
862 &mut self.cold_head
863 };
864 if evicted_weight == 0 && weight != 0 {
865 *list_head = Some(self.entries.link(idx, *list_head));
866 } else if evicted_weight != 0 && weight == 0 {
867 *list_head = self.entries.unlink(idx);
868 }
869 self.lifecycle.on_evict(lcs, evicted.key, evicted.value);
870 }
871 Entry::Ghost(_) => {
872 self.weight_hot += weight;
873 self.num_hot += 1;
874 self.num_non_resident -= 1;
875 let next_ghost = self.entries.unlink(idx);
876 if self.ghost_head == Some(idx) {
877 self.ghost_head = next_ghost;
878 }
879 if weight != 0 {
880 self.hot_head = Some(self.entries.link(idx, self.hot_head));
881 }
882 }
883 Entry::Placeholder(_) => {
884 let list_head = if enter_state == ResidentState::Hot {
885 self.num_hot += 1;
886 self.weight_hot += weight;
887 &mut self.hot_head
888 } else {
889 self.num_cold += 1;
890 self.weight_cold += weight;
891 &mut self.cold_head
892 };
893 if weight != 0 {
894 *list_head = Some(self.entries.link(idx, *list_head));
895 }
896 }
897 }
898
899 while self.weight_hot + self.weight_cold > self.weight_capacity && self.advance_cold(lcs) {}
900 Ok(())
901 }
902
903 #[inline]
904 fn map_insert(&mut self, hash: u64, idx: Token) {
905 self.map.insert_unique(hash, idx, |&i| {
906 let (entry, _) = self.entries.get(i).unwrap();
907 match entry {
908 Entry::Resident(Resident { key, .. })
909 | Entry::Placeholder(Placeholder { key, .. }) => {
910 Self::hash_static(&self.hash_builder, key)
911 }
912 Entry::Ghost(hash) => *hash,
913 }
914 });
915 }
916
917 #[inline]
918 fn map_remove(&mut self, hash: u64, idx: Token) {
919 if let Ok(entry) = self.map.find_entry(hash, |&i| i == idx) {
920 entry.remove();
921 return;
922 }
923 #[cfg(debug_assertions)]
924 panic!("key not found");
925 }
926
927 pub fn replace_placeholder(
928 &mut self,
929 lcs: &mut L::RequestState,
930 placeholder: &Plh,
931 referenced: bool,
932 mut value: Val,
933 ) -> Result<(), Val> {
934 let entry = match self.entries.get_mut(placeholder.idx()) {
935 Some((entry, _)) if matches!(&*entry, Entry::Placeholder(p) if p.shared.same_as(placeholder)) => {
936 entry
937 }
938 _ => return Err(value),
939 };
940 let Entry::Placeholder(Placeholder {
941 key,
942 hot: mut placeholder_hot,
943 ..
944 }) = mem::replace(entry, Entry::Ghost(0))
945 else {
946 unsafe { core::hint::unreachable_unchecked() };
948 };
949 let mut weight = self.weighter.weight(&key, &value);
950 if weight > self.weight_target_hot && !self.lifecycle.is_pinned(&key, &value) {
952 self.lifecycle.before_evict(lcs, &key, &mut value);
953 weight = self.weighter.weight(&key, &value);
954 if weight > self.weight_target_hot {
956 return self.handle_overweight_replace_placeholder(lcs, placeholder, key, value);
957 }
958 }
959
960 if self.weight_hot + weight <= self.weight_target_hot {
962 placeholder_hot = ResidentState::Hot;
963 }
964 *entry = Entry::Resident(Resident {
965 key,
966 value,
967 state: placeholder_hot,
968 referenced: (referenced as u16).into(),
969 });
970
971 let list_head = if placeholder_hot == ResidentState::Hot {
972 self.num_hot += 1;
973 self.weight_hot += weight;
974 &mut self.hot_head
975 } else {
976 self.num_cold += 1;
977 self.weight_cold += weight;
978 &mut self.cold_head
979 };
980
981 if weight != 0 {
982 *list_head = Some(self.entries.link(placeholder.idx(), *list_head));
983 while self.weight_hot + self.weight_cold > self.weight_capacity
984 && self.advance_cold(lcs)
985 {}
986 }
987
988 Ok(())
989 }
990
991 #[cold]
992 fn handle_overweight_replace_placeholder(
993 &mut self,
994 lcs: &mut L::RequestState,
995 placeholder: &Plh,
996 key: Key,
997 value: Val,
998 ) -> Result<(), Val> {
999 self.entries.remove(placeholder.idx());
1000 self.map_remove(placeholder.hash(), placeholder.idx());
1001 self.lifecycle.on_evict(lcs, key, value);
1002 Ok(())
1003 }
1004
1005 pub fn insert(
1006 &mut self,
1007 lcs: &mut L::RequestState,
1008 hash: u64,
1009 key: Key,
1010 mut value: Val,
1011 strategy: InsertStrategy,
1012 ) -> Result<(), (Key, Val)> {
1013 let mut weight = self.weighter.weight(&key, &value);
1014 if weight > self.weight_target_hot && !self.lifecycle.is_pinned(&key, &value) {
1016 self.lifecycle.before_evict(lcs, &key, &mut value);
1017 weight = self.weighter.weight(&key, &value);
1018 if weight > self.weight_target_hot {
1020 return self.handle_insert_overweight(lcs, hash, key, value, strategy);
1021 }
1022 }
1023
1024 if let Some(idx) = self.search(hash, &key) {
1025 return self.insert_existing(lcs, idx, key, value, weight, strategy);
1026 } else if matches!(strategy, InsertStrategy::Replace { .. }) {
1027 return Err((key, value));
1028 }
1029
1030 let enter_hot = self.weight_hot + weight <= self.weight_target_hot;
1032 while self.weight_hot + self.weight_cold + weight > self.weight_capacity
1034 && self.advance_cold(lcs)
1035 {}
1036
1037 let (state, list_head) = if enter_hot {
1038 self.num_hot += 1;
1039 self.weight_hot += weight;
1040 (ResidentState::Hot, &mut self.hot_head)
1041 } else {
1042 self.num_cold += 1;
1043 self.weight_cold += weight;
1044 (ResidentState::Cold, &mut self.cold_head)
1045 };
1046 let idx = self.entries.insert(Entry::Resident(Resident {
1047 key,
1048 value,
1049 state,
1050 referenced: Default::default(),
1051 }));
1052 if weight != 0 {
1053 *list_head = Some(self.entries.link(idx, *list_head));
1054 }
1055 self.map_insert(hash, idx);
1056 Ok(())
1057 }
1058
1059 #[cold]
1060 fn handle_insert_overweight(
1061 &mut self,
1062 lcs: &mut L::RequestState,
1063 hash: u64,
1064 key: Key,
1065 value: Val,
1066 strategy: InsertStrategy,
1067 ) -> Result<(), (Key, Val)> {
1068 if let Some((idx, _)) = self.search_resident(hash, &key) {
1070 if let Some((ek, ev)) = self.remove_internal(hash, idx) {
1071 self.lifecycle.on_evict(lcs, ek, ev);
1072 }
1073 }
1074 if matches!(strategy, InsertStrategy::Replace { .. }) {
1075 return Err((key, value));
1076 }
1077 self.lifecycle.on_evict(lcs, key, value);
1078 Ok(())
1079 }
1080
1081 pub fn upsert_placeholder<Q>(
1082 &mut self,
1083 hash: u64,
1084 key: &Q,
1085 ) -> Result<(Token, &Val), (Plh, bool)>
1086 where
1087 Q: Hash + Equivalent<Key> + ToOwned<Owned = Key> + ?Sized,
1088 {
1089 let shared;
1090 if let Some(idx) = self.search(hash, key) {
1091 let (entry, _) = self.entries.get_mut(idx).unwrap();
1092 match entry {
1093 Entry::Resident(resident) => {
1094 if *resident.referenced.get_mut() < MAX_F {
1095 *resident.referenced.get_mut() += 1;
1096 }
1097 record_hit_mut!(self);
1098 unsafe {
1099 let value_ptr: *const Val = &resident.value;
1102 return Ok((idx, &*value_ptr));
1103 }
1104 }
1105 Entry::Placeholder(p) => {
1106 record_hit_mut!(self);
1107 return Err((p.shared.clone(), false));
1108 }
1109 Entry::Ghost(_) => {
1110 shared = Plh::new(hash, idx);
1111 *entry = Entry::Placeholder(Placeholder {
1112 key: key.to_owned(),
1113 hot: ResidentState::Hot,
1114 shared: shared.clone(),
1115 });
1116 self.num_non_resident -= 1;
1117 let next = self.entries.unlink(idx);
1118 if self.ghost_head == Some(idx) {
1119 self.ghost_head = next;
1120 }
1121 }
1122 }
1123 } else {
1124 let idx = self.entries.next_free();
1125 shared = Plh::new(hash, idx);
1126 let idx_ = self.entries.insert(Entry::Placeholder(Placeholder {
1127 key: key.to_owned(),
1128 hot: ResidentState::Cold,
1129 shared: shared.clone(),
1130 }));
1131 debug_assert_eq!(idx, idx_);
1132 self.map_insert(hash, idx);
1133 }
1134 record_miss_mut!(self);
1135 Err((shared, true))
1136 }
1137
1138 pub fn set_capacity(&mut self, new_weight_capacity: u64) {
1139 let old_new_ratio = new_weight_capacity as f64 / self.weight_capacity as f64;
1141 let hot_ratio = self.weight_target_hot as f64 / self.weight_capacity as f64;
1142
1143 self.weight_capacity = new_weight_capacity;
1145 self.weight_target_hot = (new_weight_capacity as f64 * hot_ratio) as u64;
1146 self.capacity_non_resident = (self.capacity_non_resident as f64 * old_new_ratio) as usize;
1147
1148 let mut lcs = self.lifecycle.begin_request();
1150 while self.weight_hot + self.weight_cold > self.weight_capacity
1151 && self.advance_cold(&mut lcs)
1152 {}
1153 self.lifecycle.end_request(lcs);
1154 while self.num_non_resident > self.capacity_non_resident {
1156 self.advance_ghost();
1157 }
1158 }
1159}
1160
1161pub struct RefMut<'cache, Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> {
1163 cache: &'cache mut CacheShard<Key, Val, We, B, L, Plh>,
1164 idx: Token,
1165 old_weight: u64,
1166}
1167
1168impl<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder>
1169 RefMut<'_, Key, Val, We, B, L, Plh>
1170{
1171 pub(crate) fn pair(&self) -> (&Key, &Val) {
1172 unsafe {
1175 if let (Entry::Resident(Resident { key, value, .. }), _) =
1176 self.cache.entries.get_unchecked(self.idx)
1177 {
1178 (key, value)
1179 } else {
1180 core::hint::unreachable_unchecked()
1181 }
1182 }
1183 }
1184
1185 pub(crate) fn value_mut(&mut self) -> &mut Val {
1186 unsafe {
1189 if let (Entry::Resident(Resident { value, .. }), _) =
1190 self.cache.entries.get_mut_unchecked(self.idx)
1191 {
1192 value
1193 } else {
1194 core::hint::unreachable_unchecked()
1195 }
1196 }
1197 }
1198}
1199
1200impl<Key, Val, We: Weighter<Key, Val>, B, L, Plh: SharedPlaceholder> Drop
1201 for RefMut<'_, Key, Val, We, B, L, Plh>
1202{
1203 #[inline]
1204 fn drop(&mut self) {
1205 let (key, value) = self.pair();
1206 let new_weight = self.cache.weighter.weight(key, value);
1207 if self.old_weight != new_weight {
1208 self.cache
1209 .cold_change_weight(self.idx, self.old_weight, new_weight);
1210 }
1211 }
1212}
1213
1214#[cfg(test)]
1215mod tests {
1216 use super::*;
1217
1218 #[test]
1219 fn entry_overhead() {
1220 use std::mem::size_of;
1221 assert_eq!(
1222 size_of::<Entry<u64, u64, crate::sync_placeholder::SharedPlaceholder<u64>>>()
1223 - size_of::<[u64; 2]>(),
1224 16 );
1226 assert_eq!(
1227 size_of::<Entry<u64, u64, crate::unsync::SharedPlaceholder>>() - size_of::<[u64; 2]>(),
1228 16 );
1230 }
1231}