pub struct CacheShard<Key, Val, We, B, L, Plh> {Show 16 fields
hash_builder: B,
map: HashTable<NonZeroU32>,
entries: LinkedSlab<Entry<Key, Val, Plh>>,
cold_head: Option<NonZeroU32>,
hot_head: Option<NonZeroU32>,
ghost_head: Option<NonZeroU32>,
weight_target_hot: u64,
weight_capacity: u64,
weight_hot: u64,
weight_cold: u64,
num_hot: usize,
num_cold: usize,
num_non_resident: usize,
capacity_non_resident: usize,
weighter: We,
pub(crate) lifecycle: L,
}Expand description
A bounded cache using a modified CLOCK-PRO eviction policy. The implementation allows some parallelism as gets don’t require exclusive access. Any evicted items are returned so they can be dropped by the caller, outside the locks.
Fields§
§hash_builder: B§map: HashTable<NonZeroU32>Map to an entry in the entries slab.
Note that the actual key/value/hash are not stored in the map but in the slab.
entries: LinkedSlab<Entry<Key, Val, Plh>>Slab holding entries
cold_head: Option<NonZeroU32>Head of cold list, containing Cold entries.
Only contains entries of kind Resident.
hot_head: Option<NonZeroU32>Head of hot list, containing Hot entries.
Only contains entries of kind Resident.
ghost_head: Option<NonZeroU32>Head of ghost list, containing non-resident/Hash entries.
Only contains entries of kind Ghost.
weight_target_hot: u64§weight_capacity: u64§weight_hot: u64§weight_cold: u64§num_hot: usize§num_cold: usize§num_non_resident: usize§capacity_non_resident: usize§weighter: We§lifecycle: LImplementations§
Source§impl<Key, Val, We, B, L, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh>
impl<Key, Val, We, B, L, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh>
pub fn remove_placeholder(&mut self, placeholder: &Plh)
fn cold_change_weight( &mut self, idx: NonZeroU32, old_weight: u64, new_weight: u64, )
Source§impl<Key, Val, We, B, L, Plh> CacheShard<Key, Val, We, B, L, Plh>
impl<Key, Val, We, B, L, Plh> CacheShard<Key, Val, We, B, L, Plh>
pub fn memory_used(&self) -> MemoryUsed
pub fn weight(&self) -> u64
pub fn len(&self) -> usize
pub fn capacity(&self) -> u64
pub fn clear(&mut self)
pub fn drain(&mut self) -> impl Iterator<Item = (Key, Val)> + '_
pub fn iter(&self) -> impl Iterator<Item = (&Key, &Val)> + '_
pub fn iter_from( &self, continuation: Option<NonZeroU32>, ) -> impl Iterator<Item = (NonZeroU32, &Key, &Val)> + '_
Source§impl<Key: Eq + Hash, Val, We: Weighter<Key, Val>, B: BuildHasher, L: Lifecycle<Key, Val>, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh>
impl<Key: Eq + Hash, Val, We: Weighter<Key, Val>, B: BuildHasher, L: Lifecycle<Key, Val>, Plh: SharedPlaceholder> CacheShard<Key, Val, We, B, L, Plh>
pub fn new( hot_allocation: f64, ghost_allocation: f64, estimated_items_capacity: usize, weight_capacity: u64, weighter: We, hash_builder: B, lifecycle: L, ) -> Self
Sourcepub fn reserve(&mut self, additional: usize)
pub fn reserve(&mut self, additional: usize)
Reserver additional space for additional entries.
Note that this is counted in entries, and is not weighted.
pub fn retain<F>(&mut self, f: F)
fn hash_static<Q>(hasher: &B, key: &Q) -> u64
pub fn hash<Q>(&self, key: &Q) -> u64
fn search<Q>(&self, hash: u64, k: &Q) -> Option<NonZeroU32>
fn search_resident<Q>( &self, hash: u64, k: &Q, ) -> Option<(NonZeroU32, &Resident<Key, Val>)>
pub fn contains<Q>(&self, hash: u64, key: &Q) -> bool
pub fn get<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
pub fn get_mut<Q>( &mut self, hash: u64, key: &Q, ) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
pub fn peek_token(&self, token: NonZeroU32) -> Option<&Val>
pub fn peek_token_mut( &mut self, token: NonZeroU32, ) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
pub fn peek<Q>(&self, hash: u64, key: &Q) -> Option<&Val>
pub fn peek_mut<Q>( &mut self, hash: u64, key: &Q, ) -> Option<RefMut<'_, Key, Val, We, B, L, Plh>>
pub fn remove<Q>(&mut self, hash: u64, key: &Q) -> Option<(Key, Val)>
pub fn remove_if<Q, F>( &mut self, hash: u64, key: &Q, f: F, ) -> Option<(Key, Val)>
pub fn remove_token(&mut self, token: NonZeroU32) -> Option<(Key, Val)>
pub fn remove_next( &mut self, continuation: Option<NonZeroU32>, ) -> Option<(NonZeroU32, Key, Val)>
fn remove_internal(&mut self, hash: u64, idx: NonZeroU32) -> Option<(Key, Val)>
Sourcefn advance_cold(&mut self, lcs: &mut L::RequestState) -> bool
fn advance_cold(&mut self, lcs: &mut L::RequestState) -> bool
Advance cold ring, promoting to hot and demoting as needed.
Sourcefn advance_hot(&mut self, lcs: &mut L::RequestState) -> bool
fn advance_hot(&mut self, lcs: &mut L::RequestState) -> bool
Advance hot ring evicting entries.