1use std::{
6 alloc::Layout,
7 ptr::{self, NonNull}, sync::{Arc, Mutex},
8};
9
10use allocator_api2::alloc::{AllocError, Allocator, Global};
11
12const CHUNK_ALIGNMENT: usize = 32;
13const DEFAULT_CHUNK_SIZE: usize = 128 * 1024;
14
15pub struct BumpAllocator {
21 current_chunk: NonNull<Chunk>,
23 allocation_count: i32,
25
26 chunk_pool: Arc<ChunkPool>,
27
28 stats: Stats,
29}
30
31impl BumpAllocator {
32 pub fn new(chunk_pool: Arc<ChunkPool>) -> Self {
33 let mut stats = Stats::default();
34
35 let first_chunk = chunk_pool.allocate_chunk(DEFAULT_CHUNK_SIZE).unwrap();
36 stats.chunks = 1;
37 stats.reserved_bytes += DEFAULT_CHUNK_SIZE;
38
39 BumpAllocator {
40 current_chunk: first_chunk,
41 chunk_pool,
42 allocation_count: 0,
43 stats,
44 }
45 }
46
47 pub fn get_stats(&mut self) -> Stats {
48 self.stats.chunk_utilization = self.stats.chunks as f32 - 1.0 + Chunk::utilization(self.current_chunk);
49 self.stats
50 }
51
52 pub fn reset_stats(&mut self) {
53 let chunks = self.stats.chunks;
54 let reserved_bytes = self.stats.reserved_bytes;
55 self.stats = Stats::default();
56 self.stats.chunks = chunks;
57 self.stats.reserved_bytes = reserved_bytes;
58 }
59
60 pub fn allocate_item(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
61 self.stats.allocations += 1;
62 self.stats.allocated_bytes += layout.size();
63
64 if let Ok(alloc) = Chunk::allocate_item(self.current_chunk, layout) {
65 self.allocation_count += 1;
66 return Ok(alloc);
67 }
68
69 self.alloc_chunk(layout.size())?;
70
71 match Chunk::allocate_item(self.current_chunk, layout) {
72 Ok(alloc) => {
73 self.allocation_count += 1;
74 return Ok(alloc);
75 }
76 Err(_) => {
77 return Err(AllocError);
78 }
79 }
80 }
81
82 pub fn deallocate_item(&mut self, ptr: NonNull<u8>, layout: Layout) {
83 self.stats.deallocations += 1;
84
85 if Chunk::contains_item(self.current_chunk, ptr) {
86 unsafe { Chunk::deallocate_item(self.current_chunk, ptr, layout); }
87 }
88
89 self.allocation_count -= 1;
90 debug_assert!(self.allocation_count >= 0);
91 }
92
93 pub unsafe fn grow_item(&mut self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
94 debug_assert!(
95 new_layout.size() >= old_layout.size(),
96 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
97 );
98
99 self.stats.reallocations += 1;
100
101 if Chunk::contains_item(self.current_chunk, ptr) {
102 if let Ok(alloc) = Chunk::grow_item(self.current_chunk, ptr, old_layout, new_layout) {
103 self.stats.in_place_reallocations += 1;
104 return Ok(alloc);
105 }
106 }
107
108 let new_alloc = if let Ok(alloc) = Chunk::allocate_item(self.current_chunk, new_layout) {
109 alloc
110 } else {
111 self.alloc_chunk(new_layout.size())?;
112 Chunk::allocate_item(self.current_chunk, new_layout).map_err(|_| AllocError)?
113 };
114
115 self.stats.reallocated_bytes += old_layout.size();
116
117 unsafe {
118 ptr::copy_nonoverlapping(ptr.as_ptr(), new_alloc.as_ptr().cast(), old_layout.size());
119 }
120
121 Ok(new_alloc)
122 }
123
124 pub unsafe fn shrink_item(&mut self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
125 debug_assert!(
126 new_layout.size() <= old_layout.size(),
127 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
128 );
129
130 if Chunk::contains_item(self.current_chunk, ptr) {
131 return unsafe { Ok(Chunk::shrink_item(self.current_chunk, ptr, old_layout, new_layout)) };
132 }
133
134 Ok(NonNull::slice_from_raw_parts(ptr, old_layout.size()))
136 }
137
138 fn alloc_chunk(&mut self, item_size: usize) -> Result<(), AllocError> {
139 let chunk_size = DEFAULT_CHUNK_SIZE.max(align(item_size, CHUNK_ALIGNMENT) + CHUNK_ALIGNMENT);
140 self.stats.reserved_bytes += chunk_size;
141
142 let chunk = self.chunk_pool.allocate_chunk(chunk_size)?;
143
144 unsafe {
145 (*chunk.as_ptr()).previous = Some(self.current_chunk);
146 }
147 self.current_chunk = chunk;
148
149 self.stats.chunks += 1;
150
151 Ok(())
152 }
153}
154
155impl Drop for BumpAllocator {
156 fn drop(&mut self) {
157 assert!(self.allocation_count == 0);
158
159 unsafe {
160 self.chunk_pool.recycle_chunks(self.current_chunk);
161 }
162 }
163}
164
165pub struct Chunk {
167 previous: Option<NonNull<Chunk>>,
168 cursor: *mut u8,
170 chunk_end: *mut u8,
172 size: usize,
174}
175
176impl Chunk {
177 pub fn allocate_item(this: NonNull<Chunk>, layout: Layout) -> Result<NonNull<[u8]>, ()> {
178 debug_assert!(CHUNK_ALIGNMENT % layout.align() == 0);
182 debug_assert!(layout.align() > 0);
183 debug_assert!(layout.align().is_power_of_two());
184
185 let size = align(layout.size(), CHUNK_ALIGNMENT);
186
187 unsafe {
188 let cursor = (*this.as_ptr()).cursor;
189 let end = (*this.as_ptr()).chunk_end;
190 let available_size = end.offset_from(cursor);
191
192 if size as isize > available_size {
193 return Err(());
194 }
195
196 let next = cursor.add(size);
197
198 (*this.as_ptr()).cursor = next;
199
200 let cursor = NonNull::new(cursor).unwrap();
201 let suballocation: NonNull<[u8]> = NonNull::slice_from_raw_parts(cursor, size);
202
203 Ok(suballocation)
204 }
205 }
206
207 pub unsafe fn deallocate_item(this: NonNull<Chunk>, item: NonNull<u8>, layout: Layout) {
208 debug_assert!(Chunk::contains_item(this, item));
209
210 unsafe {
211 let size = align(layout.size(), CHUNK_ALIGNMENT);
212 let item_end = item.as_ptr().add(size);
213
214 if item_end == (*this.as_ptr()).cursor {
217 (*this.as_ptr()).cursor = item.as_ptr();
218 }
219
220 }
222 }
223
224 pub unsafe fn grow_item(this: NonNull<Chunk>, item: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, ()> {
225 debug_assert!(Chunk::contains_item(this, item));
226
227 let old_size = align(old_layout.size(), CHUNK_ALIGNMENT);
228 let new_size = align(new_layout.size(), CHUNK_ALIGNMENT);
229 let old_item_end = item.as_ptr().add(old_size);
230
231 if old_item_end != (*this.as_ptr()).cursor {
232 return Err(());
233 }
234
235 let chunk_end = (*this.as_ptr()).chunk_end;
239 let available_size = chunk_end.offset_from(item.as_ptr());
240
241 if new_size as isize > available_size {
242 return Err(());
244 }
245
246 let new_item_end = item.as_ptr().add(new_size);
247 (*this.as_ptr()).cursor = new_item_end;
248
249 Ok(NonNull::slice_from_raw_parts(item, new_size))
250 }
251
252 pub unsafe fn shrink_item(this: NonNull<Chunk>, item: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> NonNull<[u8]> {
253 debug_assert!(Chunk::contains_item(this, item));
254
255 let old_size = align(old_layout.size(), CHUNK_ALIGNMENT);
256 let new_size = align(new_layout.size(), CHUNK_ALIGNMENT);
257 let old_item_end = item.as_ptr().add(old_size);
258
259 if old_item_end == (*this.as_ptr()).cursor {
263 let new_item_end = item.as_ptr().add(new_size);
264 (*this.as_ptr()).cursor = new_item_end;
265 }
266
267 NonNull::slice_from_raw_parts(item, new_size)
268 }
269
270 pub fn contains_item(this: NonNull<Chunk>, item: NonNull<u8>) -> bool {
271 unsafe {
272 let start: *mut u8 = this.cast::<u8>().as_ptr().add(CHUNK_ALIGNMENT);
273 let end: *mut u8 = (*this.as_ptr()).chunk_end;
274 let item = item.as_ptr();
275
276 start <= item && item < end
277 }
278 }
279
280 fn available_size(this: NonNull<Chunk>) -> usize {
281 unsafe {
282 let this = this.as_ptr();
283 (*this).chunk_end.offset_from((*this).cursor) as usize
284 }
285 }
286
287 fn utilization(this: NonNull<Chunk>) -> f32 {
288 let size = unsafe { (*this.as_ptr()).size } as f32;
289 (size - Chunk::available_size(this) as f32) / size
290 }
291}
292
293fn align(val: usize, alignment: usize) -> usize {
294 let rem = val % alignment;
295 if rem == 0 {
296 return val;
297 }
298
299 val.checked_add(alignment).unwrap() - rem
300}
301
302#[derive(Copy, Clone, Debug, Default)]
303pub struct Stats {
304 pub chunks: u32,
305 pub chunk_utilization: f32,
306 pub allocations: u32,
307 pub deallocations: u32,
308 pub reallocations: u32,
309 pub in_place_reallocations: u32,
310
311 pub reallocated_bytes: usize,
312 pub allocated_bytes: usize,
313 pub reserved_bytes: usize,
314}
315
316pub struct ChunkPool {
322 inner: Mutex<ChunkPoolInner>,
323}
324
325struct ChunkPoolInner {
326 first: Option<NonNull<RecycledChunk>>,
327 count: i32,
328}
329
330struct RecycledChunk {
332 next: Option<NonNull<RecycledChunk>>,
333}
334
335impl ChunkPool {
336 pub fn new() -> Self {
337 ChunkPool {
338 inner: Mutex::new(ChunkPoolInner {
339 first: None,
340 count: 0,
341 }),
342 }
343 }
344
345 pub fn allocate_chunk(&self, size: usize) -> Result<NonNull<Chunk>, AllocError> {
350 let chunk: Option<NonNull<RecycledChunk>> = if size == DEFAULT_CHUNK_SIZE {
351 let mut inner = self.inner.lock().unwrap();
353 let mut chunk = inner.first.take();
354 inner.first = chunk.as_mut().and_then(|chunk| unsafe { chunk.as_mut().next.take() });
355
356 if chunk.is_some() {
357 inner.count -= 1;
358 debug_assert!(inner.count >= 0);
359 }
360
361 chunk
362 } else {
363 None
365 };
366
367 let chunk: NonNull<Chunk> = match chunk {
368 Some(chunk) => chunk.cast(),
369 None => {
370 let layout = match Layout::from_size_align(size, CHUNK_ALIGNMENT) {
372 Ok(layout) => layout,
373 Err(_) => {
374 return Err(AllocError);
375 }
376 };
377
378 let alloc = Global.allocate(layout)?;
379
380 alloc.cast()
381 }
382 };
383
384 let chunk_start: *mut u8 = chunk.cast().as_ptr();
385
386 unsafe {
387 let chunk_end = chunk_start.add(size);
388 let cursor = chunk_start.add(CHUNK_ALIGNMENT);
389 ptr::write(
390 chunk.as_ptr(),
391 Chunk {
392 previous: None,
393 chunk_end,
394 cursor,
395 size,
396 },
397 );
398 }
399
400 Ok(chunk)
401 }
402
403 unsafe fn recycle_chunks(&self, chunk: NonNull<Chunk>) {
413 let mut inner = self.inner.lock().unwrap();
414 let mut iter = Some(chunk);
415 while let Some(mut chunk) = iter {
418 iter = unsafe { chunk.as_mut().previous.take() };
420
421 unsafe {
422 let size = chunk.as_ref().size;
424 if size != DEFAULT_CHUNK_SIZE {
425 let layout = Layout::from_size_align(size, CHUNK_ALIGNMENT).unwrap();
426 Global.deallocate(chunk.cast(), layout);
427 continue;
428 }
429 }
430
431 let recycled: NonNull<RecycledChunk> = chunk.cast();
433
434 unsafe {
436 ptr::write(recycled.as_ptr(), RecycledChunk {
437 next: inner.first,
438 });
439 }
440 inner.first = Some(recycled);
441
442 inner.count += 1;
443 }
444 }
445
446 #[inline(never)]
456 pub fn purge_chunks(&self, target: u32, mut count: u32) -> bool {
457 let mut inner = self.inner.lock().unwrap();
458 assert!(inner.count >= 0);
459
460 while inner.count as u32 > target {
461 unsafe {
462 let chunk = inner.first.unwrap();
464
465 inner.first = chunk.as_ref().next;
467
468 let layout = Layout::from_size_align(
470 DEFAULT_CHUNK_SIZE,
471 CHUNK_ALIGNMENT
472 ).unwrap();
473 Global.deallocate(chunk.cast(), layout);
474 }
475
476 inner.count -= 1;
477 count -= 1;
478
479 if count == 0 {
480 return false;
481 }
482 }
483
484 return true;
485 }
486
487 pub fn purge_all_chunks(&self) {
489 self.purge_chunks(0, u32::MAX);
490 }
491}
492
493impl Drop for ChunkPool {
494 fn drop(&mut self) {
495 self.purge_all_chunks();
496 }
497}
498
499unsafe impl Send for ChunkPoolInner {}