1use std::{
6 alloc::Layout,
7 ptr::{self, NonNull}, sync::{Arc, Mutex},
8};
9
10use allocator_api2::alloc::{AllocError, Allocator, Global};
11
12const CHUNK_ALIGNMENT: usize = 32;
13const DEFAULT_CHUNK_SIZE: usize = 128 * 1024;
14
15pub struct BumpAllocator {
21 current_chunk: NonNull<Chunk>,
23 allocation_count: i32,
25
26 chunk_pool: Arc<ChunkPool>,
27
28 stats: Stats,
29}
30
31impl BumpAllocator {
32 pub fn new(chunk_pool: Arc<ChunkPool>) -> Self {
33 let mut stats = Stats::default();
34
35 let first_chunk = chunk_pool.allocate_chunk(DEFAULT_CHUNK_SIZE).unwrap();
36 stats.chunks = 1;
37 stats.reserved_bytes += DEFAULT_CHUNK_SIZE;
38
39 BumpAllocator {
40 current_chunk: first_chunk,
41 chunk_pool,
42 allocation_count: 0,
43 stats,
44 }
45 }
46
47 pub fn get_stats(&mut self) -> Stats {
48 self.stats.chunk_utilization = self.stats.chunks as f32 - 1.0 + Chunk::utilization(self.current_chunk);
49 self.stats
50 }
51
52 pub fn allocate_item(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
53 self.stats.allocations += 1;
54 self.stats.allocated_bytes += layout.size();
55
56 if let Ok(alloc) = Chunk::allocate_item(self.current_chunk, layout) {
57 self.allocation_count += 1;
58 return Ok(alloc);
59 }
60
61 self.alloc_chunk(layout.size())?;
62
63 match Chunk::allocate_item(self.current_chunk, layout) {
64 Ok(alloc) => {
65 self.allocation_count += 1;
66 return Ok(alloc);
67 }
68 Err(_) => {
69 return Err(AllocError);
70 }
71 }
72 }
73
74 pub fn deallocate_item(&mut self, ptr: NonNull<u8>, layout: Layout) {
75 self.stats.deallocations += 1;
76
77 if Chunk::contains_item(self.current_chunk, ptr) {
78 unsafe { Chunk::deallocate_item(self.current_chunk, ptr, layout); }
79 }
80
81 self.allocation_count -= 1;
82 debug_assert!(self.allocation_count >= 0);
83 }
84
85 pub unsafe fn grow_item(&mut self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
86 debug_assert!(
87 new_layout.size() >= old_layout.size(),
88 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
89 );
90
91 self.stats.reallocations += 1;
92
93 if Chunk::contains_item(self.current_chunk, ptr) {
94 if let Ok(alloc) = Chunk::grow_item(self.current_chunk, ptr, old_layout, new_layout) {
95 self.stats.in_place_reallocations += 1;
96 return Ok(alloc);
97 }
98 }
99
100 let new_alloc = if let Ok(alloc) = Chunk::allocate_item(self.current_chunk, new_layout) {
101 alloc
102 } else {
103 self.alloc_chunk(new_layout.size())?;
104 Chunk::allocate_item(self.current_chunk, new_layout).map_err(|_| AllocError)?
105 };
106
107 self.stats.reallocated_bytes += old_layout.size();
108
109 unsafe {
110 ptr::copy_nonoverlapping(ptr.as_ptr(), new_alloc.as_ptr().cast(), old_layout.size());
111 }
112
113 Ok(new_alloc)
114 }
115
116 pub unsafe fn shrink_item(&mut self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
117 debug_assert!(
118 new_layout.size() <= old_layout.size(),
119 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
120 );
121
122 if Chunk::contains_item(self.current_chunk, ptr) {
123 return unsafe { Ok(Chunk::shrink_item(self.current_chunk, ptr, old_layout, new_layout)) };
124 }
125
126 Ok(NonNull::slice_from_raw_parts(ptr, old_layout.size()))
128 }
129
130 fn alloc_chunk(&mut self, item_size: usize) -> Result<(), AllocError> {
131 let chunk_size = DEFAULT_CHUNK_SIZE.max(align(item_size, CHUNK_ALIGNMENT) + CHUNK_ALIGNMENT);
132 self.stats.reserved_bytes += chunk_size;
133
134 let chunk = self.chunk_pool.allocate_chunk(chunk_size)?;
135
136 unsafe {
137 (*chunk.as_ptr()).previous = Some(self.current_chunk);
138 }
139 self.current_chunk = chunk;
140
141 self.stats.chunks += 1;
142
143 Ok(())
144 }
145}
146
147impl Drop for BumpAllocator {
148 fn drop(&mut self) {
149 assert!(self.allocation_count == 0);
150
151 unsafe {
152 self.chunk_pool.recycle_chunks(self.current_chunk);
153 }
154 }
155}
156
157pub struct Chunk {
159 previous: Option<NonNull<Chunk>>,
160 cursor: *mut u8,
162 chunk_end: *mut u8,
164 size: usize,
166}
167
168impl Chunk {
169 pub fn allocate_item(this: NonNull<Chunk>, layout: Layout) -> Result<NonNull<[u8]>, ()> {
170 debug_assert!(CHUNK_ALIGNMENT % layout.align() == 0);
174 debug_assert!(layout.align() > 0);
175 debug_assert!(layout.align().is_power_of_two());
176
177 let size = align(layout.size(), CHUNK_ALIGNMENT);
178
179 unsafe {
180 let cursor = (*this.as_ptr()).cursor;
181 let end = (*this.as_ptr()).chunk_end;
182 let available_size = end.offset_from(cursor);
183
184 if size as isize > available_size {
185 return Err(());
186 }
187
188 let next = cursor.add(size);
189
190 (*this.as_ptr()).cursor = next;
191
192 let cursor = NonNull::new(cursor).unwrap();
193 let suballocation: NonNull<[u8]> = NonNull::slice_from_raw_parts(cursor, size);
194
195 Ok(suballocation)
196 }
197 }
198
199 pub unsafe fn deallocate_item(this: NonNull<Chunk>, item: NonNull<u8>, layout: Layout) {
200 debug_assert!(Chunk::contains_item(this, item));
201
202 unsafe {
203 let size = align(layout.size(), CHUNK_ALIGNMENT);
204 let item_end = item.as_ptr().add(size);
205
206 if item_end == (*this.as_ptr()).cursor {
209 (*this.as_ptr()).cursor = item.as_ptr();
210 }
211
212 }
214 }
215
216 pub unsafe fn grow_item(this: NonNull<Chunk>, item: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, ()> {
217 debug_assert!(Chunk::contains_item(this, item));
218
219 let old_size = align(old_layout.size(), CHUNK_ALIGNMENT);
220 let new_size = align(new_layout.size(), CHUNK_ALIGNMENT);
221 let old_item_end = item.as_ptr().add(old_size);
222
223 if old_item_end != (*this.as_ptr()).cursor {
224 return Err(());
225 }
226
227 let chunk_end = (*this.as_ptr()).chunk_end;
231 let available_size = chunk_end.offset_from(item.as_ptr());
232
233 if new_size as isize > available_size {
234 return Err(());
236 }
237
238 let new_item_end = item.as_ptr().add(new_size);
239 (*this.as_ptr()).cursor = new_item_end;
240
241 Ok(NonNull::slice_from_raw_parts(item, new_size))
242 }
243
244 pub unsafe fn shrink_item(this: NonNull<Chunk>, item: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> NonNull<[u8]> {
245 debug_assert!(Chunk::contains_item(this, item));
246
247 let old_size = align(old_layout.size(), CHUNK_ALIGNMENT);
248 let new_size = align(new_layout.size(), CHUNK_ALIGNMENT);
249 let old_item_end = item.as_ptr().add(old_size);
250
251 if old_item_end == (*this.as_ptr()).cursor {
255 let new_item_end = item.as_ptr().add(new_size);
256 (*this.as_ptr()).cursor = new_item_end;
257 }
258
259 NonNull::slice_from_raw_parts(item, new_size)
260 }
261
262 pub fn contains_item(this: NonNull<Chunk>, item: NonNull<u8>) -> bool {
263 unsafe {
264 let start: *mut u8 = this.cast::<u8>().as_ptr().add(CHUNK_ALIGNMENT);
265 let end: *mut u8 = (*this.as_ptr()).chunk_end;
266 let item = item.as_ptr();
267
268 start <= item && item < end
269 }
270 }
271
272 fn available_size(this: NonNull<Chunk>) -> usize {
273 unsafe {
274 let this = this.as_ptr();
275 (*this).chunk_end.offset_from((*this).cursor) as usize
276 }
277 }
278
279 fn utilization(this: NonNull<Chunk>) -> f32 {
280 let size = unsafe { (*this.as_ptr()).size } as f32;
281 (size - Chunk::available_size(this) as f32) / size
282 }
283}
284
285fn align(val: usize, alignment: usize) -> usize {
286 let rem = val % alignment;
287 if rem == 0 {
288 return val;
289 }
290
291 val.checked_add(alignment).unwrap() - rem
292}
293
294#[derive(Copy, Clone, Debug, Default)]
295pub struct Stats {
296 pub chunks: u32,
297 pub chunk_utilization: f32,
298 pub allocations: u32,
299 pub deallocations: u32,
300 pub reallocations: u32,
301 pub in_place_reallocations: u32,
302
303 pub reallocated_bytes: usize,
304 pub allocated_bytes: usize,
305 pub reserved_bytes: usize,
306}
307
308pub struct ChunkPool {
314 inner: Mutex<ChunkPoolInner>,
315}
316
317struct ChunkPoolInner {
318 first: Option<NonNull<RecycledChunk>>,
319 count: i32,
320}
321
322struct RecycledChunk {
324 next: Option<NonNull<RecycledChunk>>,
325}
326
327impl ChunkPool {
328 pub fn new() -> Self {
329 ChunkPool {
330 inner: Mutex::new(ChunkPoolInner {
331 first: None,
332 count: 0,
333 }),
334 }
335 }
336
337 pub fn allocate_chunk(&self, size: usize) -> Result<NonNull<Chunk>, AllocError> {
342 let chunk: Option<NonNull<RecycledChunk>> = if size == DEFAULT_CHUNK_SIZE {
343 let mut inner = self.inner.lock().unwrap();
345 let mut chunk = inner.first.take();
346 inner.first = chunk.as_mut().and_then(|chunk| unsafe { chunk.as_mut().next.take() });
347
348 if chunk.is_some() {
349 inner.count -= 1;
350 debug_assert!(inner.count >= 0);
351 }
352
353 chunk
354 } else {
355 None
357 };
358
359 let chunk: NonNull<Chunk> = match chunk {
360 Some(chunk) => chunk.cast(),
361 None => {
362 let layout = match Layout::from_size_align(size, CHUNK_ALIGNMENT) {
364 Ok(layout) => layout,
365 Err(_) => {
366 return Err(AllocError);
367 }
368 };
369
370 let alloc = Global.allocate(layout)?;
371
372 alloc.cast()
373 }
374 };
375
376 let chunk_start: *mut u8 = chunk.cast().as_ptr();
377
378 unsafe {
379 let chunk_end = chunk_start.add(size);
380 let cursor = chunk_start.add(CHUNK_ALIGNMENT);
381 ptr::write(
382 chunk.as_ptr(),
383 Chunk {
384 previous: None,
385 chunk_end,
386 cursor,
387 size,
388 },
389 );
390 }
391
392 Ok(chunk)
393 }
394
395 unsafe fn recycle_chunks(&self, chunk: NonNull<Chunk>) {
405 let mut inner = self.inner.lock().unwrap();
406 let mut iter = Some(chunk);
407 while let Some(mut chunk) = iter {
410 iter = unsafe { chunk.as_mut().previous.take() };
412
413 unsafe {
414 let size = chunk.as_ref().size;
416 if size != DEFAULT_CHUNK_SIZE {
417 let layout = Layout::from_size_align(size, CHUNK_ALIGNMENT).unwrap();
418 Global.deallocate(chunk.cast(), layout);
419 continue;
420 }
421 }
422
423 let recycled: NonNull<RecycledChunk> = chunk.cast();
425
426 unsafe {
428 ptr::write(recycled.as_ptr(), RecycledChunk {
429 next: inner.first,
430 });
431 }
432 inner.first = Some(recycled);
433
434 inner.count += 1;
435 }
436 }
437
438 #[inline(never)]
448 pub fn purge_chunks(&self, target: u32, mut count: u32) -> bool {
449 let mut inner = self.inner.lock().unwrap();
450 assert!(inner.count >= 0);
451
452 while inner.count as u32 > target {
453 unsafe {
454 let chunk = inner.first.unwrap();
456
457 inner.first = chunk.as_ref().next;
459
460 let layout = Layout::from_size_align(
462 DEFAULT_CHUNK_SIZE,
463 CHUNK_ALIGNMENT
464 ).unwrap();
465 Global.deallocate(chunk.cast(), layout);
466 }
467
468 inner.count -= 1;
469 count -= 1;
470
471 if count == 0 {
472 return false;
473 }
474 }
475
476 return true;
477 }
478
479 pub fn purge_all_chunks(&self) {
481 self.purge_chunks(0, u32::MAX);
482 }
483}
484
485impl Drop for ChunkPool {
486 fn drop(&mut self) {
487 self.purge_all_chunks();
488 }
489}
490
491unsafe impl Send for ChunkPoolInner {}