1use allocator_api2::alloc::{Allocator, AllocError, Layout, Global};
11
12use std::{cell::UnsafeCell, ptr::NonNull, sync::{atomic::{AtomicI32, Ordering}, Arc}};
13
14use crate::{bump_allocator::{BumpAllocator, ChunkPool, Stats}, internal_types::{FrameId, FrameVec}};
15
16pub struct FrameAllocator {
50 inner: *mut FrameInnerAllocator,
52
53 #[cfg(debug_assertions)]
54 frame_id: Option<FrameId>,
55}
56
57impl FrameAllocator {
58 pub fn fallback() -> Self {
62 FrameAllocator {
63 inner: std::ptr::null_mut(),
64 #[cfg(debug_assertions)]
65 frame_id: None,
66 }
67 }
68
69 #[inline]
71 pub fn new_vec<T>(self) -> FrameVec<T> {
72 FrameVec::new_in(self)
73 }
74
75 #[inline]
77 pub fn new_vec_with_capacity<T>(self, cap: usize) -> FrameVec<T> {
78 FrameVec::with_capacity_in(cap, self)
79 }
80
81 #[inline]
82 fn allocate_impl(mem: *mut FrameInnerAllocator, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
83 unsafe {
84 (*mem).live_alloc_count.fetch_add(1, Ordering::Relaxed);
85 (*mem).bump.allocate_item(layout)
86 }
87 }
88
89 #[inline]
90 unsafe fn deallocate_impl(mem: *mut FrameInnerAllocator, ptr: NonNull<u8>, layout: Layout) {
91 (*mem).live_alloc_count.fetch_sub(1, Ordering::Relaxed);
92 (*mem).bump.deallocate_item(ptr, layout)
93 }
94
95 #[inline]
96 unsafe fn grow_impl(mem: *mut FrameInnerAllocator, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
97 (*mem).bump.grow_item(ptr, old_layout, new_layout)
98 }
99
100 #[inline]
101 unsafe fn shrink_impl(mem: *mut FrameInnerAllocator, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
102 (*mem).bump.shrink_item(ptr, old_layout, new_layout)
103 }
104
105 #[cold]
106 #[inline(never)]
107 fn allocate_fallback(layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
108 Global.allocate(layout)
109 }
110
111 #[cold]
112 #[inline(never)]
113 fn deallocate_fallback(ptr: NonNull<u8>, layout: Layout) {
114 unsafe { Global.deallocate(ptr, layout) }
115 }
116
117 #[cold]
118 #[inline(never)]
119 fn grow_fallback(ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
120 unsafe { Global.grow(ptr, old_layout, new_layout) }
121 }
122
123 #[cfg(not(debug_assertions))]
124 fn check_frame_id(&self) {}
125
126 #[cfg(debug_assertions)]
127 fn check_frame_id(&self) {
128 if self.inner.is_null() {
129 return;
130 }
131 unsafe {
132 assert_eq!(self.frame_id, (*self.inner).frame_id);
133 }
134 }
135}
136
137impl Clone for FrameAllocator {
138 fn clone(&self) -> Self {
139 unsafe {
140 if let Some(inner) = self.inner.as_mut() {
141 inner.references_dropped.fetch_sub(1, Ordering::Relaxed);
146 }
147 }
148
149 FrameAllocator {
150 inner: self.inner,
151 #[cfg(debug_assertions)]
152 frame_id: self.frame_id,
153 }
154 }
155}
156
157impl Drop for FrameAllocator {
158 fn drop(&mut self) {
159 unsafe {
160 if let Some(inner) = self.inner.as_mut() {
161 inner.references_dropped.fetch_add(1, Ordering::Release);
162 }
163 }
164 }
165}
166
167unsafe impl Send for FrameAllocator {}
168
169unsafe impl Allocator for FrameAllocator {
170 #[inline(never)]
171 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
172 if self.inner.is_null() {
173 return FrameAllocator::allocate_fallback(layout);
174 }
175
176 self.check_frame_id();
177
178 FrameAllocator::allocate_impl(self.inner, layout)
179 }
180
181 #[inline(never)]
182 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
183 if self.inner.is_null() {
184 return FrameAllocator::deallocate_fallback(ptr, layout);
185 }
186
187 self.check_frame_id();
188
189 FrameAllocator::deallocate_impl(self.inner, ptr, layout)
190 }
191
192 #[inline(never)]
193 unsafe fn grow(
194 &self,
195 ptr: NonNull<u8>,
196 old_layout: Layout,
197 new_layout: Layout
198 ) -> Result<NonNull<[u8]>, AllocError> {
199 if self.inner.is_null() {
200 return FrameAllocator::grow_fallback(ptr, old_layout, new_layout);
201 }
202
203 self.check_frame_id();
204
205 FrameAllocator::grow_impl(self.inner, ptr, old_layout, new_layout)
206 }
207
208 #[inline(never)]
209 unsafe fn shrink(
210 &self,
211 ptr: NonNull<u8>,
212 old_layout: Layout,
213 new_layout: Layout
214 ) -> Result<NonNull<[u8]>, AllocError> {
215 if self.inner.is_null() {
216 return FrameAllocator::grow_fallback(ptr, old_layout, new_layout);
217 }
218
219 self.check_frame_id();
220
221 FrameAllocator::shrink_impl(self.inner, ptr, old_layout, new_layout)
222 }
223}
224
225#[cfg(feature = "capture")]
226impl serde::Serialize for FrameAllocator {
227 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
228 where S: serde::Serializer
229 {
230 ().serialize(serializer)
231 }
232}
233
234#[cfg(feature = "replay")]
235impl<'de> serde::Deserialize<'de> for FrameAllocator {
236 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
237 where
238 D: serde::Deserializer<'de>,
239 {
240 let _ = <() as serde::Deserialize>::deserialize(deserializer)?;
241 Ok(FrameAllocator::fallback())
242 }
243}
244
245#[cfg(feature = "replay")]
251impl Default for FrameAllocator {
252 fn default() -> Self {
253 Self::fallback()
254 }
255}
256
257pub struct FrameMemory {
264 allocator: Option<NonNull<FrameInnerAllocator>>,
274 references_created: UnsafeCell<i32>,
279}
280
281impl FrameMemory {
282 #[allow(unused)]
287 pub fn fallback() -> Self {
288 FrameMemory {
289 allocator: None,
290 references_created: UnsafeCell::new(0)
291 }
292 }
293
294 pub fn new(pool: Arc<ChunkPool>) -> Self {
300 let layout = Layout::from_size_align(
301 std::mem::size_of::<FrameInnerAllocator>(),
302 std::mem::align_of::<FrameInnerAllocator>(),
303 ).unwrap();
304
305 let uninit_u8 = Global.allocate(layout).unwrap();
306
307 unsafe {
308 let allocator: NonNull<FrameInnerAllocator> = uninit_u8.cast();
309 allocator.as_ptr().write(FrameInnerAllocator {
310 bump: BumpAllocator::new(pool),
311
312 live_alloc_count: AtomicI32::new(0),
313 references_dropped: AtomicI32::new(0),
314 #[cfg(debug_assertions)]
315 frame_id: None,
316 });
317
318 FrameMemory {
319 allocator: Some(allocator),
320 references_created: UnsafeCell::new(0),
321 }
322 }
323 }
324
325 pub fn allocator(&self) -> FrameAllocator {
327 if let Some(alloc) = &self.allocator {
328 unsafe { *self.references_created.get() += 1 };
329
330 return FrameAllocator {
331 inner: alloc.as_ptr(),
332 #[cfg(debug_assertions)]
333 frame_id: unsafe { alloc.as_ref().frame_id },
334 };
335 }
336
337 FrameAllocator::fallback()
338 }
339
340 #[inline]
342 pub fn new_vec<T>(&self) -> FrameVec<T> {
343 FrameVec::new_in(self.allocator())
344 }
345
346 #[inline]
348 pub fn new_vec_with_capacity<T>(&self, cap: usize) -> FrameVec<T> {
349 FrameVec::with_capacity_in(cap, self.allocator())
350 }
351
352 pub fn assert_memory_reusable(&self) {
354 if let Some(ptr) = self.allocator {
355 unsafe {
356 assert_eq!(ptr.as_ref().live_alloc_count.load(Ordering::Acquire), 0);
358 let references_created = *self.references_created.get();
361 assert_eq!(ptr.as_ref().references_dropped.load(Ordering::Acquire), references_created);
362 }
363 }
364 }
365
366 pub fn begin_frame(&mut self, id: FrameId) {
368 self.assert_memory_reusable();
369
370 if let Some(mut ptr) = self.allocator {
371 unsafe {
372 let allocator = ptr.as_mut();
373 allocator.references_dropped.store(0, Ordering::Release);
374 self.references_created = UnsafeCell::new(0);
375
376 allocator.bump.reset_stats();
377
378 allocator.set_frame_id(id);
379 }
380 }
381 }
382
383 #[allow(unused)]
384 pub fn get_stats(&self) -> Stats {
385 unsafe {
386 self.allocator.map(|ptr| (*ptr.as_ptr()).bump.get_stats()).unwrap_or_else(Stats::default)
387 }
388 }
389}
390
391impl Drop for FrameMemory {
392 fn drop(&mut self) {
393 self.assert_memory_reusable();
394
395 let layout = Layout::new::<FrameInnerAllocator>();
396
397 unsafe {
398 if let Some(ptr) = &mut self.allocator {
399 std::ptr::drop_in_place(ptr.as_ptr());
400 Global.deallocate(ptr.cast(), layout);
401 }
402 }
403 }
404}
405
406unsafe impl Send for FrameMemory {}
407
408#[cfg(feature = "capture")]
409impl serde::Serialize for FrameMemory {
410 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
411 where S: serde::Serializer
412 {
413 ().serialize(serializer)
414 }
415}
416
417#[cfg(feature = "replay")]
418impl<'de> serde::Deserialize<'de> for FrameMemory {
419 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
420 where
421 D: serde::Deserializer<'de>,
422 {
423 let _ = <() as serde::Deserialize>::deserialize(deserializer)?;
424 Ok(FrameMemory::fallback())
425 }
426}
427
428struct FrameInnerAllocator {
429 bump: BumpAllocator,
430
431 live_alloc_count: AtomicI32,
439 references_dropped: AtomicI32,
448 #[cfg(debug_assertions)]
449 frame_id: Option<FrameId>,
450}
451
452impl FrameInnerAllocator {
453 #[cfg(not(debug_assertions))]
454 fn set_frame_id(&mut self, _: FrameId) {}
455
456 #[cfg(debug_assertions)]
457 fn set_frame_id(&mut self, id: FrameId) {
458 self.frame_id = Some(id);
459 }
460}