wgpu_hal/noop/buffer.rs
1use alloc::{sync::Arc, vec::Vec};
2use core::{cell::UnsafeCell, ops::Range, ptr};
3
4#[derive(Clone, Debug)]
5pub struct Buffer {
6 /// This data is potentially accessed mutably in arbitrary non-overlapping slices,
7 /// so we must store it in `UnsafeCell` to avoid making any too-strong no-aliasing claims.
8 storage: Arc<UnsafeCell<[u8]>>,
9
10 /// Size of the allocation.
11 ///
12 /// This is redundant with `storage.get().len()`, but that method is not
13 /// available until our MSRV is 1.79 or greater.
14 size: usize,
15}
16
17/// SAFETY:
18/// This shared mutable data will not be accessed in a way which causes data races;
19/// the obligation to do so is on the caller of the HAL API.
20/// For safe code, `wgpu-core` validation manages appropriate access.
21unsafe impl Send for Buffer {}
22unsafe impl Sync for Buffer {}
23
24impl Buffer {
25 pub(super) fn new(desc: &crate::BufferDescriptor) -> Result<Self, crate::DeviceError> {
26 let &crate::BufferDescriptor {
27 label: _,
28 size,
29 usage: _,
30 memory_flags: _,
31 } = desc;
32
33 let size = usize::try_from(size).map_err(|_| crate::DeviceError::OutOfMemory)?;
34
35 let mut vector: Vec<u8> = Vec::new();
36 vector
37 .try_reserve_exact(size)
38 .map_err(|_| crate::DeviceError::OutOfMemory)?;
39 vector.resize(size, 0);
40 let storage: Arc<[u8]> = Arc::from(vector);
41 debug_assert_eq!(storage.len(), size);
42
43 // SAFETY: `UnsafeCell<[u8]>` and `[u8]` have the same layout.
44 // This is just adding a wrapper type without changing any layout,
45 // because there is not currently a safe language/`std` way to accomplish this.
46 let storage: Arc<UnsafeCell<[u8]>> =
47 unsafe { Arc::from_raw(Arc::into_raw(storage) as *mut UnsafeCell<[u8]>) };
48
49 Ok(Buffer { storage, size })
50 }
51
52 /// Returns a pointer to the memory owned by this buffer within the given `range`.
53 ///
54 /// This may be used to create any number of simultaneous pointers;
55 /// aliasing is only a concern when actually reading, writing, or converting the pointer
56 /// to a reference.
57 pub(super) fn get_slice_ptr(&self, range: crate::MemoryRange) -> *mut [u8] {
58 let base_ptr = self.storage.get();
59 let range = range_to_usize(range, self.size);
60
61 // We must obtain a slice pointer without ever creating a slice reference
62 // that could alias with another slice.
63 ptr::slice_from_raw_parts_mut(
64 // SAFETY: `range_to_usize` bounds checks this addition.
65 unsafe { base_ptr.cast::<u8>().add(range.start) },
66 range.len(),
67 )
68 }
69}
70
71/// Convert a [`crate::MemoryRange`] to `Range<usize>` and bounds check it.
72fn range_to_usize(range: crate::MemoryRange, upper_bound: usize) -> Range<usize> {
73 // Note: these assertions should be impossible to trigger from safe code.
74 // We're doing them anyway since this entire backend is for testing
75 // (except for when it is an unused placeholder)
76 let start = usize::try_from(range.start).expect("range too large");
77 let end = usize::try_from(range.end).expect("range too large");
78 assert!(start <= end && end <= upper_bound, "range out of bounds");
79 start..end
80}