1use std::ptr;
4
5use glib::{prelude::*, subclass::prelude::*, translate::*};
6
7use super::prelude::*;
8use crate::{AllocationParams, Allocator, ffi};
9
10pub unsafe trait AllocatorImpl:
11 GstObjectImpl + ObjectSubclass<Type: IsA<Allocator>>
12{
13 unsafe fn alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
14 unsafe { self.parent_alloc(size, params) }
15 }
16
17 unsafe fn free(&self, memory: *mut ffi::GstMemory) {
18 unsafe { self.parent_free(memory) }
19 }
20}
21
22pub trait AllocatorImplExt: AllocatorImpl {
23 unsafe fn parent_alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
24 unsafe {
25 let data = Self::type_data();
26 let parent_class = data.as_ref().parent_class() as *mut ffi::GstAllocatorClass;
27
28 if let Some(f) = (*parent_class).alloc {
29 f(
30 self.obj().unsafe_cast_ref::<Allocator>().to_glib_none().0,
31 size,
32 mut_override(params.to_glib_none().0),
33 )
34 } else {
35 ptr::null_mut()
36 }
37 }
38 }
39
40 unsafe fn parent_free(&self, memory: *mut ffi::GstMemory) {
41 unsafe {
42 let data = Self::type_data();
43 let parent_class = data.as_ref().parent_class() as *mut ffi::GstAllocatorClass;
44
45 if let Some(f) = (*parent_class).free {
46 f(
47 self.obj().unsafe_cast_ref::<Allocator>().to_glib_none().0,
48 memory,
49 )
50 }
51 }
52 }
53}
54
55impl<T: AllocatorImpl> AllocatorImplExt for T {}
56
57unsafe impl<T: AllocatorImpl> IsSubclassable<T> for Allocator {
58 fn class_init(klass: &mut glib::Class<Self>) {
59 Self::parent_class_init::<T>(klass);
60 let klass = klass.as_mut();
61 klass.alloc = Some(alloc::<T>);
62 klass.free = Some(free::<T>);
63 }
64}
65
66unsafe extern "C" fn alloc<T: AllocatorImpl>(
67 ptr: *mut ffi::GstAllocator,
68 size: usize,
69 params: *mut ffi::GstAllocationParams,
70) -> *mut ffi::GstMemory {
71 unsafe {
72 let instance = &*(ptr as *mut T::Instance);
73 let imp = instance.imp();
74
75 let params = &*(params as *mut AllocationParams);
76
77 imp.alloc(size, params)
78 }
79}
80
81unsafe extern "C" fn free<T: AllocatorImpl>(
82 ptr: *mut ffi::GstAllocator,
83 memory: *mut ffi::GstMemory,
84) {
85 unsafe {
86 debug_assert_eq!((*memory).mini_object.refcount, 0);
87
88 let instance = &*(ptr as *mut T::Instance);
89 let imp = instance.imp();
90
91 imp.free(memory);
92 }
93}
94
95#[cfg(test)]
96mod tests {
97 use super::*;
98 use crate::prelude::*;
99
100 pub mod imp {
105 use glib::translate::*;
106 use std::alloc;
107
108 use super::*;
109
110 #[repr(C)]
111 struct Memory {
112 mem: ffi::GstMemory,
113 layout: alloc::Layout,
114 data: *mut u8,
115 }
116
117 const LAYOUT: alloc::Layout = alloc::Layout::new::<Memory>();
118
119 #[derive(Default)]
120 pub struct TestAllocator;
121
122 impl ObjectImpl for TestAllocator {}
123 impl GstObjectImpl for TestAllocator {}
124 unsafe impl AllocatorImpl for TestAllocator {
125 unsafe fn alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
126 unsafe {
127 let Some(maxsize) = size
128 .checked_add(params.prefix())
129 .and_then(|s| s.checked_add(params.padding()))
130 else {
131 return ptr::null_mut();
132 };
133
134 let align = params.align() | crate::Memory::default_alignment();
135 let Ok(layout) = alloc::Layout::from_size_align(maxsize, align + 1) else {
136 return ptr::null_mut();
137 };
138
139 let mem = alloc::alloc(LAYOUT) as *mut Memory;
140
141 let data = alloc::alloc(layout);
142
143 if params.prefix() > 0
144 && params.flags().contains(crate::MemoryFlags::ZERO_PREFIXED)
145 {
146 ptr::write_bytes(data, 0, params.prefix());
147 }
148
149 if params.flags().contains(crate::MemoryFlags::ZERO_PADDED) {
150 ptr::write_bytes(data.add(params.prefix()).add(size), 0, params.padding());
151 }
152
153 ffi::gst_memory_init(
154 ptr::addr_of_mut!((*mem).mem),
155 params.flags().into_glib(),
156 self.obj().as_ptr() as *mut ffi::GstAllocator,
157 ptr::null_mut(),
158 maxsize,
159 params.align(),
160 params.prefix(),
161 size,
162 );
163 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
164 ptr::write(ptr::addr_of_mut!((*mem).data), data);
165
166 mem as *mut ffi::GstMemory
167 }
168 }
169
170 unsafe fn free(&self, mem: *mut ffi::GstMemory) {
171 unsafe {
172 let mem = mem as *mut Memory;
173
174 if (*mem).mem.parent.is_null() {
175 alloc::dealloc((*mem).data, (*mem).layout);
176 ptr::drop_in_place(ptr::addr_of_mut!((*mem).layout));
177 }
178 alloc::dealloc(mem as *mut u8, LAYOUT);
179 }
180 }
181 }
182
183 #[glib::object_subclass]
184 impl ObjectSubclass for TestAllocator {
185 const NAME: &'static str = "TestAllocator";
186 type Type = super::TestAllocator;
187 type ParentType = Allocator;
188
189 fn instance_init(obj: &glib::subclass::InitializingObject<Self>) {
190 static ALLOCATOR_TYPE: &[u8] = b"TestAllocatorMemory\0";
191
192 unsafe {
193 let allocator = obj.as_ptr() as *mut ffi::GstAllocator;
194
195 (*allocator).mem_type = ALLOCATOR_TYPE.as_ptr() as *const _;
197 (*allocator).mem_map = Some(TestAllocator::mem_map);
198 (*allocator).mem_unmap = Some(TestAllocator::mem_unmap);
199 (*allocator).mem_share = Some(TestAllocator::mem_share);
201 (*allocator).mem_is_span = Some(TestAllocator::mem_is_span);
202 }
203 }
204 }
205
206 impl TestAllocator {
207 unsafe extern "C" fn mem_map(
208 mem: *mut ffi::GstMemory,
209 _maxsize: usize,
210 _flags: ffi::GstMapFlags,
211 ) -> glib::ffi::gpointer {
212 unsafe {
213 let mem = mem as *mut Memory;
214
215 let parent = if (*mem).mem.parent.is_null() {
216 mem
217 } else {
218 (*mem).mem.parent as *mut Memory
219 };
220
221 (*parent).data as *mut _
223 }
224 }
225
226 unsafe extern "C" fn mem_unmap(_mem: *mut ffi::GstMemory) {}
227
228 unsafe extern "C" fn mem_share(
229 mem: *mut ffi::GstMemory,
230 offset: isize,
231 size: isize,
232 ) -> *mut ffi::GstMemory {
233 unsafe {
234 let mem = mem as *mut Memory;
235
236 let parent = if (*mem).mem.parent.is_null() {
239 mem
240 } else {
241 (*mem).mem.parent as *mut Memory
242 };
243
244 let offset = offset as usize;
247 let mut size = size as usize;
248
249 let new_offset = (*mem).mem.offset.wrapping_add(offset);
250 debug_assert!(new_offset < (*mem).mem.maxsize);
251
252 if size == usize::MAX {
253 size = (*mem).mem.size.wrapping_sub(offset);
254 }
255 debug_assert!(new_offset <= usize::MAX - size);
256 debug_assert!(new_offset + size <= (*mem).mem.maxsize);
257
258 let sub = alloc::alloc(LAYOUT) as *mut Memory;
259
260 ffi::gst_memory_init(
261 sub as *mut ffi::GstMemory,
262 (*mem).mem.mini_object.flags | ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
263 (*mem).mem.allocator,
264 parent as *mut ffi::GstMemory,
265 (*mem).mem.maxsize,
266 (*mem).mem.align,
267 new_offset,
268 size,
269 );
270 ptr::write(ptr::addr_of_mut!((*sub).data), ptr::null_mut());
272
273 sub as *mut ffi::GstMemory
274 }
275 }
276
277 unsafe extern "C" fn mem_is_span(
278 mem1: *mut ffi::GstMemory,
279 mem2: *mut ffi::GstMemory,
280 offset: *mut usize,
281 ) -> glib::ffi::gboolean {
282 unsafe {
283 let mem1 = mem1 as *mut Memory;
284 let mem2 = mem2 as *mut Memory;
285
286 let parent1 = (*mem1).mem.parent as *mut Memory;
288 let parent2 = (*mem2).mem.parent as *mut Memory;
289 debug_assert_eq!(parent1, parent2);
290
291 if !offset.is_null() {
292 *offset = (*mem1).mem.offset.wrapping_sub((*parent1).mem.offset);
297 }
298
299 let is_span = ((*mem1).mem.offset + ((*mem1).mem.size)) == (*mem2).mem.offset;
301
302 is_span.into_glib()
303 }
304 }
305 }
306 }
307
308 glib::wrapper! {
309 pub struct TestAllocator(ObjectSubclass<imp::TestAllocator>) @extends Allocator, crate::Object;
310 }
311
312 impl Default for TestAllocator {
313 fn default() -> Self {
314 glib::Object::new()
315 }
316 }
317
318 #[test]
319 fn test_allocator_registration() {
320 crate::init().unwrap();
321
322 const TEST_ALLOCATOR_NAME: &str = "TestAllocator";
323
324 let allocator = TestAllocator::default();
325 Allocator::register(TEST_ALLOCATOR_NAME, allocator);
326
327 let allocator = Allocator::find(Some(TEST_ALLOCATOR_NAME));
328
329 assert!(allocator.is_some());
330 }
331
332 #[test]
333 fn test_allocator_alloc() {
334 crate::init().unwrap();
335
336 const SIZE: usize = 1024;
337
338 let allocator = TestAllocator::default();
339
340 let memory = allocator.alloc(SIZE, None).unwrap();
341
342 assert_eq!(memory.size(), SIZE);
343 }
344
345 #[test]
346 fn test_allocator_mem_ops() {
347 crate::init().unwrap();
348
349 let data = [0, 1, 2, 3, 4, 5, 6, 7];
350
351 let allocator = TestAllocator::default();
352
353 let mut memory = allocator.alloc(data.len(), None).unwrap();
354 assert_eq!(memory.size(), data.len());
355
356 {
357 let memory = memory.get_mut().unwrap();
358 let mut map = memory.map_writable().unwrap();
359 map.copy_from_slice(&data);
360 }
361
362 let copy = memory.copy();
363 assert!(copy.parent().is_none());
364
365 {
366 let map1 = memory.map_readable().unwrap();
367 let map2 = copy.map_readable().unwrap();
368 assert_eq!(map1.as_slice(), map2.as_slice());
369 }
370
371 let share = memory.share(..);
372 assert_eq!(share.parent().unwrap().as_ptr(), memory.as_ptr());
373
374 {
375 let map1 = memory.map_readable().unwrap();
376 let map2 = share.map_readable().unwrap();
377 assert_eq!(map1.as_slice(), map2.as_slice());
378 }
379
380 let sub1 = memory.share(..2);
381 assert_eq!(sub1.size(), 2);
382 assert_eq!(sub1.parent().unwrap().as_ptr(), memory.as_ptr());
383
384 {
385 let map = sub1.map_readable().unwrap();
386 assert_eq!(map.as_slice(), &data[..2]);
387 }
388
389 let sub2 = memory.share(2..);
390 assert_eq!(sub2.size(), 6);
391 assert_eq!(sub2.parent().unwrap().as_ptr(), memory.as_ptr());
392
393 {
394 let map = sub2.map_readable().unwrap();
395 assert_eq!(map.as_slice(), &data[2..]);
396 }
397
398 let offset = sub1.is_span(&sub2).unwrap();
399 assert_eq!(offset, 0);
400
401 let sub3 = sub2.share(2..);
402 assert_eq!(sub3.size(), 4);
403 assert_eq!(sub3.parent().unwrap().as_ptr(), memory.as_ptr());
404
405 {
406 let map = sub3.map_readable().unwrap();
407 assert_eq!(map.as_slice(), &data[4..]);
408 }
409 }
410}