gstreamer/
memory.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::{
4    fmt,
5    marker::PhantomData,
6    mem,
7    ops::{Bound, Deref, DerefMut, RangeBounds},
8    ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{ffi, AllocationParams, Allocator, MemoryFlags};
14
15mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
16    ffi::gst_memory_get_type()
17});
18
19pub struct MemoryMap<'a, T> {
20    map_info: ffi::GstMapInfo,
21    phantom: PhantomData<(&'a MemoryRef, T)>,
22}
23
24pub struct MappedMemory<T> {
25    map_info: ffi::GstMapInfo,
26    phantom: PhantomData<(Memory, T)>,
27}
28
29impl fmt::Debug for Memory {
30    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
31        MemoryRef::fmt(self, f)
32    }
33}
34
35impl fmt::Debug for MemoryRef {
36    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
37        f.debug_struct("Memory")
38            .field("ptr", &self.as_ptr())
39            .field("allocator", &self.allocator())
40            .field("parent", &self.parent())
41            .field("maxsize", &self.maxsize())
42            .field("align", &self.align())
43            .field("offset", &self.offset())
44            .field("size", &self.size())
45            .field("flags", &self.flags())
46            .finish()
47    }
48}
49
50pub enum Readable {}
51pub enum Writable {}
52
53impl Memory {
54    #[inline]
55    pub fn with_size(size: usize) -> Self {
56        assert_initialized_main_thread!();
57        unsafe {
58            from_glib_full(ffi::gst_allocator_alloc(
59                ptr::null_mut(),
60                size,
61                ptr::null_mut(),
62            ))
63        }
64    }
65
66    #[inline]
67    pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
68        assert_initialized_main_thread!();
69        unsafe {
70            from_glib_full(ffi::gst_allocator_alloc(
71                ptr::null_mut(),
72                size,
73                params.as_ptr() as *mut _,
74            ))
75        }
76    }
77
78    #[inline]
79    pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
80        unsafe {
81            let s = mem::ManuallyDrop::new(self);
82            let mut map_info = mem::MaybeUninit::uninit();
83            let res: bool = from_glib(ffi::gst_memory_map(
84                s.as_mut_ptr(),
85                map_info.as_mut_ptr(),
86                ffi::GST_MAP_READ,
87            ));
88            if res {
89                Ok(MappedMemory {
90                    map_info: map_info.assume_init(),
91                    phantom: PhantomData,
92                })
93            } else {
94                Err(mem::ManuallyDrop::into_inner(s))
95            }
96        }
97    }
98
99    #[inline]
100    pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
101        unsafe {
102            let s = mem::ManuallyDrop::new(self);
103            let mut map_info = mem::MaybeUninit::uninit();
104            let res: bool = from_glib(ffi::gst_memory_map(
105                s.as_mut_ptr(),
106                map_info.as_mut_ptr(),
107                ffi::GST_MAP_READWRITE,
108            ));
109            if res {
110                Ok(MappedMemory {
111                    map_info: map_info.assume_init(),
112                    phantom: PhantomData,
113                })
114            } else {
115                Err(mem::ManuallyDrop::into_inner(s))
116            }
117        }
118    }
119}
120
121impl MemoryRef {
122    #[doc(alias = "get_allocator")]
123    #[inline]
124    pub fn allocator(&self) -> Option<&Allocator> {
125        unsafe {
126            if self.0.allocator.is_null() {
127                None
128            } else {
129                Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator))
130            }
131        }
132    }
133
134    #[doc(alias = "get_parent")]
135    #[inline]
136    pub fn parent(&self) -> Option<&MemoryRef> {
137        unsafe {
138            if self.0.parent.is_null() {
139                None
140            } else {
141                Some(MemoryRef::from_ptr(self.0.parent))
142            }
143        }
144    }
145
146    #[doc(alias = "get_maxsize")]
147    #[inline]
148    pub fn maxsize(&self) -> usize {
149        self.0.maxsize
150    }
151
152    #[doc(alias = "get_align")]
153    #[inline]
154    pub fn align(&self) -> usize {
155        self.0.align
156    }
157
158    #[doc(alias = "get_offset")]
159    #[inline]
160    pub fn offset(&self) -> usize {
161        self.0.offset
162    }
163
164    #[doc(alias = "get_size")]
165    #[inline]
166    pub fn size(&self) -> usize {
167        self.0.size
168    }
169
170    #[doc(alias = "get_flags")]
171    #[inline]
172    pub fn flags(&self) -> MemoryFlags {
173        unsafe { from_glib(self.0.mini_object.flags) }
174    }
175
176    fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
177        let size = self.size();
178
179        let start_offset = match range.start_bound() {
180            Bound::Included(v) => *v,
181            Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
182            Bound::Unbounded => 0,
183        };
184        assert!(start_offset < size, "Start offset after valid range");
185
186        let end_offset = match range.end_bound() {
187            Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
188            Bound::Excluded(v) => *v,
189            Bound::Unbounded => size,
190        };
191        assert!(end_offset <= size, "End offset after valid range");
192
193        // Cast from usize to isize because that's literally how this works in the
194        // implementation and how the upper half of the usize range can be made use of.
195        //
196        // The implementation works exploiting wraparounds.
197        let new_offset = start_offset as isize;
198        let new_size = end_offset.saturating_sub(start_offset) as isize;
199
200        (new_offset, new_size)
201    }
202
203    fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
204        let maxsize = self.maxsize();
205
206        let start_offset = match range.start_bound() {
207            Bound::Included(v) => *v,
208            Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
209            Bound::Unbounded => 0,
210        };
211        assert!(start_offset < maxsize, "Start offset after valid range");
212
213        let end_offset = match range.end_bound() {
214            Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
215            Bound::Excluded(v) => *v,
216            Bound::Unbounded => maxsize,
217        };
218        assert!(end_offset <= maxsize, "End offset after valid range");
219
220        // Cast from usize to isize because that's literally how this works in the
221        // implementation and how the upper half of the usize range can be made use of.
222        //
223        // The implementation works by exploiting wraparounds.
224        let offset = self.offset();
225
226        let new_offset = start_offset.wrapping_sub(offset) as isize;
227        let new_size = end_offset.saturating_sub(start_offset) as isize;
228
229        (new_offset, new_size)
230    }
231
232    #[doc(alias = "gst_memory_copy")]
233    pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory {
234        let (offset, size) = self.calculate_offset_size(range);
235        unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
236    }
237
238    #[doc(alias = "gst_memory_copy")]
239    pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
240        let (offset, size) = self.calculate_offset_size_maxsize(range);
241        unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
242    }
243
244    #[doc(alias = "gst_memory_is_span")]
245    pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
246        unsafe {
247            let mut offset = mem::MaybeUninit::uninit();
248            let res = from_glib(ffi::gst_memory_is_span(
249                self.as_mut_ptr(),
250                mem2.as_mut_ptr(),
251                offset.as_mut_ptr(),
252            ));
253            if res {
254                Some(offset.assume_init())
255            } else {
256                None
257            }
258        }
259    }
260
261    #[doc(alias = "gst_memory_is_type")]
262    pub fn is_type(&self, mem_type: &str) -> bool {
263        unsafe {
264            from_glib(ffi::gst_memory_is_type(
265                self.as_mut_ptr(),
266                mem_type.to_glib_none().0,
267            ))
268        }
269    }
270
271    #[inline]
272    pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> {
273        unsafe {
274            let mut map_info = mem::MaybeUninit::uninit();
275            let res =
276                ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
277            if res == glib::ffi::GTRUE {
278                Ok(MemoryMap {
279                    map_info: map_info.assume_init(),
280                    phantom: PhantomData,
281                })
282            } else {
283                Err(glib::bool_error!("Failed to map memory readable"))
284            }
285        }
286    }
287
288    #[inline]
289    pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> {
290        unsafe {
291            let mut map_info = mem::MaybeUninit::uninit();
292            let res = ffi::gst_memory_map(
293                self.as_mut_ptr(),
294                map_info.as_mut_ptr(),
295                ffi::GST_MAP_READWRITE,
296            );
297            if res == glib::ffi::GTRUE {
298                Ok(MemoryMap {
299                    map_info: map_info.assume_init(),
300                    phantom: PhantomData,
301                })
302            } else {
303                Err(glib::bool_error!("Failed to map memory writable"))
304            }
305        }
306    }
307
308    #[doc(alias = "gst_memory_share")]
309    pub fn share(&self, range: impl RangeBounds<usize>) -> Memory {
310        let (offset, size) = self.calculate_offset_size(range);
311        unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
312    }
313
314    #[doc(alias = "gst_memory_share")]
315    pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
316        let (offset, size) = self.calculate_offset_size_maxsize(range);
317        unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
318    }
319
320    #[doc(alias = "gst_memory_resize")]
321    pub fn resize(&mut self, range: impl RangeBounds<usize>) {
322        let (offset, size) = self.calculate_offset_size(range);
323        unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
324    }
325
326    #[doc(alias = "gst_memory_resize")]
327    pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) {
328        let (offset, size) = self.calculate_offset_size_maxsize(range);
329        unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
330    }
331
332    #[doc(alias = "gst_util_dump_mem")]
333    pub fn dump(&self) -> Dump {
334        Dump {
335            memory: self,
336            start: Bound::Unbounded,
337            end: Bound::Unbounded,
338        }
339    }
340
341    #[doc(alias = "gst_util_dump_mem")]
342    pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump {
343        Dump {
344            memory: self,
345            start: range.start_bound().cloned(),
346            end: range.end_bound().cloned(),
347        }
348    }
349}
350
351impl<T> MemoryMap<'_, T> {
352    #[doc(alias = "get_size")]
353    #[inline]
354    pub fn size(&self) -> usize {
355        self.map_info.size
356    }
357
358    #[doc(alias = "get_memory")]
359    #[inline]
360    pub fn memory(&self) -> &MemoryRef {
361        unsafe { MemoryRef::from_ptr(self.map_info.memory) }
362    }
363
364    #[inline]
365    pub fn as_slice(&self) -> &[u8] {
366        if self.map_info.size == 0 {
367            return &[];
368        }
369        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
370    }
371}
372
373impl MemoryMap<'_, Writable> {
374    #[inline]
375    pub fn as_mut_slice(&mut self) -> &mut [u8] {
376        if self.map_info.size == 0 {
377            return &mut [];
378        }
379        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
380    }
381}
382
383impl<T> AsRef<[u8]> for MemoryMap<'_, T> {
384    #[inline]
385    fn as_ref(&self) -> &[u8] {
386        self.as_slice()
387    }
388}
389
390impl AsMut<[u8]> for MemoryMap<'_, Writable> {
391    #[inline]
392    fn as_mut(&mut self) -> &mut [u8] {
393        self.as_mut_slice()
394    }
395}
396
397impl<T> Deref for MemoryMap<'_, T> {
398    type Target = [u8];
399
400    #[inline]
401    fn deref(&self) -> &[u8] {
402        self.as_slice()
403    }
404}
405
406impl DerefMut for MemoryMap<'_, Writable> {
407    #[inline]
408    fn deref_mut(&mut self) -> &mut [u8] {
409        self.as_mut_slice()
410    }
411}
412
413impl<T> fmt::Debug for MemoryMap<'_, T> {
414    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
415        f.debug_tuple("MemoryMap").field(&self.memory()).finish()
416    }
417}
418
419impl<'a, T> PartialEq for MemoryMap<'a, T> {
420    fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
421        self.as_slice().eq(other.as_slice())
422    }
423}
424
425impl<T> Eq for MemoryMap<'_, T> {}
426
427impl<T> Drop for MemoryMap<'_, T> {
428    #[inline]
429    fn drop(&mut self) {
430        unsafe {
431            ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
432        }
433    }
434}
435
436unsafe impl<T> Send for MemoryMap<'_, T> {}
437unsafe impl<T> Sync for MemoryMap<'_, T> {}
438
439impl<T> MappedMemory<T> {
440    #[inline]
441    pub fn as_slice(&self) -> &[u8] {
442        if self.map_info.size == 0 {
443            return &[];
444        }
445        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
446    }
447
448    #[doc(alias = "get_size")]
449    #[inline]
450    pub fn size(&self) -> usize {
451        self.map_info.size
452    }
453
454    #[doc(alias = "get_memory")]
455    #[inline]
456    pub fn memory(&self) -> &MemoryRef {
457        unsafe { MemoryRef::from_ptr(self.map_info.memory) }
458    }
459
460    #[inline]
461    pub fn into_memory(self) -> Memory {
462        let mut s = mem::ManuallyDrop::new(self);
463        let memory = unsafe { from_glib_full(s.map_info.memory) };
464        unsafe {
465            ffi::gst_memory_unmap(s.map_info.memory, &mut s.map_info);
466        }
467
468        memory
469    }
470}
471
472impl MappedMemory<Writable> {
473    #[inline]
474    pub fn as_mut_slice(&mut self) -> &mut [u8] {
475        if self.map_info.size == 0 {
476            return &mut [];
477        }
478        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
479    }
480}
481
482impl<T> AsRef<[u8]> for MappedMemory<T> {
483    #[inline]
484    fn as_ref(&self) -> &[u8] {
485        self.as_slice()
486    }
487}
488
489impl AsMut<[u8]> for MappedMemory<Writable> {
490    #[inline]
491    fn as_mut(&mut self) -> &mut [u8] {
492        self.as_mut_slice()
493    }
494}
495
496impl<T> Deref for MappedMemory<T> {
497    type Target = [u8];
498
499    #[inline]
500    fn deref(&self) -> &[u8] {
501        self.as_slice()
502    }
503}
504
505impl DerefMut for MappedMemory<Writable> {
506    #[inline]
507    fn deref_mut(&mut self) -> &mut [u8] {
508        self.as_mut_slice()
509    }
510}
511
512impl<T> Drop for MappedMemory<T> {
513    #[inline]
514    fn drop(&mut self) {
515        unsafe {
516            let _memory = Memory::from_glib_full(self.map_info.memory);
517            ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
518        }
519    }
520}
521
522impl<T> fmt::Debug for MappedMemory<T> {
523    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
524        f.debug_tuple("MappedMemory").field(&self.memory()).finish()
525    }
526}
527
528impl<T> PartialEq for MappedMemory<T> {
529    fn eq(&self, other: &MappedMemory<T>) -> bool {
530        self.as_slice().eq(other.as_slice())
531    }
532}
533
534impl<T> Eq for MappedMemory<T> {}
535
536unsafe impl<T> Send for MappedMemory<T> {}
537unsafe impl<T> Sync for MappedMemory<T> {}
538
539pub struct Dump<'a> {
540    memory: &'a MemoryRef,
541    start: Bound<usize>,
542    end: Bound<usize>,
543}
544
545impl Dump<'_> {
546    fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
547        let map = self.memory.map_readable().expect("Failed to map memory");
548        let data = map.as_slice();
549
550        let dump = crate::slice::Dump {
551            data,
552            start: self.start,
553            end: self.end,
554        };
555
556        if debug {
557            <crate::slice::Dump as fmt::Debug>::fmt(&dump, f)
558        } else {
559            <crate::slice::Dump as fmt::Display>::fmt(&dump, f)
560        }
561    }
562}
563
564impl fmt::Display for Dump<'_> {
565    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
566        self.fmt(f, false)
567    }
568}
569
570impl fmt::Debug for Dump<'_> {
571    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
572        self.fmt(f, true)
573    }
574}
575
576pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
577where
578    <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
579{
580    fn check_memory_type(mem: &MemoryRef) -> bool;
581}
582
583#[derive(Debug, thiserror::Error)]
584pub enum MemoryTypeMismatchError {
585    #[error(transparent)]
586    ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
587    #[error("the memory is not of the requested type {requested}")]
588    MemoryTypeMismatch { requested: &'static str },
589}
590
591pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
592
593unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
594where
595    M: MemoryType + glib::prelude::StaticType,
596    <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
597{
598    type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
599
600    fn check(value: &glib::Value) -> Result<(), Self::Error> {
601        skip_assert_initialized!();
602        let mem = value.get::<&Memory>().map_err(|err| match err {
603            glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
604                glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
605            }
606            glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
607                glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
608                    MemoryTypeMismatchError::ValueTypeMismatch(err),
609                )
610            }
611        })?;
612
613        if mem.is_memory_type::<M>() {
614            Ok(())
615        } else {
616            Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
617                MemoryTypeMismatchError::MemoryTypeMismatch {
618                    requested: std::any::type_name::<M>(),
619                },
620            ))
621        }
622    }
623}
624
625impl AsRef<MemoryRef> for MemoryRef {
626    #[inline]
627    fn as_ref(&self) -> &MemoryRef {
628        self
629    }
630}
631
632impl AsMut<MemoryRef> for MemoryRef {
633    #[inline]
634    fn as_mut(&mut self) -> &mut MemoryRef {
635        self
636    }
637}
638
639impl AsRef<Memory> for Memory {
640    #[inline]
641    fn as_ref(&self) -> &Memory {
642        self
643    }
644}
645
646unsafe impl MemoryType for Memory {
647    #[inline]
648    fn check_memory_type(_mem: &MemoryRef) -> bool {
649        skip_assert_initialized!();
650        true
651    }
652}
653
654impl Memory {
655    #[inline]
656    pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
657    where
658        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
659    {
660        if M::check_memory_type(&self) {
661            unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
662        } else {
663            Err(self)
664        }
665    }
666}
667
668impl MemoryRef {
669    #[inline]
670    pub fn is_memory_type<M: MemoryType>(&self) -> bool
671    where
672        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
673    {
674        M::check_memory_type(self)
675    }
676
677    #[inline]
678    pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
679    where
680        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
681    {
682        if M::check_memory_type(self) {
683            unsafe { Some(&*(self as *const Self as *const M::RefType)) }
684        } else {
685            None
686        }
687    }
688
689    #[inline]
690    pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
691    where
692        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
693    {
694        if M::check_memory_type(self) {
695            unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
696        } else {
697            None
698        }
699    }
700}
701
702#[macro_export]
703macro_rules! memory_object_wrapper {
704    ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
705        $crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
706
707        unsafe impl $crate::memory::MemoryType for $name {
708            #[inline]
709            fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
710                skip_assert_initialized!();
711                $mem_type_check(mem)
712            }
713        }
714
715        impl $name {
716            #[inline]
717            pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
718            where
719                <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
720                    + AsMut<$crate::MemoryRef>
721                    + AsRef<$ref_name>
722                    + AsMut<$ref_name>,
723            {
724                if M::check_memory_type(&self) {
725                    unsafe {
726                        Ok($crate::glib::translate::from_glib_full(
727                            self.into_glib_ptr() as *mut M::FfiType
728                        ))
729                    }
730                } else {
731                    Err(self)
732                }
733            }
734
735            #[inline]
736            pub fn upcast_memory<M>(self) -> M
737            where
738                M: $crate::memory::MemoryType
739                    + $crate::glib::translate::FromGlibPtrFull<
740                        *const <M as $crate::miniobject::IsMiniObject>::FfiType,
741                    >,
742                <M as $crate::miniobject::IsMiniObject>::RefType:
743                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
744                Self: AsRef<M>,
745            {
746                unsafe {
747                    $crate::glib::translate::from_glib_full(
748                        self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
749                    )
750                }
751            }
752        }
753
754        impl $ref_name {
755            #[inline]
756            pub fn upcast_memory_ref<M>(&self) -> &M::RefType
757            where
758                M: $crate::memory::MemoryType,
759                <M as $crate::miniobject::IsMiniObject>::RefType:
760                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
761                Self: AsRef<M::RefType> + AsMut<M::RefType>
762            {
763                self.as_ref()
764            }
765
766            #[inline]
767            pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
768            where
769                M: $crate::memory::MemoryType,
770                <M as $crate::miniobject::IsMiniObject>::RefType:
771                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
772                Self: AsRef<M::RefType> + AsMut<M::RefType>
773            {
774                self.as_mut()
775            }
776        }
777
778        impl std::ops::Deref for $ref_name {
779            type Target = $parent_memory_ref_type;
780
781            #[inline]
782            fn deref(&self) -> &Self::Target {
783                unsafe { &*(self as *const _ as *const Self::Target) }
784            }
785        }
786
787        impl std::ops::DerefMut for $ref_name {
788            #[inline]
789            fn deref_mut(&mut self) -> &mut Self::Target {
790                unsafe { &mut *(self as *mut _ as *mut Self::Target) }
791            }
792        }
793
794        impl AsRef<$parent_memory_type> for $name {
795            #[inline]
796            fn as_ref(&self) -> &$parent_memory_type {
797                unsafe { &*(self as *const _ as *const $parent_memory_type) }
798            }
799        }
800
801        impl AsRef<$parent_memory_ref_type> for $ref_name {
802            #[inline]
803            fn as_ref(&self) -> &$parent_memory_ref_type {
804                self
805            }
806        }
807
808        impl AsMut<$parent_memory_ref_type> for $ref_name {
809            #[inline]
810            fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
811                &mut *self
812            }
813        }
814
815        impl $crate::glib::types::StaticType for $name {
816            #[inline]
817            fn static_type() -> glib::types::Type {
818                $ref_name::static_type()
819            }
820        }
821
822        impl $crate::glib::types::StaticType for $ref_name {
823            #[inline]
824            fn static_type() -> $crate::glib::types::Type {
825                unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
826            }
827        }
828
829        impl $crate::glib::value::ValueType for $name {
830            type Type = Self;
831        }
832
833        unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
834            type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
835
836            unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
837                skip_assert_initialized!();
838                $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
839                    $crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
840                ) as *mut $ffi_name)
841            }
842        }
843
844        unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
845            type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
846
847            unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
848                skip_assert_initialized!();
849                assert_eq!(
850                    std::mem::size_of::<$name>(),
851                    std::mem::size_of::<$crate::glib::ffi::gpointer>()
852                );
853                let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
854                let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
855                    as *const *const $ffi_name;
856                debug_assert!(!(*ptr).is_null());
857                &*(ptr as *const $name)
858            }
859        }
860
861        impl $crate::glib::value::ToValue for $name {
862            fn to_value(&self) -> $crate::glib::Value {
863                let mut value = $crate::glib::Value::for_value_type::<Self>();
864                unsafe {
865                    $crate::glib::gobject_ffi::g_value_set_boxed(
866                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
867                        $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
868                            as *mut _,
869                    )
870                }
871                value
872            }
873
874            fn value_type(&self) -> glib::Type {
875                <Self as $crate::glib::prelude::StaticType>::static_type()
876            }
877        }
878
879        impl $crate::glib::value::ToValueOptional for $name {
880            fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
881                skip_assert_initialized!();
882                let mut value = $crate::glib::Value::for_value_type::<Self>();
883                unsafe {
884                    $crate::glib::gobject_ffi::g_value_set_boxed(
885                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
886                        $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
887                            as *mut _,
888                    )
889                }
890                value
891            }
892        }
893
894        impl From<$name> for $crate::glib::Value {
895            fn from(v: $name) -> $crate::glib::Value {
896                skip_assert_initialized!();
897                let mut value = $crate::glib::Value::for_value_type::<$name>();
898                unsafe {
899                    $crate::glib::gobject_ffi::g_value_take_boxed(
900                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
901                        $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
902                    )
903                }
904                value
905            }
906        }
907
908        unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
909            type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
910
911            unsafe fn from_value(value: &'a glib::Value) -> Self {
912                skip_assert_initialized!();
913                &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
914                    as *const $ref_name)
915            }
916        }
917
918        // Can't have SetValue/SetValueOptional impls as otherwise one could use it to get
919        // immutable references from a mutable reference without borrowing via the value
920    };
921    ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
922        $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
923
924        $(
925            impl AsRef<$parent_parent_memory_type> for $name {
926                #[inline]
927                fn as_ref(&self) -> &$parent_parent_memory_type {
928                    unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
929                }
930            }
931
932            impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
933                #[inline]
934                fn as_ref(&self) -> &$parent_parent_memory_ref_type {
935                    self
936                }
937            }
938
939            impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
940                #[inline]
941                fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
942                    &mut *self
943                }
944            }
945        )*
946    };
947}
948
949#[cfg(feature = "v1_26")]
950#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
951#[doc(alias = "GstMemory")]
952pub struct MemoryRefTrace(ffi::GstMemory);
953#[cfg(feature = "v1_26")]
954#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
955impl MemoryRefTrace {
956    pub unsafe fn from_ptr<'a>(ptr: *mut ffi::GstMemory) -> &'a MemoryRefTrace {
957        assert!(!ptr.is_null());
958
959        &*(ptr as *const Self)
960    }
961
962    pub fn as_ptr(&self) -> *const ffi::GstMemory {
963        self as *const Self as *const ffi::GstMemory
964    }
965
966    #[doc(alias = "get_allocator")]
967    #[inline]
968    pub fn allocator(&self) -> Option<&Allocator> {
969        unsafe {
970            if self.0.allocator.is_null() {
971                None
972            } else {
973                Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator))
974            }
975        }
976    }
977
978    #[doc(alias = "get_parent")]
979    #[inline]
980    pub fn parent(&self) -> Option<&MemoryRef> {
981        unsafe {
982            if self.0.parent.is_null() {
983                None
984            } else {
985                Some(MemoryRef::from_ptr(self.0.parent))
986            }
987        }
988    }
989
990    #[doc(alias = "get_maxsize")]
991    #[inline]
992    pub fn maxsize(&self) -> usize {
993        self.0.maxsize
994    }
995
996    #[doc(alias = "get_align")]
997    #[inline]
998    pub fn align(&self) -> usize {
999        self.0.align
1000    }
1001
1002    #[doc(alias = "get_offset")]
1003    #[inline]
1004    pub fn offset(&self) -> usize {
1005        self.0.offset
1006    }
1007
1008    #[doc(alias = "get_size")]
1009    #[inline]
1010    pub fn size(&self) -> usize {
1011        self.0.size
1012    }
1013
1014    #[doc(alias = "get_flags")]
1015    #[inline]
1016    pub fn flags(&self) -> crate::MemoryFlags {
1017        unsafe { from_glib(self.0.mini_object.flags) }
1018    }
1019
1020    #[doc(alias = "gst_memory_is_type")]
1021    pub fn is_type(&self, mem_type: &str) -> bool {
1022        unsafe {
1023            from_glib(ffi::gst_memory_is_type(
1024                self as *const Self as *mut ffi::GstMemory,
1025                mem_type.to_glib_none().0,
1026            ))
1027        }
1028    }
1029}
1030
1031#[cfg(test)]
1032mod tests {
1033    #[test]
1034    fn test_map() {
1035        crate::init().unwrap();
1036
1037        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1038        let map = mem.map_readable().unwrap();
1039        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1040        drop(map);
1041
1042        let mem = mem.into_mapped_memory_readable().unwrap();
1043        assert_eq!(mem.as_slice(), &[1, 2, 3, 4]);
1044
1045        let mem = mem.into_memory();
1046        let map = mem.map_readable().unwrap();
1047        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1048    }
1049
1050    #[test]
1051    fn test_share() {
1052        crate::init().unwrap();
1053
1054        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1055        let sub = mem.share(1..=2); // [2, 3]
1056        let sub_sub1 = sub.share(1..=1); // [3]
1057        let sub_sub2 = sub.share_maxsize(0..4); // [1, 2, 3, 4]
1058
1059        let map = mem.map_readable().unwrap();
1060        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1061        drop(map);
1062
1063        let map = sub.map_readable().unwrap();
1064        assert_eq!(map.as_slice(), &[2, 3]);
1065        drop(map);
1066
1067        let map = sub_sub1.map_readable().unwrap();
1068        assert_eq!(map.as_slice(), &[3]);
1069        drop(map);
1070
1071        let map = sub_sub2.map_readable().unwrap();
1072        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1073        drop(map);
1074    }
1075
1076    #[test]
1077    fn test_dump() {
1078        use std::fmt::Write;
1079
1080        crate::init().unwrap();
1081
1082        let mut s = String::new();
1083        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1084        write!(&mut s, "{:?}", mem.dump()).unwrap();
1085        assert_eq!(
1086            s,
1087            "0000:  01 02 03 04                                       ...."
1088        );
1089        s.clear();
1090        write!(&mut s, "{}", mem.dump()).unwrap();
1091        assert_eq!(s, "01 02 03 04");
1092        s.clear();
1093
1094        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1095        write!(&mut s, "{:?}", mem.dump_range(..)).unwrap();
1096        assert_eq!(
1097            s,
1098            "0000:  01 02 03 04                                       ...."
1099        );
1100        s.clear();
1101        write!(&mut s, "{:?}", mem.dump_range(..2)).unwrap();
1102        assert_eq!(
1103            s,
1104            "0000:  01 02                                             .."
1105        );
1106        s.clear();
1107        write!(&mut s, "{:?}", mem.dump_range(2..=3)).unwrap();
1108        assert_eq!(
1109            s,
1110            "0002:  03 04                                             .."
1111        );
1112        s.clear();
1113        write!(&mut s, "{:?}", mem.dump_range(..100)).unwrap();
1114        assert_eq!(s, "<end out of range>",);
1115        s.clear();
1116        write!(&mut s, "{:?}", mem.dump_range(90..100)).unwrap();
1117        assert_eq!(s, "<start out of range>",);
1118        s.clear();
1119
1120        let mem = crate::Memory::from_slice(vec![0; 19]);
1121        write!(&mut s, "{:?}", mem.dump()).unwrap();
1122        assert_eq!(
1123            s,
1124            "0000:  00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................\n\
1125             0010:  00 00 00                                          ..."
1126        );
1127        s.clear();
1128    }
1129
1130    #[test]
1131    fn test_value() {
1132        use glib::prelude::*;
1133
1134        crate::init().unwrap();
1135
1136        let v = None::<&crate::Memory>.to_value();
1137        assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
1138
1139        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1140        let v = mem.to_value();
1141        assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
1142        assert!(v.get::<crate::Memory>().is_ok());
1143    }
1144}