gstreamer/
memory.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::{
4    fmt,
5    marker::PhantomData,
6    mem,
7    ops::{Bound, Deref, DerefMut, RangeBounds},
8    ptr, slice,
9};
10
11use glib::{prelude::*, translate::*};
12
13use crate::{AllocationParams, Allocator, MemoryFlags, ffi};
14
15mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
16    ffi::gst_memory_get_type()
17});
18
19pub struct MemoryMap<'a, T> {
20    map_info: ffi::GstMapInfo,
21    phantom: PhantomData<(&'a MemoryRef, T)>,
22}
23
24pub struct MappedMemory<T> {
25    map_info: ffi::GstMapInfo,
26    phantom: PhantomData<(Memory, T)>,
27}
28
29impl fmt::Debug for Memory {
30    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
31        MemoryRef::fmt(self, f)
32    }
33}
34
35impl fmt::Debug for MemoryRef {
36    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
37        f.debug_struct("Memory")
38            .field("ptr", &self.as_ptr())
39            .field("allocator", &self.allocator())
40            .field("parent", &self.parent())
41            .field("maxsize", &self.maxsize())
42            .field("align", &self.align())
43            .field("offset", &self.offset())
44            .field("size", &self.size())
45            .field("flags", &self.flags())
46            .finish()
47    }
48}
49
50pub enum Readable {}
51pub enum Writable {}
52
53impl Memory {
54    #[inline]
55    pub fn default_alignment() -> usize {
56        #[cfg(not(windows))]
57        {
58            unsafe extern "C" {
59                static gst_memory_alignment: usize;
60            }
61            unsafe { gst_memory_alignment }
62        }
63        #[cfg(windows)]
64        {
65            // FIXME: Windows is special and accessing variables exported from
66            // shared libraries need different handling than from static libraries
67            // so just return the default MEMORY_ALIGNMENT_MALLOC here until someone
68            // figures out how to do this properly on Windows.
69            7
70        }
71    }
72
73    #[inline]
74    pub fn with_size(size: usize) -> Self {
75        assert_initialized_main_thread!();
76        unsafe {
77            from_glib_full(ffi::gst_allocator_alloc(
78                ptr::null_mut(),
79                size,
80                ptr::null_mut(),
81            ))
82        }
83    }
84
85    #[inline]
86    pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
87        assert_initialized_main_thread!();
88        unsafe {
89            from_glib_full(ffi::gst_allocator_alloc(
90                ptr::null_mut(),
91                size,
92                params.as_ptr() as *mut _,
93            ))
94        }
95    }
96
97    #[inline]
98    pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
99        unsafe {
100            let s = mem::ManuallyDrop::new(self);
101            let mut map_info = mem::MaybeUninit::uninit();
102            let res: bool = from_glib(ffi::gst_memory_map(
103                s.as_mut_ptr(),
104                map_info.as_mut_ptr(),
105                ffi::GST_MAP_READ,
106            ));
107            if res {
108                Ok(MappedMemory {
109                    map_info: map_info.assume_init(),
110                    phantom: PhantomData,
111                })
112            } else {
113                Err(mem::ManuallyDrop::into_inner(s))
114            }
115        }
116    }
117
118    #[inline]
119    pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
120        unsafe {
121            let s = mem::ManuallyDrop::new(self);
122            let mut map_info = mem::MaybeUninit::uninit();
123            let res: bool = from_glib(ffi::gst_memory_map(
124                s.as_mut_ptr(),
125                map_info.as_mut_ptr(),
126                ffi::GST_MAP_READWRITE,
127            ));
128            if res {
129                Ok(MappedMemory {
130                    map_info: map_info.assume_init(),
131                    phantom: PhantomData,
132                })
133            } else {
134                Err(mem::ManuallyDrop::into_inner(s))
135            }
136        }
137    }
138}
139
140impl MemoryRef {
141    #[doc(alias = "get_allocator")]
142    #[inline]
143    pub fn allocator(&self) -> Option<&Allocator> {
144        unsafe {
145            if self.0.allocator.is_null() {
146                None
147            } else {
148                Some(Allocator::from_glib_ptr_borrow(&self.0.allocator))
149            }
150        }
151    }
152
153    #[doc(alias = "get_parent")]
154    #[inline]
155    pub fn parent(&self) -> Option<&MemoryRef> {
156        unsafe {
157            if self.0.parent.is_null() {
158                None
159            } else {
160                Some(MemoryRef::from_ptr(self.0.parent))
161            }
162        }
163    }
164
165    #[doc(alias = "get_maxsize")]
166    #[inline]
167    pub fn maxsize(&self) -> usize {
168        self.0.maxsize
169    }
170
171    #[doc(alias = "get_align")]
172    #[inline]
173    pub fn align(&self) -> usize {
174        self.0.align
175    }
176
177    #[doc(alias = "get_offset")]
178    #[inline]
179    pub fn offset(&self) -> usize {
180        self.0.offset
181    }
182
183    #[doc(alias = "get_size")]
184    #[inline]
185    pub fn size(&self) -> usize {
186        self.0.size
187    }
188
189    #[doc(alias = "gst_memory_get_sizes")]
190    #[inline]
191    pub fn sizes(&self) -> (usize, usize, usize) {
192        unsafe {
193            let mut offset = 0;
194            let mut maxsize = 0;
195            let total_size =
196                ffi::gst_memory_get_sizes(mut_override(self.as_ptr()), &mut offset, &mut maxsize);
197
198            (total_size, offset, maxsize)
199        }
200    }
201
202    #[doc(alias = "get_flags")]
203    #[inline]
204    pub fn flags(&self) -> MemoryFlags {
205        unsafe { from_glib(self.0.mini_object.flags) }
206    }
207
208    fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
209        let size = self.size();
210
211        let start_offset = match range.start_bound() {
212            Bound::Included(v) => *v,
213            Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
214            Bound::Unbounded => 0,
215        };
216        assert!(start_offset < size, "Start offset after valid range");
217
218        let end_offset = match range.end_bound() {
219            Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
220            Bound::Excluded(v) => *v,
221            Bound::Unbounded => size,
222        };
223        assert!(end_offset <= size, "End offset after valid range");
224
225        // Cast from usize to isize because that's literally how this works in the
226        // implementation and how the upper half of the usize range can be made use of.
227        //
228        // The implementation works exploiting wraparounds.
229        let new_offset = start_offset as isize;
230        let new_size = end_offset.saturating_sub(start_offset) as isize;
231
232        (new_offset, new_size)
233    }
234
235    fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
236        let maxsize = self.maxsize();
237
238        let start_offset = match range.start_bound() {
239            Bound::Included(v) => *v,
240            Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
241            Bound::Unbounded => 0,
242        };
243        assert!(start_offset < maxsize, "Start offset after valid range");
244
245        let end_offset = match range.end_bound() {
246            Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
247            Bound::Excluded(v) => *v,
248            Bound::Unbounded => maxsize,
249        };
250        assert!(end_offset <= maxsize, "End offset after valid range");
251
252        // Cast from usize to isize because that's literally how this works in the
253        // implementation and how the upper half of the usize range can be made use of.
254        //
255        // The implementation works by exploiting wraparounds.
256        let offset = self.offset();
257
258        let new_offset = start_offset.wrapping_sub(offset) as isize;
259        let new_size = end_offset.saturating_sub(start_offset) as isize;
260
261        (new_offset, new_size)
262    }
263
264    #[doc(alias = "gst_memory_copy")]
265    pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory {
266        let (offset, size) = self.calculate_offset_size(range);
267        unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
268    }
269
270    #[doc(alias = "gst_memory_copy")]
271    pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
272        let (offset, size) = self.calculate_offset_size_maxsize(range);
273        unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
274    }
275
276    #[doc(alias = "gst_memory_is_span")]
277    pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
278        unsafe {
279            let mut offset = mem::MaybeUninit::uninit();
280            let res = from_glib(ffi::gst_memory_is_span(
281                self.as_mut_ptr(),
282                mem2.as_mut_ptr(),
283                offset.as_mut_ptr(),
284            ));
285            if res {
286                Some(offset.assume_init())
287            } else {
288                None
289            }
290        }
291    }
292
293    #[doc(alias = "gst_memory_is_type")]
294    pub fn is_type(&self, mem_type: impl IntoGStr) -> bool {
295        unsafe {
296            mem_type.run_with_gstr(|mem_type| {
297                from_glib(ffi::gst_memory_is_type(
298                    self.as_mut_ptr(),
299                    mem_type.as_ptr(),
300                ))
301            })
302        }
303    }
304
305    #[inline]
306    pub fn map_readable(&self) -> Result<MemoryMap<'_, Readable>, glib::BoolError> {
307        unsafe {
308            let mut map_info = mem::MaybeUninit::uninit();
309            let res =
310                ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
311            if res == glib::ffi::GTRUE {
312                Ok(MemoryMap {
313                    map_info: map_info.assume_init(),
314                    phantom: PhantomData,
315                })
316            } else {
317                Err(glib::bool_error!("Failed to map memory readable"))
318            }
319        }
320    }
321
322    #[inline]
323    pub fn map_writable(&mut self) -> Result<MemoryMap<'_, Writable>, glib::BoolError> {
324        unsafe {
325            let mut map_info = mem::MaybeUninit::uninit();
326            let res = ffi::gst_memory_map(
327                self.as_mut_ptr(),
328                map_info.as_mut_ptr(),
329                ffi::GST_MAP_READWRITE,
330            );
331            if res == glib::ffi::GTRUE {
332                Ok(MemoryMap {
333                    map_info: map_info.assume_init(),
334                    phantom: PhantomData,
335                })
336            } else {
337                Err(glib::bool_error!("Failed to map memory writable"))
338            }
339        }
340    }
341
342    #[doc(alias = "gst_memory_share")]
343    pub fn share(&self, range: impl RangeBounds<usize>) -> Memory {
344        let (offset, size) = self.calculate_offset_size(range);
345        unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
346    }
347
348    #[doc(alias = "gst_memory_share")]
349    pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
350        let (offset, size) = self.calculate_offset_size_maxsize(range);
351        unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
352    }
353
354    #[doc(alias = "gst_memory_resize")]
355    pub fn resize(&mut self, range: impl RangeBounds<usize>) {
356        let (offset, size) = self.calculate_offset_size(range);
357        unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
358    }
359
360    #[doc(alias = "gst_memory_resize")]
361    pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) {
362        let (offset, size) = self.calculate_offset_size_maxsize(range);
363        unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
364    }
365
366    #[doc(alias = "gst_util_dump_mem")]
367    pub fn dump(&self) -> Dump<'_> {
368        Dump {
369            memory: self,
370            start: Bound::Unbounded,
371            end: Bound::Unbounded,
372        }
373    }
374
375    #[doc(alias = "gst_util_dump_mem")]
376    pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
377        Dump {
378            memory: self,
379            start: range.start_bound().cloned(),
380            end: range.end_bound().cloned(),
381        }
382    }
383}
384
385impl<T> MemoryMap<'_, T> {
386    #[doc(alias = "get_size")]
387    #[inline]
388    pub fn size(&self) -> usize {
389        self.map_info.size
390    }
391
392    #[doc(alias = "get_memory")]
393    #[inline]
394    pub fn memory(&self) -> &MemoryRef {
395        unsafe { MemoryRef::from_ptr(self.map_info.memory) }
396    }
397
398    #[inline]
399    pub fn as_slice(&self) -> &[u8] {
400        if self.map_info.size == 0 {
401            return &[];
402        }
403        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
404    }
405}
406
407impl MemoryMap<'_, Writable> {
408    #[inline]
409    pub fn as_mut_slice(&mut self) -> &mut [u8] {
410        if self.map_info.size == 0 {
411            return &mut [];
412        }
413        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
414    }
415}
416
417impl<T> AsRef<[u8]> for MemoryMap<'_, T> {
418    #[inline]
419    fn as_ref(&self) -> &[u8] {
420        self.as_slice()
421    }
422}
423
424impl AsMut<[u8]> for MemoryMap<'_, Writable> {
425    #[inline]
426    fn as_mut(&mut self) -> &mut [u8] {
427        self.as_mut_slice()
428    }
429}
430
431impl<T> Deref for MemoryMap<'_, T> {
432    type Target = [u8];
433
434    #[inline]
435    fn deref(&self) -> &[u8] {
436        self.as_slice()
437    }
438}
439
440impl DerefMut for MemoryMap<'_, Writable> {
441    #[inline]
442    fn deref_mut(&mut self) -> &mut [u8] {
443        self.as_mut_slice()
444    }
445}
446
447impl<T> fmt::Debug for MemoryMap<'_, T> {
448    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
449        f.debug_tuple("MemoryMap").field(&self.memory()).finish()
450    }
451}
452
453impl<'a, T> PartialEq for MemoryMap<'a, T> {
454    fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
455        self.as_slice().eq(other.as_slice())
456    }
457}
458
459impl<T> Eq for MemoryMap<'_, T> {}
460
461impl<T> Drop for MemoryMap<'_, T> {
462    #[inline]
463    fn drop(&mut self) {
464        unsafe {
465            ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
466        }
467    }
468}
469
470unsafe impl<T> Send for MemoryMap<'_, T> {}
471unsafe impl<T> Sync for MemoryMap<'_, T> {}
472
473impl<T> MappedMemory<T> {
474    #[inline]
475    pub fn as_slice(&self) -> &[u8] {
476        if self.map_info.size == 0 {
477            return &[];
478        }
479        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
480    }
481
482    #[doc(alias = "get_size")]
483    #[inline]
484    pub fn size(&self) -> usize {
485        self.map_info.size
486    }
487
488    #[doc(alias = "get_memory")]
489    #[inline]
490    pub fn memory(&self) -> &MemoryRef {
491        unsafe { MemoryRef::from_ptr(self.map_info.memory) }
492    }
493
494    #[inline]
495    pub fn into_memory(self) -> Memory {
496        let mut s = mem::ManuallyDrop::new(self);
497        let memory = unsafe { from_glib_full(s.map_info.memory) };
498        unsafe {
499            ffi::gst_memory_unmap(s.map_info.memory, &mut s.map_info);
500        }
501
502        memory
503    }
504}
505
506impl MappedMemory<Writable> {
507    #[inline]
508    pub fn as_mut_slice(&mut self) -> &mut [u8] {
509        if self.map_info.size == 0 {
510            return &mut [];
511        }
512        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
513    }
514}
515
516impl<T> AsRef<[u8]> for MappedMemory<T> {
517    #[inline]
518    fn as_ref(&self) -> &[u8] {
519        self.as_slice()
520    }
521}
522
523impl AsMut<[u8]> for MappedMemory<Writable> {
524    #[inline]
525    fn as_mut(&mut self) -> &mut [u8] {
526        self.as_mut_slice()
527    }
528}
529
530impl<T> Deref for MappedMemory<T> {
531    type Target = [u8];
532
533    #[inline]
534    fn deref(&self) -> &[u8] {
535        self.as_slice()
536    }
537}
538
539impl DerefMut for MappedMemory<Writable> {
540    #[inline]
541    fn deref_mut(&mut self) -> &mut [u8] {
542        self.as_mut_slice()
543    }
544}
545
546impl<T> Drop for MappedMemory<T> {
547    #[inline]
548    fn drop(&mut self) {
549        unsafe {
550            let _memory = Memory::from_glib_full(self.map_info.memory);
551            ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
552        }
553    }
554}
555
556impl<T> fmt::Debug for MappedMemory<T> {
557    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
558        f.debug_tuple("MappedMemory").field(&self.memory()).finish()
559    }
560}
561
562impl<T> PartialEq for MappedMemory<T> {
563    fn eq(&self, other: &MappedMemory<T>) -> bool {
564        self.as_slice().eq(other.as_slice())
565    }
566}
567
568impl<T> Eq for MappedMemory<T> {}
569
570unsafe impl<T> Send for MappedMemory<T> {}
571unsafe impl<T> Sync for MappedMemory<T> {}
572
573pub struct Dump<'a> {
574    memory: &'a MemoryRef,
575    start: Bound<usize>,
576    end: Bound<usize>,
577}
578
579impl Dump<'_> {
580    fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
581        let map = self.memory.map_readable().expect("Failed to map memory");
582        let data = map.as_slice();
583
584        let dump = crate::slice::Dump {
585            data,
586            start: self.start,
587            end: self.end,
588        };
589
590        if debug {
591            <crate::slice::Dump as fmt::Debug>::fmt(&dump, f)
592        } else {
593            <crate::slice::Dump as fmt::Display>::fmt(&dump, f)
594        }
595    }
596}
597
598impl fmt::Display for Dump<'_> {
599    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
600        self.fmt(f, false)
601    }
602}
603
604impl fmt::Debug for Dump<'_> {
605    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
606        self.fmt(f, true)
607    }
608}
609
610pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
611where
612    <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
613{
614    fn check_memory_type(mem: &MemoryRef) -> bool;
615}
616
617#[derive(Debug, thiserror::Error)]
618pub enum MemoryTypeMismatchError {
619    #[error(transparent)]
620    ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
621    #[error("the memory is not of the requested type {requested}")]
622    MemoryTypeMismatch { requested: &'static str },
623}
624
625pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
626
627unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
628where
629    M: MemoryType + glib::prelude::StaticType,
630    <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
631{
632    type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
633
634    fn check(value: &glib::Value) -> Result<(), Self::Error> {
635        skip_assert_initialized!();
636        let mem = value.get::<&Memory>().map_err(|err| match err {
637            glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
638                glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
639            }
640            glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
641                glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
642                    MemoryTypeMismatchError::ValueTypeMismatch(err),
643                )
644            }
645        })?;
646
647        if mem.is_memory_type::<M>() {
648            Ok(())
649        } else {
650            Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
651                MemoryTypeMismatchError::MemoryTypeMismatch {
652                    requested: std::any::type_name::<M>(),
653                },
654            ))
655        }
656    }
657}
658
659impl AsRef<MemoryRef> for MemoryRef {
660    #[inline]
661    fn as_ref(&self) -> &MemoryRef {
662        self
663    }
664}
665
666impl AsMut<MemoryRef> for MemoryRef {
667    #[inline]
668    fn as_mut(&mut self) -> &mut MemoryRef {
669        self
670    }
671}
672
673impl AsRef<Memory> for Memory {
674    #[inline]
675    fn as_ref(&self) -> &Memory {
676        self
677    }
678}
679
680unsafe impl MemoryType for Memory {
681    #[inline]
682    fn check_memory_type(_mem: &MemoryRef) -> bool {
683        skip_assert_initialized!();
684        true
685    }
686}
687
688impl Memory {
689    #[inline]
690    pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
691    where
692        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
693    {
694        if M::check_memory_type(&self) {
695            unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
696        } else {
697            Err(self)
698        }
699    }
700}
701
702impl MemoryRef {
703    #[inline]
704    pub fn is_memory_type<M: MemoryType>(&self) -> bool
705    where
706        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
707    {
708        M::check_memory_type(self)
709    }
710
711    #[inline]
712    pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
713    where
714        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
715    {
716        if M::check_memory_type(self) {
717            unsafe { Some(&*(self as *const Self as *const M::RefType)) }
718        } else {
719            None
720        }
721    }
722
723    #[inline]
724    pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
725    where
726        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
727    {
728        if M::check_memory_type(self) {
729            unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
730        } else {
731            None
732        }
733    }
734}
735
736#[macro_export]
737macro_rules! memory_object_wrapper {
738    ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
739        $crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
740
741        unsafe impl $crate::memory::MemoryType for $name {
742            #[inline]
743            fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
744                skip_assert_initialized!();
745                $mem_type_check(mem)
746            }
747        }
748
749        impl $name {
750            #[inline]
751            pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
752            where
753                <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
754                    + AsMut<$crate::MemoryRef>
755                    + AsRef<$ref_name>
756                    + AsMut<$ref_name>,
757            {
758                if M::check_memory_type(&self) {
759                    unsafe {
760                        Ok($crate::glib::translate::from_glib_full(
761                            self.into_glib_ptr() as *mut M::FfiType
762                        ))
763                    }
764                } else {
765                    Err(self)
766                }
767            }
768
769            #[inline]
770            pub fn upcast_memory<M>(self) -> M
771            where
772                M: $crate::memory::MemoryType
773                    + $crate::glib::translate::FromGlibPtrFull<
774                        *const <M as $crate::miniobject::IsMiniObject>::FfiType,
775                    >,
776                <M as $crate::miniobject::IsMiniObject>::RefType:
777                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
778                Self: AsRef<M>,
779            {
780                unsafe {
781                    $crate::glib::translate::from_glib_full(
782                        self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
783                    )
784                }
785            }
786        }
787
788        impl $ref_name {
789            #[inline]
790            pub fn upcast_memory_ref<M>(&self) -> &M::RefType
791            where
792                M: $crate::memory::MemoryType,
793                <M as $crate::miniobject::IsMiniObject>::RefType:
794                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
795                Self: AsRef<M::RefType> + AsMut<M::RefType>
796            {
797                self.as_ref()
798            }
799
800            #[inline]
801            pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
802            where
803                M: $crate::memory::MemoryType,
804                <M as $crate::miniobject::IsMiniObject>::RefType:
805                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
806                Self: AsRef<M::RefType> + AsMut<M::RefType>
807            {
808                self.as_mut()
809            }
810        }
811
812        impl std::ops::Deref for $ref_name {
813            type Target = $parent_memory_ref_type;
814
815            #[inline]
816            fn deref(&self) -> &Self::Target {
817                unsafe { &*(self as *const _ as *const Self::Target) }
818            }
819        }
820
821        impl std::ops::DerefMut for $ref_name {
822            #[inline]
823            fn deref_mut(&mut self) -> &mut Self::Target {
824                unsafe { &mut *(self as *mut _ as *mut Self::Target) }
825            }
826        }
827
828        impl AsRef<$parent_memory_type> for $name {
829            #[inline]
830            fn as_ref(&self) -> &$parent_memory_type {
831                unsafe { &*(self as *const _ as *const $parent_memory_type) }
832            }
833        }
834
835        impl AsRef<$parent_memory_ref_type> for $ref_name {
836            #[inline]
837            fn as_ref(&self) -> &$parent_memory_ref_type {
838                self
839            }
840        }
841
842        impl AsMut<$parent_memory_ref_type> for $ref_name {
843            #[inline]
844            fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
845                &mut *self
846            }
847        }
848
849        impl $crate::glib::types::StaticType for $name {
850            #[inline]
851            fn static_type() -> glib::types::Type {
852                $ref_name::static_type()
853            }
854        }
855
856        impl $crate::glib::types::StaticType for $ref_name {
857            #[inline]
858            fn static_type() -> $crate::glib::types::Type {
859                unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
860            }
861        }
862
863        impl $crate::glib::value::ValueType for $name {
864            type Type = Self;
865        }
866
867        unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
868            type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
869
870            unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
871                skip_assert_initialized!();
872                $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
873                    $crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
874                ) as *mut $ffi_name)
875            }
876        }
877
878        unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
879            type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
880
881            unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
882                skip_assert_initialized!();
883                assert_eq!(
884                    std::mem::size_of::<$name>(),
885                    std::mem::size_of::<$crate::glib::ffi::gpointer>()
886                );
887                let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
888                let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
889                    as *const *const $ffi_name;
890                debug_assert!(!(*ptr).is_null());
891                &*(ptr as *const $name)
892            }
893        }
894
895        impl $crate::glib::value::ToValue for $name {
896            fn to_value(&self) -> $crate::glib::Value {
897                let mut value = $crate::glib::Value::for_value_type::<Self>();
898                unsafe {
899                    $crate::glib::gobject_ffi::g_value_set_boxed(
900                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
901                        $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
902                            as *mut _,
903                    )
904                }
905                value
906            }
907
908            fn value_type(&self) -> glib::Type {
909                <Self as $crate::glib::prelude::StaticType>::static_type()
910            }
911        }
912
913        impl $crate::glib::value::ToValueOptional for $name {
914            fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
915                skip_assert_initialized!();
916                let mut value = $crate::glib::Value::for_value_type::<Self>();
917                unsafe {
918                    $crate::glib::gobject_ffi::g_value_set_boxed(
919                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
920                        $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
921                            as *mut _,
922                    )
923                }
924                value
925            }
926        }
927
928        impl From<$name> for $crate::glib::Value {
929            fn from(v: $name) -> $crate::glib::Value {
930                skip_assert_initialized!();
931                let mut value = $crate::glib::Value::for_value_type::<$name>();
932                unsafe {
933                    $crate::glib::gobject_ffi::g_value_take_boxed(
934                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
935                        $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
936                    )
937                }
938                value
939            }
940        }
941
942        unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
943            type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
944
945            unsafe fn from_value(value: &'a glib::Value) -> Self {
946                skip_assert_initialized!();
947                &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
948                    as *const $ref_name)
949            }
950        }
951
952        // Can't have SetValue/SetValueOptional impls as otherwise one could use it to get
953        // immutable references from a mutable reference without borrowing via the value
954    };
955    ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
956        $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
957
958        $(
959            impl AsRef<$parent_parent_memory_type> for $name {
960                #[inline]
961                fn as_ref(&self) -> &$parent_parent_memory_type {
962                    unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
963                }
964            }
965
966            impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
967                #[inline]
968                fn as_ref(&self) -> &$parent_parent_memory_ref_type {
969                    self
970                }
971            }
972
973            impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
974                #[inline]
975                fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
976                    &mut *self
977                }
978            }
979        )*
980    };
981}
982
983#[cfg(feature = "v1_26")]
984#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
985#[doc(alias = "GstMemory")]
986pub struct MemoryRefTrace(ffi::GstMemory);
987#[cfg(feature = "v1_26")]
988#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
989impl MemoryRefTrace {
990    pub unsafe fn from_ptr<'a>(ptr: *mut ffi::GstMemory) -> &'a MemoryRefTrace {
991        unsafe {
992            assert!(!ptr.is_null());
993
994            &*(ptr as *const Self)
995        }
996    }
997
998    pub fn as_ptr(&self) -> *const ffi::GstMemory {
999        self as *const Self as *const ffi::GstMemory
1000    }
1001
1002    #[doc(alias = "get_allocator")]
1003    #[inline]
1004    pub fn allocator(&self) -> Option<&Allocator> {
1005        unsafe {
1006            if self.0.allocator.is_null() {
1007                None
1008            } else {
1009                Some(Allocator::from_glib_ptr_borrow(&self.0.allocator))
1010            }
1011        }
1012    }
1013
1014    #[doc(alias = "get_parent")]
1015    #[inline]
1016    pub fn parent(&self) -> Option<&MemoryRef> {
1017        unsafe {
1018            if self.0.parent.is_null() {
1019                None
1020            } else {
1021                Some(MemoryRef::from_ptr(self.0.parent))
1022            }
1023        }
1024    }
1025
1026    #[doc(alias = "get_maxsize")]
1027    #[inline]
1028    pub fn maxsize(&self) -> usize {
1029        self.0.maxsize
1030    }
1031
1032    #[doc(alias = "get_align")]
1033    #[inline]
1034    pub fn align(&self) -> usize {
1035        self.0.align
1036    }
1037
1038    #[doc(alias = "get_offset")]
1039    #[inline]
1040    pub fn offset(&self) -> usize {
1041        self.0.offset
1042    }
1043
1044    #[doc(alias = "get_size")]
1045    #[inline]
1046    pub fn size(&self) -> usize {
1047        self.0.size
1048    }
1049
1050    #[doc(alias = "get_flags")]
1051    #[inline]
1052    pub fn flags(&self) -> crate::MemoryFlags {
1053        unsafe { from_glib(self.0.mini_object.flags) }
1054    }
1055
1056    #[doc(alias = "gst_memory_is_type")]
1057    pub fn is_type(&self, mem_type: impl IntoGStr) -> bool {
1058        unsafe {
1059            mem_type.run_with_gstr(|mem_type| {
1060                from_glib(ffi::gst_memory_is_type(
1061                    self as *const Self as *mut ffi::GstMemory,
1062                    mem_type.as_ptr(),
1063                ))
1064            })
1065        }
1066    }
1067}
1068
1069#[cfg(test)]
1070mod tests {
1071    #[test]
1072    fn test_map() {
1073        crate::init().unwrap();
1074
1075        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1076        let map = mem.map_readable().unwrap();
1077        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1078        drop(map);
1079
1080        let mem = mem.into_mapped_memory_readable().unwrap();
1081        assert_eq!(mem.as_slice(), &[1, 2, 3, 4]);
1082
1083        let mem = mem.into_memory();
1084        let map = mem.map_readable().unwrap();
1085        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1086    }
1087
1088    #[test]
1089    fn test_share() {
1090        crate::init().unwrap();
1091
1092        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1093        let sub = mem.share(1..=2); // [2, 3]
1094        let sub_sub1 = sub.share(1..=1); // [3]
1095        let sub_sub2 = sub.share_maxsize(0..4); // [1, 2, 3, 4]
1096
1097        let map = mem.map_readable().unwrap();
1098        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1099        drop(map);
1100
1101        let map = sub.map_readable().unwrap();
1102        assert_eq!(map.as_slice(), &[2, 3]);
1103        drop(map);
1104
1105        let map = sub_sub1.map_readable().unwrap();
1106        assert_eq!(map.as_slice(), &[3]);
1107        drop(map);
1108
1109        let map = sub_sub2.map_readable().unwrap();
1110        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1111        drop(map);
1112    }
1113
1114    #[test]
1115    fn test_dump() {
1116        use std::fmt::Write;
1117
1118        crate::init().unwrap();
1119
1120        let mut s = String::new();
1121        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1122        write!(&mut s, "{:?}", mem.dump()).unwrap();
1123        assert_eq!(
1124            s,
1125            "0000:  01 02 03 04                                       ...."
1126        );
1127        s.clear();
1128        write!(&mut s, "{}", mem.dump()).unwrap();
1129        assert_eq!(s, "01 02 03 04");
1130        s.clear();
1131
1132        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1133        write!(&mut s, "{:?}", mem.dump_range(..)).unwrap();
1134        assert_eq!(
1135            s,
1136            "0000:  01 02 03 04                                       ...."
1137        );
1138        s.clear();
1139        write!(&mut s, "{:?}", mem.dump_range(..2)).unwrap();
1140        assert_eq!(
1141            s,
1142            "0000:  01 02                                             .."
1143        );
1144        s.clear();
1145        write!(&mut s, "{:?}", mem.dump_range(2..=3)).unwrap();
1146        assert_eq!(
1147            s,
1148            "0002:  03 04                                             .."
1149        );
1150        s.clear();
1151        write!(&mut s, "{:?}", mem.dump_range(..100)).unwrap();
1152        assert_eq!(s, "<end out of range>",);
1153        s.clear();
1154        write!(&mut s, "{:?}", mem.dump_range(90..100)).unwrap();
1155        assert_eq!(s, "<start out of range>",);
1156        s.clear();
1157
1158        let mem = crate::Memory::from_slice(vec![0; 19]);
1159        write!(&mut s, "{:?}", mem.dump()).unwrap();
1160        assert_eq!(
1161            s,
1162            "0000:  00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................\n\
1163             0010:  00 00 00                                          ..."
1164        );
1165        s.clear();
1166    }
1167
1168    #[test]
1169    fn test_value() {
1170        use glib::prelude::*;
1171
1172        crate::init().unwrap();
1173
1174        let v = None::<&crate::Memory>.to_value();
1175        assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
1176
1177        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1178        let v = mem.to_value();
1179        assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
1180        assert!(v.get::<crate::Memory>().is_ok());
1181    }
1182}