1use std::{
4 cmp, fmt,
5 marker::PhantomData,
6 mem, ops,
7 ops::{Bound, ControlFlow, Range, RangeBounds},
8 ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{
14 ffi, meta::*, BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef,
15};
16
17pub enum Readable {}
18pub enum Writable {}
19
20#[derive(Copy, Clone, Debug, PartialEq, Eq)]
21pub enum BufferMetaForeachAction {
22 Keep,
23 Remove,
24}
25
26mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || {
27 ffi::gst_buffer_get_type()
28});
29
30pub struct BufferMap<'a, T> {
31 buffer: &'a BufferRef,
32 map_info: ffi::GstMapInfo,
33 phantom: PhantomData<T>,
34}
35
36pub struct MappedBuffer<T> {
37 buffer: Buffer,
38 map_info: ffi::GstMapInfo,
39 phantom: PhantomData<T>,
40}
41
42impl Buffer {
43 #[doc(alias = "gst_buffer_new")]
44 #[inline]
45 pub fn new() -> Self {
46 assert_initialized_main_thread!();
47
48 unsafe { from_glib_full(ffi::gst_buffer_new()) }
49 }
50
51 #[doc(alias = "gst_buffer_new_allocate")]
52 #[doc(alias = "gst_buffer_new_and_alloc")]
53 #[inline]
54 pub fn with_size(size: usize) -> Result<Self, glib::BoolError> {
55 assert_initialized_main_thread!();
56
57 unsafe {
58 Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate(
59 ptr::null_mut(),
60 size,
61 ptr::null_mut(),
62 ))
63 .ok_or_else(|| glib::bool_error!("Failed to allocate buffer"))
64 }
65 }
66
67 #[doc(alias = "gst_buffer_new_wrapped")]
68 #[doc(alias = "gst_buffer_new_wrapped_full")]
69 #[inline]
70 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
71 assert_initialized_main_thread!();
72
73 let mem = Memory::from_mut_slice(slice);
74 let mut buffer = Buffer::new();
75 {
76 let buffer = buffer.get_mut().unwrap();
77 buffer.append_memory(mem);
78 buffer.unset_flags(BufferFlags::TAG_MEMORY);
79 }
80
81 buffer
82 }
83
84 #[doc(alias = "gst_buffer_new_wrapped")]
85 #[doc(alias = "gst_buffer_new_wrapped_full")]
86 #[inline]
87 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
88 assert_initialized_main_thread!();
89
90 let mem = Memory::from_slice(slice);
91 let mut buffer = Buffer::new();
92 {
93 let buffer = buffer.get_mut().unwrap();
94 buffer.append_memory(mem);
95 buffer.unset_flags(BufferFlags::TAG_MEMORY);
96 }
97
98 buffer
99 }
100
101 #[doc(alias = "gst_buffer_map")]
102 #[inline]
103 pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> {
104 unsafe {
105 let mut map_info = mem::MaybeUninit::uninit();
106 let res: bool = from_glib(ffi::gst_buffer_map(
107 self.as_mut_ptr(),
108 map_info.as_mut_ptr(),
109 ffi::GST_MAP_READ,
110 ));
111 if res {
112 Ok(MappedBuffer {
113 buffer: self,
114 map_info: map_info.assume_init(),
115 phantom: PhantomData,
116 })
117 } else {
118 Err(self)
119 }
120 }
121 }
122
123 #[doc(alias = "gst_buffer_map")]
124 #[inline]
125 pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> {
126 unsafe {
127 let mut map_info = mem::MaybeUninit::uninit();
128 let res: bool = from_glib(ffi::gst_buffer_map(
129 self.as_mut_ptr(),
130 map_info.as_mut_ptr(),
131 ffi::GST_MAP_READWRITE,
132 ));
133 if res {
134 Ok(MappedBuffer {
135 buffer: self,
136 map_info: map_info.assume_init(),
137 phantom: PhantomData,
138 })
139 } else {
140 Err(self)
141 }
142 }
143 }
144
145 #[inline]
146 pub fn into_cursor_readable(self) -> BufferCursor<Readable> {
147 BufferCursor::new_readable(self)
148 }
149
150 #[inline]
151 pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> {
152 BufferCursor::new_writable(self)
153 }
154
155 #[doc(alias = "gst_buffer_append")]
156 pub fn append(&mut self, other: Self) {
157 unsafe {
158 let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr());
159 self.replace_ptr(ptr);
160 }
161 }
162}
163
164impl Default for Buffer {
165 fn default() -> Self {
166 Self::new()
167 }
168}
169
170impl BufferRef {
171 #[doc(alias = "gst_buffer_map")]
172 #[inline]
173 pub fn map_readable(&self) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
174 unsafe {
175 let mut map_info = mem::MaybeUninit::uninit();
176 let res =
177 ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
178 if res == glib::ffi::GTRUE {
179 Ok(BufferMap {
180 buffer: self,
181 map_info: map_info.assume_init(),
182 phantom: PhantomData,
183 })
184 } else {
185 Err(glib::bool_error!("Failed to map buffer readable"))
186 }
187 }
188 }
189
190 #[doc(alias = "gst_buffer_map")]
191 #[inline]
192 pub fn map_writable(&mut self) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
193 unsafe {
194 let mut map_info = mem::MaybeUninit::uninit();
195 let res = ffi::gst_buffer_map(
196 self.as_mut_ptr(),
197 map_info.as_mut_ptr(),
198 ffi::GST_MAP_READWRITE,
199 );
200 if res == glib::ffi::GTRUE {
201 Ok(BufferMap {
202 buffer: self,
203 map_info: map_info.assume_init(),
204 phantom: PhantomData,
205 })
206 } else {
207 Err(glib::bool_error!("Failed to map buffer writable"))
208 }
209 }
210 }
211
212 fn memory_range_into_idx_len(
213 &self,
214 range: impl RangeBounds<usize>,
215 ) -> Result<(u32, i32), glib::BoolError> {
216 let n_memory = self.n_memory();
217 debug_assert!(n_memory <= u32::MAX as usize);
218
219 let start_idx = match range.start_bound() {
220 ops::Bound::Included(idx) if *idx >= n_memory => {
221 return Err(glib::bool_error!("Invalid range start"));
222 }
223 ops::Bound::Included(idx) => *idx,
224 ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= n_memory) => {
225 return Err(glib::bool_error!("Invalid range start"));
226 }
227 ops::Bound::Excluded(idx) => *idx + 1,
228 ops::Bound::Unbounded => 0,
229 };
230
231 let end_idx = match range.end_bound() {
232 ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > n_memory) => {
233 return Err(glib::bool_error!("Invalid range end"));
234 }
235 ops::Bound::Included(idx) => *idx + 1,
236 ops::Bound::Excluded(idx) if *idx > n_memory => {
237 return Err(glib::bool_error!("Invalid range end"));
238 }
239 ops::Bound::Excluded(idx) => *idx,
240 ops::Bound::Unbounded => n_memory,
241 };
242
243 Ok((
244 start_idx as u32,
245 i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range"))?,
246 ))
247 }
248
249 #[doc(alias = "gst_buffer_map_range")]
250 #[inline]
251 pub fn map_range_readable(
252 &self,
253 range: impl RangeBounds<usize>,
254 ) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
255 let (idx, len) = self.memory_range_into_idx_len(range)?;
256 unsafe {
257 let mut map_info = mem::MaybeUninit::uninit();
258 let res = ffi::gst_buffer_map_range(
259 self.as_mut_ptr(),
260 idx,
261 len,
262 map_info.as_mut_ptr(),
263 ffi::GST_MAP_READ,
264 );
265 if res == glib::ffi::GTRUE {
266 Ok(BufferMap {
267 buffer: self,
268 map_info: map_info.assume_init(),
269 phantom: PhantomData,
270 })
271 } else {
272 Err(glib::bool_error!("Failed to map buffer readable"))
273 }
274 }
275 }
276
277 #[doc(alias = "gst_buffer_map_range")]
278 #[inline]
279 pub fn map_range_writable(
280 &mut self,
281 range: impl RangeBounds<usize>,
282 ) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
283 let (idx, len) = self.memory_range_into_idx_len(range)?;
284 unsafe {
285 let mut map_info = mem::MaybeUninit::uninit();
286 let res = ffi::gst_buffer_map_range(
287 self.as_mut_ptr(),
288 idx,
289 len,
290 map_info.as_mut_ptr(),
291 ffi::GST_MAP_READWRITE,
292 );
293 if res == glib::ffi::GTRUE {
294 Ok(BufferMap {
295 buffer: self,
296 map_info: map_info.assume_init(),
297 phantom: PhantomData,
298 })
299 } else {
300 Err(glib::bool_error!("Failed to map buffer writable"))
301 }
302 }
303 }
304
305 pub(crate) fn byte_range_into_offset_len(
306 &self,
307 range: impl RangeBounds<usize>,
308 ) -> Result<(usize, usize), glib::BoolError> {
309 let size = self.size();
310
311 let start_idx = match range.start_bound() {
312 ops::Bound::Included(idx) if *idx >= size => {
313 return Err(glib::bool_error!("Invalid range start"));
314 }
315 ops::Bound::Included(idx) => *idx,
316 ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= size) => {
317 return Err(glib::bool_error!("Invalid range start"));
318 }
319 ops::Bound::Excluded(idx) => *idx + 1,
320 ops::Bound::Unbounded => 0,
321 };
322
323 let end_idx = match range.end_bound() {
324 ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > size) => {
325 return Err(glib::bool_error!("Invalid range end"));
326 }
327 ops::Bound::Included(idx) => *idx + 1,
328 ops::Bound::Excluded(idx) if *idx > size => {
329 return Err(glib::bool_error!("Invalid range end"));
330 }
331 ops::Bound::Excluded(idx) => *idx,
332 ops::Bound::Unbounded => size,
333 };
334
335 Ok((start_idx, end_idx - start_idx))
336 }
337
338 #[doc(alias = "gst_buffer_copy_region")]
339 pub fn copy_region(
340 &self,
341 flags: crate::BufferCopyFlags,
342 range: impl RangeBounds<usize>,
343 ) -> Result<Buffer, glib::BoolError> {
344 let (offset, size) = self.byte_range_into_offset_len(range)?;
345
346 unsafe {
347 Option::<_>::from_glib_full(ffi::gst_buffer_copy_region(
348 self.as_mut_ptr(),
349 flags.into_glib(),
350 offset,
351 size,
352 ))
353 .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer"))
354 }
355 }
356
357 #[doc(alias = "gst_buffer_copy_into")]
358 pub fn copy_into(
359 &self,
360 dest: &mut BufferRef,
361 flags: crate::BufferCopyFlags,
362 range: impl RangeBounds<usize>,
363 ) -> Result<(), glib::BoolError> {
364 let (offset, size) = self.byte_range_into_offset_len(range)?;
365
366 unsafe {
367 glib::result_from_gboolean!(
368 ffi::gst_buffer_copy_into(
369 dest.as_mut_ptr(),
370 self.as_mut_ptr(),
371 flags.into_glib(),
372 offset,
373 size,
374 ),
375 "Failed to copy into destination buffer",
376 )
377 }
378 }
379
380 #[doc(alias = "gst_buffer_fill")]
381 pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> {
382 let maxsize = self.maxsize();
383 let size = slice.len();
384
385 assert!(maxsize >= offset && maxsize - offset >= size);
386
387 let copied = unsafe {
388 let src = slice.as_ptr();
389 ffi::gst_buffer_fill(
390 self.as_mut_ptr(),
391 offset,
392 src as glib::ffi::gconstpointer,
393 size,
394 )
395 };
396
397 if copied == size {
398 Ok(())
399 } else {
400 Err(copied)
401 }
402 }
403
404 #[doc(alias = "gst_buffer_extract")]
405 pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> {
406 let maxsize = self.size();
407 let size = slice.len();
408
409 assert!(maxsize >= offset && maxsize - offset >= size);
410
411 let copied = unsafe {
412 let dest = slice.as_mut_ptr();
413 ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size)
414 };
415
416 if copied == size {
417 Ok(())
418 } else {
419 Err(copied)
420 }
421 }
422
423 #[doc(alias = "gst_buffer_copy_deep")]
424 pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> {
425 unsafe {
426 Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr()))
427 .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer"))
428 }
429 }
430
431 #[doc(alias = "get_size")]
432 #[doc(alias = "gst_buffer_get_size")]
433 pub fn size(&self) -> usize {
434 unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) }
435 }
436
437 #[doc(alias = "get_maxsize")]
438 pub fn maxsize(&self) -> usize {
439 unsafe {
440 let mut maxsize = mem::MaybeUninit::uninit();
441 ffi::gst_buffer_get_sizes_range(
442 self.as_mut_ptr(),
443 0,
444 -1,
445 ptr::null_mut(),
446 maxsize.as_mut_ptr(),
447 );
448
449 maxsize.assume_init()
450 }
451 }
452
453 #[doc(alias = "gst_buffer_set_size")]
454 pub fn set_size(&mut self, size: usize) {
455 assert!(self.maxsize() >= size);
456
457 unsafe {
458 ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize);
459 }
460 }
461
462 #[doc(alias = "get_offset")]
463 #[doc(alias = "GST_BUFFER_OFFSET")]
464 #[inline]
465 pub fn offset(&self) -> u64 {
466 self.0.offset
467 }
468
469 #[inline]
470 pub fn set_offset(&mut self, offset: u64) {
471 self.0.offset = offset;
472 }
473
474 #[doc(alias = "get_offset_end")]
475 #[doc(alias = "GST_BUFFER_OFFSET_END")]
476 #[inline]
477 pub fn offset_end(&self) -> u64 {
478 self.0.offset_end
479 }
480
481 #[inline]
482 pub fn set_offset_end(&mut self, offset_end: u64) {
483 self.0.offset_end = offset_end;
484 }
485
486 #[doc(alias = "get_pts")]
487 #[doc(alias = "GST_BUFFER_PTS")]
488 #[inline]
489 pub fn pts(&self) -> Option<ClockTime> {
490 unsafe { from_glib(self.0.pts) }
491 }
492
493 #[inline]
494 pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) {
495 self.0.pts = pts.into().into_glib();
496 }
497
498 #[doc(alias = "get_dts")]
499 #[doc(alias = "GST_BUFFER_DTS")]
500 #[inline]
501 pub fn dts(&self) -> Option<ClockTime> {
502 unsafe { from_glib(self.0.dts) }
503 }
504
505 #[inline]
506 pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) {
507 self.0.dts = dts.into().into_glib();
508 }
509
510 #[doc(alias = "get_dts_or_pts")]
511 #[doc(alias = "GST_BUFFER_DTS_OR_PTS")]
512 #[inline]
513 pub fn dts_or_pts(&self) -> Option<ClockTime> {
514 let val = self.dts();
515 if val.is_none() {
516 self.pts()
517 } else {
518 val
519 }
520 }
521
522 #[doc(alias = "get_duration")]
523 #[doc(alias = "GST_BUFFER_DURATION")]
524 #[inline]
525 pub fn duration(&self) -> Option<ClockTime> {
526 unsafe { from_glib(self.0.duration) }
527 }
528
529 #[inline]
530 pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) {
531 self.0.duration = duration.into().into_glib();
532 }
533
534 #[doc(alias = "get_flags")]
535 #[doc(alias = "GST_BUFFER_FLAGS")]
536 #[inline]
537 pub fn flags(&self) -> BufferFlags {
538 BufferFlags::from_bits_truncate(self.0.mini_object.flags)
539 }
540
541 #[doc(alias = "GST_BUFFER_FLAG_SET")]
542 #[inline]
543 pub fn set_flags(&mut self, flags: BufferFlags) {
544 self.0.mini_object.flags |= flags.bits();
545 }
546
547 #[doc(alias = "GST_BUFFER_FLAG_UNSET")]
548 #[inline]
549 pub fn unset_flags(&mut self, flags: BufferFlags) {
550 self.0.mini_object.flags &= !flags.bits();
551 }
552
553 #[doc(alias = "get_meta")]
554 #[doc(alias = "gst_buffer_get_meta")]
555 #[inline]
556 pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<'_, T>> {
557 unsafe {
558 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
559 if meta.is_null() {
560 None
561 } else {
562 Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType))
563 }
564 }
565 }
566
567 #[doc(alias = "get_meta_mut")]
568 #[inline]
569 pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<'_, T, crate::meta::Standalone>> {
570 unsafe {
571 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
572 if meta.is_null() {
573 None
574 } else {
575 Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType))
576 }
577 }
578 }
579
580 pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<'_, T> {
581 MetaIter::new(self)
582 }
583
584 pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<'_, T> {
585 MetaIterMut::new(self)
586 }
587
588 #[doc(alias = "gst_buffer_foreach_meta")]
589 pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool {
590 unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(
591 buffer: *mut ffi::GstBuffer,
592 meta: *mut *mut ffi::GstMeta,
593 user_data: glib::ffi::gpointer,
594 ) -> glib::ffi::gboolean {
595 let func = user_data as *mut F;
596 let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta));
597
598 matches!(res, ControlFlow::Continue(_)).into_glib()
599 }
600
601 unsafe {
602 let mut func = func;
603 let func_ptr: &mut F = &mut func;
604
605 from_glib(ffi::gst_buffer_foreach_meta(
606 mut_override(self.as_ptr()),
607 Some(trampoline::<F>),
608 func_ptr as *mut _ as *mut _,
609 ))
610 }
611 }
612
613 #[doc(alias = "gst_buffer_foreach_meta")]
614 pub fn foreach_meta_mut<
615 F: FnMut(
616 MetaRefMut<Meta, crate::meta::Iterated>,
617 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
618 >(
619 &mut self,
620 func: F,
621 ) -> bool {
622 unsafe extern "C" fn trampoline<
623 F: FnMut(
624 MetaRefMut<Meta, crate::meta::Iterated>,
625 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
626 >(
627 buffer: *mut ffi::GstBuffer,
628 meta: *mut *mut ffi::GstMeta,
629 user_data: glib::ffi::gpointer,
630 ) -> glib::ffi::gboolean {
631 let func = user_data as *mut F;
632 let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta));
633
634 let (cont, action) = match res {
635 ControlFlow::Continue(action) => (true, action),
636 ControlFlow::Break(action) => (false, action),
637 };
638
639 if action == BufferMetaForeachAction::Remove {
640 *meta = ptr::null_mut();
641 }
642
643 cont.into_glib()
644 }
645
646 unsafe {
647 let mut func = func;
648 let func_ptr: &mut F = &mut func;
649
650 from_glib(ffi::gst_buffer_foreach_meta(
651 mut_override(self.as_ptr()),
652 Some(trampoline::<F>),
653 func_ptr as *mut _ as *mut _,
654 ))
655 }
656 }
657
658 #[doc(alias = "gst_buffer_append_memory")]
659 pub fn append_memory(&mut self, mem: Memory) {
660 unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
661 }
662
663 #[doc(alias = "gst_buffer_find_memory")]
664 pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> {
665 let (offset, size) = self.byte_range_into_offset_len(range).ok()?;
666
667 unsafe {
668 let mut idx = mem::MaybeUninit::uninit();
669 let mut length = mem::MaybeUninit::uninit();
670 let mut skip = mem::MaybeUninit::uninit();
671
672 let res = from_glib(ffi::gst_buffer_find_memory(
673 self.as_mut_ptr(),
674 offset,
675 size,
676 idx.as_mut_ptr(),
677 length.as_mut_ptr(),
678 skip.as_mut_ptr(),
679 ));
680
681 if res {
682 let idx = idx.assume_init() as usize;
683 let length = length.assume_init() as usize;
684 let skip = skip.assume_init();
685 Some((idx..(idx + length), skip))
686 } else {
687 None
688 }
689 }
690 }
691
692 #[doc(alias = "get_all_memory")]
693 #[doc(alias = "gst_buffer_get_all_memory")]
694 pub fn all_memory(&self) -> Option<Memory> {
695 unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) }
696 }
697
698 #[doc(alias = "get_max_memory")]
699 #[doc(alias = "gst_buffer_get_max_memory")]
700 pub fn max_memory() -> usize {
701 unsafe { ffi::gst_buffer_get_max_memory() as usize }
702 }
703
704 #[doc(alias = "get_memory")]
705 #[doc(alias = "gst_buffer_get_memory")]
706 pub fn memory(&self, idx: usize) -> Option<Memory> {
707 if idx >= self.n_memory() {
708 return None;
709 }
710 unsafe {
711 let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32);
712 Some(from_glib_full(res))
713 }
714 }
715
716 #[doc(alias = "get_memory_range")]
717 #[doc(alias = "gst_buffer_get_memory_range")]
718 pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> {
719 let (idx, len) = self.memory_range_into_idx_len(range).ok()?;
720
721 unsafe {
722 let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len);
723 from_glib_full(res)
724 }
725 }
726
727 #[doc(alias = "gst_buffer_insert_memory")]
728 pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) {
729 let n_memory = self.n_memory();
730 let idx = idx.into();
731 let idx = idx.unwrap_or(n_memory);
732 assert!(idx <= self.n_memory());
733 unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) }
734 }
735
736 #[doc(alias = "gst_buffer_is_all_memory_writable")]
737 pub fn is_all_memory_writable(&self) -> bool {
738 unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) }
739 }
740
741 #[doc(alias = "gst_buffer_is_memory_range_writable")]
742 pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool {
743 let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else {
744 return false;
745 };
746
747 unsafe {
748 from_glib(ffi::gst_buffer_is_memory_range_writable(
749 self.as_mut_ptr(),
750 idx,
751 len,
752 ))
753 }
754 }
755
756 #[doc(alias = "gst_buffer_n_memory")]
757 pub fn n_memory(&self) -> usize {
758 unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize }
759 }
760
761 #[doc(alias = "gst_buffer_peek_memory")]
762 pub fn peek_memory(&self, idx: usize) -> &MemoryRef {
763 assert!(idx < self.n_memory());
764 unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) }
765 }
766
767 #[doc(alias = "gst_buffer_peek_memory")]
768 pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> {
769 assert!(idx < self.n_memory());
770 unsafe {
771 let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32);
772 if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE {
773 Err(glib::bool_error!("Memory not writable"))
774 } else {
775 Ok(MemoryRef::from_mut_ptr(mem))
776 }
777 }
778 }
779
780 #[doc(alias = "gst_buffer_prepend_memory")]
781 pub fn prepend_memory(&mut self, mem: Memory) {
782 unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
783 }
784
785 #[doc(alias = "gst_buffer_remove_all_memory")]
786 pub fn remove_all_memory(&mut self) {
787 unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) }
788 }
789
790 #[doc(alias = "gst_buffer_remove_memory")]
791 pub fn remove_memory(&mut self, idx: usize) {
792 assert!(idx < self.n_memory());
793 unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) }
794 }
795
796 #[doc(alias = "gst_buffer_remove_memory_range")]
797 pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) {
798 let (idx, len) = self
799 .memory_range_into_idx_len(range)
800 .expect("Invalid memory range");
801
802 unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) }
803 }
804
805 #[doc(alias = "gst_buffer_replace_all_memory")]
806 pub fn replace_all_memory(&mut self, mem: Memory) {
807 unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
808 }
809
810 #[doc(alias = "gst_buffer_replace_memory")]
811 pub fn replace_memory(&mut self, idx: usize, mem: Memory) {
812 assert!(idx < self.n_memory());
813 unsafe {
814 ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr())
815 }
816 }
817
818 #[doc(alias = "gst_buffer_replace_memory_range")]
819 pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) {
820 let (idx, len) = self
821 .memory_range_into_idx_len(range)
822 .expect("Invalid memory range");
823
824 unsafe {
825 ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr())
826 }
827 }
828
829 pub fn iter_memories(&self) -> Iter<'_> {
830 Iter::new(self)
831 }
832
833 pub fn iter_memories_mut(&mut self) -> Result<IterMut<'_>, glib::BoolError> {
834 if !self.is_all_memory_writable() {
835 Err(glib::bool_error!("Not all memory are writable"))
836 } else {
837 Ok(IterMut::new(self))
838 }
839 }
840
841 pub fn iter_memories_owned(&self) -> IterOwned<'_> {
842 IterOwned::new(self)
843 }
844
845 pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> {
846 BufferRefCursor::new_readable(self)
847 }
848
849 pub fn as_cursor_writable(
850 &mut self,
851 ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> {
852 BufferRefCursor::new_writable(self)
853 }
854
855 #[doc(alias = "gst_util_dump_buffer")]
856 pub fn dump(&self) -> Dump<'_> {
857 Dump {
858 buffer: self,
859 start: Bound::Unbounded,
860 end: Bound::Unbounded,
861 }
862 }
863
864 #[doc(alias = "gst_util_dump_buffer")]
865 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
866 Dump {
867 buffer: self,
868 start: range.start_bound().cloned(),
869 end: range.end_bound().cloned(),
870 }
871 }
872}
873
874macro_rules! define_meta_iter(
875 ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => {
876 #[must_use = "iterators are lazy and do nothing unless consumed"]
877 pub struct $name<'a, T: MetaAPI + 'a> {
878 buffer: $typ,
879 state: glib::ffi::gpointer,
880 meta_api: glib::Type,
881 items: PhantomData<$mtyp>,
882 }
883
884 unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { }
885 unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { }
886
887 impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> {
888 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
889 f.debug_struct(stringify!($name))
890 .field("buffer", &self.buffer)
891 .field("state", &self.state)
892 .field("meta_api", &self.meta_api)
893 .field("items", &self.items)
894 .finish()
895 }
896 }
897
898 impl<'a, T: MetaAPI> $name<'a, T> {
899 fn new(buffer: $typ) -> $name<'a, T> {
900 skip_assert_initialized!();
901
902 $name {
903 buffer,
904 state: ptr::null_mut(),
905 meta_api: T::meta_api(),
906 items: PhantomData,
907 }
908 }
909 }
910
911 #[allow(clippy::redundant_closure_call)]
912 impl<'a, T: MetaAPI> Iterator for $name<'a, T> {
913 type Item = $mtyp;
914
915 fn next(&mut self) -> Option<Self::Item> {
916 loop {
917 unsafe {
918 let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state);
919
920 if meta.is_null() {
921 return None;
922 } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api {
923 let buffer = $prepare_buffer(self.buffer.as_mut_ptr());
925 let item = $from_ptr(buffer, meta);
926 return Some(item);
927 }
928 }
929 }
930 }
931 }
932
933 impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { }
934 }
935);
936
937define_meta_iter!(
938 MetaIter,
939 &'a BufferRef,
940 MetaRef<'a, T>,
941 |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer),
942 |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType)
943);
944define_meta_iter!(
945 MetaIterMut,
946 &'a mut BufferRef,
947 MetaRefMut<'a, T, crate::meta::Iterated>,
948 |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer),
949 |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType)
950);
951
952macro_rules! define_iter(
953 ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => {
954 crate::utils::define_fixed_size_iter!(
955 $name, $typ, $mtyp,
956 |buffer: &BufferRef| buffer.n_memory() as usize,
957 $get_item
958 );
959 }
960);
961
962define_iter!(
963 Iter,
964 &'a BufferRef,
965 &'a MemoryRef,
966 |buffer: &BufferRef, idx| unsafe {
967 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
968 MemoryRef::from_ptr(ptr as *const ffi::GstMemory)
969 }
970);
971
972define_iter!(
973 IterMut,
974 &'a mut BufferRef,
975 &'a mut MemoryRef,
976 |buffer: &mut BufferRef, idx| unsafe {
977 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
978 MemoryRef::from_mut_ptr(ptr)
979 }
980);
981
982impl<'a> IntoIterator for &'a BufferRef {
983 type IntoIter = Iter<'a>;
984 type Item = &'a MemoryRef;
985
986 fn into_iter(self) -> Self::IntoIter {
987 self.iter_memories()
988 }
989}
990
991impl From<Memory> for Buffer {
992 fn from(value: Memory) -> Self {
993 skip_assert_initialized!();
994
995 let mut buffer = Buffer::new();
996 {
997 let buffer = buffer.get_mut().unwrap();
998 buffer.append_memory(value);
999 }
1000 buffer
1001 }
1002}
1003
1004impl<const N: usize> From<[Memory; N]> for Buffer {
1005 fn from(value: [Memory; N]) -> Self {
1006 skip_assert_initialized!();
1007
1008 let mut buffer = Buffer::new();
1009 {
1010 let buffer = buffer.get_mut().unwrap();
1011 value.into_iter().for_each(|b| buffer.append_memory(b));
1012 }
1013 buffer
1014 }
1015}
1016
1017impl std::iter::FromIterator<Memory> for Buffer {
1018 fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self {
1019 skip_assert_initialized!();
1020 let iter = iter.into_iter();
1021
1022 let mut buffer = Buffer::new();
1023
1024 {
1025 let buffer = buffer.get_mut().unwrap();
1026 iter.for_each(|m| buffer.append_memory(m));
1027 }
1028
1029 buffer
1030 }
1031}
1032
1033impl std::iter::Extend<Memory> for BufferRef {
1034 fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) {
1035 iter.into_iter().for_each(|m| self.append_memory(m));
1036 }
1037}
1038
1039define_iter!(
1040 IterOwned,
1041 &'a BufferRef,
1042 Memory,
1043 |buffer: &BufferRef, idx| unsafe {
1044 let ptr = ffi::gst_buffer_get_memory(buffer.as_mut_ptr(), idx as u32);
1045 from_glib_full(ptr)
1046 }
1047);
1048
1049impl fmt::Debug for Buffer {
1050 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1051 BufferRef::fmt(self, f)
1052 }
1053}
1054
1055impl PartialEq for Buffer {
1056 fn eq(&self, other: &Buffer) -> bool {
1057 BufferRef::eq(self, other)
1058 }
1059}
1060
1061impl Eq for Buffer {}
1062
1063impl PartialEq<BufferRef> for Buffer {
1064 fn eq(&self, other: &BufferRef) -> bool {
1065 BufferRef::eq(self, other)
1066 }
1067}
1068impl PartialEq<Buffer> for BufferRef {
1069 fn eq(&self, other: &Buffer) -> bool {
1070 BufferRef::eq(other, self)
1071 }
1072}
1073
1074impl fmt::Debug for BufferRef {
1075 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1076 use std::cell::RefCell;
1077
1078 use crate::utils::Displayable;
1079
1080 struct DebugIter<I>(RefCell<I>);
1081 impl<I: Iterator> fmt::Debug for DebugIter<I>
1082 where
1083 I::Item: fmt::Debug,
1084 {
1085 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1086 f.debug_list().entries(&mut *self.0.borrow_mut()).finish()
1087 }
1088 }
1089
1090 f.debug_struct("Buffer")
1091 .field("ptr", &self.as_ptr())
1092 .field("pts", &self.pts().display())
1093 .field("dts", &self.dts().display())
1094 .field("duration", &self.duration().display())
1095 .field("size", &self.size())
1096 .field("offset", &self.offset())
1097 .field("offset_end", &self.offset_end())
1098 .field("flags", &self.flags())
1099 .field(
1100 "metas",
1101 &DebugIter(RefCell::new(
1102 self.iter_meta::<crate::Meta>().map(|m| m.api()),
1103 )),
1104 )
1105 .finish()
1106 }
1107}
1108
1109impl PartialEq for BufferRef {
1110 fn eq(&self, other: &BufferRef) -> bool {
1111 if self.size() != other.size() {
1112 return false;
1113 }
1114
1115 let self_map = self.map_readable();
1116 let other_map = other.map_readable();
1117
1118 match (self_map, other_map) {
1119 (Ok(self_map), Ok(other_map)) => self_map.as_slice().eq(other_map.as_slice()),
1120 _ => false,
1121 }
1122 }
1123}
1124
1125impl Eq for BufferRef {}
1126
1127impl<T> BufferMap<'_, T> {
1128 #[doc(alias = "get_size")]
1129 #[inline]
1130 pub fn size(&self) -> usize {
1131 self.map_info.size
1132 }
1133
1134 #[doc(alias = "get_buffer")]
1135 #[inline]
1136 pub fn buffer(&self) -> &BufferRef {
1137 self.buffer
1138 }
1139
1140 #[inline]
1141 pub fn as_slice(&self) -> &[u8] {
1142 if self.map_info.size == 0 {
1143 return &[];
1144 }
1145 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1146 }
1147}
1148
1149impl BufferMap<'_, Writable> {
1150 #[inline]
1151 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1152 if self.map_info.size == 0 {
1153 return &mut [];
1154 }
1155 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1156 }
1157}
1158
1159impl<T> AsRef<[u8]> for BufferMap<'_, T> {
1160 #[inline]
1161 fn as_ref(&self) -> &[u8] {
1162 self.as_slice()
1163 }
1164}
1165
1166impl AsMut<[u8]> for BufferMap<'_, Writable> {
1167 #[inline]
1168 fn as_mut(&mut self) -> &mut [u8] {
1169 self.as_mut_slice()
1170 }
1171}
1172
1173impl<T> ops::Deref for BufferMap<'_, T> {
1174 type Target = [u8];
1175
1176 #[inline]
1177 fn deref(&self) -> &[u8] {
1178 self.as_slice()
1179 }
1180}
1181
1182impl ops::DerefMut for BufferMap<'_, Writable> {
1183 #[inline]
1184 fn deref_mut(&mut self) -> &mut [u8] {
1185 self.as_mut_slice()
1186 }
1187}
1188
1189impl<T> fmt::Debug for BufferMap<'_, T> {
1190 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1191 f.debug_tuple("BufferMap").field(&self.buffer()).finish()
1192 }
1193}
1194
1195impl<'a, T> PartialEq for BufferMap<'a, T> {
1196 fn eq(&self, other: &BufferMap<'a, T>) -> bool {
1197 self.as_slice().eq(other.as_slice())
1198 }
1199}
1200
1201impl<T> Eq for BufferMap<'_, T> {}
1202
1203impl<T> Drop for BufferMap<'_, T> {
1204 #[inline]
1205 fn drop(&mut self) {
1206 unsafe {
1207 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1208 }
1209 }
1210}
1211
1212unsafe impl<T> Send for BufferMap<'_, T> {}
1213unsafe impl<T> Sync for BufferMap<'_, T> {}
1214
1215impl<T> MappedBuffer<T> {
1216 #[inline]
1217 pub fn as_slice(&self) -> &[u8] {
1218 if self.map_info.size == 0 {
1219 return &[];
1220 }
1221 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1222 }
1223
1224 #[doc(alias = "get_size")]
1225 #[inline]
1226 pub fn size(&self) -> usize {
1227 self.map_info.size
1228 }
1229
1230 #[doc(alias = "get_buffer")]
1231 #[inline]
1232 pub fn buffer(&self) -> &BufferRef {
1233 self.buffer.as_ref()
1234 }
1235
1236 #[inline]
1237 pub fn into_buffer(self) -> Buffer {
1238 let mut s = mem::ManuallyDrop::new(self);
1239 let buffer = unsafe { ptr::read(&s.buffer) };
1240 unsafe {
1241 ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info);
1242 }
1243
1244 buffer
1245 }
1246}
1247
1248impl MappedBuffer<Readable> {
1249 #[doc(alias = "get_buffer")]
1250 #[inline]
1251 pub fn buffer_owned(&self) -> Buffer {
1252 self.buffer.clone()
1253 }
1254}
1255
1256impl MappedBuffer<Writable> {
1257 #[inline]
1258 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1259 if self.map_info.size == 0 {
1260 return &mut [];
1261 }
1262 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1263 }
1264}
1265
1266impl<T> AsRef<[u8]> for MappedBuffer<T> {
1267 #[inline]
1268 fn as_ref(&self) -> &[u8] {
1269 self.as_slice()
1270 }
1271}
1272
1273impl AsMut<[u8]> for MappedBuffer<Writable> {
1274 #[inline]
1275 fn as_mut(&mut self) -> &mut [u8] {
1276 self.as_mut_slice()
1277 }
1278}
1279
1280impl<T> ops::Deref for MappedBuffer<T> {
1281 type Target = [u8];
1282
1283 #[inline]
1284 fn deref(&self) -> &[u8] {
1285 self.as_slice()
1286 }
1287}
1288
1289impl ops::DerefMut for MappedBuffer<Writable> {
1290 #[inline]
1291 fn deref_mut(&mut self) -> &mut [u8] {
1292 self.as_mut_slice()
1293 }
1294}
1295
1296impl<T> Drop for MappedBuffer<T> {
1297 #[inline]
1298 fn drop(&mut self) {
1299 unsafe {
1300 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1301 }
1302 }
1303}
1304
1305impl<T> fmt::Debug for MappedBuffer<T> {
1306 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1307 f.debug_tuple("MappedBuffer").field(&self.buffer()).finish()
1308 }
1309}
1310
1311impl<T> PartialEq for MappedBuffer<T> {
1312 fn eq(&self, other: &MappedBuffer<T>) -> bool {
1313 self.as_slice().eq(other.as_slice())
1314 }
1315}
1316
1317impl<T> Eq for MappedBuffer<T> {}
1318
1319unsafe impl<T> Send for MappedBuffer<T> {}
1320unsafe impl<T> Sync for MappedBuffer<T> {}
1321
1322#[doc(alias = "GST_BUFFER_COPY_METADATA")]
1323pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags =
1324 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_METADATA);
1325#[doc(alias = "GST_BUFFER_COPY_ALL")]
1326pub const BUFFER_COPY_ALL: crate::BufferCopyFlags =
1327 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_ALL);
1328
1329pub struct Dump<'a> {
1330 buffer: &'a BufferRef,
1331 start: Bound<usize>,
1332 end: Bound<usize>,
1333}
1334
1335#[must_use = "iterators are lazy and do nothing unless consumed"]
1336struct BufferChunked16Iter<'a> {
1337 buffer: &'a BufferRef,
1338 mem_idx: usize,
1339 mem_len: usize,
1340 map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>,
1341 map_offset: usize,
1342 len: usize,
1343}
1344
1345impl Iterator for BufferChunked16Iter<'_> {
1346 type Item = ([u8; 16], usize);
1348
1349 fn next(&mut self) -> Option<Self::Item> {
1350 if self.mem_idx == self.mem_len || self.len == 0 {
1351 return None;
1352 }
1353
1354 let mut item = [0u8; 16];
1355 let mut data = item.as_mut_slice();
1356
1357 while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 {
1358 if self.map.is_none() {
1359 let mem = self.buffer.peek_memory(self.mem_idx);
1360 self.map = Some(mem.map_readable().expect("failed to map memory"));
1361 }
1362
1363 let map = self.map.as_ref().unwrap();
1364 debug_assert!(self.map_offset < map.len());
1365 let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len);
1366 data[..copy].copy_from_slice(&map[self.map_offset..][..copy]);
1367 self.map_offset += copy;
1368 self.len -= copy;
1369 data = &mut data[copy..];
1370
1371 if self.map_offset == map.len() {
1372 self.map = None;
1373 self.map_offset = 0;
1374 self.mem_idx += 1;
1375 }
1376 }
1377
1378 let copied = 16 - data.len();
1379 Some((item, copied))
1380 }
1381}
1382
1383impl Dump<'_> {
1384 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
1385 let n_memory = self.buffer.n_memory();
1386 if n_memory == 0 {
1387 write!(f, "<empty>")?;
1388 return Ok(());
1389 }
1390
1391 use std::fmt::Write;
1392
1393 let len = self.buffer.size();
1394
1395 let mut start_idx = match self.start {
1398 Bound::Included(idx) if idx >= len => {
1399 write!(f, "<start out of range>")?;
1400 return Ok(());
1401 }
1402 Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= len) => {
1403 write!(f, "<start out of range>")?;
1404 return Ok(());
1405 }
1406 Bound::Included(idx) => idx,
1407 Bound::Excluded(idx) => idx + 1,
1408 Bound::Unbounded => 0,
1409 };
1410
1411 let end_idx = match self.end {
1412 Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > len) => {
1413 write!(f, "<end out of range>")?;
1414 return Ok(());
1415 }
1416 Bound::Excluded(idx) if idx > len => {
1417 write!(f, "<end out of range>")?;
1418 return Ok(());
1419 }
1420 Bound::Included(idx) => idx + 1,
1421 Bound::Excluded(idx) => idx,
1422 Bound::Unbounded => len,
1423 };
1424
1425 if start_idx >= end_idx {
1426 write!(f, "<empty range>")?;
1427 return Ok(());
1428 }
1429
1430 let (memory_range, skip) = self
1432 .buffer
1433 .find_memory(start_idx..)
1434 .expect("can't find memory");
1435
1436 let chunks = BufferChunked16Iter {
1437 buffer: self.buffer,
1438 mem_idx: memory_range.start,
1439 mem_len: n_memory,
1440 map: None,
1441 map_offset: skip,
1442 len: end_idx - start_idx,
1443 };
1444
1445 if debug {
1446 for (line, line_len) in chunks {
1447 let line = &line[..line_len];
1448
1449 match end_idx {
1450 0x00_00..=0xff_ff => write!(f, "{start_idx:04x}: ")?,
1451 0x01_00_00..=0xff_ff_ff => write!(f, "{start_idx:06x}: ")?,
1452 0x01_00_00_00..=0xff_ff_ff_ff => write!(f, "{start_idx:08x}: ")?,
1453 _ => write!(f, "{start_idx:016x}: ")?,
1454 }
1455
1456 for (i, v) in line.iter().enumerate() {
1457 if i > 0 {
1458 write!(f, " {v:02x}")?;
1459 } else {
1460 write!(f, "{v:02x}")?;
1461 }
1462 }
1463
1464 for _ in line.len()..16 {
1465 write!(f, " ")?;
1466 }
1467 write!(f, " ")?;
1468
1469 for v in line {
1470 if v.is_ascii() && !v.is_ascii_control() {
1471 f.write_char((*v).into())?;
1472 } else {
1473 f.write_char('.')?;
1474 }
1475 }
1476
1477 start_idx = start_idx.saturating_add(16);
1478 if start_idx < end_idx {
1479 writeln!(f)?;
1480 }
1481 }
1482
1483 Ok(())
1484 } else {
1485 for (line, line_len) in chunks {
1486 let line = &line[..line_len];
1487
1488 for (i, v) in line.iter().enumerate() {
1489 if i > 0 {
1490 write!(f, " {v:02x}")?;
1491 } else {
1492 write!(f, "{v:02x}")?;
1493 }
1494 }
1495
1496 start_idx = start_idx.saturating_add(16);
1497 if start_idx < end_idx {
1498 writeln!(f)?;
1499 }
1500 }
1501
1502 Ok(())
1503 }
1504 }
1505}
1506
1507impl fmt::Display for Dump<'_> {
1508 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1509 self.fmt(f, false)
1510 }
1511}
1512
1513impl fmt::Debug for Dump<'_> {
1514 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1515 self.fmt(f, true)
1516 }
1517}
1518
1519#[cfg(test)]
1520mod tests {
1521 use super::*;
1522
1523 #[test]
1524 fn test_fields() {
1525 crate::init().unwrap();
1526
1527 let mut buffer = Buffer::new();
1528
1529 {
1530 let buffer = buffer.get_mut().unwrap();
1531 buffer.set_pts(ClockTime::NSECOND);
1532 buffer.set_dts(2 * ClockTime::NSECOND);
1533 buffer.set_offset(3);
1534 buffer.set_offset_end(4);
1535 buffer.set_duration(Some(5 * ClockTime::NSECOND));
1536 }
1537 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1538 assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND));
1539 assert_eq!(buffer.offset(), 3);
1540 assert_eq!(buffer.offset_end(), 4);
1541 assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND));
1542 }
1543
1544 #[test]
1545 fn test_writability() {
1546 crate::init().unwrap();
1547
1548 let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]);
1549 {
1550 let data = buffer.map_readable().unwrap();
1551 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1552 }
1553 assert_ne!(buffer.get_mut(), None);
1554 {
1555 let buffer = buffer.get_mut().unwrap();
1556 buffer.set_pts(Some(ClockTime::NSECOND));
1557 }
1558
1559 let mut buffer2 = buffer.clone();
1560 assert_eq!(buffer.get_mut(), None);
1561
1562 assert_eq!(buffer2.as_ptr(), buffer.as_ptr());
1563
1564 {
1565 let buffer2 = buffer2.make_mut();
1566 assert_ne!(buffer2.as_ptr(), buffer.as_ptr());
1567
1568 buffer2.set_pts(Some(2 * ClockTime::NSECOND));
1569
1570 let mut data = buffer2.map_writable().unwrap();
1571 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1572 data.as_mut_slice()[0] = 0;
1573 }
1574
1575 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1576 assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND));
1577
1578 {
1579 let data = buffer.map_readable().unwrap();
1580 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1581
1582 let data = buffer2.map_readable().unwrap();
1583 assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice());
1584 }
1585 }
1586
1587 #[test]
1588 #[allow(clippy::cognitive_complexity)]
1589 fn test_memories() {
1590 crate::init().unwrap();
1591
1592 let mut buffer = Buffer::new();
1593 {
1594 let buffer = buffer.get_mut().unwrap();
1595 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1596 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1597 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1598 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1599 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10]));
1600 }
1601
1602 assert!(buffer.is_all_memory_writable());
1603 assert_eq!(buffer.n_memory(), 5);
1604 assert_eq!(buffer.size(), 30);
1605
1606 for i in 0..5 {
1607 {
1608 let mem = buffer.memory(i).unwrap();
1609 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1610 let map = mem.map_readable().unwrap();
1611 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1612 }
1613
1614 {
1615 let mem = buffer.peek_memory(i);
1616 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1617 let map = mem.map_readable().unwrap();
1618 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1619 }
1620
1621 {
1622 let buffer = buffer.get_mut().unwrap();
1623 let mem = buffer.peek_memory_mut(i).unwrap();
1624 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1625 let map = mem.map_writable().unwrap();
1626 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1627 }
1628 }
1629
1630 {
1631 let buffer = buffer.get_mut().unwrap();
1632 let mut last = 0;
1633 for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() {
1634 {
1635 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1636 let map = mem.map_readable().unwrap();
1637 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1638 }
1639
1640 {
1641 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1642 let map = mem.map_readable().unwrap();
1643 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1644 }
1645
1646 {
1647 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1648 let map = mem.map_writable().unwrap();
1649 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1650 }
1651
1652 last = i;
1653 }
1654
1655 assert_eq!(last, 4);
1656 }
1657
1658 let mut last = 0;
1659 for (i, mem) in buffer.iter_memories().enumerate() {
1660 {
1661 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1662 let map = mem.map_readable().unwrap();
1663 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1664 }
1665
1666 {
1667 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1668 let map = mem.map_readable().unwrap();
1669 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1670 }
1671
1672 last = i;
1673 }
1674
1675 assert_eq!(last, 4);
1676
1677 let mut last = 0;
1678 for (i, mem) in buffer.iter_memories_owned().enumerate() {
1679 {
1680 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1681 let map = mem.map_readable().unwrap();
1682 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1683 }
1684
1685 {
1686 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1687 let map = mem.map_readable().unwrap();
1688 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1689 }
1690
1691 last = i;
1692 }
1693
1694 assert_eq!(last, 4);
1695 }
1696
1697 #[test]
1698 fn test_meta_foreach() {
1699 crate::init().unwrap();
1700
1701 let mut buffer = Buffer::new();
1702 {
1703 let buffer = buffer.get_mut().unwrap();
1704 crate::ReferenceTimestampMeta::add(
1705 buffer,
1706 &crate::Caps::builder("foo/bar").build(),
1707 ClockTime::ZERO,
1708 ClockTime::NONE,
1709 );
1710 crate::ReferenceTimestampMeta::add(
1711 buffer,
1712 &crate::Caps::builder("foo/bar").build(),
1713 ClockTime::SECOND,
1714 ClockTime::NONE,
1715 );
1716 }
1717
1718 let mut res = vec![];
1719 buffer.foreach_meta(|meta| {
1720 let meta = meta
1721 .downcast_ref::<crate::ReferenceTimestampMeta>()
1722 .unwrap();
1723 res.push(meta.timestamp());
1724 ControlFlow::Continue(())
1725 });
1726
1727 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1728 }
1729
1730 #[test]
1731 fn test_meta_foreach_mut() {
1732 crate::init().unwrap();
1733
1734 let mut buffer = Buffer::new();
1735 {
1736 let buffer = buffer.get_mut().unwrap();
1737 crate::ReferenceTimestampMeta::add(
1738 buffer,
1739 &crate::Caps::builder("foo/bar").build(),
1740 ClockTime::ZERO,
1741 ClockTime::NONE,
1742 );
1743 crate::ReferenceTimestampMeta::add(
1744 buffer,
1745 &crate::Caps::builder("foo/bar").build(),
1746 ClockTime::SECOND,
1747 ClockTime::NONE,
1748 );
1749 }
1750
1751 let mut res = vec![];
1752 buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| {
1753 let meta = meta
1754 .downcast_ref::<crate::ReferenceTimestampMeta>()
1755 .unwrap();
1756 res.push(meta.timestamp());
1757 if meta.timestamp() == ClockTime::SECOND {
1758 ControlFlow::Continue(BufferMetaForeachAction::Remove)
1759 } else {
1760 ControlFlow::Continue(BufferMetaForeachAction::Keep)
1761 }
1762 });
1763
1764 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1765
1766 let mut res = vec![];
1767 buffer.foreach_meta(|meta| {
1768 let meta = meta
1769 .downcast_ref::<crate::ReferenceTimestampMeta>()
1770 .unwrap();
1771 res.push(meta.timestamp());
1772 ControlFlow::Continue(())
1773 });
1774
1775 assert_eq!(&[ClockTime::ZERO][..], &res[..]);
1776 }
1777
1778 #[test]
1779 fn test_ptr_eq() {
1780 crate::init().unwrap();
1781
1782 let buffer1 = Buffer::new();
1783 assert!(BufferRef::ptr_eq(&buffer1, &buffer1));
1784 let buffer2 = Buffer::new();
1785 assert!(!BufferRef::ptr_eq(&buffer1, &buffer2));
1786 }
1787
1788 #[test]
1789 fn test_copy_region() {
1790 crate::init().unwrap();
1791
1792 let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]);
1793 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap();
1794 assert_eq!(
1795 buffer2.map_readable().unwrap().as_slice(),
1796 &[0, 1, 2, 3, 4, 5, 6, 7]
1797 );
1798 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap();
1799 assert_eq!(
1800 buffer2.map_readable().unwrap().as_slice(),
1801 &[0, 1, 2, 3, 4, 5, 6, 7]
1802 );
1803 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap();
1804 assert_eq!(
1805 buffer2.map_readable().unwrap().as_slice(),
1806 &[0, 1, 2, 3, 4, 5, 6, 7]
1807 );
1808 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap();
1809 assert_eq!(
1810 buffer2.map_readable().unwrap().as_slice(),
1811 &[0, 1, 2, 3, 4, 5, 6, 7]
1812 );
1813 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap();
1814 assert_eq!(
1815 buffer2.map_readable().unwrap().as_slice(),
1816 &[0, 1, 2, 3, 4, 5, 6, 7]
1817 );
1818 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap();
1819 assert_eq!(
1820 buffer2.map_readable().unwrap().as_slice(),
1821 &[0, 1, 2, 3, 4, 5, 6, 7]
1822 );
1823
1824 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err());
1825 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err());
1826 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err());
1827 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err());
1828 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err());
1829 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err());
1830
1831 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap();
1832 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]);
1833
1834 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap();
1835 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]);
1836
1837 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap();
1838 assert_eq!(
1839 buffer2.map_readable().unwrap().as_slice(),
1840 &[2, 3, 4, 5, 6, 7]
1841 );
1842 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap();
1843 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]);
1844 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap();
1845 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]);
1846 }
1847
1848 #[test]
1849 fn test_dump() {
1850 use std::fmt::Write;
1851
1852 crate::init().unwrap();
1853
1854 let mut s = String::new();
1855 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
1856 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1857 assert_eq!(
1858 s,
1859 "0000: 01 02 03 04 ...."
1860 );
1861 s.clear();
1862 write!(&mut s, "{}", buffer.dump()).unwrap();
1863 assert_eq!(s, "01 02 03 04");
1864 s.clear();
1865
1866 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
1867 write!(&mut s, "{:?}", buffer.dump_range(..)).unwrap();
1868 assert_eq!(
1869 s,
1870 "0000: 01 02 03 04 ...."
1871 );
1872 s.clear();
1873 write!(&mut s, "{:?}", buffer.dump_range(..2)).unwrap();
1874 assert_eq!(
1875 s,
1876 "0000: 01 02 .."
1877 );
1878 s.clear();
1879 write!(&mut s, "{:?}", buffer.dump_range(2..=3)).unwrap();
1880 assert_eq!(
1881 s,
1882 "0002: 03 04 .."
1883 );
1884 s.clear();
1885 write!(&mut s, "{:?}", buffer.dump_range(..100)).unwrap();
1886 assert_eq!(s, "<end out of range>",);
1887 s.clear();
1888 write!(&mut s, "{:?}", buffer.dump_range(90..100)).unwrap();
1889 assert_eq!(s, "<start out of range>",);
1890 s.clear();
1891
1892 let buffer = crate::Buffer::from_slice(vec![0; 19]);
1893 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1894 assert_eq!(
1895 s,
1896 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
1897 0010: 00 00 00 ..."
1898 );
1899 s.clear();
1900 }
1901
1902 #[test]
1903 fn test_dump_multi_memories() {
1904 use std::fmt::Write;
1905
1906 crate::init().unwrap();
1907
1908 let mut buffer = crate::Buffer::new();
1909 {
1910 let buffer = buffer.get_mut().unwrap();
1911
1912 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1913 buffer.append_memory(mem);
1914
1915 let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]);
1916 buffer.append_memory(mem);
1917
1918 let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]);
1919 buffer.append_memory(mem);
1920
1921 let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]);
1922 buffer.append_memory(mem);
1923
1924 let mem = crate::Memory::from_slice(vec![17, 18, 19]);
1925 buffer.append_memory(mem);
1926 }
1927
1928 let mut s = String::new();
1929 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1930 assert_eq!(
1931 s,
1932 "0000: 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 ................\n\
1933 0010: 11 12 13 ..."
1934 );
1935 s.clear();
1936 write!(&mut s, "{}", buffer.dump()).unwrap();
1937 assert_eq!(
1938 s,
1939 "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10\n11 12 13"
1940 );
1941 s.clear();
1942
1943 write!(&mut s, "{:?}", buffer.dump_range(2..)).unwrap();
1944 assert_eq!(
1945 s,
1946 "0002: 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12 ................\n\
1947 0012: 13 ."
1948 );
1949 s.clear();
1950
1951 write!(&mut s, "{:?}", buffer.dump_range(14..17)).unwrap();
1952 assert_eq!(
1953 s,
1954 "000e: 0f 10 11 ..."
1955 );
1956 s.clear();
1957
1958 write!(&mut s, "{:?}", buffer.dump_range(14..20)).unwrap();
1959 assert_eq!(s, "<end out of range>");
1960 s.clear();
1961
1962 #[allow(clippy::reversed_empty_ranges)]
1963 {
1964 write!(&mut s, "{:?}", buffer.dump_range(23..20)).unwrap();
1965 assert_eq!(s, "<start out of range>");
1966 s.clear();
1967 }
1968 }
1969}