1use std::{
4 cmp, fmt,
5 marker::PhantomData,
6 mem, ops,
7 ops::{Bound, ControlFlow, Range, RangeBounds},
8 ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{
14 BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef, ffi, meta::*,
15};
16
17pub enum Readable {}
18pub enum Writable {}
19
20#[derive(Copy, Clone, Debug, PartialEq, Eq)]
21pub enum BufferMetaForeachAction {
22 Keep,
23 Remove,
24}
25
26mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || {
27 ffi::gst_buffer_get_type()
28});
29
30pub struct BufferMap<'a, T> {
31 buffer: &'a BufferRef,
32 map_info: ffi::GstMapInfo,
33 phantom: PhantomData<T>,
34}
35
36pub struct MappedBuffer<T> {
37 buffer: Buffer,
38 map_info: ffi::GstMapInfo,
39 phantom: PhantomData<T>,
40}
41
42impl Buffer {
43 #[doc(alias = "gst_buffer_new")]
44 #[inline]
45 pub fn new() -> Self {
46 assert_initialized_main_thread!();
47
48 unsafe { from_glib_full(ffi::gst_buffer_new()) }
49 }
50
51 #[doc(alias = "gst_buffer_new_allocate")]
52 #[doc(alias = "gst_buffer_new_and_alloc")]
53 #[inline]
54 pub fn with_size(size: usize) -> Result<Self, glib::BoolError> {
55 assert_initialized_main_thread!();
56
57 unsafe {
58 Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate(
59 ptr::null_mut(),
60 size,
61 ptr::null_mut(),
62 ))
63 .ok_or_else(|| glib::bool_error!("Failed to allocate buffer"))
64 }
65 }
66
67 #[doc(alias = "gst_buffer_new_wrapped")]
68 #[doc(alias = "gst_buffer_new_wrapped_full")]
69 #[inline]
70 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
71 assert_initialized_main_thread!();
72
73 let mem = Memory::from_mut_slice(slice);
74 let mut buffer = Buffer::new();
75 {
76 let buffer = buffer.get_mut().unwrap();
77 buffer.append_memory(mem);
78 buffer.unset_flags(BufferFlags::TAG_MEMORY);
79 }
80
81 buffer
82 }
83
84 #[doc(alias = "gst_buffer_new_wrapped")]
85 #[doc(alias = "gst_buffer_new_wrapped_full")]
86 #[inline]
87 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
88 assert_initialized_main_thread!();
89
90 let mem = Memory::from_slice(slice);
91 let mut buffer = Buffer::new();
92 {
93 let buffer = buffer.get_mut().unwrap();
94 buffer.append_memory(mem);
95 buffer.unset_flags(BufferFlags::TAG_MEMORY);
96 }
97
98 buffer
99 }
100
101 #[inline]
127 pub fn try_into_inner<T: 'static>(self) -> Result<T, (Self, crate::MemoryIntoInnerError)> {
128 if self.n_memory() != 1 {
129 return Err((self, crate::MemoryIntoInnerError::MultipleMemoryBlocks));
130 }
131
132 if !self.is_writable() {
134 return Err((self, crate::MemoryIntoInnerError::NotWritable));
135 }
136
137 unsafe {
138 let mem_ptr = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), 0);
141
142 assert!(
144 !mem_ptr.is_null(),
145 "peek_memory returned null after validation - this is a bug"
146 );
147
148 match crate::memory_wrapped::try_into_from_memory_ptr(mem_ptr) {
149 Ok(value) => {
150 ffi::gst_buffer_remove_memory(self.as_mut_ptr(), 0);
151
152 Ok(value)
153 }
154 Err(err) => Err((self, err)),
155 }
156 }
157 }
158
159 #[doc(alias = "gst_buffer_map")]
160 #[inline]
161 pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> {
162 unsafe {
163 let mut map_info = mem::MaybeUninit::uninit();
164 let res: bool = from_glib(ffi::gst_buffer_map(
165 self.as_mut_ptr(),
166 map_info.as_mut_ptr(),
167 ffi::GST_MAP_READ,
168 ));
169 if res {
170 Ok(MappedBuffer {
171 buffer: self,
172 map_info: map_info.assume_init(),
173 phantom: PhantomData,
174 })
175 } else {
176 Err(self)
177 }
178 }
179 }
180
181 #[doc(alias = "gst_buffer_map")]
182 #[inline]
183 pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> {
184 unsafe {
185 let mut map_info = mem::MaybeUninit::uninit();
186 let res: bool = from_glib(ffi::gst_buffer_map(
187 self.as_mut_ptr(),
188 map_info.as_mut_ptr(),
189 ffi::GST_MAP_READWRITE,
190 ));
191 if res {
192 Ok(MappedBuffer {
193 buffer: self,
194 map_info: map_info.assume_init(),
195 phantom: PhantomData,
196 })
197 } else {
198 Err(self)
199 }
200 }
201 }
202
203 #[inline]
204 pub fn into_cursor_readable(self) -> BufferCursor<Readable> {
205 BufferCursor::new_readable(self)
206 }
207
208 #[inline]
209 pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> {
210 BufferCursor::new_writable(self)
211 }
212
213 #[doc(alias = "gst_buffer_append")]
214 pub fn append(&mut self, other: Self) {
215 unsafe {
216 let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr());
217 self.replace_ptr(ptr);
218 }
219 }
220}
221
222impl Default for Buffer {
223 fn default() -> Self {
224 Self::new()
225 }
226}
227
228impl BufferRef {
229 #[doc(alias = "gst_buffer_map")]
230 #[inline]
231 pub fn map_readable(&self) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
232 unsafe {
233 let mut map_info = mem::MaybeUninit::uninit();
234 let res =
235 ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
236 if res == glib::ffi::GTRUE {
237 Ok(BufferMap {
238 buffer: self,
239 map_info: map_info.assume_init(),
240 phantom: PhantomData,
241 })
242 } else {
243 Err(glib::bool_error!("Failed to map buffer readable"))
244 }
245 }
246 }
247
248 #[doc(alias = "gst_buffer_map")]
249 #[inline]
250 pub fn map_writable(&mut self) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
251 unsafe {
252 let mut map_info = mem::MaybeUninit::uninit();
253 let res = ffi::gst_buffer_map(
254 self.as_mut_ptr(),
255 map_info.as_mut_ptr(),
256 ffi::GST_MAP_READWRITE,
257 );
258 if res == glib::ffi::GTRUE {
259 Ok(BufferMap {
260 buffer: self,
261 map_info: map_info.assume_init(),
262 phantom: PhantomData,
263 })
264 } else {
265 Err(glib::bool_error!("Failed to map buffer writable"))
266 }
267 }
268 }
269
270 fn memory_range_into_idx_len(
271 &self,
272 range: impl RangeBounds<usize>,
273 ) -> Result<(u32, i32), glib::BoolError> {
274 let n_memory = self.n_memory();
275 debug_assert!(n_memory <= u32::MAX as usize);
276
277 let start_idx = match range.start_bound() {
278 ops::Bound::Included(idx) if *idx >= n_memory => {
279 return Err(glib::bool_error!("Invalid range start"));
280 }
281 ops::Bound::Included(idx) => *idx,
282 ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= n_memory) => {
283 return Err(glib::bool_error!("Invalid range start"));
284 }
285 ops::Bound::Excluded(idx) => *idx + 1,
286 ops::Bound::Unbounded => 0,
287 };
288
289 let end_idx = match range.end_bound() {
290 ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > n_memory) => {
291 return Err(glib::bool_error!("Invalid range end"));
292 }
293 ops::Bound::Included(idx) => *idx + 1,
294 ops::Bound::Excluded(idx) if *idx > n_memory => {
295 return Err(glib::bool_error!("Invalid range end"));
296 }
297 ops::Bound::Excluded(idx) => *idx,
298 ops::Bound::Unbounded => n_memory,
299 };
300
301 Ok((
302 start_idx as u32,
303 i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range"))?,
304 ))
305 }
306
307 #[doc(alias = "gst_buffer_map_range")]
308 #[inline]
309 pub fn map_range_readable(
310 &self,
311 range: impl RangeBounds<usize>,
312 ) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
313 let (idx, len) = self.memory_range_into_idx_len(range)?;
314 unsafe {
315 let mut map_info = mem::MaybeUninit::uninit();
316 let res = ffi::gst_buffer_map_range(
317 self.as_mut_ptr(),
318 idx,
319 len,
320 map_info.as_mut_ptr(),
321 ffi::GST_MAP_READ,
322 );
323 if res == glib::ffi::GTRUE {
324 Ok(BufferMap {
325 buffer: self,
326 map_info: map_info.assume_init(),
327 phantom: PhantomData,
328 })
329 } else {
330 Err(glib::bool_error!("Failed to map buffer readable"))
331 }
332 }
333 }
334
335 #[doc(alias = "gst_buffer_map_range")]
336 #[inline]
337 pub fn map_range_writable(
338 &mut self,
339 range: impl RangeBounds<usize>,
340 ) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
341 let (idx, len) = self.memory_range_into_idx_len(range)?;
342 unsafe {
343 let mut map_info = mem::MaybeUninit::uninit();
344 let res = ffi::gst_buffer_map_range(
345 self.as_mut_ptr(),
346 idx,
347 len,
348 map_info.as_mut_ptr(),
349 ffi::GST_MAP_READWRITE,
350 );
351 if res == glib::ffi::GTRUE {
352 Ok(BufferMap {
353 buffer: self,
354 map_info: map_info.assume_init(),
355 phantom: PhantomData,
356 })
357 } else {
358 Err(glib::bool_error!("Failed to map buffer writable"))
359 }
360 }
361 }
362
363 pub(crate) fn byte_range_into_offset_len(
364 &self,
365 range: impl RangeBounds<usize>,
366 ) -> Result<(usize, usize), glib::BoolError> {
367 let size = self.size();
368
369 let start_idx = match range.start_bound() {
370 ops::Bound::Included(idx) if *idx >= size => {
371 return Err(glib::bool_error!("Invalid range start"));
372 }
373 ops::Bound::Included(idx) => *idx,
374 ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= size) => {
375 return Err(glib::bool_error!("Invalid range start"));
376 }
377 ops::Bound::Excluded(idx) => *idx + 1,
378 ops::Bound::Unbounded => 0,
379 };
380
381 let end_idx = match range.end_bound() {
382 ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > size) => {
383 return Err(glib::bool_error!("Invalid range end"));
384 }
385 ops::Bound::Included(idx) => *idx + 1,
386 ops::Bound::Excluded(idx) if *idx > size => {
387 return Err(glib::bool_error!("Invalid range end"));
388 }
389 ops::Bound::Excluded(idx) => *idx,
390 ops::Bound::Unbounded => size,
391 };
392
393 Ok((start_idx, end_idx - start_idx))
394 }
395
396 #[doc(alias = "gst_buffer_copy_region")]
397 pub fn copy_region(
398 &self,
399 flags: crate::BufferCopyFlags,
400 range: impl RangeBounds<usize>,
401 ) -> Result<Buffer, glib::BoolError> {
402 let (offset, size) = self.byte_range_into_offset_len(range)?;
403
404 unsafe {
405 Option::<_>::from_glib_full(ffi::gst_buffer_copy_region(
406 self.as_mut_ptr(),
407 flags.into_glib(),
408 offset,
409 size,
410 ))
411 .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer"))
412 }
413 }
414
415 #[doc(alias = "gst_buffer_copy_into")]
416 pub fn copy_into(
417 &self,
418 dest: &mut BufferRef,
419 flags: crate::BufferCopyFlags,
420 range: impl RangeBounds<usize>,
421 ) -> Result<(), glib::BoolError> {
422 let (offset, size) = self.byte_range_into_offset_len(range)?;
423
424 unsafe {
425 glib::result_from_gboolean!(
426 ffi::gst_buffer_copy_into(
427 dest.as_mut_ptr(),
428 self.as_mut_ptr(),
429 flags.into_glib(),
430 offset,
431 size,
432 ),
433 "Failed to copy into destination buffer",
434 )
435 }
436 }
437
438 #[doc(alias = "gst_buffer_fill")]
439 pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> {
440 let maxsize = self.maxsize();
441 let size = slice.len();
442
443 assert!(maxsize >= offset && maxsize - offset >= size);
444
445 let copied = unsafe {
446 let src = slice.as_ptr();
447 ffi::gst_buffer_fill(
448 self.as_mut_ptr(),
449 offset,
450 src as glib::ffi::gconstpointer,
451 size,
452 )
453 };
454
455 if copied == size { Ok(()) } else { Err(copied) }
456 }
457
458 #[doc(alias = "gst_buffer_extract")]
459 pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> {
460 let maxsize = self.size();
461 let size = slice.len();
462
463 assert!(maxsize >= offset && maxsize - offset >= size);
464
465 let copied = unsafe {
466 let dest = slice.as_mut_ptr();
467 ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size)
468 };
469
470 if copied == size { Ok(()) } else { Err(copied) }
471 }
472
473 #[doc(alias = "gst_buffer_memset")]
474 pub fn memset(
475 &mut self,
476 range: impl RangeBounds<usize>,
477 val: u8,
478 ) -> Result<usize, glib::BoolError> {
479 let (offset, size) = self.byte_range_into_offset_len(range)?;
480
481 unsafe { Ok(ffi::gst_buffer_memset(self.as_mut_ptr(), offset, val, size)) }
482 }
483
484 #[doc(alias = "gst_buffer_memcmp")]
485 pub fn memcmp(
486 &mut self,
487 range: impl RangeBounds<usize>,
488 slice: &[u8],
489 ) -> Result<cmp::Ordering, glib::BoolError> {
490 let (offset, size) = self.byte_range_into_offset_len(range)?;
491
492 assert!(slice.len() >= size);
493
494 unsafe {
495 let res =
496 ffi::gst_buffer_memcmp(self.as_mut_ptr(), offset, slice.as_ptr() as *const _, size);
497
498 Ok(from_glib(res))
499 }
500 }
501
502 #[doc(alias = "gst_buffer_copy_deep")]
503 pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> {
504 unsafe {
505 Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr()))
506 .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer"))
507 }
508 }
509
510 #[doc(alias = "get_sizes")]
511 #[doc(alias = "gst_buffer_get_sizes")]
512 pub fn sizes(&self) -> (usize, usize, usize) {
513 unsafe {
514 let mut offset = 0;
515 let mut maxsize = 0;
516 let total_size =
517 ffi::gst_buffer_get_sizes(mut_override(self.as_ptr()), &mut offset, &mut maxsize);
518
519 (total_size, offset, maxsize)
520 }
521 }
522
523 #[doc(alias = "get_sizes_range")]
524 #[doc(alias = "gst_buffer_get_sizes_range")]
525 pub fn sizes_range(&self, range: impl RangeBounds<usize>) -> (usize, usize, usize) {
526 let (idx, len) = self
527 .memory_range_into_idx_len(range)
528 .expect("Invalid memory range");
529
530 unsafe {
531 let mut offset = 0;
532 let mut maxsize = 0;
533 let total_size = ffi::gst_buffer_get_sizes_range(
534 mut_override(self.as_ptr()),
535 idx,
536 len,
537 &mut offset,
538 &mut maxsize,
539 );
540
541 (total_size, offset, maxsize)
542 }
543 }
544
545 #[doc(alias = "get_size")]
546 #[doc(alias = "gst_buffer_get_size")]
547 pub fn size(&self) -> usize {
548 unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) }
549 }
550
551 #[doc(alias = "get_maxsize")]
552 pub fn maxsize(&self) -> usize {
553 unsafe {
554 let mut maxsize = mem::MaybeUninit::uninit();
555 ffi::gst_buffer_get_sizes_range(
556 self.as_mut_ptr(),
557 0,
558 -1,
559 ptr::null_mut(),
560 maxsize.as_mut_ptr(),
561 );
562
563 maxsize.assume_init()
564 }
565 }
566
567 #[doc(alias = "gst_buffer_set_size")]
568 pub fn set_size(&mut self, size: usize) {
569 assert!(self.maxsize() >= size);
570
571 unsafe {
572 ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize);
573 }
574 }
575
576 #[doc(alias = "gst_buffer_resize")]
577 pub fn resize(&mut self, range: impl RangeBounds<usize>) -> Result<(), glib::BoolError> {
578 let (offset, size) = self.byte_range_into_offset_len(range)?;
579
580 unsafe {
581 ffi::gst_buffer_resize(self.as_mut_ptr(), offset as isize, size as isize);
582 }
583
584 Ok(())
585 }
586
587 #[doc(alias = "gst_buffer_resize_range")]
588 pub fn resize_range(
589 &mut self,
590 mem_range: impl RangeBounds<usize>,
591 byte_range: impl RangeBounds<usize>,
592 ) -> Result<(), glib::BoolError> {
593 let (idx, len) = self
594 .memory_range_into_idx_len(mem_range)
595 .expect("Invalid memory range");
596 let (offset, size) = self.byte_range_into_offset_len(byte_range)?;
597
598 unsafe {
599 glib::result_from_gboolean!(
600 ffi::gst_buffer_resize_range(
601 self.as_mut_ptr(),
602 idx,
603 len,
604 offset as isize,
605 size as isize,
606 ),
607 "Failed to resize buffer with ranges"
608 )
609 }
610 }
611
612 #[doc(alias = "get_offset")]
613 #[doc(alias = "GST_BUFFER_OFFSET")]
614 #[inline]
615 pub fn offset(&self) -> u64 {
616 self.0.offset
617 }
618
619 #[inline]
620 pub fn set_offset(&mut self, offset: u64) {
621 self.0.offset = offset;
622 }
623
624 #[doc(alias = "get_offset_end")]
625 #[doc(alias = "GST_BUFFER_OFFSET_END")]
626 #[inline]
627 pub fn offset_end(&self) -> u64 {
628 self.0.offset_end
629 }
630
631 #[inline]
632 pub fn set_offset_end(&mut self, offset_end: u64) {
633 self.0.offset_end = offset_end;
634 }
635
636 #[doc(alias = "get_pts")]
637 #[doc(alias = "GST_BUFFER_PTS")]
638 #[inline]
639 pub fn pts(&self) -> Option<ClockTime> {
640 unsafe { from_glib(self.0.pts) }
641 }
642
643 #[inline]
644 pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) {
645 self.0.pts = pts.into().into_glib();
646 }
647
648 #[doc(alias = "get_dts")]
649 #[doc(alias = "GST_BUFFER_DTS")]
650 #[inline]
651 pub fn dts(&self) -> Option<ClockTime> {
652 unsafe { from_glib(self.0.dts) }
653 }
654
655 #[inline]
656 pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) {
657 self.0.dts = dts.into().into_glib();
658 }
659
660 #[doc(alias = "get_dts_or_pts")]
661 #[doc(alias = "GST_BUFFER_DTS_OR_PTS")]
662 #[inline]
663 pub fn dts_or_pts(&self) -> Option<ClockTime> {
664 let val = self.dts();
665 if val.is_none() { self.pts() } else { val }
666 }
667
668 #[doc(alias = "get_duration")]
669 #[doc(alias = "GST_BUFFER_DURATION")]
670 #[inline]
671 pub fn duration(&self) -> Option<ClockTime> {
672 unsafe { from_glib(self.0.duration) }
673 }
674
675 #[inline]
676 pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) {
677 self.0.duration = duration.into().into_glib();
678 }
679
680 #[doc(alias = "get_flags")]
681 #[doc(alias = "GST_BUFFER_FLAGS")]
682 #[inline]
683 pub fn flags(&self) -> BufferFlags {
684 BufferFlags::from_bits_truncate(self.0.mini_object.flags)
685 }
686
687 #[doc(alias = "GST_BUFFER_FLAG_SET")]
688 #[inline]
689 pub fn set_flags(&mut self, flags: BufferFlags) {
690 self.0.mini_object.flags |= flags.bits();
691 }
692
693 #[doc(alias = "GST_BUFFER_FLAG_UNSET")]
694 #[inline]
695 pub fn unset_flags(&mut self, flags: BufferFlags) {
696 self.0.mini_object.flags &= !flags.bits();
697 }
698
699 #[doc(alias = "get_meta")]
700 #[doc(alias = "gst_buffer_get_meta")]
701 #[inline]
702 pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<'_, T>> {
703 unsafe {
704 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
705 if meta.is_null() {
706 None
707 } else {
708 Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType))
709 }
710 }
711 }
712
713 #[doc(alias = "get_meta_mut")]
714 #[inline]
715 pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<'_, T, crate::meta::Standalone>> {
716 unsafe {
717 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
718 if meta.is_null() {
719 None
720 } else {
721 Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType))
722 }
723 }
724 }
725
726 pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<'_, T> {
727 MetaIter::new(self)
728 }
729
730 pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<'_, T> {
731 MetaIterMut::new(self)
732 }
733
734 #[doc(alias = "gst_buffer_foreach_meta")]
735 pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool {
736 unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(
737 buffer: *mut ffi::GstBuffer,
738 meta: *mut *mut ffi::GstMeta,
739 user_data: glib::ffi::gpointer,
740 ) -> glib::ffi::gboolean {
741 unsafe {
742 let func = user_data as *mut F;
743 let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta));
744
745 matches!(res, ControlFlow::Continue(_)).into_glib()
746 }
747 }
748
749 unsafe {
750 let mut func = func;
751 let func_ptr: &mut F = &mut func;
752
753 from_glib(ffi::gst_buffer_foreach_meta(
754 mut_override(self.as_ptr()),
755 Some(trampoline::<F>),
756 func_ptr as *mut _ as *mut _,
757 ))
758 }
759 }
760
761 #[doc(alias = "gst_buffer_foreach_meta")]
762 pub fn foreach_meta_mut<
763 F: FnMut(
764 MetaRefMut<Meta, crate::meta::Iterated>,
765 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
766 >(
767 &mut self,
768 func: F,
769 ) -> bool {
770 unsafe extern "C" fn trampoline<
771 F: FnMut(
772 MetaRefMut<Meta, crate::meta::Iterated>,
773 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
774 >(
775 buffer: *mut ffi::GstBuffer,
776 meta: *mut *mut ffi::GstMeta,
777 user_data: glib::ffi::gpointer,
778 ) -> glib::ffi::gboolean {
779 unsafe {
780 let func = user_data as *mut F;
781 let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta));
782
783 let (cont, action) = match res {
784 ControlFlow::Continue(action) => (true, action),
785 ControlFlow::Break(action) => (false, action),
786 };
787
788 if action == BufferMetaForeachAction::Remove {
789 *meta = ptr::null_mut();
790 }
791
792 cont.into_glib()
793 }
794 }
795
796 unsafe {
797 let mut func = func;
798 let func_ptr: &mut F = &mut func;
799
800 from_glib(ffi::gst_buffer_foreach_meta(
801 mut_override(self.as_ptr()),
802 Some(trampoline::<F>),
803 func_ptr as *mut _ as *mut _,
804 ))
805 }
806 }
807
808 #[doc(alias = "gst_buffer_append_memory")]
809 pub fn append_memory(&mut self, mem: Memory) {
810 unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
811 }
812
813 #[doc(alias = "gst_buffer_find_memory")]
814 pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> {
815 let (offset, size) = self.byte_range_into_offset_len(range).ok()?;
816
817 unsafe {
818 let mut idx = mem::MaybeUninit::uninit();
819 let mut length = mem::MaybeUninit::uninit();
820 let mut skip = mem::MaybeUninit::uninit();
821
822 let res = from_glib(ffi::gst_buffer_find_memory(
823 self.as_mut_ptr(),
824 offset,
825 size,
826 idx.as_mut_ptr(),
827 length.as_mut_ptr(),
828 skip.as_mut_ptr(),
829 ));
830
831 if res {
832 let idx = idx.assume_init() as usize;
833 let length = length.assume_init() as usize;
834 let skip = skip.assume_init();
835 Some((idx..(idx + length), skip))
836 } else {
837 None
838 }
839 }
840 }
841
842 #[doc(alias = "get_all_memory")]
843 #[doc(alias = "gst_buffer_get_all_memory")]
844 pub fn all_memory(&self) -> Option<Memory> {
845 unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) }
846 }
847
848 #[doc(alias = "get_max_memory")]
849 #[doc(alias = "gst_buffer_get_max_memory")]
850 pub fn max_memory() -> usize {
851 unsafe { ffi::gst_buffer_get_max_memory() as usize }
852 }
853
854 #[doc(alias = "get_memory")]
855 #[doc(alias = "gst_buffer_get_memory")]
856 pub fn memory(&self, idx: usize) -> Option<Memory> {
857 if idx >= self.n_memory() {
858 return None;
859 }
860 unsafe {
861 let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32);
862 Some(from_glib_full(res))
863 }
864 }
865
866 #[doc(alias = "get_memory_range")]
867 #[doc(alias = "gst_buffer_get_memory_range")]
868 pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> {
869 let (idx, len) = self.memory_range_into_idx_len(range).ok()?;
870
871 unsafe {
872 let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len);
873 from_glib_full(res)
874 }
875 }
876
877 #[doc(alias = "gst_buffer_insert_memory")]
878 pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) {
879 let n_memory = self.n_memory();
880 let idx = idx.into();
881 let idx = idx.unwrap_or(n_memory);
882 assert!(idx <= self.n_memory());
883 unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) }
884 }
885
886 #[doc(alias = "gst_buffer_is_all_memory_writable")]
887 pub fn is_all_memory_writable(&self) -> bool {
888 unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) }
889 }
890
891 #[doc(alias = "gst_buffer_is_memory_range_writable")]
892 pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool {
893 let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else {
894 return false;
895 };
896
897 unsafe {
898 from_glib(ffi::gst_buffer_is_memory_range_writable(
899 self.as_mut_ptr(),
900 idx,
901 len,
902 ))
903 }
904 }
905
906 #[doc(alias = "gst_buffer_n_memory")]
907 pub fn n_memory(&self) -> usize {
908 unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize }
909 }
910
911 #[doc(alias = "gst_buffer_peek_memory")]
912 pub fn peek_memory(&self, idx: usize) -> &MemoryRef {
913 assert!(idx < self.n_memory());
914 unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) }
915 }
916
917 #[doc(alias = "gst_buffer_peek_memory")]
918 pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> {
919 assert!(idx < self.n_memory());
920 unsafe {
921 let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32);
922 if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE {
923 Err(glib::bool_error!("Memory not writable"))
924 } else {
925 Ok(MemoryRef::from_mut_ptr(mem))
926 }
927 }
928 }
929
930 #[doc(alias = "gst_buffer_prepend_memory")]
931 pub fn prepend_memory(&mut self, mem: Memory) {
932 unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
933 }
934
935 #[doc(alias = "gst_buffer_remove_all_memory")]
936 pub fn remove_all_memory(&mut self) {
937 unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) }
938 }
939
940 #[doc(alias = "gst_buffer_remove_memory")]
941 pub fn remove_memory(&mut self, idx: usize) {
942 assert!(idx < self.n_memory());
943 unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) }
944 }
945
946 #[doc(alias = "gst_buffer_remove_memory_range")]
947 pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) {
948 let (idx, len) = self
949 .memory_range_into_idx_len(range)
950 .expect("Invalid memory range");
951
952 unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) }
953 }
954
955 #[doc(alias = "gst_buffer_replace_all_memory")]
956 pub fn replace_all_memory(&mut self, mem: Memory) {
957 unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
958 }
959
960 #[doc(alias = "gst_buffer_replace_memory")]
961 pub fn replace_memory(&mut self, idx: usize, mem: Memory) {
962 assert!(idx < self.n_memory());
963 unsafe {
964 ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr())
965 }
966 }
967
968 #[doc(alias = "gst_buffer_replace_memory_range")]
969 pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) {
970 let (idx, len) = self
971 .memory_range_into_idx_len(range)
972 .expect("Invalid memory range");
973
974 unsafe {
975 ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr())
976 }
977 }
978
979 pub fn iter_memories(&self) -> Iter<'_> {
980 Iter::new(self)
981 }
982
983 pub fn iter_memories_mut(&mut self) -> Result<IterMut<'_>, glib::BoolError> {
984 if !self.is_all_memory_writable() {
985 Err(glib::bool_error!("Not all memory are writable"))
986 } else {
987 Ok(IterMut::new(self))
988 }
989 }
990
991 pub fn iter_memories_owned(&self) -> IterOwned<'_> {
992 IterOwned::new(self)
993 }
994
995 pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> {
996 BufferRefCursor::new_readable(self)
997 }
998
999 pub fn as_cursor_writable(
1000 &mut self,
1001 ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> {
1002 BufferRefCursor::new_writable(self)
1003 }
1004
1005 #[doc(alias = "gst_util_dump_buffer")]
1006 pub fn dump(&self) -> Dump<'_> {
1007 Dump {
1008 buffer: self,
1009 start: Bound::Unbounded,
1010 end: Bound::Unbounded,
1011 }
1012 }
1013
1014 #[doc(alias = "gst_util_dump_buffer")]
1015 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
1016 Dump {
1017 buffer: self,
1018 start: range.start_bound().cloned(),
1019 end: range.end_bound().cloned(),
1020 }
1021 }
1022}
1023
1024macro_rules! define_meta_iter(
1025 ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => {
1026 #[must_use = "iterators are lazy and do nothing unless consumed"]
1027 pub struct $name<'a, T: MetaAPI + 'a> {
1028 buffer: $typ,
1029 state: glib::ffi::gpointer,
1030 meta_api: glib::Type,
1031 items: PhantomData<$mtyp>,
1032 }
1033
1034 unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { }
1035 unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { }
1036
1037 impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> {
1038 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1039 f.debug_struct(stringify!($name))
1040 .field("buffer", &self.buffer)
1041 .field("state", &self.state)
1042 .field("meta_api", &self.meta_api)
1043 .field("items", &self.items)
1044 .finish()
1045 }
1046 }
1047
1048 impl<'a, T: MetaAPI> $name<'a, T> {
1049 fn new(buffer: $typ) -> $name<'a, T> {
1050 skip_assert_initialized!();
1051
1052 $name {
1053 buffer,
1054 state: ptr::null_mut(),
1055 meta_api: T::meta_api(),
1056 items: PhantomData,
1057 }
1058 }
1059 }
1060
1061 #[allow(clippy::redundant_closure_call)]
1062 impl<'a, T: MetaAPI> Iterator for $name<'a, T> {
1063 type Item = $mtyp;
1064
1065 fn next(&mut self) -> Option<Self::Item> {
1066 loop {
1067 unsafe {
1068 let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state);
1069
1070 if meta.is_null() {
1071 return None;
1072 } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api {
1073 let buffer = $prepare_buffer(self.buffer.as_mut_ptr());
1075 let item = $from_ptr(buffer, meta);
1076 return Some(item);
1077 }
1078 }
1079 }
1080 }
1081 }
1082
1083 impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { }
1084 }
1085);
1086
1087define_meta_iter!(
1088 MetaIter,
1089 &'a BufferRef,
1090 MetaRef<'a, T>,
1091 |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer),
1092 |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType)
1093);
1094define_meta_iter!(
1095 MetaIterMut,
1096 &'a mut BufferRef,
1097 MetaRefMut<'a, T, crate::meta::Iterated>,
1098 |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer),
1099 |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType)
1100);
1101
1102macro_rules! define_iter(
1103 ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => {
1104 crate::utils::define_fixed_size_iter!(
1105 $name, $typ, $mtyp,
1106 |buffer: &BufferRef| buffer.n_memory() as usize,
1107 $get_item
1108 );
1109 }
1110);
1111
1112define_iter!(
1113 Iter,
1114 &'a BufferRef,
1115 &'a MemoryRef,
1116 |buffer: &BufferRef, idx| unsafe {
1117 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1118 MemoryRef::from_ptr(ptr as *const ffi::GstMemory)
1119 }
1120);
1121
1122define_iter!(
1123 IterMut,
1124 &'a mut BufferRef,
1125 &'a mut MemoryRef,
1126 |buffer: &mut BufferRef, idx| unsafe {
1127 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1128 MemoryRef::from_mut_ptr(ptr)
1129 }
1130);
1131
1132impl<'a> IntoIterator for &'a BufferRef {
1133 type IntoIter = Iter<'a>;
1134 type Item = &'a MemoryRef;
1135
1136 fn into_iter(self) -> Self::IntoIter {
1137 self.iter_memories()
1138 }
1139}
1140
1141impl From<Memory> for Buffer {
1142 fn from(value: Memory) -> Self {
1143 skip_assert_initialized!();
1144
1145 let mut buffer = Buffer::new();
1146 {
1147 let buffer = buffer.get_mut().unwrap();
1148 buffer.append_memory(value);
1149 }
1150 buffer
1151 }
1152}
1153
1154impl<const N: usize> From<[Memory; N]> for Buffer {
1155 fn from(value: [Memory; N]) -> Self {
1156 skip_assert_initialized!();
1157
1158 let mut buffer = Buffer::new();
1159 {
1160 let buffer = buffer.get_mut().unwrap();
1161 value.into_iter().for_each(|b| buffer.append_memory(b));
1162 }
1163 buffer
1164 }
1165}
1166
1167impl std::iter::FromIterator<Memory> for Buffer {
1168 fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self {
1169 skip_assert_initialized!();
1170 let iter = iter.into_iter();
1171
1172 let mut buffer = Buffer::new();
1173
1174 {
1175 let buffer = buffer.get_mut().unwrap();
1176 iter.for_each(|m| buffer.append_memory(m));
1177 }
1178
1179 buffer
1180 }
1181}
1182
1183impl std::iter::Extend<Memory> for BufferRef {
1184 fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) {
1185 iter.into_iter().for_each(|m| self.append_memory(m));
1186 }
1187}
1188
1189define_iter!(
1190 IterOwned,
1191 &'a BufferRef,
1192 Memory,
1193 |buffer: &BufferRef, idx| unsafe {
1194 let ptr = ffi::gst_buffer_get_memory(buffer.as_mut_ptr(), idx as u32);
1195 from_glib_full(ptr)
1196 }
1197);
1198
1199impl fmt::Debug for Buffer {
1200 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1201 BufferRef::fmt(self, f)
1202 }
1203}
1204
1205impl PartialEq for Buffer {
1206 fn eq(&self, other: &Buffer) -> bool {
1207 BufferRef::eq(self, other)
1208 }
1209}
1210
1211impl Eq for Buffer {}
1212
1213impl PartialEq<BufferRef> for Buffer {
1214 fn eq(&self, other: &BufferRef) -> bool {
1215 BufferRef::eq(self, other)
1216 }
1217}
1218impl PartialEq<Buffer> for BufferRef {
1219 fn eq(&self, other: &Buffer) -> bool {
1220 BufferRef::eq(other, self)
1221 }
1222}
1223
1224impl fmt::Debug for BufferRef {
1225 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1226 use std::cell::RefCell;
1227
1228 use crate::utils::Displayable;
1229
1230 struct DebugIter<I>(RefCell<I>);
1231 impl<I: Iterator> fmt::Debug for DebugIter<I>
1232 where
1233 I::Item: fmt::Debug,
1234 {
1235 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1236 f.debug_list().entries(&mut *self.0.borrow_mut()).finish()
1237 }
1238 }
1239
1240 f.debug_struct("Buffer")
1241 .field("ptr", &self.as_ptr())
1242 .field("pts", &self.pts().display())
1243 .field("dts", &self.dts().display())
1244 .field("duration", &self.duration().display())
1245 .field("size", &self.size())
1246 .field("offset", &self.offset())
1247 .field("offset_end", &self.offset_end())
1248 .field("flags", &self.flags())
1249 .field(
1250 "metas",
1251 &DebugIter(RefCell::new(
1252 self.iter_meta::<crate::Meta>().map(|m| m.api()),
1253 )),
1254 )
1255 .finish()
1256 }
1257}
1258
1259impl PartialEq for BufferRef {
1260 fn eq(&self, other: &BufferRef) -> bool {
1261 if self.size() != other.size() {
1262 return false;
1263 }
1264
1265 let self_map = self.map_readable();
1266 let other_map = other.map_readable();
1267
1268 match (self_map, other_map) {
1269 (Ok(self_map), Ok(other_map)) => self_map.as_slice().eq(other_map.as_slice()),
1270 _ => false,
1271 }
1272 }
1273}
1274
1275impl Eq for BufferRef {}
1276
1277impl<T> BufferMap<'_, T> {
1278 #[doc(alias = "get_size")]
1279 #[inline]
1280 pub fn size(&self) -> usize {
1281 self.map_info.size
1282 }
1283
1284 #[doc(alias = "get_buffer")]
1285 #[inline]
1286 pub fn buffer(&self) -> &BufferRef {
1287 self.buffer
1288 }
1289
1290 #[inline]
1291 pub fn as_slice(&self) -> &[u8] {
1292 if self.map_info.size == 0 {
1293 return &[];
1294 }
1295 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1296 }
1297}
1298
1299impl BufferMap<'_, Writable> {
1300 #[inline]
1301 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1302 if self.map_info.size == 0 {
1303 return &mut [];
1304 }
1305 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1306 }
1307}
1308
1309impl<T> AsRef<[u8]> for BufferMap<'_, T> {
1310 #[inline]
1311 fn as_ref(&self) -> &[u8] {
1312 self.as_slice()
1313 }
1314}
1315
1316impl AsMut<[u8]> for BufferMap<'_, Writable> {
1317 #[inline]
1318 fn as_mut(&mut self) -> &mut [u8] {
1319 self.as_mut_slice()
1320 }
1321}
1322
1323impl<T> ops::Deref for BufferMap<'_, T> {
1324 type Target = [u8];
1325
1326 #[inline]
1327 fn deref(&self) -> &[u8] {
1328 self.as_slice()
1329 }
1330}
1331
1332impl ops::DerefMut for BufferMap<'_, Writable> {
1333 #[inline]
1334 fn deref_mut(&mut self) -> &mut [u8] {
1335 self.as_mut_slice()
1336 }
1337}
1338
1339impl<T> fmt::Debug for BufferMap<'_, T> {
1340 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1341 f.debug_tuple("BufferMap").field(&self.buffer()).finish()
1342 }
1343}
1344
1345impl<'a, T> PartialEq for BufferMap<'a, T> {
1346 fn eq(&self, other: &BufferMap<'a, T>) -> bool {
1347 self.as_slice().eq(other.as_slice())
1348 }
1349}
1350
1351impl<T> Eq for BufferMap<'_, T> {}
1352
1353impl<T> Drop for BufferMap<'_, T> {
1354 #[inline]
1355 fn drop(&mut self) {
1356 unsafe {
1357 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1358 }
1359 }
1360}
1361
1362unsafe impl<T> Send for BufferMap<'_, T> {}
1363unsafe impl<T> Sync for BufferMap<'_, T> {}
1364
1365impl<T> MappedBuffer<T> {
1366 #[inline]
1367 pub fn as_slice(&self) -> &[u8] {
1368 if self.map_info.size == 0 {
1369 return &[];
1370 }
1371 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1372 }
1373
1374 #[doc(alias = "get_size")]
1375 #[inline]
1376 pub fn size(&self) -> usize {
1377 self.map_info.size
1378 }
1379
1380 #[doc(alias = "get_buffer")]
1381 #[inline]
1382 pub fn buffer(&self) -> &BufferRef {
1383 self.buffer.as_ref()
1384 }
1385
1386 #[inline]
1387 pub fn into_buffer(self) -> Buffer {
1388 let mut s = mem::ManuallyDrop::new(self);
1389 let buffer = unsafe { ptr::read(&s.buffer) };
1390 unsafe {
1391 ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info);
1392 }
1393
1394 buffer
1395 }
1396}
1397
1398impl MappedBuffer<Readable> {
1399 #[doc(alias = "get_buffer")]
1400 #[inline]
1401 pub fn buffer_owned(&self) -> Buffer {
1402 self.buffer.clone()
1403 }
1404}
1405
1406impl MappedBuffer<Writable> {
1407 #[inline]
1408 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1409 if self.map_info.size == 0 {
1410 return &mut [];
1411 }
1412 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1413 }
1414}
1415
1416impl<T> AsRef<[u8]> for MappedBuffer<T> {
1417 #[inline]
1418 fn as_ref(&self) -> &[u8] {
1419 self.as_slice()
1420 }
1421}
1422
1423impl AsMut<[u8]> for MappedBuffer<Writable> {
1424 #[inline]
1425 fn as_mut(&mut self) -> &mut [u8] {
1426 self.as_mut_slice()
1427 }
1428}
1429
1430impl<T> ops::Deref for MappedBuffer<T> {
1431 type Target = [u8];
1432
1433 #[inline]
1434 fn deref(&self) -> &[u8] {
1435 self.as_slice()
1436 }
1437}
1438
1439impl ops::DerefMut for MappedBuffer<Writable> {
1440 #[inline]
1441 fn deref_mut(&mut self) -> &mut [u8] {
1442 self.as_mut_slice()
1443 }
1444}
1445
1446impl<T> Drop for MappedBuffer<T> {
1447 #[inline]
1448 fn drop(&mut self) {
1449 unsafe {
1450 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1451 }
1452 }
1453}
1454
1455impl<T> fmt::Debug for MappedBuffer<T> {
1456 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1457 f.debug_tuple("MappedBuffer").field(&self.buffer()).finish()
1458 }
1459}
1460
1461impl<T> PartialEq for MappedBuffer<T> {
1462 fn eq(&self, other: &MappedBuffer<T>) -> bool {
1463 self.as_slice().eq(other.as_slice())
1464 }
1465}
1466
1467impl<T> Eq for MappedBuffer<T> {}
1468
1469unsafe impl<T> Send for MappedBuffer<T> {}
1470unsafe impl<T> Sync for MappedBuffer<T> {}
1471
1472#[doc(alias = "GST_BUFFER_COPY_METADATA")]
1473pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags =
1474 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_METADATA);
1475#[doc(alias = "GST_BUFFER_COPY_ALL")]
1476pub const BUFFER_COPY_ALL: crate::BufferCopyFlags =
1477 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_ALL);
1478
1479pub struct Dump<'a> {
1480 buffer: &'a BufferRef,
1481 start: Bound<usize>,
1482 end: Bound<usize>,
1483}
1484
1485#[must_use = "iterators are lazy and do nothing unless consumed"]
1486struct BufferChunked16Iter<'a> {
1487 buffer: &'a BufferRef,
1488 mem_idx: usize,
1489 mem_len: usize,
1490 map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>,
1491 map_offset: usize,
1492 len: usize,
1493}
1494
1495impl Iterator for BufferChunked16Iter<'_> {
1496 type Item = ([u8; 16], usize);
1498
1499 fn next(&mut self) -> Option<Self::Item> {
1500 if self.mem_idx == self.mem_len || self.len == 0 {
1501 return None;
1502 }
1503
1504 let mut item = [0u8; 16];
1505 let mut data = item.as_mut_slice();
1506
1507 while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 {
1508 if self.map.is_none() {
1509 let mem = self.buffer.peek_memory(self.mem_idx);
1510 self.map = Some(mem.map_readable().expect("failed to map memory"));
1511 }
1512
1513 let map = self.map.as_ref().unwrap();
1514 debug_assert!(self.map_offset < map.len());
1515 let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len);
1516 data[..copy].copy_from_slice(&map[self.map_offset..][..copy]);
1517 self.map_offset += copy;
1518 self.len -= copy;
1519 data = &mut data[copy..];
1520
1521 if self.map_offset == map.len() {
1522 self.map = None;
1523 self.map_offset = 0;
1524 self.mem_idx += 1;
1525 }
1526 }
1527
1528 let copied = 16 - data.len();
1529 Some((item, copied))
1530 }
1531}
1532
1533impl Dump<'_> {
1534 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
1535 let n_memory = self.buffer.n_memory();
1536 if n_memory == 0 {
1537 write!(f, "<empty>")?;
1538 return Ok(());
1539 }
1540
1541 use std::fmt::Write;
1542
1543 let len = self.buffer.size();
1544
1545 let mut start_idx = match self.start {
1548 Bound::Included(idx) if idx >= len => {
1549 write!(f, "<start out of range>")?;
1550 return Ok(());
1551 }
1552 Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= len) => {
1553 write!(f, "<start out of range>")?;
1554 return Ok(());
1555 }
1556 Bound::Included(idx) => idx,
1557 Bound::Excluded(idx) => idx + 1,
1558 Bound::Unbounded => 0,
1559 };
1560
1561 let end_idx = match self.end {
1562 Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > len) => {
1563 write!(f, "<end out of range>")?;
1564 return Ok(());
1565 }
1566 Bound::Excluded(idx) if idx > len => {
1567 write!(f, "<end out of range>")?;
1568 return Ok(());
1569 }
1570 Bound::Included(idx) => idx + 1,
1571 Bound::Excluded(idx) => idx,
1572 Bound::Unbounded => len,
1573 };
1574
1575 if start_idx >= end_idx {
1576 write!(f, "<empty range>")?;
1577 return Ok(());
1578 }
1579
1580 let (memory_range, skip) = self
1582 .buffer
1583 .find_memory(start_idx..)
1584 .expect("can't find memory");
1585
1586 let chunks = BufferChunked16Iter {
1587 buffer: self.buffer,
1588 mem_idx: memory_range.start,
1589 mem_len: n_memory,
1590 map: None,
1591 map_offset: skip,
1592 len: end_idx - start_idx,
1593 };
1594
1595 if debug {
1596 for (line, line_len) in chunks {
1597 let line = &line[..line_len];
1598
1599 match end_idx {
1600 0x00_00..=0xff_ff => write!(f, "{start_idx:04x}: ")?,
1601 0x01_00_00..=0xff_ff_ff => write!(f, "{start_idx:06x}: ")?,
1602 0x01_00_00_00..=0xff_ff_ff_ff => write!(f, "{start_idx:08x}: ")?,
1603 _ => write!(f, "{start_idx:016x}: ")?,
1604 }
1605
1606 for (i, v) in line.iter().enumerate() {
1607 if i > 0 {
1608 write!(f, " {v:02x}")?;
1609 } else {
1610 write!(f, "{v:02x}")?;
1611 }
1612 }
1613
1614 for _ in line.len()..16 {
1615 write!(f, " ")?;
1616 }
1617 write!(f, " ")?;
1618
1619 for v in line {
1620 if v.is_ascii() && !v.is_ascii_control() {
1621 f.write_char((*v).into())?;
1622 } else {
1623 f.write_char('.')?;
1624 }
1625 }
1626
1627 start_idx = start_idx.saturating_add(16);
1628 if start_idx < end_idx {
1629 writeln!(f)?;
1630 }
1631 }
1632
1633 Ok(())
1634 } else {
1635 for (line, line_len) in chunks {
1636 let line = &line[..line_len];
1637
1638 for (i, v) in line.iter().enumerate() {
1639 if i > 0 {
1640 write!(f, " {v:02x}")?;
1641 } else {
1642 write!(f, "{v:02x}")?;
1643 }
1644 }
1645
1646 start_idx = start_idx.saturating_add(16);
1647 if start_idx < end_idx {
1648 writeln!(f)?;
1649 }
1650 }
1651
1652 Ok(())
1653 }
1654 }
1655}
1656
1657impl fmt::Display for Dump<'_> {
1658 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1659 self.fmt(f, false)
1660 }
1661}
1662
1663impl fmt::Debug for Dump<'_> {
1664 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1665 self.fmt(f, true)
1666 }
1667}
1668
1669#[cfg(test)]
1670mod tests {
1671 use super::*;
1672
1673 #[test]
1674 fn test_fields() {
1675 crate::init().unwrap();
1676
1677 let mut buffer = Buffer::new();
1678
1679 {
1680 let buffer = buffer.get_mut().unwrap();
1681 buffer.set_pts(ClockTime::NSECOND);
1682 buffer.set_dts(2 * ClockTime::NSECOND);
1683 buffer.set_offset(3);
1684 buffer.set_offset_end(4);
1685 buffer.set_duration(Some(5 * ClockTime::NSECOND));
1686 }
1687 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1688 assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND));
1689 assert_eq!(buffer.offset(), 3);
1690 assert_eq!(buffer.offset_end(), 4);
1691 assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND));
1692 }
1693
1694 #[test]
1695 fn test_writability() {
1696 crate::init().unwrap();
1697
1698 let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]);
1699 {
1700 let data = buffer.map_readable().unwrap();
1701 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1702 }
1703 assert_ne!(buffer.get_mut(), None);
1704 {
1705 let buffer = buffer.get_mut().unwrap();
1706 buffer.set_pts(Some(ClockTime::NSECOND));
1707 }
1708
1709 let mut buffer2 = buffer.clone();
1710 assert_eq!(buffer.get_mut(), None);
1711
1712 assert_eq!(buffer2.as_ptr(), buffer.as_ptr());
1713
1714 {
1715 let buffer2 = buffer2.make_mut();
1716 assert_ne!(buffer2.as_ptr(), buffer.as_ptr());
1717
1718 buffer2.set_pts(Some(2 * ClockTime::NSECOND));
1719
1720 let mut data = buffer2.map_writable().unwrap();
1721 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1722 data.as_mut_slice()[0] = 0;
1723 }
1724
1725 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1726 assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND));
1727
1728 {
1729 let data = buffer.map_readable().unwrap();
1730 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1731
1732 let data = buffer2.map_readable().unwrap();
1733 assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice());
1734 }
1735 }
1736
1737 #[test]
1738 #[allow(clippy::cognitive_complexity)]
1739 fn test_memories() {
1740 crate::init().unwrap();
1741
1742 let mut buffer = Buffer::new();
1743 {
1744 let buffer = buffer.get_mut().unwrap();
1745 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1746 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1747 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1748 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1749 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10]));
1750 }
1751
1752 assert!(buffer.is_all_memory_writable());
1753 assert_eq!(buffer.n_memory(), 5);
1754 assert_eq!(buffer.size(), 30);
1755
1756 for i in 0..5 {
1757 {
1758 let mem = buffer.memory(i).unwrap();
1759 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1760 let map = mem.map_readable().unwrap();
1761 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1762 }
1763
1764 {
1765 let mem = buffer.peek_memory(i);
1766 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1767 let map = mem.map_readable().unwrap();
1768 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1769 }
1770
1771 {
1772 let buffer = buffer.get_mut().unwrap();
1773 let mem = buffer.peek_memory_mut(i).unwrap();
1774 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1775 let map = mem.map_writable().unwrap();
1776 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1777 }
1778 }
1779
1780 {
1781 let buffer = buffer.get_mut().unwrap();
1782 let mut last = 0;
1783 for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() {
1784 {
1785 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1786 let map = mem.map_readable().unwrap();
1787 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1788 }
1789
1790 {
1791 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1792 let map = mem.map_readable().unwrap();
1793 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1794 }
1795
1796 {
1797 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1798 let map = mem.map_writable().unwrap();
1799 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1800 }
1801
1802 last = i;
1803 }
1804
1805 assert_eq!(last, 4);
1806 }
1807
1808 let mut last = 0;
1809 for (i, mem) in buffer.iter_memories().enumerate() {
1810 {
1811 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1812 let map = mem.map_readable().unwrap();
1813 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1814 }
1815
1816 {
1817 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1818 let map = mem.map_readable().unwrap();
1819 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1820 }
1821
1822 last = i;
1823 }
1824
1825 assert_eq!(last, 4);
1826
1827 let mut last = 0;
1828 for (i, mem) in buffer.iter_memories_owned().enumerate() {
1829 {
1830 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1831 let map = mem.map_readable().unwrap();
1832 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1833 }
1834
1835 {
1836 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1837 let map = mem.map_readable().unwrap();
1838 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1839 }
1840
1841 last = i;
1842 }
1843
1844 assert_eq!(last, 4);
1845 }
1846
1847 #[test]
1848 fn test_meta_foreach() {
1849 crate::init().unwrap();
1850
1851 let mut buffer = Buffer::new();
1852 {
1853 let buffer = buffer.get_mut().unwrap();
1854 crate::ReferenceTimestampMeta::add(
1855 buffer,
1856 &crate::Caps::builder("foo/bar").build(),
1857 ClockTime::ZERO,
1858 ClockTime::NONE,
1859 );
1860 crate::ReferenceTimestampMeta::add(
1861 buffer,
1862 &crate::Caps::builder("foo/bar").build(),
1863 ClockTime::SECOND,
1864 ClockTime::NONE,
1865 );
1866 }
1867
1868 let mut res = vec![];
1869 buffer.foreach_meta(|meta| {
1870 let meta = meta
1871 .downcast_ref::<crate::ReferenceTimestampMeta>()
1872 .unwrap();
1873 res.push(meta.timestamp());
1874 ControlFlow::Continue(())
1875 });
1876
1877 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1878 }
1879
1880 #[test]
1881 fn test_meta_foreach_mut() {
1882 crate::init().unwrap();
1883
1884 let mut buffer = Buffer::new();
1885 {
1886 let buffer = buffer.get_mut().unwrap();
1887 crate::ReferenceTimestampMeta::add(
1888 buffer,
1889 &crate::Caps::builder("foo/bar").build(),
1890 ClockTime::ZERO,
1891 ClockTime::NONE,
1892 );
1893 crate::ReferenceTimestampMeta::add(
1894 buffer,
1895 &crate::Caps::builder("foo/bar").build(),
1896 ClockTime::SECOND,
1897 ClockTime::NONE,
1898 );
1899 }
1900
1901 let mut res = vec![];
1902 buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| {
1903 let meta = meta
1904 .downcast_ref::<crate::ReferenceTimestampMeta>()
1905 .unwrap();
1906 res.push(meta.timestamp());
1907 if meta.timestamp() == ClockTime::SECOND {
1908 ControlFlow::Continue(BufferMetaForeachAction::Remove)
1909 } else {
1910 ControlFlow::Continue(BufferMetaForeachAction::Keep)
1911 }
1912 });
1913
1914 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1915
1916 let mut res = vec![];
1917 buffer.foreach_meta(|meta| {
1918 let meta = meta
1919 .downcast_ref::<crate::ReferenceTimestampMeta>()
1920 .unwrap();
1921 res.push(meta.timestamp());
1922 ControlFlow::Continue(())
1923 });
1924
1925 assert_eq!(&[ClockTime::ZERO][..], &res[..]);
1926 }
1927
1928 #[test]
1929 fn test_ptr_eq() {
1930 crate::init().unwrap();
1931
1932 let buffer1 = Buffer::new();
1933 assert!(BufferRef::ptr_eq(&buffer1, &buffer1));
1934 let buffer2 = Buffer::new();
1935 assert!(!BufferRef::ptr_eq(&buffer1, &buffer2));
1936 }
1937
1938 #[test]
1939 fn test_copy_region() {
1940 crate::init().unwrap();
1941
1942 let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]);
1943 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap();
1944 assert_eq!(
1945 buffer2.map_readable().unwrap().as_slice(),
1946 &[0, 1, 2, 3, 4, 5, 6, 7]
1947 );
1948 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap();
1949 assert_eq!(
1950 buffer2.map_readable().unwrap().as_slice(),
1951 &[0, 1, 2, 3, 4, 5, 6, 7]
1952 );
1953 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap();
1954 assert_eq!(
1955 buffer2.map_readable().unwrap().as_slice(),
1956 &[0, 1, 2, 3, 4, 5, 6, 7]
1957 );
1958 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap();
1959 assert_eq!(
1960 buffer2.map_readable().unwrap().as_slice(),
1961 &[0, 1, 2, 3, 4, 5, 6, 7]
1962 );
1963 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap();
1964 assert_eq!(
1965 buffer2.map_readable().unwrap().as_slice(),
1966 &[0, 1, 2, 3, 4, 5, 6, 7]
1967 );
1968 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap();
1969 assert_eq!(
1970 buffer2.map_readable().unwrap().as_slice(),
1971 &[0, 1, 2, 3, 4, 5, 6, 7]
1972 );
1973
1974 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err());
1975 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err());
1976 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err());
1977 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err());
1978 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err());
1979 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err());
1980
1981 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap();
1982 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]);
1983
1984 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap();
1985 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]);
1986
1987 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap();
1988 assert_eq!(
1989 buffer2.map_readable().unwrap().as_slice(),
1990 &[2, 3, 4, 5, 6, 7]
1991 );
1992 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap();
1993 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]);
1994 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap();
1995 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]);
1996 }
1997
1998 #[test]
1999 fn test_dump() {
2000 use std::fmt::Write;
2001
2002 crate::init().unwrap();
2003
2004 let mut s = String::new();
2005 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
2006 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2007 assert_eq!(
2008 s,
2009 "0000: 01 02 03 04 ...."
2010 );
2011 s.clear();
2012 write!(&mut s, "{}", buffer.dump()).unwrap();
2013 assert_eq!(s, "01 02 03 04");
2014 s.clear();
2015
2016 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
2017 write!(&mut s, "{:?}", buffer.dump_range(..)).unwrap();
2018 assert_eq!(
2019 s,
2020 "0000: 01 02 03 04 ...."
2021 );
2022 s.clear();
2023 write!(&mut s, "{:?}", buffer.dump_range(..2)).unwrap();
2024 assert_eq!(
2025 s,
2026 "0000: 01 02 .."
2027 );
2028 s.clear();
2029 write!(&mut s, "{:?}", buffer.dump_range(2..=3)).unwrap();
2030 assert_eq!(
2031 s,
2032 "0002: 03 04 .."
2033 );
2034 s.clear();
2035 write!(&mut s, "{:?}", buffer.dump_range(..100)).unwrap();
2036 assert_eq!(s, "<end out of range>",);
2037 s.clear();
2038 write!(&mut s, "{:?}", buffer.dump_range(90..100)).unwrap();
2039 assert_eq!(s, "<start out of range>",);
2040 s.clear();
2041
2042 let buffer = crate::Buffer::from_slice(vec![0; 19]);
2043 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2044 assert_eq!(
2045 s,
2046 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
2047 0010: 00 00 00 ..."
2048 );
2049 s.clear();
2050 }
2051
2052 #[test]
2053 fn test_dump_multi_memories() {
2054 use std::fmt::Write;
2055
2056 crate::init().unwrap();
2057
2058 let mut buffer = crate::Buffer::new();
2059 {
2060 let buffer = buffer.get_mut().unwrap();
2061
2062 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
2063 buffer.append_memory(mem);
2064
2065 let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]);
2066 buffer.append_memory(mem);
2067
2068 let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]);
2069 buffer.append_memory(mem);
2070
2071 let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]);
2072 buffer.append_memory(mem);
2073
2074 let mem = crate::Memory::from_slice(vec![17, 18, 19]);
2075 buffer.append_memory(mem);
2076 }
2077
2078 let mut s = String::new();
2079 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2080 assert_eq!(
2081 s,
2082 "0000: 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 ................\n\
2083 0010: 11 12 13 ..."
2084 );
2085 s.clear();
2086 write!(&mut s, "{}", buffer.dump()).unwrap();
2087 assert_eq!(
2088 s,
2089 "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10\n11 12 13"
2090 );
2091 s.clear();
2092
2093 write!(&mut s, "{:?}", buffer.dump_range(2..)).unwrap();
2094 assert_eq!(
2095 s,
2096 "0002: 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12 ................\n\
2097 0012: 13 ."
2098 );
2099 s.clear();
2100
2101 write!(&mut s, "{:?}", buffer.dump_range(14..17)).unwrap();
2102 assert_eq!(
2103 s,
2104 "000e: 0f 10 11 ..."
2105 );
2106 s.clear();
2107
2108 write!(&mut s, "{:?}", buffer.dump_range(14..20)).unwrap();
2109 assert_eq!(s, "<end out of range>");
2110 s.clear();
2111
2112 #[allow(clippy::reversed_empty_ranges)]
2113 {
2114 write!(&mut s, "{:?}", buffer.dump_range(23..20)).unwrap();
2115 assert_eq!(s, "<start out of range>");
2116 s.clear();
2117 }
2118 }
2119
2120 #[test]
2121 fn test_buffer_wrap_vec_u8() {
2122 crate::init().unwrap();
2123
2124 let data = vec![1u8, 2, 3, 4, 5];
2125 let expected = data.clone();
2126
2127 let buf = Buffer::from_slice(data);
2128 assert_eq!(buf.size(), 5);
2129 assert_eq!(buf.n_memory(), 1);
2130
2131 let converted: Vec<u8> = buf.try_into_inner().unwrap();
2132 assert_eq!(converted, expected);
2133 }
2134
2135 #[test]
2136 fn test_buffer_into_wrong_type() {
2137 crate::init().unwrap();
2138
2139 let buf = Buffer::from_slice(vec![1u8, 2, 3, 4, 5]);
2140 assert_eq!(buf.size(), 5);
2141 assert_eq!(buf.n_memory(), 1);
2142
2143 let res = buf.try_into_inner::<Vec<u32>>();
2144 assert!(res.is_err());
2145 let (_buf, err) = res.err().unwrap();
2146 assert!(matches!(
2147 err,
2148 crate::MemoryIntoInnerError::TypeMismatch { .. }
2149 ));
2150 }
2151
2152 #[test]
2153 fn test_buffer_modify_and_extract() {
2154 crate::init().unwrap();
2155
2156 let data = vec![0u8; 10];
2157 let mut buf = Buffer::from_mut_slice(data);
2158
2159 {
2161 let bufref = buf.make_mut();
2162 let mut mapped = bufref.map_writable().unwrap();
2163 let slice = mapped.as_mut_slice();
2164 for (i, byte) in slice.iter_mut().enumerate() {
2165 *byte = (i * 2) as u8;
2166 }
2167 }
2168
2169 let extracted: Vec<u8> = buf.try_into_inner().unwrap();
2171 assert_eq!(extracted, vec![0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
2172 }
2173}