1 // Take a look at the license at the top of the repository in the LICENSE file.
2 
3 use std::fmt;
4 use std::marker::PhantomData;
5 use std::mem;
6 use std::ops;
7 use std::ptr;
8 use std::slice;
9 
10 use glib::translate::{from_glib, from_glib_full, from_glib_none, ToGlibPtr};
11 
12 use crate::AllocationParams;
13 use crate::Allocator;
14 use crate::MemoryFlags;
15 
16 mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
17     ffi::gst_memory_get_type()
18 });
19 
20 pub struct MemoryMap<'a, T> {
21     memory: &'a MemoryRef,
22     map_info: ffi::GstMapInfo,
23     phantom: PhantomData<T>,
24 }
25 
26 pub struct MappedMemory<T> {
27     memory: Option<Memory>,
28     map_info: ffi::GstMapInfo,
29     phantom: PhantomData<T>,
30 }
31 
32 impl fmt::Debug for Memory {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result33     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
34         MemoryRef::fmt(self, f)
35     }
36 }
37 
38 impl fmt::Debug for MemoryRef {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result39     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
40         f.debug_struct("Memory")
41             .field("ptr", unsafe { &self.as_ptr() })
42             .field("allocator", &self.allocator())
43             .field("parent", &self.parent())
44             .field("maxsize", &self.maxsize())
45             .field("align", &self.align())
46             .field("offset", &self.offset())
47             .field("size", &self.size())
48             .field("flags", &self.flags())
49             .finish()
50     }
51 }
52 
53 pub enum Readable {}
54 pub enum Writable {}
55 
56 impl Memory {
drop_box<T>(vec: glib::ffi::gpointer)57     unsafe extern "C" fn drop_box<T>(vec: glib::ffi::gpointer) {
58         let slice: Box<T> = Box::from_raw(vec as *mut T);
59         drop(slice);
60     }
61 
with_size(size: usize) -> Self62     pub fn with_size(size: usize) -> Self {
63         assert_initialized_main_thread!();
64         unsafe {
65             from_glib_full(ffi::gst_allocator_alloc(
66                 ptr::null_mut(),
67                 size,
68                 ptr::null_mut(),
69             ))
70         }
71     }
72 
with_size_and_params(size: usize, params: &AllocationParams) -> Self73     pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
74         assert_initialized_main_thread!();
75         unsafe {
76             from_glib_full(ffi::gst_allocator_alloc(
77                 ptr::null_mut(),
78                 size,
79                 params.as_ptr() as *mut _,
80             ))
81         }
82     }
83 
from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self84     pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
85         assert_initialized_main_thread!();
86         unsafe {
87             let b = Box::new(slice);
88             let (size, data) = {
89                 let slice = (*b).as_ref();
90                 (slice.len(), slice.as_ptr())
91             };
92             let user_data = Box::into_raw(b);
93             from_glib_full(ffi::gst_memory_new_wrapped(
94                 ffi::GST_MEMORY_FLAG_READONLY,
95                 data as glib::ffi::gpointer,
96                 size,
97                 0,
98                 size,
99                 user_data as glib::ffi::gpointer,
100                 Some(Self::drop_box::<T>),
101             ))
102         }
103     }
104 
from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self105     pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
106         assert_initialized_main_thread!();
107 
108         unsafe {
109             let mut b = Box::new(slice);
110             let (size, data) = {
111                 let slice = (*b).as_mut();
112                 (slice.len(), slice.as_mut_ptr())
113             };
114             let user_data = Box::into_raw(b);
115             from_glib_full(ffi::gst_memory_new_wrapped(
116                 0,
117                 data as glib::ffi::gpointer,
118                 size,
119                 0,
120                 size,
121                 user_data as glib::ffi::gpointer,
122                 Some(Self::drop_box::<T>),
123             ))
124         }
125     }
126 
into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self>127     pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
128         unsafe {
129             let mut map_info = mem::MaybeUninit::zeroed();
130             let res: bool = from_glib(ffi::gst_memory_map(
131                 self.as_mut_ptr(),
132                 map_info.as_mut_ptr(),
133                 ffi::GST_MAP_READ,
134             ));
135             if res {
136                 Ok(MappedMemory {
137                     memory: Some(self),
138                     map_info: map_info.assume_init(),
139                     phantom: PhantomData,
140                 })
141             } else {
142                 Err(self)
143             }
144         }
145     }
146 
into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self>147     pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
148         unsafe {
149             let mut map_info = mem::MaybeUninit::zeroed();
150             let res: bool = from_glib(ffi::gst_memory_map(
151                 self.as_mut_ptr(),
152                 map_info.as_mut_ptr(),
153                 ffi::GST_MAP_READWRITE,
154             ));
155             if res {
156                 Ok(MappedMemory {
157                     memory: Some(self),
158                     map_info: map_info.assume_init(),
159                     phantom: PhantomData,
160                 })
161             } else {
162                 Err(self)
163             }
164         }
165     }
166 }
167 
168 impl MemoryRef {
169     #[doc(alias = "get_allocator")]
allocator(&self) -> Option<Allocator>170     pub fn allocator(&self) -> Option<Allocator> {
171         unsafe { from_glib_none(self.0.allocator) }
172     }
173 
174     #[doc(alias = "get_parent")]
parent(&self) -> Option<&MemoryRef>175     pub fn parent(&self) -> Option<&MemoryRef> {
176         unsafe {
177             if self.0.parent.is_null() {
178                 None
179             } else {
180                 Some(MemoryRef::from_ptr(self.0.parent))
181             }
182         }
183     }
184 
185     #[doc(alias = "get_maxsize")]
maxsize(&self) -> usize186     pub fn maxsize(&self) -> usize {
187         self.0.maxsize
188     }
189 
190     #[doc(alias = "get_align")]
align(&self) -> usize191     pub fn align(&self) -> usize {
192         self.0.align
193     }
194 
195     #[doc(alias = "get_offset")]
offset(&self) -> usize196     pub fn offset(&self) -> usize {
197         self.0.offset
198     }
199 
200     #[doc(alias = "get_size")]
size(&self) -> usize201     pub fn size(&self) -> usize {
202         self.0.size
203     }
204 
205     #[doc(alias = "get_flags")]
flags(&self) -> MemoryFlags206     pub fn flags(&self) -> MemoryFlags {
207         unsafe { from_glib(self.0.mini_object.flags) }
208     }
209 
copy_part(&self, offset: isize, size: Option<usize>) -> Memory210     pub fn copy_part(&self, offset: isize, size: Option<usize>) -> Memory {
211         let pos_sz = match size {
212             Some(val) => val as isize,
213             None => 0,
214         };
215         assert!(offset + pos_sz < (self.maxsize() as isize));
216         unsafe {
217             from_glib_full(ffi::gst_memory_copy(
218                 self.as_mut_ptr(),
219                 offset,
220                 match size {
221                     Some(val) => val as isize,
222                     None => -1,
223                 },
224             ))
225         }
226     }
227 
228     #[doc(alias = "gst_memory_is_span")]
is_span(&self, mem2: &MemoryRef) -> Option<usize>229     pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
230         unsafe {
231             let mut offset = mem::MaybeUninit::uninit();
232             let res = from_glib(ffi::gst_memory_is_span(
233                 self.as_mut_ptr(),
234                 mem2.as_mut_ptr(),
235                 offset.as_mut_ptr(),
236             ));
237             if res {
238                 Some(offset.assume_init())
239             } else {
240                 None
241             }
242         }
243     }
244 
245     #[doc(alias = "gst_memory_is_type")]
is_type(&self, mem_type: &str) -> bool246     pub fn is_type(&self, mem_type: &str) -> bool {
247         unsafe {
248             from_glib(ffi::gst_memory_is_type(
249                 self.as_mut_ptr(),
250                 mem_type.to_glib_none().0,
251             ))
252         }
253     }
254 
map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError>255     pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> {
256         unsafe {
257             let mut map_info = mem::MaybeUninit::zeroed();
258             let res =
259                 ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
260             if res == glib::ffi::GTRUE {
261                 Ok(MemoryMap {
262                     memory: self,
263                     map_info: map_info.assume_init(),
264                     phantom: PhantomData,
265                 })
266             } else {
267                 Err(glib::bool_error!("Failed to map memory readable"))
268             }
269         }
270     }
271 
map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError>272     pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> {
273         unsafe {
274             let mut map_info = mem::MaybeUninit::zeroed();
275             let res = ffi::gst_memory_map(
276                 self.as_mut_ptr(),
277                 map_info.as_mut_ptr(),
278                 ffi::GST_MAP_READWRITE,
279             );
280             if res == glib::ffi::GTRUE {
281                 Ok(MemoryMap {
282                     memory: self,
283                     map_info: map_info.assume_init(),
284                     phantom: PhantomData,
285                 })
286             } else {
287                 Err(glib::bool_error!("Failed to map memory writable"))
288             }
289         }
290     }
291 
292     #[doc(alias = "gst_memory_share")]
share(&self, offset: isize, size: Option<usize>) -> Memory293     pub fn share(&self, offset: isize, size: Option<usize>) -> Memory {
294         let pos_sz = match size {
295             Some(val) => val as isize,
296             None => 0,
297         };
298         assert!(offset + pos_sz < (self.maxsize() as isize));
299         unsafe {
300             from_glib_full(ffi::gst_memory_share(
301                 self.as_ptr() as *mut _,
302                 offset,
303                 match size {
304                     Some(val) => val as isize,
305                     None => -1,
306                 },
307             ))
308         }
309     }
310 
311     #[doc(alias = "gst_memory_resize")]
resize(&mut self, offset: isize, size: usize)312     pub fn resize(&mut self, offset: isize, size: usize) {
313         assert!(offset + (size as isize) < (self.maxsize() as isize));
314         unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size) }
315     }
316 
dump(&self, size: Option<usize>) -> Dump317     pub fn dump(&self, size: Option<usize>) -> Dump {
318         Dump { memory: self, size }
319     }
320 }
321 
322 impl<'a, T> MemoryMap<'a, T> {
323     #[doc(alias = "get_size")]
size(&self) -> usize324     pub fn size(&self) -> usize {
325         self.map_info.size
326     }
327 
328     #[doc(alias = "get_memory")]
memory(&self) -> &MemoryRef329     pub fn memory(&self) -> &MemoryRef {
330         self.memory
331     }
332 
as_slice(&self) -> &[u8]333     pub fn as_slice(&self) -> &[u8] {
334         unsafe { slice::from_raw_parts(self.map_info.data as *const u8, self.map_info.size) }
335     }
336 }
337 
338 impl<'a> MemoryMap<'a, Writable> {
as_mut_slice(&mut self) -> &mut [u8]339     pub fn as_mut_slice(&mut self) -> &mut [u8] {
340         unsafe { slice::from_raw_parts_mut(self.map_info.data as *mut u8, self.map_info.size) }
341     }
342 }
343 
344 impl<'a, T> AsRef<[u8]> for MemoryMap<'a, T> {
as_ref(&self) -> &[u8]345     fn as_ref(&self) -> &[u8] {
346         self.as_slice()
347     }
348 }
349 
350 impl<'a> AsMut<[u8]> for MemoryMap<'a, Writable> {
as_mut(&mut self) -> &mut [u8]351     fn as_mut(&mut self) -> &mut [u8] {
352         self.as_mut_slice()
353     }
354 }
355 
356 impl<'a, T> ops::Deref for MemoryMap<'a, T> {
357     type Target = [u8];
358 
deref(&self) -> &[u8]359     fn deref(&self) -> &[u8] {
360         self.as_slice()
361     }
362 }
363 
364 impl<'a> ops::DerefMut for MemoryMap<'a, Writable> {
deref_mut(&mut self) -> &mut [u8]365     fn deref_mut(&mut self) -> &mut [u8] {
366         self.as_mut_slice()
367     }
368 }
369 
370 impl<'a, T> fmt::Debug for MemoryMap<'a, T> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result371     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
372         f.debug_tuple("MemoryMap").field(&self.memory()).finish()
373     }
374 }
375 
376 impl<'a, T> PartialEq for MemoryMap<'a, T> {
eq(&self, other: &MemoryMap<'a, T>) -> bool377     fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
378         self.as_slice().eq(other.as_slice())
379     }
380 }
381 
382 impl<'a, T> Eq for MemoryMap<'a, T> {}
383 
384 impl<'a, T> Drop for MemoryMap<'a, T> {
drop(&mut self)385     fn drop(&mut self) {
386         unsafe {
387             ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info);
388         }
389     }
390 }
391 
392 unsafe impl<'a, T> Send for MemoryMap<'a, T> {}
393 unsafe impl<'a, T> Sync for MemoryMap<'a, T> {}
394 
395 impl<T> MappedMemory<T> {
as_slice(&self) -> &[u8]396     pub fn as_slice(&self) -> &[u8] {
397         unsafe { slice::from_raw_parts(self.map_info.data as *const u8, self.map_info.size) }
398     }
399 
400     #[doc(alias = "get_size")]
size(&self) -> usize401     pub fn size(&self) -> usize {
402         self.map_info.size
403     }
404 
405     #[doc(alias = "get_memory")]
memory(&self) -> &MemoryRef406     pub fn memory(&self) -> &MemoryRef {
407         self.memory.as_ref().unwrap().as_ref()
408     }
409 
into_memory(mut self) -> Memory410     pub fn into_memory(mut self) -> Memory {
411         let memory = self.memory.take().unwrap();
412         unsafe {
413             ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut self.map_info);
414         }
415 
416         memory
417     }
418 }
419 
420 impl MappedMemory<Writable> {
as_mut_slice(&mut self) -> &mut [u8]421     pub fn as_mut_slice(&mut self) -> &mut [u8] {
422         unsafe { slice::from_raw_parts_mut(self.map_info.data as *mut u8, self.map_info.size) }
423     }
424 }
425 
426 impl<T> AsRef<[u8]> for MappedMemory<T> {
as_ref(&self) -> &[u8]427     fn as_ref(&self) -> &[u8] {
428         self.as_slice()
429     }
430 }
431 
432 impl AsMut<[u8]> for MappedMemory<Writable> {
as_mut(&mut self) -> &mut [u8]433     fn as_mut(&mut self) -> &mut [u8] {
434         self.as_mut_slice()
435     }
436 }
437 
438 impl<T> ops::Deref for MappedMemory<T> {
439     type Target = [u8];
440 
deref(&self) -> &[u8]441     fn deref(&self) -> &[u8] {
442         self.as_slice()
443     }
444 }
445 
446 impl ops::DerefMut for MappedMemory<Writable> {
deref_mut(&mut self) -> &mut [u8]447     fn deref_mut(&mut self) -> &mut [u8] {
448         self.as_mut_slice()
449     }
450 }
451 
452 impl<T> Drop for MappedMemory<T> {
drop(&mut self)453     fn drop(&mut self) {
454         if let Some(ref memory) = self.memory {
455             unsafe {
456                 ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut self.map_info);
457             }
458         }
459     }
460 }
461 
462 impl<T> fmt::Debug for MappedMemory<T> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result463     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
464         f.debug_tuple("MappedMemory").field(&self.memory()).finish()
465     }
466 }
467 
468 impl<T> PartialEq for MappedMemory<T> {
eq(&self, other: &MappedMemory<T>) -> bool469     fn eq(&self, other: &MappedMemory<T>) -> bool {
470         self.as_slice().eq(other.as_slice())
471     }
472 }
473 
474 impl<T> Eq for MappedMemory<T> {}
475 
476 unsafe impl<T> Send for MappedMemory<T> {}
477 unsafe impl<T> Sync for MappedMemory<T> {}
478 
479 pub struct Dump<'a> {
480     memory: &'a MemoryRef,
481     size: Option<usize>,
482 }
483 
484 impl<'a> Dump<'a> {
fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result485     fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
486         use pretty_hex::*;
487 
488         let map = self.memory.map_readable().expect("Failed to map memory");
489         let data = map.as_slice();
490         let size = self.size.unwrap_or_else(|| self.memory.size());
491         let data = &data[0..size];
492 
493         if debug {
494             write!(f, "{:?}", data.hex_dump())
495         } else {
496             write!(f, "{}", data.hex_dump())
497         }
498     }
499 }
500 
501 impl<'a> fmt::Display for Dump<'a> {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result502     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
503         self.fmt(f, false)
504     }
505 }
506 
507 impl<'a> fmt::Debug for Dump<'a> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result508     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
509         self.fmt(f, true)
510     }
511 }
512 
513 #[cfg(test)]
514 mod tests {
515     #[test]
test_dump()516     fn test_dump() {
517         crate::init().unwrap();
518 
519         let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
520         println!("{}", mem.dump(Some(mem.size())));
521 
522         let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
523         println!("{:?}", mem.dump(Some(2)));
524 
525         let mem = crate::Memory::from_slice(vec![0; 64]);
526         dbg!(mem.dump(None));
527     }
528 }
529