1 // Copyright (C) 2019 Vivia Nikolaidou <vivia@ahiru.eu>
2 //
3 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6 // option. This file may not be copied, modified, or distributed
7 // except according to those terms.
8 
9 use std::fmt;
10 use std::marker::PhantomData;
11 use std::mem;
12 use std::ops;
13 use std::ptr;
14 use std::slice;
15 
16 use gst_sys;
17 
18 use glib;
19 use glib::translate::{from_glib, from_glib_full, from_glib_none, ToGlibPtr};
20 
21 use miniobject::MiniObject;
22 
23 use AllocationParams;
24 use Allocator;
25 use MemoryFlags;
26 
27 gst_define_mini_object_wrapper!(Memory, MemoryRef, gst_sys::GstMemory, [Debug,], || {
28     gst_sys::gst_memory_get_type()
29 });
30 
31 pub struct MemoryMap<'a, T> {
32     memory: &'a MemoryRef,
33     map_info: gst_sys::GstMapInfo,
34     phantom: PhantomData<T>,
35 }
36 
37 pub struct MappedMemory<T> {
38     memory: Option<Memory>,
39     map_info: gst_sys::GstMapInfo,
40     phantom: PhantomData<T>,
41 }
42 
43 impl fmt::Debug for MemoryRef {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result44     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
45         f.debug_struct("Memory")
46             .field("ptr", unsafe { &self.as_ptr() })
47             .field("allocator", &self.get_allocator())
48             .field("parent", &self.get_parent())
49             .field("maxsize", &self.get_maxsize())
50             .field("align", &self.get_align())
51             .field("offset", &self.get_offset())
52             .field("size", &self.get_size())
53             .field("flags", &self.get_flags())
54             .finish()
55     }
56 }
57 
58 pub enum Readable {}
59 pub enum Writable {}
60 
61 impl Memory {
drop_box<T>(vec: glib_sys::gpointer)62     unsafe extern "C" fn drop_box<T>(vec: glib_sys::gpointer) {
63         let slice: Box<T> = Box::from_raw(vec as *mut T);
64         drop(slice);
65     }
66 
with_size(size: usize) -> Self67     pub fn with_size(size: usize) -> Self {
68         unsafe {
69             from_glib_full(gst_sys::gst_allocator_alloc(
70                 ptr::null_mut(),
71                 size,
72                 ptr::null_mut(),
73             ))
74         }
75     }
76 
with_size_and_params(size: usize, params: &AllocationParams) -> Self77     pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
78         unsafe {
79             from_glib_full(gst_sys::gst_allocator_alloc(
80                 ptr::null_mut(),
81                 size,
82                 params.as_ptr() as *mut _,
83             ))
84         }
85     }
86 
from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self87     pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
88         assert_initialized_main_thread!();
89         unsafe {
90             let b = Box::new(slice);
91             let (size, data) = {
92                 let slice = (*b).as_ref();
93                 (slice.len(), slice.as_ptr())
94             };
95             let user_data = Box::into_raw(b);
96             from_glib_full(gst_sys::gst_memory_new_wrapped(
97                 gst_sys::GST_MEMORY_FLAG_READONLY,
98                 data as glib_sys::gpointer,
99                 size,
100                 0,
101                 size,
102                 user_data as glib_sys::gpointer,
103                 Some(Self::drop_box::<T>),
104             ))
105         }
106     }
107 
from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self108     pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
109         assert_initialized_main_thread!();
110 
111         unsafe {
112             let mut b = Box::new(slice);
113             let (size, data) = {
114                 let slice = (*b).as_mut();
115                 (slice.len(), slice.as_mut_ptr())
116             };
117             let user_data = Box::into_raw(b);
118             from_glib_full(gst_sys::gst_memory_new_wrapped(
119                 0,
120                 data as glib_sys::gpointer,
121                 size,
122                 0,
123                 size,
124                 user_data as glib_sys::gpointer,
125                 Some(Self::drop_box::<T>),
126             ))
127         }
128     }
129 
into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self>130     pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
131         unsafe {
132             let mut map_info = mem::MaybeUninit::zeroed();
133             let res: bool = from_glib(gst_sys::gst_memory_map(
134                 self.as_mut_ptr(),
135                 map_info.as_mut_ptr(),
136                 gst_sys::GST_MAP_READ,
137             ));
138             if res {
139                 Ok(MappedMemory {
140                     memory: Some(self),
141                     map_info: map_info.assume_init(),
142                     phantom: PhantomData,
143                 })
144             } else {
145                 Err(self)
146             }
147         }
148     }
149 
into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self>150     pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
151         unsafe {
152             let mut map_info = mem::MaybeUninit::zeroed();
153             let res: bool = from_glib(gst_sys::gst_memory_map(
154                 self.as_mut_ptr(),
155                 map_info.as_mut_ptr(),
156                 gst_sys::GST_MAP_READWRITE,
157             ));
158             if res {
159                 Ok(MappedMemory {
160                     memory: Some(self),
161                     map_info: map_info.assume_init(),
162                     phantom: PhantomData,
163                 })
164             } else {
165                 Err(self)
166             }
167         }
168     }
169 }
170 
171 impl MemoryRef {
get_allocator(&self) -> Option<Allocator>172     pub fn get_allocator(&self) -> Option<Allocator> {
173         unsafe { from_glib_none(self.0.allocator) }
174     }
175 
get_parent(&self) -> Option<&MemoryRef>176     pub fn get_parent(&self) -> Option<&MemoryRef> {
177         unsafe {
178             if self.0.parent.is_null() {
179                 None
180             } else {
181                 Some(MemoryRef::from_ptr(self.0.parent))
182             }
183         }
184     }
185 
get_maxsize(&self) -> usize186     pub fn get_maxsize(&self) -> usize {
187         self.0.maxsize
188     }
189 
get_align(&self) -> usize190     pub fn get_align(&self) -> usize {
191         self.0.align
192     }
193 
get_offset(&self) -> usize194     pub fn get_offset(&self) -> usize {
195         self.0.offset
196     }
197 
get_size(&self) -> usize198     pub fn get_size(&self) -> usize {
199         self.0.size
200     }
201 
get_flags(&self) -> MemoryFlags202     pub fn get_flags(&self) -> MemoryFlags {
203         from_glib(self.0.mini_object.flags)
204     }
205 
copy_part(&self, offset: isize, size: Option<usize>) -> Memory206     pub fn copy_part(&self, offset: isize, size: Option<usize>) -> Memory {
207         let pos_sz = match size {
208             Some(val) => val as isize,
209             None => 0,
210         };
211         assert!(offset + pos_sz < (self.get_maxsize() as isize));
212         unsafe {
213             from_glib_full(gst_sys::gst_memory_copy(
214                 self.as_mut_ptr(),
215                 offset,
216                 match size {
217                     Some(val) => val as isize,
218                     None => -1,
219                 },
220             ))
221         }
222     }
223 
is_span(&self, mem2: &MemoryRef) -> Option<usize>224     pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
225         unsafe {
226             let mut offset = mem::MaybeUninit::uninit();
227             let res = from_glib(gst_sys::gst_memory_is_span(
228                 self.as_mut_ptr(),
229                 mem2.as_mut_ptr(),
230                 offset.as_mut_ptr(),
231             ));
232             if res {
233                 Some(offset.assume_init())
234             } else {
235                 None
236             }
237         }
238     }
239 
is_type(&self, mem_type: &str) -> bool240     pub fn is_type(&self, mem_type: &str) -> bool {
241         unsafe {
242             from_glib(gst_sys::gst_memory_is_type(
243                 self.as_mut_ptr(),
244                 mem_type.to_glib_none().0,
245             ))
246         }
247     }
248 
map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError>249     pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> {
250         unsafe {
251             let mut map_info = mem::MaybeUninit::zeroed();
252             let res = gst_sys::gst_memory_map(
253                 self.as_mut_ptr(),
254                 map_info.as_mut_ptr(),
255                 gst_sys::GST_MAP_READ,
256             );
257             if res == glib_sys::GTRUE {
258                 Ok(MemoryMap {
259                     memory: self,
260                     map_info: map_info.assume_init(),
261                     phantom: PhantomData,
262                 })
263             } else {
264                 Err(glib_bool_error!("Failed to map memory readable"))
265             }
266         }
267     }
268 
map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError>269     pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> {
270         unsafe {
271             let mut map_info = mem::MaybeUninit::zeroed();
272             let res = gst_sys::gst_memory_map(
273                 self.as_mut_ptr(),
274                 map_info.as_mut_ptr(),
275                 gst_sys::GST_MAP_READWRITE,
276             );
277             if res == glib_sys::GTRUE {
278                 Ok(MemoryMap {
279                     memory: self,
280                     map_info: map_info.assume_init(),
281                     phantom: PhantomData,
282                 })
283             } else {
284                 Err(glib_bool_error!("Failed to map memory readable"))
285             }
286         }
287     }
288 
share(&self, offset: isize, size: Option<usize>) -> Memory289     pub fn share(&self, offset: isize, size: Option<usize>) -> Memory {
290         let pos_sz = match size {
291             Some(val) => val as isize,
292             None => 0,
293         };
294         assert!(offset + pos_sz < (self.get_maxsize() as isize));
295         unsafe {
296             from_glib_full(gst_sys::gst_memory_share(
297                 self.as_ptr() as *mut _,
298                 offset,
299                 match size {
300                     Some(val) => val as isize,
301                     None => -1,
302                 },
303             ))
304         }
305     }
306 
resize(&mut self, offset: isize, size: usize)307     pub fn resize(&mut self, offset: isize, size: usize) {
308         assert!(offset + (size as isize) < (self.get_maxsize() as isize));
309         unsafe { gst_sys::gst_memory_resize(self.as_mut_ptr(), offset, size) }
310     }
311 }
312 
313 impl<'a, T> MemoryMap<'a, T> {
get_size(&self) -> usize314     pub fn get_size(&self) -> usize {
315         self.map_info.size
316     }
317 
get_memory(&self) -> &MemoryRef318     pub fn get_memory(&self) -> &MemoryRef {
319         self.memory
320     }
321 
as_slice(&self) -> &[u8]322     pub fn as_slice(&self) -> &[u8] {
323         unsafe { slice::from_raw_parts(self.map_info.data as *const u8, self.map_info.size) }
324     }
325 }
326 
327 impl<'a> MemoryMap<'a, Writable> {
as_mut_slice(&mut self) -> &mut [u8]328     pub fn as_mut_slice(&mut self) -> &mut [u8] {
329         unsafe { slice::from_raw_parts_mut(self.map_info.data as *mut u8, self.map_info.size) }
330     }
331 }
332 
333 impl<'a, T> AsRef<[u8]> for MemoryMap<'a, T> {
as_ref(&self) -> &[u8]334     fn as_ref(&self) -> &[u8] {
335         self.as_slice()
336     }
337 }
338 
339 impl<'a> AsMut<[u8]> for MemoryMap<'a, Writable> {
as_mut(&mut self) -> &mut [u8]340     fn as_mut(&mut self) -> &mut [u8] {
341         self.as_mut_slice()
342     }
343 }
344 
345 impl<'a, T> ops::Deref for MemoryMap<'a, T> {
346     type Target = [u8];
347 
deref(&self) -> &[u8]348     fn deref(&self) -> &[u8] {
349         self.as_slice()
350     }
351 }
352 
353 impl<'a> ops::DerefMut for MemoryMap<'a, Writable> {
deref_mut(&mut self) -> &mut [u8]354     fn deref_mut(&mut self) -> &mut [u8] {
355         self.as_mut_slice()
356     }
357 }
358 
359 impl<'a, T> fmt::Debug for MemoryMap<'a, T> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result360     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
361         f.debug_tuple("MemoryMap")
362             .field(&self.get_memory())
363             .finish()
364     }
365 }
366 
367 impl<'a, T> PartialEq for MemoryMap<'a, T> {
eq(&self, other: &MemoryMap<'a, T>) -> bool368     fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
369         self.as_slice().eq(other.as_slice())
370     }
371 }
372 
373 impl<'a, T> Eq for MemoryMap<'a, T> {}
374 
375 impl<'a, T> Drop for MemoryMap<'a, T> {
drop(&mut self)376     fn drop(&mut self) {
377         unsafe {
378             gst_sys::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info);
379         }
380     }
381 }
382 
383 unsafe impl<'a, T> Send for MemoryMap<'a, T> {}
384 unsafe impl<'a, T> Sync for MemoryMap<'a, T> {}
385 
386 impl<T> MappedMemory<T> {
as_slice(&self) -> &[u8]387     pub fn as_slice(&self) -> &[u8] {
388         unsafe { slice::from_raw_parts(self.map_info.data as *const u8, self.map_info.size) }
389     }
390 
get_size(&self) -> usize391     pub fn get_size(&self) -> usize {
392         self.map_info.size
393     }
394 
get_memory(&self) -> &MemoryRef395     pub fn get_memory(&self) -> &MemoryRef {
396         self.memory.as_ref().unwrap().as_ref()
397     }
398 
into_memory(mut self) -> Memory399     pub fn into_memory(mut self) -> Memory {
400         let memory = self.memory.take().unwrap();
401         unsafe {
402             gst_sys::gst_memory_unmap(memory.as_mut_ptr(), &mut self.map_info);
403         }
404 
405         memory
406     }
407 }
408 
409 impl MappedMemory<Writable> {
as_mut_slice(&mut self) -> &mut [u8]410     pub fn as_mut_slice(&mut self) -> &mut [u8] {
411         unsafe { slice::from_raw_parts_mut(self.map_info.data as *mut u8, self.map_info.size) }
412     }
413 }
414 
415 impl<T> AsRef<[u8]> for MappedMemory<T> {
as_ref(&self) -> &[u8]416     fn as_ref(&self) -> &[u8] {
417         self.as_slice()
418     }
419 }
420 
421 impl AsMut<[u8]> for MappedMemory<Writable> {
as_mut(&mut self) -> &mut [u8]422     fn as_mut(&mut self) -> &mut [u8] {
423         self.as_mut_slice()
424     }
425 }
426 
427 impl<T> ops::Deref for MappedMemory<T> {
428     type Target = [u8];
429 
deref(&self) -> &[u8]430     fn deref(&self) -> &[u8] {
431         self.as_slice()
432     }
433 }
434 
435 impl ops::DerefMut for MappedMemory<Writable> {
deref_mut(&mut self) -> &mut [u8]436     fn deref_mut(&mut self) -> &mut [u8] {
437         self.as_mut_slice()
438     }
439 }
440 
441 impl<T> Drop for MappedMemory<T> {
drop(&mut self)442     fn drop(&mut self) {
443         if let Some(ref memory) = self.memory {
444             unsafe {
445                 gst_sys::gst_memory_unmap(memory.as_mut_ptr(), &mut self.map_info);
446             }
447         }
448     }
449 }
450 
451 impl<T> fmt::Debug for MappedMemory<T> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result452     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
453         f.debug_tuple("MappedMemory")
454             .field(&self.get_memory())
455             .finish()
456     }
457 }
458 
459 impl<T> PartialEq for MappedMemory<T> {
eq(&self, other: &MappedMemory<T>) -> bool460     fn eq(&self, other: &MappedMemory<T>) -> bool {
461         self.as_slice().eq(other.as_slice())
462     }
463 }
464 
465 impl<T> Eq for MappedMemory<T> {}
466 
467 unsafe impl<T> Send for MappedMemory<T> {}
468 unsafe impl<T> Sync for MappedMemory<T> {}
469