1 // Take a look at the license at the top of the repository in the LICENSE file. 2 3 //! # Examples 4 //! 5 //! ``` 6 //! use glib::prelude::*; // or `use gtk::prelude::*;` 7 //! use glib::ByteArray; 8 //! 9 //! let ba = ByteArray::from(b"def"); 10 //! ba.append(b"ghi").prepend(b"abc"); 11 //! ba.remove_range(3, 3); 12 //! assert_eq!(ba, "abcghi".as_bytes()); 13 //! ``` 14 15 use crate::translate::*; 16 use std::borrow::Borrow; 17 use std::cmp::Ordering; 18 use std::fmt; 19 use std::hash::{Hash, Hasher}; 20 use std::mem; 21 use std::ops::Deref; 22 use std::ptr::NonNull; 23 use std::slice; 24 25 use crate::Bytes; 26 27 wrapper! { 28 #[doc(alias = "GByteArray")] 29 pub struct ByteArray(Shared<ffi::GByteArray>); 30 31 match fn { 32 ref => |ptr| ffi::g_byte_array_ref(ptr), 33 unref => |ptr| ffi::g_byte_array_unref(ptr), 34 type_ => || ffi::g_byte_array_get_type(), 35 } 36 } 37 38 impl ByteArray { 39 #[doc(alias = "g_byte_array_new")] new() -> Self40 pub fn new() -> Self { 41 unsafe { from_glib_full(ffi::g_byte_array_new()) } 42 } 43 44 #[doc(alias = "g_byte_array_sized_new")] with_capacity(size: usize) -> Self45 pub fn with_capacity(size: usize) -> Self { 46 unsafe { from_glib_full(ffi::g_byte_array_sized_new(size as u32)) } 47 } 48 49 #[doc(alias = "g_byte_array_free_to_bytes")] into_gbytes(self) -> Bytes50 pub fn into_gbytes(self) -> Bytes { 51 unsafe { 52 let s = mem::ManuallyDrop::new(self); 53 from_glib_full(ffi::g_byte_array_free_to_bytes(mut_override( 54 s.to_glib_none().0, 55 ))) 56 } 57 } 58 59 #[doc(alias = "g_byte_array_append")] append<T: ?Sized + AsRef<[u8]>>(&self, data: &T) -> &Self60 pub fn append<T: ?Sized + AsRef<[u8]>>(&self, data: &T) -> &Self { 61 let bytes = data.as_ref(); 62 unsafe { 63 ffi::g_byte_array_append( 64 self.to_glib_none().0, 65 bytes.as_ptr() as *const _, 66 bytes.len() as u32, 67 ); 68 } 69 self 70 } 71 72 #[doc(alias = "g_byte_array_prepend")] prepend<T: ?Sized + AsRef<[u8]>>(&self, data: &T) -> &Self73 pub fn prepend<T: ?Sized + AsRef<[u8]>>(&self, data: &T) -> &Self { 74 let bytes = data.as_ref(); 75 unsafe { 76 ffi::g_byte_array_prepend( 77 self.to_glib_none().0, 78 bytes.as_ptr() as *const _, 79 bytes.len() as u32, 80 ); 81 } 82 self 83 } 84 85 #[doc(alias = "g_byte_array_remove_index")] remove_index(&self, index: usize)86 pub fn remove_index(&self, index: usize) { 87 unsafe { 88 ffi::g_byte_array_remove_index(self.to_glib_none().0, index as u32); 89 } 90 } 91 92 #[doc(alias = "g_byte_array_remove_index_fast")] remove_index_fast(&self, index: usize)93 pub fn remove_index_fast(&self, index: usize) { 94 unsafe { 95 ffi::g_byte_array_remove_index_fast(self.to_glib_none().0, index as u32); 96 } 97 } 98 99 #[doc(alias = "g_byte_array_remove_range")] remove_range(&self, index: usize, length: usize)100 pub fn remove_range(&self, index: usize, length: usize) { 101 unsafe { 102 ffi::g_byte_array_remove_range(self.to_glib_none().0, index as u32, length as u32); 103 } 104 } 105 106 #[doc(alias = "g_byte_array_set_size")] set_size(&self, size: usize)107 pub unsafe fn set_size(&self, size: usize) { 108 ffi::g_byte_array_set_size(self.to_glib_none().0, size as u32); 109 } 110 111 #[doc(alias = "g_byte_array_sort_with_data")] sort<F: FnMut(&u8, &u8) -> Ordering>(&self, compare_func: F)112 pub fn sort<F: FnMut(&u8, &u8) -> Ordering>(&self, compare_func: F) { 113 unsafe extern "C" fn compare_func_trampoline( 114 a: ffi::gconstpointer, 115 b: ffi::gconstpointer, 116 func: ffi::gpointer, 117 ) -> i32 { 118 let func = func as *mut &mut (dyn FnMut(&u8, &u8) -> Ordering); 119 120 let a = &*(a as *const u8); 121 let b = &*(b as *const u8); 122 123 match (*func)(a, b) { 124 Ordering::Less => -1, 125 Ordering::Equal => 0, 126 Ordering::Greater => 1, 127 } 128 } 129 unsafe { 130 let mut func = compare_func; 131 let func_obj: &mut (dyn FnMut(&u8, &u8) -> Ordering) = &mut func; 132 let func_ptr = 133 &func_obj as *const &mut (dyn FnMut(&u8, &u8) -> Ordering) as ffi::gpointer; 134 135 ffi::g_byte_array_sort_with_data( 136 self.to_glib_none().0, 137 Some(compare_func_trampoline), 138 func_ptr, 139 ); 140 } 141 } 142 } 143 144 impl AsRef<[u8]> for ByteArray { as_ref(&self) -> &[u8]145 fn as_ref(&self) -> &[u8] { 146 &*self 147 } 148 } 149 150 impl<'a, T: ?Sized + Borrow<[u8]> + 'a> From<&'a T> for ByteArray { from(value: &'a T) -> ByteArray151 fn from(value: &'a T) -> ByteArray { 152 let ba = ByteArray::new(); 153 ba.append(value.borrow()); 154 ba 155 } 156 } 157 158 impl Deref for ByteArray { 159 type Target = [u8]; 160 deref(&self) -> &[u8]161 fn deref(&self) -> &[u8] { 162 unsafe { 163 let mut ptr = (*self.to_glib_none().0).data; 164 let len = (*self.to_glib_none().0).len as usize; 165 debug_assert!(!ptr.is_null() || len == 0); 166 if ptr.is_null() { 167 ptr = NonNull::dangling().as_ptr(); 168 } 169 slice::from_raw_parts(ptr as *const u8, len) 170 } 171 } 172 } 173 174 impl Default for ByteArray { default() -> Self175 fn default() -> Self { 176 Self::new() 177 } 178 } 179 180 impl fmt::Debug for ByteArray { fmt(&self, f: &mut fmt::Formatter) -> fmt::Result181 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 182 f.debug_struct("ByteArray") 183 .field("ptr", &self.to_glib_none().0) 184 .field("data", &&self[..]) 185 .finish() 186 } 187 } 188 189 macro_rules! impl_cmp { 190 ($lhs:ty, $rhs: ty) => { 191 #[allow(clippy::redundant_slicing)] 192 impl<'a, 'b> PartialEq<$rhs> for $lhs { 193 #[inline] 194 fn eq(&self, other: &$rhs) -> bool { 195 self[..].eq(&other[..]) 196 } 197 } 198 199 #[allow(clippy::redundant_slicing)] 200 impl<'a, 'b> PartialEq<$lhs> for $rhs { 201 #[inline] 202 fn eq(&self, other: &$lhs) -> bool { 203 self[..].eq(&other[..]) 204 } 205 } 206 207 #[allow(clippy::redundant_slicing)] 208 impl<'a, 'b> PartialOrd<$rhs> for $lhs { 209 #[inline] 210 fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> { 211 self[..].partial_cmp(&other[..]) 212 } 213 } 214 215 #[allow(clippy::redundant_slicing)] 216 impl<'a, 'b> PartialOrd<$lhs> for $rhs { 217 #[inline] 218 fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> { 219 self[..].partial_cmp(&other[..]) 220 } 221 } 222 }; 223 } 224 225 impl_cmp!(ByteArray, [u8]); 226 impl_cmp!(ByteArray, &'a [u8]); 227 impl_cmp!(&'a ByteArray, [u8]); 228 impl_cmp!(ByteArray, Vec<u8>); 229 impl_cmp!(&'a ByteArray, Vec<u8>); 230 231 impl PartialEq for ByteArray { eq(&self, other: &Self) -> bool232 fn eq(&self, other: &Self) -> bool { 233 self[..] == other[..] 234 } 235 } 236 237 impl Eq for ByteArray {} 238 239 impl Hash for ByteArray { hash<H: Hasher>(&self, state: &mut H)240 fn hash<H: Hasher>(&self, state: &mut H) { 241 self.len().hash(state); 242 Hash::hash_slice(&self[..], state) 243 } 244 } 245 #[cfg(test)] 246 mod tests { 247 use super::*; 248 use std::collections::HashSet; 249 250 #[test] various()251 fn various() { 252 let ba: ByteArray = Default::default(); 253 ba.append("foo").append("bar").prepend("baz"); 254 ba.remove_index(0); 255 ba.remove_index_fast(1); 256 ba.remove_range(1, 2); 257 ba.sort(|a, b| a.cmp(b)); 258 unsafe { ba.set_size(3) }; 259 assert_eq!(ba, b"aab" as &[u8]); 260 let abc: &[u8] = b"abc"; 261 assert_eq!(ByteArray::from(abc), b"abc" as &[u8]); 262 } 263 264 #[test] hash()265 fn hash() { 266 let b1 = ByteArray::from(b"this is a test"); 267 let b2 = ByteArray::from(b"this is a test"); 268 let b3 = ByteArray::from(b"test"); 269 let mut set = HashSet::new(); 270 set.insert(b1); 271 assert!(set.contains(&b2)); 272 assert!(!set.contains(&b3)); 273 } 274 } 275