1 use core::mem::{self, MaybeUninit};
2 use core::ops::{Deref, DerefMut};
3 use core::{ptr, slice};
4 
5 use crate::Result;
6 use crate::{PartialAllocStrategy, PhysallocFlags};
7 
8 /// An RAII guard of a physical memory allocation. Currently all physically allocated memory are
9 /// page-aligned and take up at least 4k of space (on x86_64).
10 #[derive(Debug)]
11 pub struct PhysBox {
12     address: usize,
13     size: usize
14 }
15 
16 impl PhysBox {
17     /// Construct a PhysBox from an address and a size.
18     ///
19     /// # Safety
20     /// This function is unsafe because when dropping, Self has to a valid allocation.
from_raw_parts(address: usize, size: usize) -> Self21     pub unsafe fn from_raw_parts(address: usize, size: usize) -> Self {
22         Self {
23             address,
24             size,
25         }
26     }
27 
28     /// Retrieve the byte address in physical memory, of this allocation.
address(&self) -> usize29     pub fn address(&self) -> usize {
30         self.address
31     }
32 
33     /// Retrieve the size in bytes of the alloc.
size(&self) -> usize34     pub fn size(&self) -> usize {
35         self.size
36     }
37 
38     /// Allocate physical memory that must reside in 32-bit space.
new_in_32bit_space(size: usize) -> Result<Self>39     pub fn new_in_32bit_space(size: usize) -> Result<Self> {
40         Self::new_with_flags(size, PhysallocFlags::SPACE_32)
41     }
42 
new_with_flags(size: usize, flags: PhysallocFlags) -> Result<Self>43     pub fn new_with_flags(size: usize, flags: PhysallocFlags) -> Result<Self> {
44         assert!(!flags.contains(PhysallocFlags::PARTIAL_ALLOC));
45 
46         let address = unsafe { crate::physalloc2(size, flags.bits())? };
47         Ok(Self {
48             address,
49             size,
50         })
51     }
52 
53     /// "Partially" allocate physical memory, in the sense that the allocation may be smaller than
54     /// expected, but still with a minimum limit. This is particularly useful when the physical
55     /// memory space is fragmented, and a device supports scatter-gather I/O. In that case, the
56     /// driver can optimistically request e.g. 1 alloc of 1 MiB, with the minimum of 512 KiB. If
57     /// that first allocation only returns half the size, the driver can do another allocation
58     /// and then let the device use both buffers.
new_partial_allocation(size: usize, flags: PhysallocFlags, strategy: Option<PartialAllocStrategy>, mut min: usize) -> Result<Self>59     pub fn new_partial_allocation(size: usize, flags: PhysallocFlags, strategy: Option<PartialAllocStrategy>, mut min: usize) -> Result<Self> {
60         debug_assert!(!(flags.contains(PhysallocFlags::PARTIAL_ALLOC) && strategy.is_none()));
61 
62         let address = unsafe { crate::physalloc3(size, flags.bits() | strategy.map(|s| s as usize).unwrap_or(0), &mut min)? };
63         Ok(Self {
64             address,
65             size: min,
66         })
67     }
68 
new(size: usize) -> Result<Self>69     pub fn new(size: usize) -> Result<Self> {
70         let address = unsafe { crate::physalloc(size)? };
71         Ok(Self {
72             address,
73             size,
74         })
75     }
76 }
77 
78 impl Drop for PhysBox {
drop(&mut self)79     fn drop(&mut self) {
80         let _ = unsafe { crate::physfree(self.address, self.size) };
81     }
82 }
83 
84 pub struct Dma<T: ?Sized> {
85     phys: PhysBox,
86     virt: *mut T,
87 }
88 
89 impl<T> Dma<T> {
from_physbox_uninit(phys: PhysBox) -> Result<Dma<MaybeUninit<T>>>90     pub fn from_physbox_uninit(phys: PhysBox) -> Result<Dma<MaybeUninit<T>>> {
91         let virt = unsafe { crate::physmap(phys.address, phys.size, crate::PHYSMAP_WRITE)? } as *mut MaybeUninit<T>;
92 
93         Ok(Dma {
94             phys,
95             virt,
96         })
97     }
from_physbox_zeroed(phys: PhysBox) -> Result<Dma<MaybeUninit<T>>>98     pub fn from_physbox_zeroed(phys: PhysBox) -> Result<Dma<MaybeUninit<T>>> {
99         let this = Self::from_physbox_uninit(phys)?;
100         unsafe { ptr::write_bytes(this.virt as *mut MaybeUninit<u8>, 0, this.phys.size) }
101         Ok(this)
102     }
103 
from_physbox(phys: PhysBox, value: T) -> Result<Self>104     pub fn from_physbox(phys: PhysBox, value: T) -> Result<Self> {
105         let this = Self::from_physbox_uninit(phys)?;
106 
107         Ok(unsafe {
108             ptr::write(this.virt, MaybeUninit::new(value));
109             this.assume_init()
110         })
111     }
112 
new(value: T) -> Result<Self>113     pub fn new(value: T) -> Result<Self> {
114         let phys = PhysBox::new(mem::size_of::<T>())?;
115         Self::from_physbox(phys, value)
116     }
zeroed() -> Result<Dma<MaybeUninit<T>>>117     pub fn zeroed() -> Result<Dma<MaybeUninit<T>>> {
118         let phys = PhysBox::new(mem::size_of::<T>())?;
119         Self::from_physbox_zeroed(phys)
120     }
121 }
122 
123 impl<T> Dma<MaybeUninit<T>> {
assume_init(self) -> Dma<T>124     pub unsafe fn assume_init(self) -> Dma<T> {
125         let &Dma { phys: PhysBox { address, size }, virt } = &self;
126         mem::forget(self);
127 
128         Dma {
129             phys: PhysBox { address, size },
130             virt: virt as *mut T,
131         }
132     }
133 }
134 impl<T: ?Sized> Dma<T> {
physical(&self) -> usize135     pub fn physical(&self) -> usize {
136         self.phys.address()
137     }
size(&self) -> usize138     pub fn size(&self) -> usize {
139         self.phys.size()
140     }
phys(&self) -> &PhysBox141     pub fn phys(&self) -> &PhysBox {
142         &self.phys
143     }
144 }
145 
146 impl<T> Dma<[T]> {
from_physbox_uninit_unsized(phys: PhysBox, len: usize) -> Result<Dma<[MaybeUninit<T>]>>147     pub fn from_physbox_uninit_unsized(phys: PhysBox, len: usize) -> Result<Dma<[MaybeUninit<T>]>> {
148         let max_len = phys.size() / mem::size_of::<T>();
149         assert!(len <= max_len);
150 
151         Ok(Dma {
152             virt: unsafe { slice::from_raw_parts_mut(crate::physmap(phys.address, phys.size, crate::PHYSMAP_WRITE)? as *mut MaybeUninit<T>, len) } as *mut [MaybeUninit<T>],
153             phys,
154         })
155     }
from_physbox_zeroed_unsized(phys: PhysBox, len: usize) -> Result<Dma<[MaybeUninit<T>]>>156     pub fn from_physbox_zeroed_unsized(phys: PhysBox, len: usize) -> Result<Dma<[MaybeUninit<T>]>> {
157         let this = Self::from_physbox_uninit_unsized(phys, len)?;
158         unsafe { ptr::write_bytes(this.virt as *mut MaybeUninit<u8>, 0, this.phys.size()) }
159         Ok(this)
160     }
161     /// Creates a new DMA buffer with a size only known at runtime.
162     /// ## Safety
163     /// * `T` must be properly aligned.
164     /// * `T` must be valid as zeroed (i.e. no NonNull pointers).
zeroed_unsized(count: usize) -> Result<Self>165     pub unsafe fn zeroed_unsized(count: usize) -> Result<Self> {
166         let phys = PhysBox::new(mem::size_of::<T>() * count)?;
167         Ok(Self::from_physbox_zeroed_unsized(phys, count)?.assume_init())
168     }
169 }
170 impl<T> Dma<[MaybeUninit<T>]> {
assume_init(self) -> Dma<[T]>171     pub unsafe fn assume_init(self) -> Dma<[T]> {
172         let &Dma { phys: PhysBox { address, size }, virt } = &self;
173         mem::forget(self);
174 
175         Dma {
176             phys: PhysBox { address, size },
177             virt: virt as *mut [T],
178         }
179     }
180 }
181 
182 impl<T: ?Sized> Deref for Dma<T> {
183     type Target = T;
deref(&self) -> &T184     fn deref(&self) -> &T {
185         unsafe { &*self.virt }
186     }
187 }
188 
189 impl<T: ?Sized> DerefMut for Dma<T> {
deref_mut(&mut self) -> &mut T190     fn deref_mut(&mut self) -> &mut T {
191         unsafe { &mut *self.virt }
192     }
193 }
194 
195 impl<T: ?Sized> Drop for Dma<T> {
drop(&mut self)196     fn drop(&mut self) {
197         unsafe { ptr::drop_in_place(self.virt) }
198         let _ = unsafe { crate::physunmap(self.virt as *mut u8 as usize) };
199     }
200 }
201