1 use crate::{Backend, RawDevice, ROUGH_MAX_ATTACHMENT_COUNT};
2 use ash::{version::DeviceV1_0, vk};
3 use hal::{
4     device::OutOfMemory,
5     image::{Extent, SubresourceRange},
6     pso,
7 };
8 use inplace_it::inplace_or_alloc_from_iter;
9 use parking_lot::Mutex;
10 use smallvec::SmallVec;
11 use std::{collections::HashMap, sync::Arc};
12 
13 #[derive(Debug, Hash)]
14 pub struct Semaphore(pub vk::Semaphore);
15 
16 #[derive(Debug, Hash, PartialEq, Eq)]
17 pub struct Fence(pub vk::Fence);
18 
19 #[derive(Debug, Hash)]
20 pub struct Event(pub vk::Event);
21 
22 #[derive(Debug, Hash)]
23 pub struct GraphicsPipeline(pub vk::Pipeline);
24 
25 #[derive(Debug, Hash)]
26 pub struct ComputePipeline(pub vk::Pipeline);
27 
28 #[derive(Debug, Hash)]
29 pub struct Memory {
30     pub(crate) raw: vk::DeviceMemory,
31 }
32 
33 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
34 pub struct Buffer {
35     pub(crate) raw: vk::Buffer,
36 }
37 
38 unsafe impl Sync for Buffer {}
39 unsafe impl Send for Buffer {}
40 
41 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
42 pub struct BufferView {
43     pub(crate) raw: vk::BufferView,
44 }
45 
46 #[derive(Debug, Hash, PartialEq, Eq)]
47 pub struct Image {
48     pub(crate) raw: vk::Image,
49     pub(crate) ty: vk::ImageType,
50     pub(crate) flags: vk::ImageCreateFlags,
51     pub(crate) extent: vk::Extent3D,
52 }
53 
54 #[derive(Debug, Hash, PartialEq, Eq)]
55 pub struct ImageView {
56     pub(crate) image: vk::Image,
57     pub(crate) raw: vk::ImageView,
58     pub(crate) range: SubresourceRange,
59 }
60 
61 #[derive(Debug, Hash)]
62 pub struct Sampler(pub vk::Sampler);
63 
64 #[derive(Debug, Hash)]
65 pub struct RenderPass {
66     pub raw: vk::RenderPass,
67     pub attachment_count: usize,
68 }
69 
70 pub type FramebufferKey = SmallVec<[vk::ImageView; ROUGH_MAX_ATTACHMENT_COUNT]>;
71 
72 #[derive(Debug)]
73 pub enum Framebuffer {
74     ImageLess(vk::Framebuffer),
75     Legacy {
76         name: String,
77         map: Mutex<HashMap<FramebufferKey, vk::Framebuffer>>,
78         extent: Extent,
79     },
80 }
81 
82 pub(crate) type SortedBindings = Arc<Vec<pso::DescriptorSetLayoutBinding>>;
83 
84 #[derive(Debug)]
85 pub struct DescriptorSetLayout {
86     pub(crate) raw: vk::DescriptorSetLayout,
87     pub(crate) bindings: SortedBindings,
88 }
89 
90 #[derive(Debug)]
91 pub struct DescriptorSet {
92     pub(crate) raw: vk::DescriptorSet,
93     pub(crate) bindings: SortedBindings,
94 }
95 
96 #[derive(Debug, Hash)]
97 pub struct PipelineLayout {
98     pub(crate) raw: vk::PipelineLayout,
99 }
100 
101 #[derive(Debug)]
102 pub struct PipelineCache {
103     pub(crate) raw: vk::PipelineCache,
104 }
105 
106 #[derive(Debug, Eq, Hash, PartialEq)]
107 pub struct ShaderModule {
108     pub(crate) raw: vk::ShaderModule,
109 }
110 
111 #[derive(Debug)]
112 pub struct DescriptorPool {
113     raw: vk::DescriptorPool,
114     device: Arc<RawDevice>,
115     /// This vec only exists to re-use allocations when `DescriptorSet`s are freed.
116     temp_raw_sets: Vec<vk::DescriptorSet>,
117     /// This vec only exists for collecting the layouts when allocating new sets.
118     temp_raw_layouts: Vec<vk::DescriptorSetLayout>,
119     /// This vec only exists for collecting the bindings when allocating new sets.
120     temp_layout_bindings: Vec<SortedBindings>,
121 }
122 
123 impl DescriptorPool {
new(raw: vk::DescriptorPool, device: &Arc<RawDevice>) -> Self124     pub(crate) fn new(raw: vk::DescriptorPool, device: &Arc<RawDevice>) -> Self {
125         DescriptorPool {
126             raw,
127             device: Arc::clone(device),
128             temp_raw_sets: Vec::new(),
129             temp_raw_layouts: Vec::new(),
130             temp_layout_bindings: Vec::new(),
131         }
132     }
133 
finish(self) -> vk::DescriptorPool134     pub(crate) fn finish(self) -> vk::DescriptorPool {
135         self.raw
136     }
137 }
138 
139 impl pso::DescriptorPool<Backend> for DescriptorPool {
allocate_one( &mut self, layout: &DescriptorSetLayout, ) -> Result<DescriptorSet, pso::AllocationError>140     unsafe fn allocate_one(
141         &mut self,
142         layout: &DescriptorSetLayout,
143     ) -> Result<DescriptorSet, pso::AllocationError> {
144         let raw_layouts = [layout.raw];
145         let info = vk::DescriptorSetAllocateInfo::builder()
146             .descriptor_pool(self.raw)
147             .set_layouts(&raw_layouts);
148 
149         self.device
150             .raw
151             .allocate_descriptor_sets(&info)
152             //Note: https://github.com/MaikKlein/ash/issues/358
153             .map(|mut sets| DescriptorSet {
154                 raw: sets.pop().unwrap(),
155                 bindings: Arc::clone(&layout.bindings),
156             })
157             .map_err(|err| match err {
158                 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
159                     pso::AllocationError::OutOfMemory(OutOfMemory::Host)
160                 }
161                 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
162                     pso::AllocationError::OutOfMemory(OutOfMemory::Device)
163                 }
164                 vk::Result::ERROR_OUT_OF_POOL_MEMORY => pso::AllocationError::OutOfPoolMemory,
165                 _ => pso::AllocationError::FragmentedPool,
166             })
167     }
168 
allocate<'a, I, E>( &mut self, layouts: I, list: &mut E, ) -> Result<(), pso::AllocationError> where I: Iterator<Item = &'a DescriptorSetLayout>, E: Extend<DescriptorSet>,169     unsafe fn allocate<'a, I, E>(
170         &mut self,
171         layouts: I,
172         list: &mut E,
173     ) -> Result<(), pso::AllocationError>
174     where
175         I: Iterator<Item = &'a DescriptorSetLayout>,
176         E: Extend<DescriptorSet>,
177     {
178         self.temp_raw_layouts.clear();
179         self.temp_layout_bindings.clear();
180         for layout in layouts {
181             self.temp_raw_layouts.push(layout.raw);
182             self.temp_layout_bindings.push(Arc::clone(&layout.bindings));
183         }
184 
185         let info = vk::DescriptorSetAllocateInfo::builder()
186             .descriptor_pool(self.raw)
187             .set_layouts(&self.temp_raw_layouts);
188 
189         self.device
190             .raw
191             .allocate_descriptor_sets(&info)
192             .map(|sets| {
193                 list.extend(
194                     sets.into_iter()
195                         .zip(self.temp_layout_bindings.drain(..))
196                         .map(|(raw, bindings)| DescriptorSet { raw, bindings }),
197                 )
198             })
199             .map_err(|err| match err {
200                 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
201                     pso::AllocationError::OutOfMemory(OutOfMemory::Host)
202                 }
203                 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
204                     pso::AllocationError::OutOfMemory(OutOfMemory::Device)
205                 }
206                 vk::Result::ERROR_OUT_OF_POOL_MEMORY => pso::AllocationError::OutOfPoolMemory,
207                 _ => pso::AllocationError::FragmentedPool,
208             })
209     }
210 
free<I>(&mut self, descriptor_sets: I) where I: Iterator<Item = DescriptorSet>,211     unsafe fn free<I>(&mut self, descriptor_sets: I)
212     where
213         I: Iterator<Item = DescriptorSet>,
214     {
215         let sets_iter = descriptor_sets.map(|d| d.raw);
216         inplace_or_alloc_from_iter(sets_iter, |sets| {
217             if !sets.is_empty() {
218                 if let Err(e) = self.device.raw.free_descriptor_sets(self.raw, sets) {
219                     error!("free_descriptor_sets error {}", e);
220                 }
221             }
222         })
223     }
224 
reset(&mut self)225     unsafe fn reset(&mut self) {
226         assert_eq!(
227             Ok(()),
228             self.device
229                 .raw
230                 .reset_descriptor_pool(self.raw, vk::DescriptorPoolResetFlags::empty())
231         );
232     }
233 }
234 
235 #[derive(Debug, Hash)]
236 pub struct QueryPool(pub vk::QueryPool);
237