1 use crate::{window::FramebufferCachePtr, Backend, RawDevice};
2 use ash::{version::DeviceV1_0, vk};
3 use hal::{image::SubresourceRange, pso};
4 use std::{borrow::Borrow, sync::Arc};
5 
6 #[derive(Debug, Hash)]
7 pub struct Semaphore(pub vk::Semaphore);
8 
9 #[derive(Debug, Hash, PartialEq, Eq)]
10 pub struct Fence(pub vk::Fence);
11 
12 #[derive(Debug, Hash)]
13 pub struct Event(pub vk::Event);
14 
15 #[derive(Debug, Hash)]
16 pub struct GraphicsPipeline(pub vk::Pipeline);
17 
18 #[derive(Debug, Hash)]
19 pub struct ComputePipeline(pub vk::Pipeline);
20 
21 #[derive(Debug, Hash)]
22 pub struct Memory {
23     pub(crate) raw: vk::DeviceMemory,
24 }
25 
26 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
27 pub struct Buffer {
28     pub(crate) raw: vk::Buffer,
29 }
30 
31 unsafe impl Sync for Buffer {}
32 unsafe impl Send for Buffer {}
33 
34 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
35 pub struct BufferView {
36     pub(crate) raw: vk::BufferView,
37 }
38 
39 #[derive(Debug, Hash, PartialEq, Eq)]
40 pub struct Image {
41     pub(crate) raw: vk::Image,
42     pub(crate) ty: vk::ImageType,
43     pub(crate) flags: vk::ImageCreateFlags,
44     pub(crate) extent: vk::Extent3D,
45 }
46 
47 #[derive(Debug, Hash, PartialEq, Eq)]
48 pub enum ImageViewOwner {
49     User,
50     Surface(FramebufferCachePtr),
51 }
52 
53 #[derive(Debug, Hash, PartialEq, Eq)]
54 pub struct ImageView {
55     pub(crate) image: vk::Image,
56     pub(crate) view: vk::ImageView,
57     pub(crate) range: SubresourceRange,
58     pub(crate) owner: ImageViewOwner,
59 }
60 
61 #[derive(Debug, Hash)]
62 pub struct Sampler(pub vk::Sampler);
63 
64 #[derive(Debug, Hash)]
65 pub struct RenderPass {
66     pub raw: vk::RenderPass,
67     pub clear_attachments_mask: u64,
68 }
69 
70 #[derive(Debug, Hash)]
71 pub struct Framebuffer {
72     pub(crate) raw: vk::Framebuffer,
73     pub(crate) owned: bool,
74 }
75 
76 #[derive(Debug)]
77 pub struct DescriptorSetLayout {
78     pub(crate) raw: vk::DescriptorSetLayout,
79     pub(crate) bindings: Arc<Vec<pso::DescriptorSetLayoutBinding>>,
80 }
81 
82 #[derive(Debug)]
83 pub struct DescriptorSet {
84     pub(crate) raw: vk::DescriptorSet,
85     pub(crate) bindings: Arc<Vec<pso::DescriptorSetLayoutBinding>>,
86 }
87 
88 #[derive(Debug, Hash)]
89 pub struct PipelineLayout {
90     pub(crate) raw: vk::PipelineLayout,
91 }
92 
93 #[derive(Debug)]
94 pub struct PipelineCache {
95     pub(crate) raw: vk::PipelineCache,
96 }
97 
98 #[derive(Debug, Eq, Hash, PartialEq)]
99 pub struct ShaderModule {
100     pub(crate) raw: vk::ShaderModule,
101 }
102 
103 #[derive(Debug)]
104 pub struct DescriptorPool {
105     pub(crate) raw: vk::DescriptorPool,
106     pub(crate) device: Arc<RawDevice>,
107     /// This vec only exists to re-use allocations when `DescriptorSet`s are freed.
108     pub(crate) set_free_vec: Vec<vk::DescriptorSet>,
109 }
110 
111 impl pso::DescriptorPool<Backend> for DescriptorPool {
allocate<I, E>( &mut self, layout_iter: I, list: &mut E, ) -> Result<(), pso::AllocationError> where I: IntoIterator, I::Item: Borrow<DescriptorSetLayout>, E: Extend<DescriptorSet>,112     unsafe fn allocate<I, E>(
113         &mut self,
114         layout_iter: I,
115         list: &mut E,
116     ) -> Result<(), pso::AllocationError>
117     where
118         I: IntoIterator,
119         I::Item: Borrow<DescriptorSetLayout>,
120         E: Extend<DescriptorSet>,
121     {
122         use std::ptr;
123 
124         let mut raw_layouts = Vec::new();
125         let mut layout_bindings = Vec::new();
126         for layout in layout_iter {
127             raw_layouts.push(layout.borrow().raw);
128             layout_bindings.push(layout.borrow().bindings.clone());
129         }
130 
131         let info = vk::DescriptorSetAllocateInfo {
132             s_type: vk::StructureType::DESCRIPTOR_SET_ALLOCATE_INFO,
133             p_next: ptr::null(),
134             descriptor_pool: self.raw,
135             descriptor_set_count: raw_layouts.len() as u32,
136             p_set_layouts: raw_layouts.as_ptr(),
137         };
138 
139         self.device
140             .raw
141             .allocate_descriptor_sets(&info)
142             .map(|sets| {
143                 list.extend(
144                     sets.into_iter()
145                         .zip(layout_bindings)
146                         .map(|(raw, bindings)| DescriptorSet { raw, bindings }),
147                 )
148             })
149             .map_err(|err| match err {
150                 vk::Result::ERROR_OUT_OF_HOST_MEMORY => pso::AllocationError::Host,
151                 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => pso::AllocationError::Device,
152                 vk::Result::ERROR_OUT_OF_POOL_MEMORY => pso::AllocationError::OutOfPoolMemory,
153                 _ => pso::AllocationError::FragmentedPool,
154             })
155     }
156 
free_sets<I>(&mut self, descriptor_sets: I) where I: IntoIterator<Item = DescriptorSet>,157     unsafe fn free_sets<I>(&mut self, descriptor_sets: I)
158     where
159         I: IntoIterator<Item = DescriptorSet>,
160     {
161         self.set_free_vec.clear();
162         self.set_free_vec
163             .extend(descriptor_sets.into_iter().map(|d| d.raw));
164         self.device
165             .raw
166             .free_descriptor_sets(self.raw, &self.set_free_vec);
167     }
168 
reset(&mut self)169     unsafe fn reset(&mut self) {
170         assert_eq!(
171             Ok(()),
172             self.device
173                 .raw
174                 .reset_descriptor_pool(self.raw, vk::DescriptorPoolResetFlags::empty())
175         );
176     }
177 }
178 
179 #[derive(Debug, Hash)]
180 pub struct QueryPool(pub vk::QueryPool);
181