1 use super::conv;
2 
3 use arrayvec::ArrayVec;
4 use ash::{extensions::khr, vk};
5 use inplace_it::inplace_or_alloc_from_iter;
6 use parking_lot::Mutex;
7 
8 use std::{
9     borrow::Cow, collections::hash_map::Entry, ffi::CString, num::NonZeroU32, ptr, sync::Arc,
10 };
11 
12 impl super::DeviceShared {
set_object_name( &self, object_type: vk::ObjectType, object: impl vk::Handle, name: &str, )13     pub(super) unsafe fn set_object_name(
14         &self,
15         object_type: vk::ObjectType,
16         object: impl vk::Handle,
17         name: &str,
18     ) {
19         use std::ffi::CStr;
20 
21         let extension = match self.instance.debug_utils {
22             Some(ref debug_utils) => &debug_utils.extension,
23             None => return,
24         };
25 
26         // Keep variables outside the if-else block to ensure they do not
27         // go out of scope while we hold a pointer to them
28         let mut buffer: [u8; 64] = [0u8; 64];
29         let buffer_vec: Vec<u8>;
30 
31         // Append a null terminator to the string
32         let name_bytes = if name.len() < buffer.len() {
33             // Common case, string is very small. Allocate a copy on the stack.
34             buffer[..name.len()].copy_from_slice(name.as_bytes());
35             // Add null terminator
36             buffer[name.len()] = 0;
37             &buffer[..name.len() + 1]
38         } else {
39             // Less common case, the string is large.
40             // This requires a heap allocation.
41             buffer_vec = name
42                 .as_bytes()
43                 .iter()
44                 .cloned()
45                 .chain(std::iter::once(0))
46                 .collect();
47             &buffer_vec
48         };
49 
50         let _result = extension.debug_utils_set_object_name(
51             self.raw.handle(),
52             &vk::DebugUtilsObjectNameInfoEXT::builder()
53                 .object_type(object_type)
54                 .object_handle(object.as_raw())
55                 .object_name(CStr::from_bytes_with_nul_unchecked(name_bytes)),
56         );
57     }
58 
make_render_pass( &self, key: super::RenderPassKey, ) -> Result<vk::RenderPass, crate::DeviceError>59     pub fn make_render_pass(
60         &self,
61         key: super::RenderPassKey,
62     ) -> Result<vk::RenderPass, crate::DeviceError> {
63         Ok(match self.render_passes.lock().entry(key) {
64             Entry::Occupied(e) => *e.get(),
65             Entry::Vacant(e) => {
66                 let mut vk_attachments = Vec::new();
67                 let mut color_refs = Vec::with_capacity(e.key().colors.len());
68                 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
69                 let mut ds_ref = None;
70                 let samples = vk::SampleCountFlags::from_raw(e.key().sample_count);
71 
72                 for cat in e.key().colors.iter() {
73                     color_refs.push(vk::AttachmentReference {
74                         attachment: vk_attachments.len() as u32,
75                         layout: cat.base.layout,
76                     });
77                     vk_attachments.push({
78                         let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
79                         vk::AttachmentDescription::builder()
80                             .format(cat.base.format)
81                             .samples(samples)
82                             .load_op(load_op)
83                             .store_op(store_op)
84                             .initial_layout(cat.base.layout)
85                             .final_layout(cat.base.layout)
86                             .build()
87                     });
88                     let at_ref = if let Some(ref rat) = cat.resolve {
89                         let at_ref = vk::AttachmentReference {
90                             attachment: vk_attachments.len() as u32,
91                             layout: rat.layout,
92                         };
93                         let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
94                         let vk_attachment = vk::AttachmentDescription::builder()
95                             .format(rat.format)
96                             .samples(vk::SampleCountFlags::TYPE_1)
97                             .load_op(load_op)
98                             .store_op(store_op)
99                             .initial_layout(rat.layout)
100                             .final_layout(rat.layout)
101                             .build();
102                         vk_attachments.push(vk_attachment);
103                         at_ref
104                     } else {
105                         vk::AttachmentReference {
106                             attachment: vk::ATTACHMENT_UNUSED,
107                             layout: vk::ImageLayout::UNDEFINED,
108                         }
109                     };
110                     resolve_refs.push(at_ref);
111                 }
112 
113                 if let Some(ref ds) = e.key().depth_stencil {
114                     ds_ref = Some(vk::AttachmentReference {
115                         attachment: vk_attachments.len() as u32,
116                         layout: ds.base.layout,
117                     });
118                     let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
119                     let (stencil_load_op, stencil_store_op) =
120                         conv::map_attachment_ops(ds.stencil_ops);
121                     let vk_attachment = vk::AttachmentDescription::builder()
122                         .format(ds.base.format)
123                         .samples(samples)
124                         .load_op(load_op)
125                         .store_op(store_op)
126                         .stencil_load_op(stencil_load_op)
127                         .stencil_store_op(stencil_store_op)
128                         .initial_layout(ds.base.layout)
129                         .final_layout(ds.base.layout)
130                         .build();
131                     vk_attachments.push(vk_attachment);
132                 }
133 
134                 let vk_subpasses = [{
135                     let mut vk_subpass = vk::SubpassDescription::builder()
136                         .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
137                         .color_attachments(&color_refs)
138                         .resolve_attachments(&resolve_refs);
139 
140                     if self
141                         .workarounds
142                         .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
143                         && resolve_refs.is_empty()
144                     {
145                         vk_subpass.p_resolve_attachments = ptr::null();
146                     }
147 
148                     if let Some(ref reference) = ds_ref {
149                         vk_subpass = vk_subpass.depth_stencil_attachment(reference)
150                     }
151                     vk_subpass.build()
152                 }];
153 
154                 let mut vk_info = vk::RenderPassCreateInfo::builder()
155                     .attachments(&vk_attachments)
156                     .subpasses(&vk_subpasses);
157 
158                 let mut multiview_info;
159                 let mask;
160                 if let Some(multiview) = e.key().multiview {
161                     // Sanity checks, better to panic here than cause a driver crash
162                     assert!(multiview.get() <= 8);
163                     assert!(multiview.get() > 1);
164 
165                     // Right now we enable all bits on the view masks and correlation masks.
166                     // This means we're rendering to all views in the subpass, and that all views
167                     // can be rendered concurrently.
168                     mask = [(1 << multiview.get()) - 1];
169 
170                     // On Vulkan 1.1 or later, this is an alias for core functionality
171                     multiview_info = vk::RenderPassMultiviewCreateInfoKHR::builder()
172                         .view_masks(&mask)
173                         .correlation_masks(&mask)
174                         .build();
175                     vk_info = vk_info.push_next(&mut multiview_info);
176                 }
177 
178                 let raw = unsafe { self.raw.create_render_pass(&vk_info, None)? };
179 
180                 *e.insert(raw)
181             }
182         })
183     }
184 
make_framebuffer( &self, key: super::FramebufferKey, raw_pass: vk::RenderPass, pass_label: crate::Label, ) -> Result<vk::Framebuffer, crate::DeviceError>185     pub fn make_framebuffer(
186         &self,
187         key: super::FramebufferKey,
188         raw_pass: vk::RenderPass,
189         pass_label: crate::Label,
190     ) -> Result<vk::Framebuffer, crate::DeviceError> {
191         Ok(match self.framebuffers.lock().entry(key) {
192             Entry::Occupied(e) => *e.get(),
193             Entry::Vacant(e) => {
194                 let vk_views = e
195                     .key()
196                     .attachments
197                     .iter()
198                     .map(|at| at.raw)
199                     .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
200                 let vk_view_formats = e
201                     .key()
202                     .attachments
203                     .iter()
204                     .map(|at| self.private_caps.map_texture_format(at.view_format))
205                     .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
206                 let vk_image_infos = e
207                     .key()
208                     .attachments
209                     .iter()
210                     .enumerate()
211                     .map(|(i, at)| {
212                         vk::FramebufferAttachmentImageInfo::builder()
213                             .usage(conv::map_texture_usage(at.view_usage))
214                             .flags(at.raw_image_flags)
215                             .width(e.key().extent.width)
216                             .height(e.key().extent.height)
217                             .layer_count(e.key().extent.depth_or_array_layers)
218                             .view_formats(&vk_view_formats[i..i + 1])
219                             .build()
220                     })
221                     .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
222 
223                 let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::builder()
224                     .attachment_image_infos(&vk_image_infos)
225                     .build();
226                 let mut vk_info = vk::FramebufferCreateInfo::builder()
227                     .render_pass(raw_pass)
228                     .width(e.key().extent.width)
229                     .height(e.key().extent.height)
230                     .layers(e.key().extent.depth_or_array_layers);
231 
232                 if self.private_caps.imageless_framebuffers {
233                     //TODO: https://github.com/MaikKlein/ash/issues/450
234                     vk_info = vk_info
235                         .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
236                         .push_next(&mut vk_attachment_info);
237                     vk_info.attachment_count = e.key().attachments.len() as u32;
238                 } else {
239                     vk_info = vk_info.attachments(&vk_views);
240                 }
241 
242                 *e.insert(unsafe {
243                     let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
244                     if let Some(label) = pass_label {
245                         self.set_object_name(vk::ObjectType::FRAMEBUFFER, raw, label);
246                     }
247                     raw
248                 })
249             }
250         })
251     }
252 
make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>( &self, buffer: &'a super::Buffer, ranges: I, ) -> impl 'a + Iterator<Item = vk::MappedMemoryRange>253     fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
254         &self,
255         buffer: &'a super::Buffer,
256         ranges: I,
257     ) -> impl 'a + Iterator<Item = vk::MappedMemoryRange> {
258         let block = buffer.block.lock();
259         let mask = self.private_caps.non_coherent_map_mask;
260         ranges.map(move |range| {
261             vk::MappedMemoryRange::builder()
262                 .memory(*block.memory())
263                 .offset((block.offset() + range.start) & !mask)
264                 .size((range.end - range.start + mask) & !mask)
265                 .build()
266         })
267     }
268 
free_resources(&self)269     unsafe fn free_resources(&self) {
270         for &raw in self.render_passes.lock().values() {
271             self.raw.destroy_render_pass(raw, None);
272         }
273         for &raw in self.framebuffers.lock().values() {
274             self.raw.destroy_framebuffer(raw, None);
275         }
276         if self.handle_is_owned {
277             self.raw.destroy_device(None);
278         }
279     }
280 }
281 
282 impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
allocate_memory( &self, size: u64, memory_type: u32, flags: gpu_alloc::AllocationFlags, ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory>283     unsafe fn allocate_memory(
284         &self,
285         size: u64,
286         memory_type: u32,
287         flags: gpu_alloc::AllocationFlags,
288     ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
289         let mut info = vk::MemoryAllocateInfo::builder()
290             .allocation_size(size)
291             .memory_type_index(memory_type);
292 
293         let mut info_flags;
294 
295         if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
296             info_flags = vk::MemoryAllocateFlagsInfo::builder()
297                 .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
298             info = info.push_next(&mut info_flags);
299         }
300 
301         match self.raw.allocate_memory(&info, None) {
302             Ok(memory) => Ok(memory),
303             Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
304                 Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
305             }
306             Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
307                 Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
308             }
309             Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => panic!("Too many objects"),
310             Err(err) => panic!("Unexpected Vulkan error: `{}`", err),
311         }
312     }
313 
deallocate_memory(&self, memory: vk::DeviceMemory)314     unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
315         self.raw.free_memory(memory, None);
316     }
317 
map_memory( &self, memory: &mut vk::DeviceMemory, offset: u64, size: u64, ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError>318     unsafe fn map_memory(
319         &self,
320         memory: &mut vk::DeviceMemory,
321         offset: u64,
322         size: u64,
323     ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
324         match self
325             .raw
326             .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
327         {
328             Ok(ptr) => Ok(ptr::NonNull::new(ptr as *mut u8)
329                 .expect("Pointer to memory mapping must not be null")),
330             Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
331                 Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
332             }
333             Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
334                 Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
335             }
336             Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
337             Err(err) => panic!("Unexpected Vulkan error: `{}`", err),
338         }
339     }
340 
unmap_memory(&self, memory: &mut vk::DeviceMemory)341     unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
342         self.raw.unmap_memory(*memory);
343     }
344 
invalidate_memory_ranges( &self, _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>], ) -> Result<(), gpu_alloc::OutOfMemory>345     unsafe fn invalidate_memory_ranges(
346         &self,
347         _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
348     ) -> Result<(), gpu_alloc::OutOfMemory> {
349         // should never be called
350         unimplemented!()
351     }
352 
flush_memory_ranges( &self, _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>], ) -> Result<(), gpu_alloc::OutOfMemory>353     unsafe fn flush_memory_ranges(
354         &self,
355         _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
356     ) -> Result<(), gpu_alloc::OutOfMemory> {
357         // should never be called
358         unimplemented!()
359     }
360 }
361 
362 impl
363     gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
364     for super::DeviceShared
365 {
create_descriptor_pool( &self, descriptor_count: &gpu_descriptor::DescriptorTotalCount, max_sets: u32, flags: gpu_descriptor::DescriptorPoolCreateFlags, ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError>366     unsafe fn create_descriptor_pool(
367         &self,
368         descriptor_count: &gpu_descriptor::DescriptorTotalCount,
369         max_sets: u32,
370         flags: gpu_descriptor::DescriptorPoolCreateFlags,
371     ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
372         //Note: ignoring other types, since they can't appear here
373         let unfiltered_counts = [
374             (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
375             (
376                 vk::DescriptorType::SAMPLED_IMAGE,
377                 descriptor_count.sampled_image,
378             ),
379             (
380                 vk::DescriptorType::STORAGE_IMAGE,
381                 descriptor_count.storage_image,
382             ),
383             (
384                 vk::DescriptorType::UNIFORM_BUFFER,
385                 descriptor_count.uniform_buffer,
386             ),
387             (
388                 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
389                 descriptor_count.uniform_buffer_dynamic,
390             ),
391             (
392                 vk::DescriptorType::STORAGE_BUFFER,
393                 descriptor_count.storage_buffer,
394             ),
395             (
396                 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
397                 descriptor_count.storage_buffer_dynamic,
398             ),
399         ];
400 
401         let filtered_counts = unfiltered_counts
402             .iter()
403             .cloned()
404             .filter(|&(_, count)| count != 0)
405             .map(|(ty, count)| vk::DescriptorPoolSize {
406                 ty,
407                 descriptor_count: count,
408             })
409             .collect::<ArrayVec<_, 8>>();
410 
411         let mut vk_flags =
412             if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
413                 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
414             } else {
415                 vk::DescriptorPoolCreateFlags::empty()
416             };
417         if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
418             vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
419         }
420         let vk_info = vk::DescriptorPoolCreateInfo::builder()
421             .max_sets(max_sets)
422             .flags(vk_flags)
423             .pool_sizes(&filtered_counts)
424             .build();
425 
426         match self.raw.create_descriptor_pool(&vk_info, None) {
427             Ok(pool) => Ok(pool),
428             Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
429                 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
430             }
431             Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
432                 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
433             }
434             Err(vk::Result::ERROR_FRAGMENTATION) => {
435                 Err(gpu_descriptor::CreatePoolError::Fragmentation)
436             }
437             Err(other) => {
438                 log::error!("create_descriptor_pool: {:?}", other);
439                 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
440             }
441         }
442     }
443 
destroy_descriptor_pool(&self, pool: vk::DescriptorPool)444     unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
445         self.raw.destroy_descriptor_pool(pool, None)
446     }
447 
alloc_descriptor_sets<'a>( &self, pool: &mut vk::DescriptorPool, layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>, sets: &mut impl Extend<vk::DescriptorSet>, ) -> Result<(), gpu_descriptor::DeviceAllocationError>448     unsafe fn alloc_descriptor_sets<'a>(
449         &self,
450         pool: &mut vk::DescriptorPool,
451         layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
452         sets: &mut impl Extend<vk::DescriptorSet>,
453     ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
454         let result = inplace_or_alloc_from_iter(layouts.cloned(), |layouts_slice| {
455             let vk_info = vk::DescriptorSetAllocateInfo::builder()
456                 .descriptor_pool(*pool)
457                 .set_layouts(layouts_slice)
458                 .build();
459             self.raw.allocate_descriptor_sets(&vk_info)
460         });
461 
462         match result {
463             Ok(vk_sets) => {
464                 sets.extend(vk_sets);
465                 Ok(())
466             }
467             Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
468             | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
469                 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
470             }
471             Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
472                 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
473             }
474             Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
475                 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
476             }
477             Err(other) => {
478                 log::error!("allocate_descriptor_sets: {:?}", other);
479                 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
480             }
481         }
482     }
483 
dealloc_descriptor_sets<'a>( &self, pool: &mut vk::DescriptorPool, sets: impl Iterator<Item = vk::DescriptorSet>, )484     unsafe fn dealloc_descriptor_sets<'a>(
485         &self,
486         pool: &mut vk::DescriptorPool,
487         sets: impl Iterator<Item = vk::DescriptorSet>,
488     ) {
489         let result = inplace_or_alloc_from_iter(sets, |sets_slice| {
490             self.raw.free_descriptor_sets(*pool, sets_slice)
491         });
492         match result {
493             Ok(()) => {}
494             Err(err) => log::error!("free_descriptor_sets: {:?}", err),
495         }
496     }
497 }
498 
499 struct CompiledStage {
500     create_info: vk::PipelineShaderStageCreateInfo,
501     _entry_point: CString,
502     temp_raw_module: Option<vk::ShaderModule>,
503 }
504 
505 impl super::Device {
create_swapchain( &self, surface: &mut super::Surface, config: &crate::SurfaceConfiguration, provided_old_swapchain: Option<super::Swapchain>, ) -> Result<super::Swapchain, crate::SurfaceError>506     pub(super) unsafe fn create_swapchain(
507         &self,
508         surface: &mut super::Surface,
509         config: &crate::SurfaceConfiguration,
510         provided_old_swapchain: Option<super::Swapchain>,
511     ) -> Result<super::Swapchain, crate::SurfaceError> {
512         profiling::scope!("Device::create_swapchain");
513         let functor = khr::Swapchain::new(&surface.instance.raw, &self.shared.raw);
514 
515         let old_swapchain = match provided_old_swapchain {
516             Some(osc) => osc.raw,
517             None => vk::SwapchainKHR::null(),
518         };
519 
520         let info = vk::SwapchainCreateInfoKHR::builder()
521             .flags(vk::SwapchainCreateFlagsKHR::empty())
522             .surface(surface.raw)
523             .min_image_count(config.swap_chain_size)
524             .image_format(self.shared.private_caps.map_texture_format(config.format))
525             .image_color_space(vk::ColorSpaceKHR::SRGB_NONLINEAR)
526             .image_extent(vk::Extent2D {
527                 width: config.extent.width,
528                 height: config.extent.height,
529             })
530             .image_array_layers(config.extent.depth_or_array_layers)
531             .image_usage(conv::map_texture_usage(config.usage))
532             .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
533             .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
534             .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
535             .present_mode(conv::map_present_mode(config.present_mode))
536             .clipped(true)
537             .old_swapchain(old_swapchain);
538 
539         let result = {
540             profiling::scope!("vkCreateSwapchainKHR");
541             functor.create_swapchain(&info, None)
542         };
543 
544         // doing this before bailing out with error
545         if old_swapchain != vk::SwapchainKHR::null() {
546             functor.destroy_swapchain(old_swapchain, None)
547         }
548 
549         let raw = match result {
550             Ok(swapchain) => swapchain,
551             Err(error) => {
552                 return Err(match error {
553                     vk::Result::ERROR_SURFACE_LOST_KHR => crate::SurfaceError::Lost,
554                     vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
555                         crate::SurfaceError::Other("Native window is in use")
556                     }
557                     other => crate::DeviceError::from(other).into(),
558                 })
559             }
560         };
561 
562         let images = functor
563             .get_swapchain_images(raw)
564             .map_err(crate::DeviceError::from)?;
565 
566         let vk_info = vk::FenceCreateInfo::builder().build();
567         let fence = self
568             .shared
569             .raw
570             .create_fence(&vk_info, None)
571             .map_err(crate::DeviceError::from)?;
572 
573         Ok(super::Swapchain {
574             raw,
575             functor,
576             device: Arc::clone(&self.shared),
577             fence,
578             images,
579             config: config.clone(),
580         })
581     }
582 
583     /// # Safety
584     ///
585     /// - `vk_image` must be created respecting `desc`
586     /// - If `drop_guard` is `Some`, the application must manually destroy the image handle. This
587     ///   can be done inside the `Drop` impl of `drop_guard`.
texture_from_raw( vk_image: vk::Image, desc: &crate::TextureDescriptor, drop_guard: Option<super::DropGuard>, ) -> super::Texture588     pub unsafe fn texture_from_raw(
589         vk_image: vk::Image,
590         desc: &crate::TextureDescriptor,
591         drop_guard: Option<super::DropGuard>,
592     ) -> super::Texture {
593         super::Texture {
594             raw: vk_image,
595             drop_guard,
596             block: None,
597             usage: desc.usage,
598             aspects: crate::FormatAspects::from(desc.format),
599             format_info: desc.format.describe(),
600             raw_flags: vk::ImageCreateFlags::empty(),
601             copy_size: conv::map_extent_to_copy_size(&desc.size, desc.dimension),
602         }
603     }
604 
create_shader_module_impl( &self, spv: &[u32], ) -> Result<vk::ShaderModule, crate::DeviceError>605     fn create_shader_module_impl(
606         &self,
607         spv: &[u32],
608     ) -> Result<vk::ShaderModule, crate::DeviceError> {
609         let vk_info = vk::ShaderModuleCreateInfo::builder()
610             .flags(vk::ShaderModuleCreateFlags::empty())
611             .code(spv);
612 
613         let raw = unsafe {
614             profiling::scope!("vkCreateShaderModule");
615             self.shared.raw.create_shader_module(&vk_info, None)?
616         };
617         Ok(raw)
618     }
619 
compile_stage( &self, stage: &crate::ProgrammableStage<super::Api>, naga_stage: naga::ShaderStage, ) -> Result<CompiledStage, crate::PipelineError>620     fn compile_stage(
621         &self,
622         stage: &crate::ProgrammableStage<super::Api>,
623         naga_stage: naga::ShaderStage,
624     ) -> Result<CompiledStage, crate::PipelineError> {
625         let stage_flags = crate::auxil::map_naga_stage(naga_stage);
626         let vk_module = match *stage.module {
627             super::ShaderModule::Raw(raw) => raw,
628             super::ShaderModule::Intermediate {
629                 ref naga_shader,
630                 runtime_checks,
631             } => {
632                 let pipeline_options = naga::back::spv::PipelineOptions {
633                     entry_point: stage.entry_point.to_string(),
634                     shader_stage: naga_stage,
635                 };
636                 let temp_options;
637                 let options = if !runtime_checks {
638                     temp_options = naga::back::spv::Options {
639                         bounds_check_policies: naga::proc::BoundsCheckPolicies {
640                             index: naga::proc::BoundsCheckPolicy::Unchecked,
641                             buffer: naga::proc::BoundsCheckPolicy::Unchecked,
642                             image: naga::proc::BoundsCheckPolicy::Unchecked,
643                         },
644                         ..self.naga_options.clone()
645                     };
646                     &temp_options
647                 } else {
648                     &self.naga_options
649                 };
650                 let spv = {
651                     profiling::scope!("naga::spv::write_vec");
652                     naga::back::spv::write_vec(
653                         &naga_shader.module,
654                         &naga_shader.info,
655                         options,
656                         Some(&pipeline_options),
657                     )
658                 }
659                 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{}", e)))?;
660                 self.create_shader_module_impl(&spv)?
661             }
662         };
663 
664         let entry_point = CString::new(stage.entry_point).unwrap();
665         let create_info = vk::PipelineShaderStageCreateInfo::builder()
666             .stage(conv::map_shader_stage(stage_flags))
667             .module(vk_module)
668             .name(&entry_point)
669             .build();
670 
671         Ok(CompiledStage {
672             create_info,
673             _entry_point: entry_point,
674             temp_raw_module: match *stage.module {
675                 super::ShaderModule::Raw(_) => None,
676                 super::ShaderModule::Intermediate { .. } => Some(vk_module),
677             },
678         })
679     }
680 
raw_device(&self) -> &ash::Device681     pub fn raw_device(&self) -> &ash::Device {
682         &self.shared.raw
683     }
684 }
685 
686 impl crate::Device<super::Api> for super::Device {
exit(self, queue: super::Queue)687     unsafe fn exit(self, queue: super::Queue) {
688         self.mem_allocator.into_inner().cleanup(&*self.shared);
689         self.desc_allocator.into_inner().cleanup(&*self.shared);
690         for &sem in queue.relay_semaphores.iter() {
691             self.shared.raw.destroy_semaphore(sem, None);
692         }
693         self.shared.free_resources();
694     }
695 
create_buffer( &self, desc: &crate::BufferDescriptor, ) -> Result<super::Buffer, crate::DeviceError>696     unsafe fn create_buffer(
697         &self,
698         desc: &crate::BufferDescriptor,
699     ) -> Result<super::Buffer, crate::DeviceError> {
700         let vk_info = vk::BufferCreateInfo::builder()
701             .size(desc.size)
702             .usage(conv::map_buffer_usage(desc.usage))
703             .sharing_mode(vk::SharingMode::EXCLUSIVE);
704 
705         let raw = self.shared.raw.create_buffer(&vk_info, None)?;
706         let req = self.shared.raw.get_buffer_memory_requirements(raw);
707 
708         let mut alloc_usage = if desc
709             .usage
710             .intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE)
711         {
712             let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
713             //TODO: find a way to use `crate::MemoryFlags::PREFER_COHERENT`
714             flags.set(
715                 gpu_alloc::UsageFlags::DOWNLOAD,
716                 desc.usage.contains(crate::BufferUses::MAP_READ),
717             );
718             flags.set(
719                 gpu_alloc::UsageFlags::UPLOAD,
720                 desc.usage.contains(crate::BufferUses::MAP_WRITE),
721             );
722             flags
723         } else {
724             gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
725         };
726         alloc_usage.set(
727             gpu_alloc::UsageFlags::TRANSIENT,
728             desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
729         );
730 
731         let block = self.mem_allocator.lock().alloc(
732             &*self.shared,
733             gpu_alloc::Request {
734                 size: req.size,
735                 align_mask: req.alignment - 1,
736                 usage: alloc_usage,
737                 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
738             },
739         )?;
740 
741         self.shared
742             .raw
743             .bind_buffer_memory(raw, *block.memory(), block.offset())?;
744 
745         if let Some(label) = desc.label {
746             self.shared
747                 .set_object_name(vk::ObjectType::BUFFER, raw, label);
748         }
749 
750         Ok(super::Buffer {
751             raw,
752             block: Mutex::new(block),
753         })
754     }
destroy_buffer(&self, buffer: super::Buffer)755     unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
756         self.shared.raw.destroy_buffer(buffer.raw, None);
757         self.mem_allocator
758             .lock()
759             .dealloc(&*self.shared, buffer.block.into_inner());
760     }
761 
map_buffer( &self, buffer: &super::Buffer, range: crate::MemoryRange, ) -> Result<crate::BufferMapping, crate::DeviceError>762     unsafe fn map_buffer(
763         &self,
764         buffer: &super::Buffer,
765         range: crate::MemoryRange,
766     ) -> Result<crate::BufferMapping, crate::DeviceError> {
767         let size = range.end - range.start;
768         let mut block = buffer.block.lock();
769         let ptr = block.map(&*self.shared, range.start, size as usize)?;
770         let is_coherent = block
771             .props()
772             .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
773         Ok(crate::BufferMapping { ptr, is_coherent })
774     }
unmap_buffer(&self, buffer: &super::Buffer) -> Result<(), crate::DeviceError>775     unsafe fn unmap_buffer(&self, buffer: &super::Buffer) -> Result<(), crate::DeviceError> {
776         buffer.block.lock().unmap(&*self.shared);
777         Ok(())
778     }
779 
flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I) where I: Iterator<Item = crate::MemoryRange>,780     unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
781     where
782         I: Iterator<Item = crate::MemoryRange>,
783     {
784         let vk_ranges = self.shared.make_memory_ranges(buffer, ranges);
785         inplace_or_alloc_from_iter(vk_ranges, |array| {
786             self.shared.raw.flush_mapped_memory_ranges(array).unwrap()
787         });
788     }
invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I) where I: Iterator<Item = crate::MemoryRange>,789     unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
790     where
791         I: Iterator<Item = crate::MemoryRange>,
792     {
793         let vk_ranges = self.shared.make_memory_ranges(buffer, ranges);
794         inplace_or_alloc_from_iter(vk_ranges, |array| {
795             self.shared
796                 .raw
797                 .invalidate_mapped_memory_ranges(array)
798                 .unwrap()
799         });
800     }
801 
create_texture( &self, desc: &crate::TextureDescriptor, ) -> Result<super::Texture, crate::DeviceError>802     unsafe fn create_texture(
803         &self,
804         desc: &crate::TextureDescriptor,
805     ) -> Result<super::Texture, crate::DeviceError> {
806         let array_layer_count = match desc.dimension {
807             wgt::TextureDimension::D3 => 1,
808             _ => desc.size.depth_or_array_layers,
809         };
810         let copy_size = conv::map_extent_to_copy_size(&desc.size, desc.dimension);
811 
812         let mut raw_flags = vk::ImageCreateFlags::empty();
813         if desc.dimension == wgt::TextureDimension::D2
814             && desc.size.depth_or_array_layers % 6 == 0
815             && desc.sample_count == 1
816             && desc.size.width == desc.size.height
817         {
818             raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
819         }
820 
821         let vk_info = vk::ImageCreateInfo::builder()
822             .flags(raw_flags)
823             .image_type(conv::map_texture_dimension(desc.dimension))
824             .format(self.shared.private_caps.map_texture_format(desc.format))
825             .extent(vk::Extent3D {
826                 width: copy_size.width,
827                 height: copy_size.height,
828                 depth: copy_size.depth,
829             })
830             .mip_levels(desc.mip_level_count)
831             .array_layers(array_layer_count)
832             .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
833             .tiling(vk::ImageTiling::OPTIMAL)
834             .usage(conv::map_texture_usage(desc.usage))
835             .sharing_mode(vk::SharingMode::EXCLUSIVE)
836             .initial_layout(vk::ImageLayout::UNDEFINED);
837 
838         let raw = self.shared.raw.create_image(&vk_info, None)?;
839         let req = self.shared.raw.get_image_memory_requirements(raw);
840 
841         let block = self.mem_allocator.lock().alloc(
842             &*self.shared,
843             gpu_alloc::Request {
844                 size: req.size,
845                 align_mask: req.alignment - 1,
846                 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
847                 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
848             },
849         )?;
850 
851         self.shared
852             .raw
853             .bind_image_memory(raw, *block.memory(), block.offset())?;
854 
855         if let Some(label) = desc.label {
856             self.shared
857                 .set_object_name(vk::ObjectType::IMAGE, raw, label);
858         }
859 
860         Ok(super::Texture {
861             raw,
862             drop_guard: None,
863             block: Some(block),
864             usage: desc.usage,
865             aspects: crate::FormatAspects::from(desc.format),
866             format_info: desc.format.describe(),
867             raw_flags,
868             copy_size,
869         })
870     }
destroy_texture(&self, texture: super::Texture)871     unsafe fn destroy_texture(&self, texture: super::Texture) {
872         if texture.drop_guard.is_none() {
873             self.shared.raw.destroy_image(texture.raw, None);
874         }
875         if let Some(block) = texture.block {
876             self.mem_allocator.lock().dealloc(&*self.shared, block);
877         }
878     }
879 
create_texture_view( &self, texture: &super::Texture, desc: &crate::TextureViewDescriptor, ) -> Result<super::TextureView, crate::DeviceError>880     unsafe fn create_texture_view(
881         &self,
882         texture: &super::Texture,
883         desc: &crate::TextureViewDescriptor,
884     ) -> Result<super::TextureView, crate::DeviceError> {
885         let subresource_range = conv::map_subresource_range(&desc.range, texture.aspects);
886         let mut vk_info = vk::ImageViewCreateInfo::builder()
887             .flags(vk::ImageViewCreateFlags::empty())
888             .image(texture.raw)
889             .view_type(conv::map_view_dimension(desc.dimension))
890             .format(self.shared.private_caps.map_texture_format(desc.format))
891             .subresource_range(subresource_range);
892         let layers =
893             NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
894 
895         let mut image_view_info;
896         let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
897             image_view_info = vk::ImageViewUsageCreateInfo::builder()
898                 .usage(conv::map_texture_usage(desc.usage))
899                 .build();
900             vk_info = vk_info.push_next(&mut image_view_info);
901             desc.usage
902         } else {
903             texture.usage
904         };
905 
906         let raw = self.shared.raw.create_image_view(&vk_info, None)?;
907 
908         if let Some(label) = desc.label {
909             self.shared
910                 .set_object_name(vk::ObjectType::IMAGE_VIEW, raw, label);
911         }
912 
913         let attachment = super::FramebufferAttachment {
914             raw: if self.shared.private_caps.imageless_framebuffers {
915                 vk::ImageView::null()
916             } else {
917                 raw
918             },
919             raw_image_flags: texture.raw_flags,
920             view_usage,
921             view_format: desc.format,
922         };
923 
924         Ok(super::TextureView {
925             raw,
926             layers,
927             attachment,
928         })
929     }
destroy_texture_view(&self, view: super::TextureView)930     unsafe fn destroy_texture_view(&self, view: super::TextureView) {
931         if !self.shared.private_caps.imageless_framebuffers {
932             let mut fbuf_lock = self.shared.framebuffers.lock();
933             for (key, &raw_fbuf) in fbuf_lock.iter() {
934                 if key.attachments.iter().any(|at| at.raw == view.raw) {
935                     self.shared.raw.destroy_framebuffer(raw_fbuf, None);
936                 }
937             }
938             fbuf_lock.retain(|key, _| !key.attachments.iter().any(|at| at.raw == view.raw));
939         }
940         self.shared.raw.destroy_image_view(view.raw, None);
941     }
942 
create_sampler( &self, desc: &crate::SamplerDescriptor, ) -> Result<super::Sampler, crate::DeviceError>943     unsafe fn create_sampler(
944         &self,
945         desc: &crate::SamplerDescriptor,
946     ) -> Result<super::Sampler, crate::DeviceError> {
947         let lod_range = desc.lod_clamp.clone().unwrap_or(0.0..16.0);
948 
949         let mut vk_info = vk::SamplerCreateInfo::builder()
950             .flags(vk::SamplerCreateFlags::empty())
951             .mag_filter(conv::map_filter_mode(desc.mag_filter))
952             .min_filter(conv::map_filter_mode(desc.min_filter))
953             .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
954             .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
955             .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
956             .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
957             .min_lod(lod_range.start)
958             .max_lod(lod_range.end);
959 
960         if let Some(fun) = desc.compare {
961             vk_info = vk_info
962                 .compare_enable(true)
963                 .compare_op(conv::map_comparison(fun));
964         }
965 
966         if let Some(aniso) = desc.anisotropy_clamp {
967             if self
968                 .shared
969                 .downlevel_flags
970                 .contains(wgt::DownlevelFlags::ANISOTROPIC_FILTERING)
971             {
972                 vk_info = vk_info
973                     .anisotropy_enable(true)
974                     .max_anisotropy(aniso.get() as f32);
975             }
976         }
977         if let Some(color) = desc.border_color {
978             vk_info = vk_info.border_color(conv::map_border_color(color));
979         }
980 
981         let raw = self.shared.raw.create_sampler(&vk_info, None)?;
982 
983         if let Some(label) = desc.label {
984             self.shared
985                 .set_object_name(vk::ObjectType::SAMPLER, raw, label);
986         }
987 
988         Ok(super::Sampler { raw })
989     }
destroy_sampler(&self, sampler: super::Sampler)990     unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
991         self.shared.raw.destroy_sampler(sampler.raw, None);
992     }
993 
create_command_encoder( &self, desc: &crate::CommandEncoderDescriptor<super::Api>, ) -> Result<super::CommandEncoder, crate::DeviceError>994     unsafe fn create_command_encoder(
995         &self,
996         desc: &crate::CommandEncoderDescriptor<super::Api>,
997     ) -> Result<super::CommandEncoder, crate::DeviceError> {
998         let vk_info = vk::CommandPoolCreateInfo::builder()
999             .queue_family_index(desc.queue.family_index)
1000             .build();
1001         let raw = self.shared.raw.create_command_pool(&vk_info, None)?;
1002 
1003         Ok(super::CommandEncoder {
1004             raw,
1005             device: Arc::clone(&self.shared),
1006             active: vk::CommandBuffer::null(),
1007             bind_point: vk::PipelineBindPoint::default(),
1008             temp: super::Temp::default(),
1009             free: Vec::new(),
1010             discarded: Vec::new(),
1011             rpass_debug_marker_active: false,
1012         })
1013     }
destroy_command_encoder(&self, cmd_encoder: super::CommandEncoder)1014     unsafe fn destroy_command_encoder(&self, cmd_encoder: super::CommandEncoder) {
1015         if !cmd_encoder.free.is_empty() {
1016             self.shared
1017                 .raw
1018                 .free_command_buffers(cmd_encoder.raw, &cmd_encoder.free);
1019         }
1020         if !cmd_encoder.discarded.is_empty() {
1021             self.shared
1022                 .raw
1023                 .free_command_buffers(cmd_encoder.raw, &cmd_encoder.discarded);
1024         }
1025         self.shared.raw.destroy_command_pool(cmd_encoder.raw, None);
1026     }
1027 
create_bind_group_layout( &self, desc: &crate::BindGroupLayoutDescriptor, ) -> Result<super::BindGroupLayout, crate::DeviceError>1028     unsafe fn create_bind_group_layout(
1029         &self,
1030         desc: &crate::BindGroupLayoutDescriptor,
1031     ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1032         let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1033         let mut types = Vec::new();
1034         for entry in desc.entries {
1035             let count = entry.count.map_or(1, |c| c.get());
1036             if entry.binding as usize >= types.len() {
1037                 types.resize(
1038                     entry.binding as usize + 1,
1039                     (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1040                 );
1041             }
1042             types[entry.binding as usize] = (
1043                 conv::map_binding_type(entry.ty),
1044                 entry.count.map_or(1, |c| c.get()),
1045             );
1046 
1047             match entry.ty {
1048                 wgt::BindingType::Buffer {
1049                     ty,
1050                     has_dynamic_offset,
1051                     ..
1052                 } => match ty {
1053                     wgt::BufferBindingType::Uniform => {
1054                         if has_dynamic_offset {
1055                             desc_count.uniform_buffer_dynamic += count;
1056                         } else {
1057                             desc_count.uniform_buffer += count;
1058                         }
1059                     }
1060                     wgt::BufferBindingType::Storage { .. } => {
1061                         if has_dynamic_offset {
1062                             desc_count.storage_buffer_dynamic += count;
1063                         } else {
1064                             desc_count.storage_buffer += count;
1065                         }
1066                     }
1067                 },
1068                 wgt::BindingType::Sampler { .. } => {
1069                     desc_count.sampler += count;
1070                 }
1071                 wgt::BindingType::Texture { .. } => {
1072                     desc_count.sampled_image += count;
1073                 }
1074                 wgt::BindingType::StorageTexture { .. } => {
1075                     desc_count.storage_image += count;
1076                 }
1077             }
1078         }
1079 
1080         //Note: not bothering with inplace_or_alloc_from_iter her as it's low frequency
1081         let vk_bindings = desc
1082             .entries
1083             .iter()
1084             .map(|entry| vk::DescriptorSetLayoutBinding {
1085                 binding: entry.binding,
1086                 descriptor_type: types[entry.binding as usize].0,
1087                 descriptor_count: types[entry.binding as usize].1,
1088                 stage_flags: conv::map_shader_stage(entry.visibility),
1089                 p_immutable_samplers: ptr::null(),
1090             })
1091             .collect::<Vec<_>>();
1092 
1093         let vk_info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(&vk_bindings);
1094 
1095         let mut binding_flag_info;
1096         let binding_flag_vec;
1097         let mut requires_update_after_bind = false;
1098 
1099         let partially_bound = desc
1100             .flags
1101             .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1102 
1103         let vk_info = if !self.shared.uab_types.is_empty() || partially_bound {
1104             binding_flag_vec = desc
1105                 .entries
1106                 .iter()
1107                 .map(|entry| {
1108                     let mut flags = vk::DescriptorBindingFlags::empty();
1109 
1110                     if partially_bound && entry.count.is_some() {
1111                         flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1112                     }
1113 
1114                     let uab_type = match entry.ty {
1115                         wgt::BindingType::Buffer {
1116                             ty: wgt::BufferBindingType::Uniform,
1117                             ..
1118                         } => super::UpdateAfterBindTypes::UNIFORM_BUFFER,
1119                         wgt::BindingType::Buffer {
1120                             ty: wgt::BufferBindingType::Storage { .. },
1121                             ..
1122                         } => super::UpdateAfterBindTypes::STORAGE_BUFFER,
1123                         wgt::BindingType::Texture { .. } => {
1124                             super::UpdateAfterBindTypes::SAMPLED_TEXTURE
1125                         }
1126                         wgt::BindingType::StorageTexture { .. } => {
1127                             super::UpdateAfterBindTypes::STORAGE_TEXTURE
1128                         }
1129                         _ => super::UpdateAfterBindTypes::empty(),
1130                     };
1131 
1132                     if !uab_type.is_empty() && self.shared.uab_types.contains(uab_type) {
1133                         flags |= vk::DescriptorBindingFlags::UPDATE_AFTER_BIND;
1134                         requires_update_after_bind = true;
1135                     }
1136 
1137                     flags
1138                 })
1139                 .collect::<Vec<_>>();
1140 
1141             binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::builder()
1142                 .binding_flags(&binding_flag_vec);
1143 
1144             vk_info.push_next(&mut binding_flag_info)
1145         } else {
1146             vk_info
1147         };
1148 
1149         let dsl_create_flags = if requires_update_after_bind {
1150             vk::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL
1151         } else {
1152             vk::DescriptorSetLayoutCreateFlags::empty()
1153         };
1154 
1155         let vk_info = vk_info.flags(dsl_create_flags);
1156 
1157         let raw = self
1158             .shared
1159             .raw
1160             .create_descriptor_set_layout(&vk_info, None)?;
1161 
1162         if let Some(label) = desc.label {
1163             self.shared
1164                 .set_object_name(vk::ObjectType::DESCRIPTOR_SET_LAYOUT, raw, label);
1165         }
1166 
1167         Ok(super::BindGroupLayout {
1168             raw,
1169             desc_count,
1170             types: types.into_boxed_slice(),
1171             requires_update_after_bind,
1172         })
1173     }
destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout)1174     unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1175         self.shared
1176             .raw
1177             .destroy_descriptor_set_layout(bg_layout.raw, None);
1178     }
1179 
create_pipeline_layout( &self, desc: &crate::PipelineLayoutDescriptor<super::Api>, ) -> Result<super::PipelineLayout, crate::DeviceError>1180     unsafe fn create_pipeline_layout(
1181         &self,
1182         desc: &crate::PipelineLayoutDescriptor<super::Api>,
1183     ) -> Result<super::PipelineLayout, crate::DeviceError> {
1184         //Note: not bothering with inplace_or_alloc_from_iter her as it's low frequency
1185         let vk_set_layouts = desc
1186             .bind_group_layouts
1187             .iter()
1188             .map(|bgl| bgl.raw)
1189             .collect::<Vec<_>>();
1190         let vk_push_constant_ranges = desc
1191             .push_constant_ranges
1192             .iter()
1193             .map(|pcr| vk::PushConstantRange {
1194                 stage_flags: conv::map_shader_stage(pcr.stages),
1195                 offset: pcr.range.start,
1196                 size: pcr.range.end - pcr.range.start,
1197             })
1198             .collect::<Vec<_>>();
1199 
1200         let vk_info = vk::PipelineLayoutCreateInfo::builder()
1201             .flags(vk::PipelineLayoutCreateFlags::empty())
1202             .set_layouts(&vk_set_layouts)
1203             .push_constant_ranges(&vk_push_constant_ranges);
1204 
1205         let raw = {
1206             profiling::scope!("vkCreatePipelineLayout");
1207             self.shared.raw.create_pipeline_layout(&vk_info, None)?
1208         };
1209 
1210         if let Some(label) = desc.label {
1211             self.shared
1212                 .set_object_name(vk::ObjectType::PIPELINE_LAYOUT, raw, label);
1213         }
1214 
1215         Ok(super::PipelineLayout { raw })
1216     }
destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout)1217     unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1218         self.shared
1219             .raw
1220             .destroy_pipeline_layout(pipeline_layout.raw, None);
1221     }
1222 
create_bind_group( &self, desc: &crate::BindGroupDescriptor<super::Api>, ) -> Result<super::BindGroup, crate::DeviceError>1223     unsafe fn create_bind_group(
1224         &self,
1225         desc: &crate::BindGroupDescriptor<super::Api>,
1226     ) -> Result<super::BindGroup, crate::DeviceError> {
1227         let mut vk_sets = self.desc_allocator.lock().allocate(
1228             &*self.shared,
1229             &desc.layout.raw,
1230             if desc.layout.requires_update_after_bind {
1231                 gpu_descriptor::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND
1232             } else {
1233                 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty()
1234             },
1235             &desc.layout.desc_count,
1236             1,
1237         )?;
1238 
1239         let set = vk_sets.pop().unwrap();
1240         if let Some(label) = desc.label {
1241             self.shared
1242                 .set_object_name(vk::ObjectType::DESCRIPTOR_SET, *set.raw(), label);
1243         }
1244 
1245         let mut writes = Vec::with_capacity(desc.entries.len());
1246         let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1247         let mut sampler_infos = Vec::with_capacity(desc.samplers.len());
1248         let mut image_infos = Vec::with_capacity(desc.textures.len());
1249         for entry in desc.entries {
1250             let (ty, size) = desc.layout.types[entry.binding as usize];
1251             if size == 0 {
1252                 continue; // empty slot
1253             }
1254             let mut write = vk::WriteDescriptorSet::builder()
1255                 .dst_set(*set.raw())
1256                 .dst_binding(entry.binding)
1257                 .descriptor_type(ty);
1258             write = match ty {
1259                 vk::DescriptorType::SAMPLER => {
1260                     let index = sampler_infos.len();
1261                     let binding = desc.samplers[entry.resource_index as usize];
1262                     let vk_info = vk::DescriptorImageInfo::builder()
1263                         .sampler(binding.raw)
1264                         .build();
1265                     sampler_infos.push(vk_info);
1266                     write.image_info(&sampler_infos[index..])
1267                 }
1268                 vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1269                     let index = image_infos.len();
1270                     let start = entry.resource_index;
1271                     let end = start + entry.count;
1272                     image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1273                         |binding| {
1274                             let layout =
1275                                 conv::derive_image_layout(binding.usage, binding.view.aspects());
1276                             vk::DescriptorImageInfo::builder()
1277                                 .image_view(binding.view.raw)
1278                                 .image_layout(layout)
1279                                 .build()
1280                         },
1281                     ));
1282                     write.image_info(&image_infos[index..])
1283                 }
1284                 vk::DescriptorType::UNIFORM_BUFFER
1285                 | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1286                 | vk::DescriptorType::STORAGE_BUFFER
1287                 | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1288                     let index = buffer_infos.len();
1289                     let start = entry.resource_index;
1290                     let end = start + entry.count;
1291                     buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1292                         |binding| {
1293                             vk::DescriptorBufferInfo::builder()
1294                                 .buffer(binding.buffer.raw)
1295                                 .offset(binding.offset)
1296                                 .range(binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get))
1297                                 .build()
1298                         },
1299                     ));
1300                     write.buffer_info(&buffer_infos[index..])
1301                 }
1302                 _ => unreachable!(),
1303             };
1304             writes.push(write.build());
1305         }
1306 
1307         self.shared.raw.update_descriptor_sets(&writes, &[]);
1308         Ok(super::BindGroup { set })
1309     }
destroy_bind_group(&self, group: super::BindGroup)1310     unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1311         self.desc_allocator
1312             .lock()
1313             .free(&*self.shared, Some(group.set));
1314     }
1315 
create_shader_module( &self, desc: &crate::ShaderModuleDescriptor, shader: crate::ShaderInput, ) -> Result<super::ShaderModule, crate::ShaderError>1316     unsafe fn create_shader_module(
1317         &self,
1318         desc: &crate::ShaderModuleDescriptor,
1319         shader: crate::ShaderInput,
1320     ) -> Result<super::ShaderModule, crate::ShaderError> {
1321         let spv = match shader {
1322             crate::ShaderInput::Naga(naga_shader) => {
1323                 if self
1324                     .shared
1325                     .workarounds
1326                     .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1327                 {
1328                     return Ok(super::ShaderModule::Intermediate {
1329                         naga_shader,
1330                         runtime_checks: desc.runtime_checks,
1331                     });
1332                 }
1333                 let mut naga_options = self.naga_options.clone();
1334                 if !desc.runtime_checks {
1335                     naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1336                         index: naga::proc::BoundsCheckPolicy::Unchecked,
1337                         buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1338                         image: naga::proc::BoundsCheckPolicy::Unchecked,
1339                     };
1340                 }
1341                 Cow::Owned(
1342                     naga::back::spv::write_vec(
1343                         &naga_shader.module,
1344                         &naga_shader.info,
1345                         &naga_options,
1346                         None,
1347                     )
1348                     .map_err(|e| crate::ShaderError::Compilation(format!("{}", e)))?,
1349                 )
1350             }
1351             crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1352         };
1353 
1354         let raw = self.create_shader_module_impl(&*spv)?;
1355 
1356         if let Some(label) = desc.label {
1357             self.shared
1358                 .set_object_name(vk::ObjectType::SHADER_MODULE, raw, label);
1359         }
1360 
1361         Ok(super::ShaderModule::Raw(raw))
1362     }
destroy_shader_module(&self, module: super::ShaderModule)1363     unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1364         match module {
1365             super::ShaderModule::Raw(raw) => {
1366                 let _ = self.shared.raw.destroy_shader_module(raw, None);
1367             }
1368             super::ShaderModule::Intermediate { .. } => {}
1369         }
1370     }
1371 
create_render_pipeline( &self, desc: &crate::RenderPipelineDescriptor<super::Api>, ) -> Result<super::RenderPipeline, crate::PipelineError>1372     unsafe fn create_render_pipeline(
1373         &self,
1374         desc: &crate::RenderPipelineDescriptor<super::Api>,
1375     ) -> Result<super::RenderPipeline, crate::PipelineError> {
1376         let dynamic_states = [
1377             vk::DynamicState::VIEWPORT,
1378             vk::DynamicState::SCISSOR,
1379             vk::DynamicState::BLEND_CONSTANTS,
1380             vk::DynamicState::STENCIL_REFERENCE,
1381         ];
1382         let mut compatible_rp_key = super::RenderPassKey {
1383             sample_count: desc.multisample.count,
1384             multiview: desc.multiview,
1385             ..Default::default()
1386         };
1387         let mut stages = ArrayVec::<_, 2>::new();
1388         let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1389         let mut vertex_attributes = Vec::new();
1390 
1391         for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1392             vertex_buffers.push(vk::VertexInputBindingDescription {
1393                 binding: i as u32,
1394                 stride: vb.array_stride as u32,
1395                 input_rate: match vb.step_mode {
1396                     wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1397                     wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1398                 },
1399             });
1400             for at in vb.attributes {
1401                 vertex_attributes.push(vk::VertexInputAttributeDescription {
1402                     location: at.shader_location,
1403                     binding: i as u32,
1404                     format: conv::map_vertex_format(at.format),
1405                     offset: at.offset as u32,
1406                 });
1407             }
1408         }
1409 
1410         let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::builder()
1411             .vertex_binding_descriptions(&vertex_buffers)
1412             .vertex_attribute_descriptions(&vertex_attributes)
1413             .build();
1414 
1415         let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::builder()
1416             .topology(conv::map_topology(desc.primitive.topology))
1417             .primitive_restart_enable(desc.primitive.strip_index_format.is_some())
1418             .build();
1419 
1420         let compiled_vs = self.compile_stage(&desc.vertex_stage, naga::ShaderStage::Vertex)?;
1421         stages.push(compiled_vs.create_info);
1422         let compiled_fs = match desc.fragment_stage {
1423             Some(ref stage) => {
1424                 let compiled = self.compile_stage(stage, naga::ShaderStage::Fragment)?;
1425                 stages.push(compiled.create_info);
1426                 Some(compiled)
1427             }
1428             None => None,
1429         };
1430 
1431         let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::builder()
1432             .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1433             .front_face(conv::map_front_face(desc.primitive.front_face))
1434             .line_width(1.0);
1435         if let Some(face) = desc.primitive.cull_mode {
1436             vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1437         }
1438         let mut vk_rasterization_conservative_state =
1439             vk::PipelineRasterizationConservativeStateCreateInfoEXT::builder()
1440                 .conservative_rasterization_mode(vk::ConservativeRasterizationModeEXT::OVERESTIMATE)
1441                 .build();
1442         if desc.primitive.conservative {
1443             vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1444         }
1445         let mut vk_depth_clip_state =
1446             vk::PipelineRasterizationDepthClipStateCreateInfoEXT::builder()
1447                 .depth_clip_enable(false)
1448                 .build();
1449         if desc.primitive.unclipped_depth {
1450             vk_rasterization = vk_rasterization.push_next(&mut vk_depth_clip_state);
1451         }
1452 
1453         let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::builder();
1454         if let Some(ref ds) = desc.depth_stencil {
1455             let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1456             let vk_layout = if ds.is_read_only() {
1457                 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1458             } else {
1459                 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1460             };
1461             compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1462                 base: super::AttachmentKey::compatible(vk_format, vk_layout),
1463                 stencil_ops: crate::AttachmentOps::all(),
1464             });
1465 
1466             if ds.is_depth_enabled() {
1467                 vk_depth_stencil = vk_depth_stencil
1468                     .depth_test_enable(true)
1469                     .depth_write_enable(ds.depth_write_enabled)
1470                     .depth_compare_op(conv::map_comparison(ds.depth_compare));
1471             }
1472             if ds.stencil.is_enabled() {
1473                 let s = &ds.stencil;
1474                 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1475                 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1476                 vk_depth_stencil = vk_depth_stencil
1477                     .stencil_test_enable(true)
1478                     .front(front)
1479                     .back(back);
1480             }
1481 
1482             if ds.bias.is_enabled() {
1483                 vk_rasterization = vk_rasterization
1484                     .depth_bias_enable(true)
1485                     .depth_bias_constant_factor(ds.bias.constant as f32)
1486                     .depth_bias_clamp(ds.bias.clamp)
1487                     .depth_bias_slope_factor(ds.bias.slope_scale);
1488             }
1489         }
1490 
1491         let vk_viewport = vk::PipelineViewportStateCreateInfo::builder()
1492             .flags(vk::PipelineViewportStateCreateFlags::empty())
1493             .scissor_count(1)
1494             .viewport_count(1)
1495             .build();
1496 
1497         let vk_sample_mask = [
1498             desc.multisample.mask as u32,
1499             (desc.multisample.mask >> 32) as u32,
1500         ];
1501         let vk_multisample = vk::PipelineMultisampleStateCreateInfo::builder()
1502             .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1503             .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1504             .sample_mask(&vk_sample_mask)
1505             .build();
1506 
1507         let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1508         for cat in desc.color_targets {
1509             let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1510             compatible_rp_key.colors.push(super::ColorAttachmentKey {
1511                 base: super::AttachmentKey::compatible(
1512                     vk_format,
1513                     vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1514                 ),
1515                 resolve: None,
1516             });
1517 
1518             let mut vk_attachment = vk::PipelineColorBlendAttachmentState::builder()
1519                 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1520             if let Some(ref blend) = cat.blend {
1521                 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1522                 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1523                 vk_attachment = vk_attachment
1524                     .blend_enable(true)
1525                     .color_blend_op(color_op)
1526                     .src_color_blend_factor(color_src)
1527                     .dst_color_blend_factor(color_dst)
1528                     .alpha_blend_op(alpha_op)
1529                     .src_alpha_blend_factor(alpha_src)
1530                     .dst_alpha_blend_factor(alpha_dst);
1531             }
1532             vk_attachments.push(vk_attachment.build());
1533         }
1534 
1535         let vk_color_blend = vk::PipelineColorBlendStateCreateInfo::builder()
1536             .attachments(&vk_attachments)
1537             .build();
1538 
1539         let vk_dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()
1540             .dynamic_states(&dynamic_states)
1541             .build();
1542 
1543         let raw_pass = self
1544             .shared
1545             .make_render_pass(compatible_rp_key)
1546             .map_err(crate::DeviceError::from)?;
1547 
1548         let vk_infos = [{
1549             vk::GraphicsPipelineCreateInfo::builder()
1550                 .layout(desc.layout.raw)
1551                 .stages(&stages)
1552                 .vertex_input_state(&vk_vertex_input)
1553                 .input_assembly_state(&vk_input_assembly)
1554                 .rasterization_state(&vk_rasterization)
1555                 .viewport_state(&vk_viewport)
1556                 .multisample_state(&vk_multisample)
1557                 .depth_stencil_state(&vk_depth_stencil)
1558                 .color_blend_state(&vk_color_blend)
1559                 .dynamic_state(&vk_dynamic_state)
1560                 .render_pass(raw_pass)
1561                 .build()
1562         }];
1563 
1564         let mut raw_vec = {
1565             profiling::scope!("vkCreateGraphicsPipelines");
1566             self.shared
1567                 .raw
1568                 .create_graphics_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1569                 .map_err(|(_, e)| crate::DeviceError::from(e))?
1570         };
1571 
1572         let raw = raw_vec.pop().unwrap();
1573         if let Some(label) = desc.label {
1574             self.shared
1575                 .set_object_name(vk::ObjectType::PIPELINE, raw, label);
1576         }
1577 
1578         if let Some(raw_module) = compiled_vs.temp_raw_module {
1579             self.shared.raw.destroy_shader_module(raw_module, None);
1580         }
1581         if let Some(CompiledStage {
1582             temp_raw_module: Some(raw_module),
1583             ..
1584         }) = compiled_fs
1585         {
1586             self.shared.raw.destroy_shader_module(raw_module, None);
1587         }
1588 
1589         Ok(super::RenderPipeline { raw })
1590     }
destroy_render_pipeline(&self, pipeline: super::RenderPipeline)1591     unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
1592         self.shared.raw.destroy_pipeline(pipeline.raw, None);
1593     }
1594 
create_compute_pipeline( &self, desc: &crate::ComputePipelineDescriptor<super::Api>, ) -> Result<super::ComputePipeline, crate::PipelineError>1595     unsafe fn create_compute_pipeline(
1596         &self,
1597         desc: &crate::ComputePipelineDescriptor<super::Api>,
1598     ) -> Result<super::ComputePipeline, crate::PipelineError> {
1599         let compiled = self.compile_stage(&desc.stage, naga::ShaderStage::Compute)?;
1600 
1601         let vk_infos = [{
1602             vk::ComputePipelineCreateInfo::builder()
1603                 .layout(desc.layout.raw)
1604                 .stage(compiled.create_info)
1605                 .build()
1606         }];
1607 
1608         let mut raw_vec = {
1609             profiling::scope!("vkCreateComputePipelines");
1610             self.shared
1611                 .raw
1612                 .create_compute_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1613                 .map_err(|(_, e)| crate::DeviceError::from(e))?
1614         };
1615 
1616         let raw = raw_vec.pop().unwrap();
1617         if let Some(label) = desc.label {
1618             self.shared
1619                 .set_object_name(vk::ObjectType::PIPELINE, raw, label);
1620         }
1621 
1622         if let Some(raw_module) = compiled.temp_raw_module {
1623             self.shared.raw.destroy_shader_module(raw_module, None);
1624         }
1625 
1626         Ok(super::ComputePipeline { raw })
1627     }
destroy_compute_pipeline(&self, pipeline: super::ComputePipeline)1628     unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
1629         self.shared.raw.destroy_pipeline(pipeline.raw, None);
1630     }
1631 
create_query_set( &self, desc: &wgt::QuerySetDescriptor<crate::Label>, ) -> Result<super::QuerySet, crate::DeviceError>1632     unsafe fn create_query_set(
1633         &self,
1634         desc: &wgt::QuerySetDescriptor<crate::Label>,
1635     ) -> Result<super::QuerySet, crate::DeviceError> {
1636         let (vk_type, pipeline_statistics) = match desc.ty {
1637             wgt::QueryType::Occlusion => (
1638                 vk::QueryType::OCCLUSION,
1639                 vk::QueryPipelineStatisticFlags::empty(),
1640             ),
1641             wgt::QueryType::PipelineStatistics(statistics) => (
1642                 vk::QueryType::PIPELINE_STATISTICS,
1643                 conv::map_pipeline_statistics(statistics),
1644             ),
1645             wgt::QueryType::Timestamp => (
1646                 vk::QueryType::TIMESTAMP,
1647                 vk::QueryPipelineStatisticFlags::empty(),
1648             ),
1649         };
1650 
1651         let vk_info = vk::QueryPoolCreateInfo::builder()
1652             .query_type(vk_type)
1653             .query_count(desc.count)
1654             .pipeline_statistics(pipeline_statistics)
1655             .build();
1656 
1657         let raw = self.shared.raw.create_query_pool(&vk_info, None)?;
1658         if let Some(label) = desc.label {
1659             self.shared
1660                 .set_object_name(vk::ObjectType::QUERY_POOL, raw, label);
1661         }
1662 
1663         Ok(super::QuerySet { raw })
1664     }
destroy_query_set(&self, set: super::QuerySet)1665     unsafe fn destroy_query_set(&self, set: super::QuerySet) {
1666         self.shared.raw.destroy_query_pool(set.raw, None);
1667     }
1668 
create_fence(&self) -> Result<super::Fence, crate::DeviceError>1669     unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
1670         Ok(if self.shared.private_caps.timeline_semaphores {
1671             let mut sem_type_info =
1672                 vk::SemaphoreTypeCreateInfo::builder().semaphore_type(vk::SemaphoreType::TIMELINE);
1673             let vk_info = vk::SemaphoreCreateInfo::builder().push_next(&mut sem_type_info);
1674             let raw = self.shared.raw.create_semaphore(&vk_info, None)?;
1675             super::Fence::TimelineSemaphore(raw)
1676         } else {
1677             super::Fence::FencePool {
1678                 last_completed: 0,
1679                 active: Vec::new(),
1680                 free: Vec::new(),
1681             }
1682         })
1683     }
destroy_fence(&self, fence: super::Fence)1684     unsafe fn destroy_fence(&self, fence: super::Fence) {
1685         match fence {
1686             super::Fence::TimelineSemaphore(raw) => {
1687                 self.shared.raw.destroy_semaphore(raw, None);
1688             }
1689             super::Fence::FencePool {
1690                 active,
1691                 free,
1692                 last_completed: _,
1693             } => {
1694                 for (_, raw) in active {
1695                     self.shared.raw.destroy_fence(raw, None);
1696                 }
1697                 for raw in free {
1698                     self.shared.raw.destroy_fence(raw, None);
1699                 }
1700             }
1701         }
1702     }
get_fence_value( &self, fence: &super::Fence, ) -> Result<crate::FenceValue, crate::DeviceError>1703     unsafe fn get_fence_value(
1704         &self,
1705         fence: &super::Fence,
1706     ) -> Result<crate::FenceValue, crate::DeviceError> {
1707         fence.get_latest(
1708             &self.shared.raw,
1709             self.shared.extension_fns.timeline_semaphore.as_ref(),
1710         )
1711     }
wait( &self, fence: &super::Fence, wait_value: crate::FenceValue, timeout_ms: u32, ) -> Result<bool, crate::DeviceError>1712     unsafe fn wait(
1713         &self,
1714         fence: &super::Fence,
1715         wait_value: crate::FenceValue,
1716         timeout_ms: u32,
1717     ) -> Result<bool, crate::DeviceError> {
1718         let timeout_us = timeout_ms as u64 * super::MILLIS_TO_NANOS;
1719         match *fence {
1720             super::Fence::TimelineSemaphore(raw) => {
1721                 let semaphores = [raw];
1722                 let values = [wait_value];
1723                 let vk_info = vk::SemaphoreWaitInfo::builder()
1724                     .semaphores(&semaphores)
1725                     .values(&values);
1726                 let result = match self.shared.extension_fns.timeline_semaphore {
1727                     Some(super::ExtensionFn::Extension(ref ext)) => {
1728                         ext.wait_semaphores(&vk_info, timeout_us)
1729                     }
1730                     Some(super::ExtensionFn::Promoted) => {
1731                         self.shared.raw.wait_semaphores(&vk_info, timeout_us)
1732                     }
1733                     None => unreachable!(),
1734                 };
1735                 match result {
1736                     Ok(()) => Ok(true),
1737                     Err(vk::Result::TIMEOUT) => Ok(false),
1738                     Err(other) => Err(other.into()),
1739                 }
1740             }
1741             super::Fence::FencePool {
1742                 last_completed,
1743                 ref active,
1744                 free: _,
1745             } => {
1746                 if wait_value <= last_completed {
1747                     Ok(true)
1748                 } else {
1749                     match active.iter().find(|&&(value, _)| value >= wait_value) {
1750                         Some(&(_, raw)) => {
1751                             match self.shared.raw.wait_for_fences(&[raw], true, timeout_us) {
1752                                 Ok(()) => Ok(true),
1753                                 Err(vk::Result::TIMEOUT) => Ok(false),
1754                                 Err(other) => Err(other.into()),
1755                             }
1756                         }
1757                         None => {
1758                             log::error!("No signals reached value {}", wait_value);
1759                             Err(crate::DeviceError::Lost)
1760                         }
1761                     }
1762                 }
1763             }
1764         }
1765     }
1766 
start_capture(&self) -> bool1767     unsafe fn start_capture(&self) -> bool {
1768         #[cfg(feature = "renderdoc")]
1769         {
1770             // Renderdoc requires us to give us the pointer that vkInstance _points to_.
1771             let raw_vk_instance =
1772                 ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
1773             let raw_vk_instance_dispatch_table = *raw_vk_instance;
1774             self.render_doc
1775                 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
1776         }
1777         #[cfg(not(feature = "renderdoc"))]
1778         false
1779     }
stop_capture(&self)1780     unsafe fn stop_capture(&self) {
1781         #[cfg(feature = "renderdoc")]
1782         {
1783             // Renderdoc requires us to give us the pointer that vkInstance _points to_.
1784             let raw_vk_instance =
1785                 ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
1786             let raw_vk_instance_dispatch_table = *raw_vk_instance;
1787 
1788             self.render_doc
1789                 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
1790         }
1791     }
1792 }
1793 
1794 impl From<gpu_alloc::AllocationError> for crate::DeviceError {
from(error: gpu_alloc::AllocationError) -> Self1795     fn from(error: gpu_alloc::AllocationError) -> Self {
1796         use gpu_alloc::AllocationError as Ae;
1797         match error {
1798             Ae::OutOfDeviceMemory | Ae::OutOfHostMemory => Self::OutOfMemory,
1799             _ => {
1800                 log::error!("memory allocation: {:?}", error);
1801                 Self::Lost
1802             }
1803         }
1804     }
1805 }
1806 impl From<gpu_alloc::MapError> for crate::DeviceError {
from(error: gpu_alloc::MapError) -> Self1807     fn from(error: gpu_alloc::MapError) -> Self {
1808         use gpu_alloc::MapError as Me;
1809         match error {
1810             Me::OutOfDeviceMemory | Me::OutOfHostMemory => Self::OutOfMemory,
1811             _ => {
1812                 log::error!("memory mapping: {:?}", error);
1813                 Self::Lost
1814             }
1815         }
1816     }
1817 }
1818 impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
from(error: gpu_descriptor::AllocationError) -> Self1819     fn from(error: gpu_descriptor::AllocationError) -> Self {
1820         log::error!("descriptor allocation: {:?}", error);
1821         Self::OutOfMemory
1822     }
1823 }
1824