1 use super::conv;
2 
3 use arrayvec::ArrayVec;
4 use ash::{extensions::ext, vk};
5 use inplace_it::inplace_or_alloc_from_iter;
6 
7 use std::{mem, ops::Range, slice};
8 
9 const ALLOCATION_GRANULARITY: u32 = 16;
10 const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
11 
12 impl super::Texture {
map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy> where T: Iterator<Item = crate::BufferTextureCopy>,13     fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
14     where
15         T: Iterator<Item = crate::BufferTextureCopy>,
16     {
17         let aspects = self.aspects;
18         let fi = self.format_info;
19         let copy_size = self.copy_size;
20         regions.map(move |r| {
21             let extent = r.texture_base.max_copy_size(&copy_size).min(&r.size);
22             let (image_subresource, image_offset) =
23                 conv::map_subresource_layers(&r.texture_base, aspects);
24             vk::BufferImageCopy {
25                 buffer_offset: r.buffer_layout.offset,
26                 buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
27                     fi.block_dimensions.0 as u32 * (bpr.get() / fi.block_size as u32)
28                 }),
29                 buffer_image_height: r
30                     .buffer_layout
31                     .rows_per_image
32                     .map_or(0, |rpi| rpi.get() * fi.block_dimensions.1 as u32),
33                 image_subresource,
34                 image_offset,
35                 image_extent: conv::map_copy_extent(&extent),
36             }
37         })
38     }
39 }
40 
41 impl super::DeviceShared {
debug_messenger(&self) -> Option<&ext::DebugUtils>42     fn debug_messenger(&self) -> Option<&ext::DebugUtils> {
43         Some(&self.instance.debug_utils.as_ref()?.extension)
44     }
45 }
46 
47 impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError>48     unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
49         if self.free.is_empty() {
50             let vk_info = vk::CommandBufferAllocateInfo::builder()
51                 .command_pool(self.raw)
52                 .command_buffer_count(ALLOCATION_GRANULARITY)
53                 .build();
54             let cmd_buf_vec = self.device.raw.allocate_command_buffers(&vk_info)?;
55             self.free.extend(cmd_buf_vec);
56         }
57         let raw = self.free.pop().unwrap();
58 
59         // Set the name unconditionally, since there might be a
60         // previous name assigned to this.
61         self.device.set_object_name(
62             vk::ObjectType::COMMAND_BUFFER,
63             raw,
64             label.unwrap_or_default(),
65         );
66 
67         // Reset this in case the last renderpass was never ended.
68         self.rpass_debug_marker_active = false;
69 
70         let vk_info = vk::CommandBufferBeginInfo::builder()
71             .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
72             .build();
73         self.device.raw.begin_command_buffer(raw, &vk_info)?;
74         self.active = raw;
75 
76         Ok(())
77     }
78 
end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError>79     unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
80         let raw = self.active;
81         self.active = vk::CommandBuffer::null();
82         self.device.raw.end_command_buffer(raw)?;
83         Ok(super::CommandBuffer { raw })
84     }
85 
discard_encoding(&mut self)86     unsafe fn discard_encoding(&mut self) {
87         self.discarded.push(self.active);
88         self.active = vk::CommandBuffer::null();
89     }
90 
reset_all<I>(&mut self, cmd_bufs: I) where I: Iterator<Item = super::CommandBuffer>,91     unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
92     where
93         I: Iterator<Item = super::CommandBuffer>,
94     {
95         self.temp.clear();
96         self.free
97             .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
98         self.free.append(&mut self.discarded);
99         let _ = self
100             .device
101             .raw
102             .reset_command_pool(self.raw, vk::CommandPoolResetFlags::RELEASE_RESOURCES);
103     }
104 
transition_buffers<'a, T>(&mut self, barriers: T) where T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,105     unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
106     where
107         T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
108     {
109         //Note: this is done so that we never end up with empty stage flags
110         let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
111         let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
112         let vk_barriers = &mut self.temp.buffer_barriers;
113         vk_barriers.clear();
114 
115         for bar in barriers {
116             let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.start);
117             src_stages |= src_stage;
118             let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.end);
119             dst_stages |= dst_stage;
120 
121             vk_barriers.push(
122                 vk::BufferMemoryBarrier::builder()
123                     .buffer(bar.buffer.raw)
124                     .size(vk::WHOLE_SIZE)
125                     .src_access_mask(src_access)
126                     .dst_access_mask(dst_access)
127                     .build(),
128             )
129         }
130 
131         if !vk_barriers.is_empty() {
132             self.device.raw.cmd_pipeline_barrier(
133                 self.active,
134                 src_stages,
135                 dst_stages,
136                 vk::DependencyFlags::empty(),
137                 &[],
138                 vk_barriers,
139                 &[],
140             );
141         }
142     }
143 
transition_textures<'a, T>(&mut self, barriers: T) where T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,144     unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
145     where
146         T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
147     {
148         let mut src_stages = vk::PipelineStageFlags::empty();
149         let mut dst_stages = vk::PipelineStageFlags::empty();
150         let vk_barriers = &mut self.temp.image_barriers;
151         vk_barriers.clear();
152 
153         for bar in barriers {
154             let range = conv::map_subresource_range(&bar.range, bar.texture.aspects);
155             let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.start);
156             let src_layout = conv::derive_image_layout(bar.usage.start, bar.texture.aspects);
157             src_stages |= src_stage;
158             let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.end);
159             let dst_layout = conv::derive_image_layout(bar.usage.end, bar.texture.aspects);
160             dst_stages |= dst_stage;
161 
162             vk_barriers.push(
163                 vk::ImageMemoryBarrier::builder()
164                     .image(bar.texture.raw)
165                     .subresource_range(range)
166                     .src_access_mask(src_access)
167                     .dst_access_mask(dst_access)
168                     .old_layout(src_layout)
169                     .new_layout(dst_layout)
170                     .build(),
171             );
172         }
173 
174         if !vk_barriers.is_empty() {
175             self.device.raw.cmd_pipeline_barrier(
176                 self.active,
177                 src_stages,
178                 dst_stages,
179                 vk::DependencyFlags::empty(),
180                 &[],
181                 &[],
182                 vk_barriers,
183             );
184         }
185     }
186 
clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange)187     unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
188         self.device.raw.cmd_fill_buffer(
189             self.active,
190             buffer.raw,
191             range.start,
192             range.end - range.start,
193             0,
194         );
195     }
196 
copy_buffer_to_buffer<T>( &mut self, src: &super::Buffer, dst: &super::Buffer, regions: T, ) where T: Iterator<Item = crate::BufferCopy>,197     unsafe fn copy_buffer_to_buffer<T>(
198         &mut self,
199         src: &super::Buffer,
200         dst: &super::Buffer,
201         regions: T,
202     ) where
203         T: Iterator<Item = crate::BufferCopy>,
204     {
205         let vk_regions_iter = regions.map(|r| vk::BufferCopy {
206             src_offset: r.src_offset,
207             dst_offset: r.dst_offset,
208             size: r.size.get(),
209         });
210 
211         inplace_or_alloc_from_iter(vk_regions_iter, |vk_regions| {
212             self.device
213                 .raw
214                 .cmd_copy_buffer(self.active, src.raw, dst.raw, vk_regions)
215         })
216     }
217 
copy_texture_to_texture<T>( &mut self, src: &super::Texture, src_usage: crate::TextureUses, dst: &super::Texture, regions: T, ) where T: Iterator<Item = crate::TextureCopy>,218     unsafe fn copy_texture_to_texture<T>(
219         &mut self,
220         src: &super::Texture,
221         src_usage: crate::TextureUses,
222         dst: &super::Texture,
223         regions: T,
224     ) where
225         T: Iterator<Item = crate::TextureCopy>,
226     {
227         let src_layout = conv::derive_image_layout(src_usage, src.aspects);
228 
229         let vk_regions_iter = regions.map(|r| {
230             let (src_subresource, src_offset) =
231                 conv::map_subresource_layers(&r.src_base, src.aspects);
232             let (dst_subresource, dst_offset) =
233                 conv::map_subresource_layers(&r.dst_base, dst.aspects);
234             let extent = r
235                 .size
236                 .min(&r.src_base.max_copy_size(&src.copy_size))
237                 .min(&r.dst_base.max_copy_size(&dst.copy_size));
238             vk::ImageCopy {
239                 src_subresource,
240                 src_offset,
241                 dst_subresource,
242                 dst_offset,
243                 extent: conv::map_copy_extent(&extent),
244             }
245         });
246 
247         inplace_or_alloc_from_iter(vk_regions_iter, |vk_regions| {
248             self.device.raw.cmd_copy_image(
249                 self.active,
250                 src.raw,
251                 src_layout,
252                 dst.raw,
253                 DST_IMAGE_LAYOUT,
254                 vk_regions,
255             );
256         });
257     }
258 
copy_buffer_to_texture<T>( &mut self, src: &super::Buffer, dst: &super::Texture, regions: T, ) where T: Iterator<Item = crate::BufferTextureCopy>,259     unsafe fn copy_buffer_to_texture<T>(
260         &mut self,
261         src: &super::Buffer,
262         dst: &super::Texture,
263         regions: T,
264     ) where
265         T: Iterator<Item = crate::BufferTextureCopy>,
266     {
267         let vk_regions_iter = dst.map_buffer_copies(regions);
268 
269         inplace_or_alloc_from_iter(vk_regions_iter, |vk_regions| {
270             self.device.raw.cmd_copy_buffer_to_image(
271                 self.active,
272                 src.raw,
273                 dst.raw,
274                 DST_IMAGE_LAYOUT,
275                 vk_regions,
276             );
277         });
278     }
279 
copy_texture_to_buffer<T>( &mut self, src: &super::Texture, src_usage: crate::TextureUses, dst: &super::Buffer, regions: T, ) where T: Iterator<Item = crate::BufferTextureCopy>,280     unsafe fn copy_texture_to_buffer<T>(
281         &mut self,
282         src: &super::Texture,
283         src_usage: crate::TextureUses,
284         dst: &super::Buffer,
285         regions: T,
286     ) where
287         T: Iterator<Item = crate::BufferTextureCopy>,
288     {
289         let src_layout = conv::derive_image_layout(src_usage, src.aspects);
290         let vk_regions_iter = src.map_buffer_copies(regions);
291 
292         inplace_or_alloc_from_iter(vk_regions_iter, |vk_regions| {
293             self.device.raw.cmd_copy_image_to_buffer(
294                 self.active,
295                 src.raw,
296                 src_layout,
297                 dst.raw,
298                 vk_regions,
299             );
300         });
301     }
302 
begin_query(&mut self, set: &super::QuerySet, index: u32)303     unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
304         self.device.raw.cmd_begin_query(
305             self.active,
306             set.raw,
307             index,
308             vk::QueryControlFlags::empty(),
309         );
310     }
end_query(&mut self, set: &super::QuerySet, index: u32)311     unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
312         self.device.raw.cmd_end_query(self.active, set.raw, index);
313     }
write_timestamp(&mut self, set: &super::QuerySet, index: u32)314     unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
315         self.device.raw.cmd_write_timestamp(
316             self.active,
317             vk::PipelineStageFlags::BOTTOM_OF_PIPE,
318             set.raw,
319             index,
320         );
321     }
reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>)322     unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
323         self.device.raw.cmd_reset_query_pool(
324             self.active,
325             set.raw,
326             range.start,
327             range.end - range.start,
328         );
329     }
copy_query_results( &mut self, set: &super::QuerySet, range: Range<u32>, buffer: &super::Buffer, offset: wgt::BufferAddress, stride: wgt::BufferSize, )330     unsafe fn copy_query_results(
331         &mut self,
332         set: &super::QuerySet,
333         range: Range<u32>,
334         buffer: &super::Buffer,
335         offset: wgt::BufferAddress,
336         stride: wgt::BufferSize,
337     ) {
338         self.device.raw.cmd_copy_query_pool_results(
339             self.active,
340             set.raw,
341             range.start,
342             range.end - range.start,
343             buffer.raw,
344             offset,
345             stride.get(),
346             vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
347         );
348     }
349 
350     // render
351 
begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>)352     unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
353         let mut vk_clear_values =
354             ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
355         let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
356         let mut rp_key = super::RenderPassKey::default();
357         let mut fb_key = super::FramebufferKey {
358             attachments: ArrayVec::default(),
359             extent: desc.extent,
360             sample_count: desc.sample_count,
361         };
362         let caps = &self.device.private_caps;
363 
364         for cat in desc.color_attachments {
365             vk_clear_values.push(vk::ClearValue {
366                 color: cat.make_vk_clear_color(),
367             });
368             vk_image_views.push(cat.target.view.raw);
369             rp_key.colors.push(super::ColorAttachmentKey {
370                 base: cat.target.make_attachment_key(cat.ops, caps),
371                 resolve: cat
372                     .resolve_target
373                     .as_ref()
374                     .map(|target| target.make_attachment_key(crate::AttachmentOps::STORE, caps)),
375             });
376             fb_key.attachments.push(cat.target.view.attachment.clone());
377             if let Some(ref at) = cat.resolve_target {
378                 vk_clear_values.push(mem::zeroed());
379                 vk_image_views.push(at.view.raw);
380                 fb_key.attachments.push(at.view.attachment.clone());
381             }
382 
383             // Assert this attachment is valid for the detected multiview, as a sanity check
384             // The driver crash for this is really bad on AMD, so the check is worth it
385             if let Some(multiview) = desc.multiview {
386                 assert_eq!(cat.target.view.layers, multiview);
387                 if let Some(ref resolve_target) = cat.resolve_target {
388                     assert_eq!(resolve_target.view.layers, multiview);
389                 }
390             }
391         }
392         if let Some(ref ds) = desc.depth_stencil_attachment {
393             vk_clear_values.push(vk::ClearValue {
394                 depth_stencil: vk::ClearDepthStencilValue {
395                     depth: ds.clear_value.0,
396                     stencil: ds.clear_value.1,
397                 },
398             });
399             vk_image_views.push(ds.target.view.raw);
400             rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
401                 base: ds.target.make_attachment_key(ds.depth_ops, caps),
402                 stencil_ops: ds.stencil_ops,
403             });
404             fb_key.attachments.push(ds.target.view.attachment.clone());
405 
406             // Assert this attachment is valid for the detected multiview, as a sanity check
407             // The driver crash for this is really bad on AMD, so the check is worth it
408             if let Some(multiview) = desc.multiview {
409                 assert_eq!(ds.target.view.layers, multiview);
410             }
411         }
412         rp_key.sample_count = fb_key.sample_count;
413         rp_key.multiview = desc.multiview;
414 
415         let render_area = vk::Rect2D {
416             offset: vk::Offset2D { x: 0, y: 0 },
417             extent: vk::Extent2D {
418                 width: desc.extent.width,
419                 height: desc.extent.height,
420             },
421         };
422         let vk_viewports = [vk::Viewport {
423             x: 0.0,
424             y: if self.device.private_caps.flip_y_requires_shift {
425                 desc.extent.height as f32
426             } else {
427                 0.0
428             },
429             width: desc.extent.width as f32,
430             height: -(desc.extent.height as f32),
431             min_depth: 0.0,
432             max_depth: 1.0,
433         }];
434         let vk_scissors = [render_area];
435 
436         let raw_pass = self.device.make_render_pass(rp_key).unwrap();
437 
438         let raw_framebuffer = self
439             .device
440             .make_framebuffer(fb_key, raw_pass, desc.label)
441             .unwrap();
442 
443         let mut vk_attachment_info = vk::RenderPassAttachmentBeginInfo::builder()
444             .attachments(&vk_image_views)
445             .build();
446         let mut vk_info = vk::RenderPassBeginInfo::builder()
447             .render_pass(raw_pass)
448             .render_area(render_area)
449             .clear_values(&vk_clear_values)
450             .framebuffer(raw_framebuffer);
451         if caps.imageless_framebuffers {
452             vk_info = vk_info.push_next(&mut vk_attachment_info);
453         }
454 
455         if let Some(label) = desc.label {
456             self.begin_debug_marker(label);
457             self.rpass_debug_marker_active = true;
458         }
459 
460         self.device
461             .raw
462             .cmd_set_viewport(self.active, 0, &vk_viewports);
463         self.device
464             .raw
465             .cmd_set_scissor(self.active, 0, &vk_scissors);
466         self.device
467             .raw
468             .cmd_begin_render_pass(self.active, &vk_info, vk::SubpassContents::INLINE);
469 
470         self.bind_point = vk::PipelineBindPoint::GRAPHICS;
471     }
end_render_pass(&mut self)472     unsafe fn end_render_pass(&mut self) {
473         self.device.raw.cmd_end_render_pass(self.active);
474         if self.rpass_debug_marker_active {
475             self.end_debug_marker();
476             self.rpass_debug_marker_active = false;
477         }
478     }
479 
set_bind_group( &mut self, layout: &super::PipelineLayout, index: u32, group: &super::BindGroup, dynamic_offsets: &[wgt::DynamicOffset], )480     unsafe fn set_bind_group(
481         &mut self,
482         layout: &super::PipelineLayout,
483         index: u32,
484         group: &super::BindGroup,
485         dynamic_offsets: &[wgt::DynamicOffset],
486     ) {
487         let sets = [*group.set.raw()];
488         self.device.raw.cmd_bind_descriptor_sets(
489             self.active,
490             self.bind_point,
491             layout.raw,
492             index,
493             &sets,
494             dynamic_offsets,
495         );
496     }
set_push_constants( &mut self, layout: &super::PipelineLayout, stages: wgt::ShaderStages, offset: u32, data: &[u32], )497     unsafe fn set_push_constants(
498         &mut self,
499         layout: &super::PipelineLayout,
500         stages: wgt::ShaderStages,
501         offset: u32,
502         data: &[u32],
503     ) {
504         self.device.raw.cmd_push_constants(
505             self.active,
506             layout.raw,
507             conv::map_shader_stage(stages),
508             offset,
509             slice::from_raw_parts(data.as_ptr() as _, data.len() * 4),
510         );
511     }
512 
insert_debug_marker(&mut self, label: &str)513     unsafe fn insert_debug_marker(&mut self, label: &str) {
514         if let Some(ext) = self.device.debug_messenger() {
515             let cstr = self.temp.make_c_str(label);
516             let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
517             ext.cmd_insert_debug_utils_label(self.active, &vk_label);
518         }
519     }
begin_debug_marker(&mut self, group_label: &str)520     unsafe fn begin_debug_marker(&mut self, group_label: &str) {
521         if let Some(ext) = self.device.debug_messenger() {
522             let cstr = self.temp.make_c_str(group_label);
523             let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
524             ext.cmd_begin_debug_utils_label(self.active, &vk_label);
525         }
526     }
end_debug_marker(&mut self)527     unsafe fn end_debug_marker(&mut self) {
528         if let Some(ext) = self.device.debug_messenger() {
529             ext.cmd_end_debug_utils_label(self.active);
530         }
531     }
532 
set_render_pipeline(&mut self, pipeline: &super::RenderPipeline)533     unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
534         self.device.raw.cmd_bind_pipeline(
535             self.active,
536             vk::PipelineBindPoint::GRAPHICS,
537             pipeline.raw,
538         );
539     }
540 
set_index_buffer<'a>( &mut self, binding: crate::BufferBinding<'a, super::Api>, format: wgt::IndexFormat, )541     unsafe fn set_index_buffer<'a>(
542         &mut self,
543         binding: crate::BufferBinding<'a, super::Api>,
544         format: wgt::IndexFormat,
545     ) {
546         self.device.raw.cmd_bind_index_buffer(
547             self.active,
548             binding.buffer.raw,
549             binding.offset,
550             conv::map_index_format(format),
551         );
552     }
set_vertex_buffer<'a>( &mut self, index: u32, binding: crate::BufferBinding<'a, super::Api>, )553     unsafe fn set_vertex_buffer<'a>(
554         &mut self,
555         index: u32,
556         binding: crate::BufferBinding<'a, super::Api>,
557     ) {
558         let vk_buffers = [binding.buffer.raw];
559         let vk_offsets = [binding.offset];
560         self.device
561             .raw
562             .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets);
563     }
set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>)564     unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
565         let vk_viewports = [vk::Viewport {
566             x: rect.x,
567             y: if self.device.private_caps.flip_y_requires_shift {
568                 rect.y + rect.h
569             } else {
570                 rect.y
571             },
572             width: rect.w,
573             height: -rect.h, // flip Y
574             min_depth: depth_range.start,
575             max_depth: depth_range.end,
576         }];
577         self.device
578             .raw
579             .cmd_set_viewport(self.active, 0, &vk_viewports);
580     }
set_scissor_rect(&mut self, rect: &crate::Rect<u32>)581     unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
582         let vk_scissors = [vk::Rect2D {
583             offset: vk::Offset2D {
584                 x: rect.x as i32,
585                 y: rect.y as i32,
586             },
587             extent: vk::Extent2D {
588                 width: rect.w,
589                 height: rect.h,
590             },
591         }];
592         self.device
593             .raw
594             .cmd_set_scissor(self.active, 0, &vk_scissors);
595     }
set_stencil_reference(&mut self, value: u32)596     unsafe fn set_stencil_reference(&mut self, value: u32) {
597         self.device.raw.cmd_set_stencil_reference(
598             self.active,
599             vk::StencilFaceFlags::FRONT_AND_BACK,
600             value,
601         );
602     }
set_blend_constants(&mut self, color: &[f32; 4])603     unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
604         self.device.raw.cmd_set_blend_constants(self.active, color);
605     }
606 
draw( &mut self, start_vertex: u32, vertex_count: u32, start_instance: u32, instance_count: u32, )607     unsafe fn draw(
608         &mut self,
609         start_vertex: u32,
610         vertex_count: u32,
611         start_instance: u32,
612         instance_count: u32,
613     ) {
614         self.device.raw.cmd_draw(
615             self.active,
616             vertex_count,
617             instance_count,
618             start_vertex,
619             start_instance,
620         );
621     }
draw_indexed( &mut self, start_index: u32, index_count: u32, base_vertex: i32, start_instance: u32, instance_count: u32, )622     unsafe fn draw_indexed(
623         &mut self,
624         start_index: u32,
625         index_count: u32,
626         base_vertex: i32,
627         start_instance: u32,
628         instance_count: u32,
629     ) {
630         self.device.raw.cmd_draw_indexed(
631             self.active,
632             index_count,
633             instance_count,
634             start_index,
635             base_vertex,
636             start_instance,
637         );
638     }
draw_indirect( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, draw_count: u32, )639     unsafe fn draw_indirect(
640         &mut self,
641         buffer: &super::Buffer,
642         offset: wgt::BufferAddress,
643         draw_count: u32,
644     ) {
645         self.device.raw.cmd_draw_indirect(
646             self.active,
647             buffer.raw,
648             offset,
649             draw_count,
650             mem::size_of::<wgt::DrawIndirectArgs>() as u32,
651         );
652     }
draw_indexed_indirect( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, draw_count: u32, )653     unsafe fn draw_indexed_indirect(
654         &mut self,
655         buffer: &super::Buffer,
656         offset: wgt::BufferAddress,
657         draw_count: u32,
658     ) {
659         self.device.raw.cmd_draw_indexed_indirect(
660             self.active,
661             buffer.raw,
662             offset,
663             draw_count,
664             mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
665         );
666     }
draw_indirect_count( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, count_buffer: &super::Buffer, count_offset: wgt::BufferAddress, max_count: u32, )667     unsafe fn draw_indirect_count(
668         &mut self,
669         buffer: &super::Buffer,
670         offset: wgt::BufferAddress,
671         count_buffer: &super::Buffer,
672         count_offset: wgt::BufferAddress,
673         max_count: u32,
674     ) {
675         let stride = mem::size_of::<wgt::DrawIndirectArgs>() as u32;
676         match self.device.extension_fns.draw_indirect_count {
677             Some(super::ExtensionFn::Extension(ref t)) => {
678                 t.cmd_draw_indirect_count(
679                     self.active,
680                     buffer.raw,
681                     offset,
682                     count_buffer.raw,
683                     count_offset,
684                     max_count,
685                     stride,
686                 );
687             }
688             Some(super::ExtensionFn::Promoted) => {
689                 self.device.raw.cmd_draw_indirect_count(
690                     self.active,
691                     buffer.raw,
692                     offset,
693                     count_buffer.raw,
694                     count_offset,
695                     max_count,
696                     stride,
697                 );
698             }
699             None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
700         }
701     }
draw_indexed_indirect_count( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, count_buffer: &super::Buffer, count_offset: wgt::BufferAddress, max_count: u32, )702     unsafe fn draw_indexed_indirect_count(
703         &mut self,
704         buffer: &super::Buffer,
705         offset: wgt::BufferAddress,
706         count_buffer: &super::Buffer,
707         count_offset: wgt::BufferAddress,
708         max_count: u32,
709     ) {
710         let stride = mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
711         match self.device.extension_fns.draw_indirect_count {
712             Some(super::ExtensionFn::Extension(ref t)) => {
713                 t.cmd_draw_indexed_indirect_count(
714                     self.active,
715                     buffer.raw,
716                     offset,
717                     count_buffer.raw,
718                     count_offset,
719                     max_count,
720                     stride,
721                 );
722             }
723             Some(super::ExtensionFn::Promoted) => {
724                 self.device.raw.cmd_draw_indexed_indirect_count(
725                     self.active,
726                     buffer.raw,
727                     offset,
728                     count_buffer.raw,
729                     count_offset,
730                     max_count,
731                     stride,
732                 );
733             }
734             None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
735         }
736     }
737 
738     // compute
739 
begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor)740     unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {
741         self.bind_point = vk::PipelineBindPoint::COMPUTE;
742         if let Some(label) = desc.label {
743             self.begin_debug_marker(label);
744             self.rpass_debug_marker_active = true;
745         }
746     }
end_compute_pass(&mut self)747     unsafe fn end_compute_pass(&mut self) {
748         if self.rpass_debug_marker_active {
749             self.end_debug_marker();
750             self.rpass_debug_marker_active = false
751         }
752     }
753 
set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline)754     unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
755         self.device.raw.cmd_bind_pipeline(
756             self.active,
757             vk::PipelineBindPoint::COMPUTE,
758             pipeline.raw,
759         );
760     }
761 
dispatch(&mut self, count: [u32; 3])762     unsafe fn dispatch(&mut self, count: [u32; 3]) {
763         self.device
764             .raw
765             .cmd_dispatch(self.active, count[0], count[1], count[2]);
766     }
dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress)767     unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
768         self.device
769             .raw
770             .cmd_dispatch_indirect(self.active, buffer.raw, offset)
771     }
772 }
773 
774 #[test]
check_dst_image_layout()775 fn check_dst_image_layout() {
776     assert_eq!(
777         conv::derive_image_layout(crate::TextureUses::COPY_DST, crate::FormatAspects::empty()),
778         DST_IMAGE_LAYOUT
779     );
780 }
781