1 use super::{conv, Command as C};
2 use arrayvec::ArrayVec;
3 use std::{mem, ops::Range};
4 
5 #[derive(Clone, Copy, Debug, Default)]
6 struct TextureSlotDesc {
7     tex_target: super::BindTarget,
8     sampler_index: Option<u8>,
9 }
10 
11 #[derive(Default)]
12 pub(super) struct State {
13     topology: u32,
14     primitive: super::PrimitiveState,
15     index_format: wgt::IndexFormat,
16     index_offset: wgt::BufferAddress,
17     vertex_buffers:
18         [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
19     vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
20     color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_TARGETS }>,
21     stencil: super::StencilState,
22     depth_bias: wgt::DepthBiasState,
23     samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
24     texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
25     render_size: wgt::Extent3d,
26     resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_TARGETS }>,
27     invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_TARGETS + 2 }>,
28     has_pass_label: bool,
29     instance_vbuf_mask: usize,
30     dirty_vbuf_mask: usize,
31 }
32 
33 impl super::CommandBuffer {
clear(&mut self)34     fn clear(&mut self) {
35         self.label = None;
36         self.commands.clear();
37         self.data_bytes.clear();
38         self.queries.clear();
39     }
40 
add_marker(&mut self, marker: &str) -> Range<u32>41     fn add_marker(&mut self, marker: &str) -> Range<u32> {
42         let start = self.data_bytes.len() as u32;
43         self.data_bytes.extend(marker.as_bytes());
44         start..self.data_bytes.len() as u32
45     }
46 }
47 
48 impl super::CommandEncoder {
rebind_stencil_func(&mut self)49     fn rebind_stencil_func(&mut self) {
50         fn make(s: &super::StencilSide, face: u32) -> C {
51             C::SetStencilFunc {
52                 face,
53                 function: s.function,
54                 reference: s.reference,
55                 read_mask: s.mask_read,
56             }
57         }
58 
59         let s = &self.state.stencil;
60         if s.front.function == s.back.function
61             && s.front.mask_read == s.back.mask_read
62             && s.front.reference == s.back.reference
63         {
64             self.cmd_buffer
65                 .commands
66                 .push(make(&s.front, glow::FRONT_AND_BACK));
67         } else {
68             self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
69             self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
70         }
71     }
72 
rebind_vertex_data(&mut self, first_instance: u32)73     fn rebind_vertex_data(&mut self, first_instance: u32) {
74         if self
75             .private_caps
76             .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
77         {
78             for (index, &(ref vb_desc, ref vb)) in self.state.vertex_buffers.iter().enumerate() {
79                 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
80                     continue;
81                 }
82                 let vb = vb.as_ref().unwrap();
83                 let instance_offset = match vb_desc.step {
84                     wgt::VertexStepMode::Vertex => 0,
85                     wgt::VertexStepMode::Instance => first_instance * vb_desc.stride,
86                 };
87                 self.cmd_buffer.commands.push(C::SetVertexBuffer {
88                     index: index as u32,
89                     buffer: super::BufferBinding {
90                         raw: vb.raw,
91                         offset: vb.offset + instance_offset as wgt::BufferAddress,
92                     },
93                     buffer_desc: vb_desc.clone(),
94                 });
95             }
96         } else {
97             for attribute in self.state.vertex_attributes.iter() {
98                 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
99                     continue;
100                 }
101                 let (buffer_desc, buffer) =
102                     self.state.vertex_buffers[attribute.buffer_index as usize].clone();
103 
104                 let mut attribute_desc = attribute.clone();
105                 let vb = buffer.unwrap();
106                 attribute_desc.offset += vb.offset as u32;
107                 if buffer_desc.step == wgt::VertexStepMode::Instance {
108                     attribute_desc.offset += buffer_desc.stride * first_instance;
109                 }
110 
111                 self.cmd_buffer.commands.push(C::SetVertexAttribute {
112                     buffer: Some(vb.raw),
113                     buffer_desc,
114                     attribute_desc,
115                 });
116             }
117         }
118     }
119 
rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32)120     fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
121         for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
122             if dirty_textures & (1 << texture_index) != 0
123                 || slot
124                     .sampler_index
125                     .map_or(false, |si| dirty_samplers & (1 << si) != 0)
126             {
127                 let sampler = slot
128                     .sampler_index
129                     .and_then(|si| self.state.samplers[si as usize]);
130                 self.cmd_buffer
131                     .commands
132                     .push(C::BindSampler(texture_index as u32, sampler));
133             }
134         }
135     }
136 
prepare_draw(&mut self, first_instance: u32)137     fn prepare_draw(&mut self, first_instance: u32) {
138         if first_instance != 0 {
139             self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
140         }
141         if self.state.dirty_vbuf_mask != 0 {
142             self.rebind_vertex_data(first_instance);
143             let vertex_rate_mask = self.state.dirty_vbuf_mask & !self.state.instance_vbuf_mask;
144             self.state.dirty_vbuf_mask ^= vertex_rate_mask;
145         }
146     }
147 
set_pipeline_inner(&mut self, inner: &super::PipelineInner)148     fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
149         self.cmd_buffer.commands.push(C::SetProgram(inner.program));
150 
151         //TODO: push constants
152         let _ = &inner.uniforms;
153 
154         // rebind textures, if needed
155         let mut dirty_textures = 0u32;
156         for (texture_index, (slot, &sampler_index)) in self
157             .state
158             .texture_slots
159             .iter_mut()
160             .zip(inner.sampler_map.iter())
161             .enumerate()
162         {
163             if slot.sampler_index != sampler_index {
164                 slot.sampler_index = sampler_index;
165                 dirty_textures |= 1 << texture_index;
166             }
167         }
168         if dirty_textures != 0 {
169             self.rebind_sampler_states(dirty_textures, 0);
170         }
171     }
172 }
173 
174 impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError>175     unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
176         self.state = State::default();
177         self.cmd_buffer.label = label.map(str::to_string);
178         Ok(())
179     }
discard_encoding(&mut self)180     unsafe fn discard_encoding(&mut self) {
181         self.cmd_buffer.clear();
182     }
end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError>183     unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
184         Ok(mem::take(&mut self.cmd_buffer))
185     }
reset_all<I>(&mut self, _command_buffers: I)186     unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
187         //TODO: could re-use the allocations in all these command buffers
188     }
189 
transition_buffers<'a, T>(&mut self, barriers: T) where T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,190     unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
191     where
192         T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
193     {
194         if !self
195             .private_caps
196             .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
197         {
198             return;
199         }
200         for bar in barriers {
201             // GLES only synchronizes storage -> anything explicitly
202             if !bar.usage.start.contains(crate::BufferUses::STORAGE_WRITE) {
203                 continue;
204             }
205             self.cmd_buffer
206                 .commands
207                 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.end));
208         }
209     }
210 
transition_textures<'a, T>(&mut self, barriers: T) where T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,211     unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
212     where
213         T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
214     {
215         if !self
216             .private_caps
217             .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
218         {
219             return;
220         }
221 
222         let mut combined_usage = crate::TextureUses::empty();
223         for bar in barriers {
224             // GLES only synchronizes storage -> anything explicitly
225             if !bar.usage.start.contains(crate::TextureUses::STORAGE_WRITE) {
226                 continue;
227             }
228             // unlike buffers, there is no need for a concrete texture
229             // object to be bound anywhere for a barrier
230             combined_usage |= bar.usage.end;
231         }
232 
233         if !combined_usage.is_empty() {
234             self.cmd_buffer
235                 .commands
236                 .push(C::TextureBarrier(combined_usage));
237         }
238     }
239 
clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange)240     unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
241         self.cmd_buffer.commands.push(C::ClearBuffer {
242             dst: buffer.clone(),
243             dst_target: buffer.target,
244             range,
245         });
246     }
247 
copy_buffer_to_buffer<T>( &mut self, src: &super::Buffer, dst: &super::Buffer, regions: T, ) where T: Iterator<Item = crate::BufferCopy>,248     unsafe fn copy_buffer_to_buffer<T>(
249         &mut self,
250         src: &super::Buffer,
251         dst: &super::Buffer,
252         regions: T,
253     ) where
254         T: Iterator<Item = crate::BufferCopy>,
255     {
256         let (src_target, dst_target) = if src.target == dst.target {
257             (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
258         } else {
259             (src.target, dst.target)
260         };
261         for copy in regions {
262             self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
263                 src: src.clone(),
264                 src_target,
265                 dst: dst.clone(),
266                 dst_target,
267                 copy,
268             })
269         }
270     }
271 
copy_texture_to_texture<T>( &mut self, src: &super::Texture, _src_usage: crate::TextureUses, dst: &super::Texture, regions: T, ) where T: Iterator<Item = crate::TextureCopy>,272     unsafe fn copy_texture_to_texture<T>(
273         &mut self,
274         src: &super::Texture,
275         _src_usage: crate::TextureUses,
276         dst: &super::Texture,
277         regions: T,
278     ) where
279         T: Iterator<Item = crate::TextureCopy>,
280     {
281         let (src_raw, src_target) = src.inner.as_native();
282         let (dst_raw, dst_target) = dst.inner.as_native();
283         for mut copy in regions {
284             copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
285             self.cmd_buffer.commands.push(C::CopyTextureToTexture {
286                 src: src_raw,
287                 src_target,
288                 dst: dst_raw,
289                 dst_target,
290                 copy,
291             })
292         }
293     }
294 
copy_buffer_to_texture<T>( &mut self, src: &super::Buffer, dst: &super::Texture, regions: T, ) where T: Iterator<Item = crate::BufferTextureCopy>,295     unsafe fn copy_buffer_to_texture<T>(
296         &mut self,
297         src: &super::Buffer,
298         dst: &super::Texture,
299         regions: T,
300     ) where
301         T: Iterator<Item = crate::BufferTextureCopy>,
302     {
303         let (dst_raw, dst_target) = dst.inner.as_native();
304 
305         for mut copy in regions {
306             copy.clamp_size_to_virtual(&dst.copy_size);
307             self.cmd_buffer.commands.push(C::CopyBufferToTexture {
308                 src: src.clone(),
309                 src_target: src.target,
310                 dst: dst_raw,
311                 dst_target,
312                 dst_format: dst.format,
313                 copy,
314             })
315         }
316     }
317 
copy_texture_to_buffer<T>( &mut self, src: &super::Texture, _src_usage: crate::TextureUses, dst: &super::Buffer, regions: T, ) where T: Iterator<Item = crate::BufferTextureCopy>,318     unsafe fn copy_texture_to_buffer<T>(
319         &mut self,
320         src: &super::Texture,
321         _src_usage: crate::TextureUses,
322         dst: &super::Buffer,
323         regions: T,
324     ) where
325         T: Iterator<Item = crate::BufferTextureCopy>,
326     {
327         let (src_raw, src_target) = src.inner.as_native();
328         for mut copy in regions {
329             copy.clamp_size_to_virtual(&src.copy_size);
330             self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
331                 src: src_raw,
332                 src_target,
333                 src_format: src.format,
334                 dst: dst.clone(),
335                 dst_target: dst.target,
336                 copy,
337             })
338         }
339     }
340 
begin_query(&mut self, set: &super::QuerySet, index: u32)341     unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
342         let query = set.queries[index as usize];
343         self.cmd_buffer
344             .commands
345             .push(C::BeginQuery(query, set.target));
346     }
end_query(&mut self, set: &super::QuerySet, _index: u32)347     unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
348         self.cmd_buffer.commands.push(C::EndQuery(set.target));
349     }
write_timestamp(&mut self, _set: &super::QuerySet, _index: u32)350     unsafe fn write_timestamp(&mut self, _set: &super::QuerySet, _index: u32) {
351         unimplemented!()
352     }
reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>)353     unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
354         //TODO: what do we do here?
355     }
copy_query_results( &mut self, set: &super::QuerySet, range: Range<u32>, buffer: &super::Buffer, offset: wgt::BufferAddress, _stride: wgt::BufferSize, )356     unsafe fn copy_query_results(
357         &mut self,
358         set: &super::QuerySet,
359         range: Range<u32>,
360         buffer: &super::Buffer,
361         offset: wgt::BufferAddress,
362         _stride: wgt::BufferSize,
363     ) {
364         let start = self.cmd_buffer.queries.len();
365         self.cmd_buffer
366             .queries
367             .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
368         let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
369         self.cmd_buffer.commands.push(C::CopyQueryResults {
370             query_range,
371             dst: buffer.clone(),
372             dst_target: buffer.target,
373             dst_offset: offset,
374         });
375     }
376 
377     // render
378 
begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>)379     unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
380         self.state.render_size = desc.extent;
381         self.state.resolve_attachments.clear();
382         self.state.invalidate_attachments.clear();
383         if let Some(label) = desc.label {
384             let range = self.cmd_buffer.add_marker(label);
385             self.cmd_buffer.commands.push(C::PushDebugGroup(range));
386             self.state.has_pass_label = true;
387         }
388 
389         match desc
390             .color_attachments
391             .first()
392             .map(|at| &at.target.view.inner)
393         {
394             // default framebuffer (provided externally)
395             Some(&super::TextureInner::DefaultRenderbuffer) => {
396                 self.cmd_buffer
397                     .commands
398                     .push(C::ResetFramebuffer { is_default: true });
399             }
400             _ => {
401                 // set the framebuffer
402                 self.cmd_buffer
403                     .commands
404                     .push(C::ResetFramebuffer { is_default: false });
405 
406                 for (i, cat) in desc.color_attachments.iter().enumerate() {
407                     let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
408                     self.cmd_buffer.commands.push(C::BindAttachment {
409                         attachment,
410                         view: cat.target.view.clone(),
411                     });
412                     if let Some(ref rat) = cat.resolve_target {
413                         self.state
414                             .resolve_attachments
415                             .push((attachment, rat.view.clone()));
416                     }
417                     if !cat.ops.contains(crate::AttachmentOps::STORE) {
418                         self.state.invalidate_attachments.push(attachment);
419                     }
420                 }
421                 if let Some(ref dsat) = desc.depth_stencil_attachment {
422                     let aspects = dsat.target.view.aspects;
423                     let attachment = match aspects {
424                         crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
425                         crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
426                         _ => glow::DEPTH_STENCIL_ATTACHMENT,
427                     };
428                     self.cmd_buffer.commands.push(C::BindAttachment {
429                         attachment,
430                         view: dsat.target.view.clone(),
431                     });
432                     if aspects.contains(crate::FormatAspects::DEPTH)
433                         && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
434                     {
435                         self.state
436                             .invalidate_attachments
437                             .push(glow::DEPTH_ATTACHMENT);
438                     }
439                     if aspects.contains(crate::FormatAspects::STENCIL)
440                         && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
441                     {
442                         self.state
443                             .invalidate_attachments
444                             .push(glow::STENCIL_ATTACHMENT);
445                     }
446                 }
447 
448                 // set the draw buffers and states
449                 self.cmd_buffer
450                     .commands
451                     .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
452             }
453         }
454 
455         let rect = crate::Rect {
456             x: 0,
457             y: 0,
458             w: desc.extent.width as i32,
459             h: desc.extent.height as i32,
460         };
461         self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
462         self.cmd_buffer.commands.push(C::SetViewport {
463             rect,
464             depth: 0.0..1.0,
465         });
466 
467         // issue the clears
468         for (i, cat) in desc.color_attachments.iter().enumerate() {
469             if !cat.ops.contains(crate::AttachmentOps::LOAD) {
470                 let c = &cat.clear_value;
471                 self.cmd_buffer
472                     .commands
473                     .push(match cat.target.view.sample_type {
474                         wgt::TextureSampleType::Float { .. } => C::ClearColorF {
475                             draw_buffer: i as u32,
476                             color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
477                             is_srgb: cat.target.view.format.describe().srgb,
478                         },
479                         wgt::TextureSampleType::Depth => unimplemented!(),
480                         wgt::TextureSampleType::Uint => C::ClearColorU(
481                             i as u32,
482                             [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
483                         ),
484                         wgt::TextureSampleType::Sint => C::ClearColorI(
485                             i as u32,
486                             [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
487                         ),
488                     });
489             }
490         }
491         if let Some(ref dsat) = desc.depth_stencil_attachment {
492             if !dsat.depth_ops.contains(crate::AttachmentOps::LOAD) {
493                 self.cmd_buffer
494                     .commands
495                     .push(C::ClearDepth(dsat.clear_value.0));
496             }
497             if !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD) {
498                 self.cmd_buffer
499                     .commands
500                     .push(C::ClearStencil(dsat.clear_value.1));
501             }
502         }
503     }
end_render_pass(&mut self)504     unsafe fn end_render_pass(&mut self) {
505         for (attachment, dst) in self.state.resolve_attachments.drain(..) {
506             self.cmd_buffer.commands.push(C::ResolveAttachment {
507                 attachment,
508                 dst,
509                 size: self.state.render_size,
510             });
511         }
512         if !self.state.invalidate_attachments.is_empty() {
513             self.cmd_buffer.commands.push(C::InvalidateAttachments(
514                 self.state.invalidate_attachments.clone(),
515             ));
516             self.state.invalidate_attachments.clear();
517         }
518         if self.state.has_pass_label {
519             self.cmd_buffer.commands.push(C::PopDebugGroup);
520             self.state.has_pass_label = false;
521         }
522         self.state.instance_vbuf_mask = 0;
523         self.state.dirty_vbuf_mask = 0;
524         self.state.color_targets.clear();
525         self.state.vertex_attributes.clear();
526         self.state.primitive = super::PrimitiveState::default();
527     }
528 
set_bind_group( &mut self, layout: &super::PipelineLayout, index: u32, group: &super::BindGroup, dynamic_offsets: &[wgt::DynamicOffset], )529     unsafe fn set_bind_group(
530         &mut self,
531         layout: &super::PipelineLayout,
532         index: u32,
533         group: &super::BindGroup,
534         dynamic_offsets: &[wgt::DynamicOffset],
535     ) {
536         let mut do_index = 0;
537         let mut dirty_textures = 0u32;
538         let mut dirty_samplers = 0u32;
539         let group_info = &layout.group_infos[index as usize];
540 
541         for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
542             let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
543             match *raw_binding {
544                 super::RawBinding::Buffer {
545                     raw,
546                     offset: base_offset,
547                     size,
548                 } => {
549                     let mut offset = base_offset;
550                     let target = match binding_layout.ty {
551                         wgt::BindingType::Buffer {
552                             ty,
553                             has_dynamic_offset,
554                             min_binding_size: _,
555                         } => {
556                             if has_dynamic_offset {
557                                 offset += dynamic_offsets[do_index] as i32;
558                                 do_index += 1;
559                             }
560                             match ty {
561                                 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
562                                 wgt::BufferBindingType::Storage { .. } => {
563                                     glow::SHADER_STORAGE_BUFFER
564                                 }
565                             }
566                         }
567                         _ => unreachable!(),
568                     };
569                     self.cmd_buffer.commands.push(C::BindBuffer {
570                         target,
571                         slot,
572                         buffer: raw,
573                         offset,
574                         size,
575                     });
576                 }
577                 super::RawBinding::Sampler(sampler) => {
578                     dirty_samplers |= 1 << slot;
579                     self.state.samplers[slot as usize] = Some(sampler);
580                 }
581                 super::RawBinding::Texture { raw, target } => {
582                     dirty_textures |= 1 << slot;
583                     self.state.texture_slots[slot as usize].tex_target = target;
584                     self.cmd_buffer.commands.push(C::BindTexture {
585                         slot,
586                         texture: raw,
587                         target,
588                     });
589                 }
590                 super::RawBinding::Image(ref binding) => {
591                     self.cmd_buffer.commands.push(C::BindImage {
592                         slot,
593                         binding: binding.clone(),
594                     });
595                 }
596             }
597         }
598 
599         self.rebind_sampler_states(dirty_textures, dirty_samplers);
600     }
601 
set_push_constants( &mut self, _layout: &super::PipelineLayout, _stages: wgt::ShaderStages, _offset: u32, _data: &[u32], )602     unsafe fn set_push_constants(
603         &mut self,
604         _layout: &super::PipelineLayout,
605         _stages: wgt::ShaderStages,
606         _offset: u32,
607         _data: &[u32],
608     ) {
609         unimplemented!()
610     }
611 
insert_debug_marker(&mut self, label: &str)612     unsafe fn insert_debug_marker(&mut self, label: &str) {
613         let range = self.cmd_buffer.add_marker(label);
614         self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
615     }
begin_debug_marker(&mut self, group_label: &str)616     unsafe fn begin_debug_marker(&mut self, group_label: &str) {
617         let range = self.cmd_buffer.add_marker(group_label);
618         self.cmd_buffer.commands.push(C::PushDebugGroup(range));
619     }
end_debug_marker(&mut self)620     unsafe fn end_debug_marker(&mut self) {
621         self.cmd_buffer.commands.push(C::PopDebugGroup);
622     }
623 
set_render_pipeline(&mut self, pipeline: &super::RenderPipeline)624     unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
625         self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
626 
627         if self
628             .private_caps
629             .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
630         {
631             for vat in pipeline.vertex_attributes.iter() {
632                 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
633                 // set the layout
634                 self.cmd_buffer.commands.push(C::SetVertexAttribute {
635                     buffer: None,
636                     buffer_desc: vb.clone(),
637                     attribute_desc: vat.clone(),
638                 });
639             }
640         } else {
641             for index in 0..self.state.vertex_attributes.len() {
642                 self.cmd_buffer
643                     .commands
644                     .push(C::UnsetVertexAttribute(index as u32));
645             }
646             self.state.vertex_attributes.clear();
647 
648             self.state.dirty_vbuf_mask = 0;
649             // copy vertex attributes
650             for vat in pipeline.vertex_attributes.iter() {
651                 //Note: we can invalidate more carefully here.
652                 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
653                 self.state.vertex_attributes.push(vat.clone());
654             }
655         }
656 
657         self.state.instance_vbuf_mask = 0;
658         // copy vertex state
659         for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
660             .state
661             .vertex_buffers
662             .iter_mut()
663             .zip(pipeline.vertex_buffers.iter())
664             .enumerate()
665         {
666             if pipe_desc.step == wgt::VertexStepMode::Instance {
667                 self.state.instance_vbuf_mask |= 1 << index;
668             }
669             if state_desc != pipe_desc {
670                 self.state.dirty_vbuf_mask |= 1 << index;
671                 *state_desc = pipe_desc.clone();
672             }
673         }
674 
675         self.set_pipeline_inner(&pipeline.inner);
676 
677         // set primitive state
678         let prim_state = conv::map_primitive_state(&pipeline.primitive);
679         if prim_state != self.state.primitive {
680             self.cmd_buffer
681                 .commands
682                 .push(C::SetPrimitive(prim_state.clone()));
683             self.state.primitive = prim_state;
684         }
685 
686         // set depth/stencil states
687         let mut aspects = crate::FormatAspects::empty();
688         if pipeline.depth_bias != self.state.depth_bias {
689             self.state.depth_bias = pipeline.depth_bias;
690             self.cmd_buffer
691                 .commands
692                 .push(C::SetDepthBias(pipeline.depth_bias));
693         }
694         if let Some(ref depth) = pipeline.depth {
695             aspects |= crate::FormatAspects::DEPTH;
696             self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
697         }
698         if let Some(ref stencil) = pipeline.stencil {
699             aspects |= crate::FormatAspects::STENCIL;
700             self.state.stencil = stencil.clone();
701             self.rebind_stencil_func();
702             if stencil.front.ops == stencil.back.ops
703                 && stencil.front.mask_write == stencil.back.mask_write
704             {
705                 self.cmd_buffer.commands.push(C::SetStencilOps {
706                     face: glow::FRONT_AND_BACK,
707                     write_mask: stencil.front.mask_write,
708                     ops: stencil.front.ops.clone(),
709                 });
710             } else {
711                 self.cmd_buffer.commands.push(C::SetStencilOps {
712                     face: glow::FRONT,
713                     write_mask: stencil.front.mask_write,
714                     ops: stencil.front.ops.clone(),
715                 });
716                 self.cmd_buffer.commands.push(C::SetStencilOps {
717                     face: glow::BACK,
718                     write_mask: stencil.back.mask_write,
719                     ops: stencil.back.ops.clone(),
720                 });
721             }
722         }
723         self.cmd_buffer
724             .commands
725             .push(C::ConfigureDepthStencil(aspects));
726 
727         // set blend states
728         if self.state.color_targets[..] != pipeline.color_targets[..] {
729             if pipeline
730                 .color_targets
731                 .iter()
732                 .skip(1)
733                 .any(|ct| *ct != pipeline.color_targets[0])
734             {
735                 for (index, ct) in pipeline.color_targets.iter().enumerate() {
736                     self.cmd_buffer.commands.push(C::SetColorTarget {
737                         draw_buffer_index: Some(index as u32),
738                         desc: ct.clone(),
739                     });
740                 }
741             } else {
742                 self.cmd_buffer.commands.push(C::SetColorTarget {
743                     draw_buffer_index: None,
744                     desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
745                 });
746             }
747         }
748         self.state.color_targets.clear();
749         for ct in pipeline.color_targets.iter() {
750             self.state.color_targets.push(ct.clone());
751         }
752     }
753 
set_index_buffer<'a>( &mut self, binding: crate::BufferBinding<'a, super::Api>, format: wgt::IndexFormat, )754     unsafe fn set_index_buffer<'a>(
755         &mut self,
756         binding: crate::BufferBinding<'a, super::Api>,
757         format: wgt::IndexFormat,
758     ) {
759         self.state.index_offset = binding.offset;
760         self.state.index_format = format;
761         self.cmd_buffer
762             .commands
763             .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
764     }
set_vertex_buffer<'a>( &mut self, index: u32, binding: crate::BufferBinding<'a, super::Api>, )765     unsafe fn set_vertex_buffer<'a>(
766         &mut self,
767         index: u32,
768         binding: crate::BufferBinding<'a, super::Api>,
769     ) {
770         self.state.dirty_vbuf_mask |= 1 << index;
771         let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
772         *vb = Some(super::BufferBinding {
773             raw: binding.buffer.raw.unwrap(),
774             offset: binding.offset,
775         });
776     }
set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>)777     unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
778         self.cmd_buffer.commands.push(C::SetViewport {
779             rect: crate::Rect {
780                 x: rect.x as i32,
781                 y: rect.y as i32,
782                 w: rect.w as i32,
783                 h: rect.h as i32,
784             },
785             depth,
786         });
787     }
set_scissor_rect(&mut self, rect: &crate::Rect<u32>)788     unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
789         self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
790             x: rect.x as i32,
791             y: rect.y as i32,
792             w: rect.w as i32,
793             h: rect.h as i32,
794         }));
795     }
set_stencil_reference(&mut self, value: u32)796     unsafe fn set_stencil_reference(&mut self, value: u32) {
797         self.state.stencil.front.reference = value;
798         self.state.stencil.back.reference = value;
799         self.rebind_stencil_func();
800     }
set_blend_constants(&mut self, color: &[f32; 4])801     unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
802         self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
803     }
804 
draw( &mut self, start_vertex: u32, vertex_count: u32, start_instance: u32, instance_count: u32, )805     unsafe fn draw(
806         &mut self,
807         start_vertex: u32,
808         vertex_count: u32,
809         start_instance: u32,
810         instance_count: u32,
811     ) {
812         self.prepare_draw(start_instance);
813         self.cmd_buffer.commands.push(C::Draw {
814             topology: self.state.topology,
815             start_vertex,
816             vertex_count,
817             instance_count,
818         });
819     }
draw_indexed( &mut self, start_index: u32, index_count: u32, base_vertex: i32, start_instance: u32, instance_count: u32, )820     unsafe fn draw_indexed(
821         &mut self,
822         start_index: u32,
823         index_count: u32,
824         base_vertex: i32,
825         start_instance: u32,
826         instance_count: u32,
827     ) {
828         self.prepare_draw(start_instance);
829         let (index_size, index_type) = match self.state.index_format {
830             wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
831             wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
832         };
833         let index_offset = self.state.index_offset + index_size * start_index as wgt::BufferAddress;
834         self.cmd_buffer.commands.push(C::DrawIndexed {
835             topology: self.state.topology,
836             index_type,
837             index_offset,
838             index_count,
839             base_vertex,
840             instance_count,
841         });
842     }
draw_indirect( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, draw_count: u32, )843     unsafe fn draw_indirect(
844         &mut self,
845         buffer: &super::Buffer,
846         offset: wgt::BufferAddress,
847         draw_count: u32,
848     ) {
849         self.prepare_draw(0);
850         for draw in 0..draw_count as wgt::BufferAddress {
851             let indirect_offset =
852                 offset + draw * mem::size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
853             self.cmd_buffer.commands.push(C::DrawIndirect {
854                 topology: self.state.topology,
855                 indirect_buf: buffer.raw.unwrap(),
856                 indirect_offset,
857             });
858         }
859     }
draw_indexed_indirect( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, draw_count: u32, )860     unsafe fn draw_indexed_indirect(
861         &mut self,
862         buffer: &super::Buffer,
863         offset: wgt::BufferAddress,
864         draw_count: u32,
865     ) {
866         self.prepare_draw(0);
867         let index_type = match self.state.index_format {
868             wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
869             wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
870         };
871         for draw in 0..draw_count as wgt::BufferAddress {
872             let indirect_offset = offset
873                 + draw * mem::size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
874             self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
875                 topology: self.state.topology,
876                 index_type,
877                 indirect_buf: buffer.raw.unwrap(),
878                 indirect_offset,
879             });
880         }
881     }
draw_indirect_count( &mut self, _buffer: &super::Buffer, _offset: wgt::BufferAddress, _count_buffer: &super::Buffer, _count_offset: wgt::BufferAddress, _max_count: u32, )882     unsafe fn draw_indirect_count(
883         &mut self,
884         _buffer: &super::Buffer,
885         _offset: wgt::BufferAddress,
886         _count_buffer: &super::Buffer,
887         _count_offset: wgt::BufferAddress,
888         _max_count: u32,
889     ) {
890         unreachable!()
891     }
draw_indexed_indirect_count( &mut self, _buffer: &super::Buffer, _offset: wgt::BufferAddress, _count_buffer: &super::Buffer, _count_offset: wgt::BufferAddress, _max_count: u32, )892     unsafe fn draw_indexed_indirect_count(
893         &mut self,
894         _buffer: &super::Buffer,
895         _offset: wgt::BufferAddress,
896         _count_buffer: &super::Buffer,
897         _count_offset: wgt::BufferAddress,
898         _max_count: u32,
899     ) {
900         unreachable!()
901     }
902 
903     // compute
904 
begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor)905     unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {
906         if let Some(label) = desc.label {
907             let range = self.cmd_buffer.add_marker(label);
908             self.cmd_buffer.commands.push(C::PushDebugGroup(range));
909             self.state.has_pass_label = true;
910         }
911     }
end_compute_pass(&mut self)912     unsafe fn end_compute_pass(&mut self) {
913         if self.state.has_pass_label {
914             self.cmd_buffer.commands.push(C::PopDebugGroup);
915             self.state.has_pass_label = false;
916         }
917     }
918 
set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline)919     unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
920         self.set_pipeline_inner(&pipeline.inner);
921     }
922 
dispatch(&mut self, count: [u32; 3])923     unsafe fn dispatch(&mut self, count: [u32; 3]) {
924         self.cmd_buffer.commands.push(C::Dispatch(count));
925     }
dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress)926     unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
927         self.cmd_buffer.commands.push(C::DispatchIndirect {
928             indirect_buf: buffer.raw.unwrap(),
929             indirect_offset: offset,
930         });
931     }
932 }
933