1 use super::{conv, HResult as _};
2 use std::{mem, ops::Range, ptr};
3 use winapi::um::d3d12;
4
make_box(origin: &wgt::Origin3d, size: &crate::CopyExtent) -> d3d12::D3D12_BOX5 fn make_box(origin: &wgt::Origin3d, size: &crate::CopyExtent) -> d3d12::D3D12_BOX {
6 d3d12::D3D12_BOX {
7 left: origin.x,
8 top: origin.y,
9 right: origin.x + size.width,
10 bottom: origin.y + size.height,
11 front: origin.z,
12 back: origin.z + size.depth,
13 }
14 }
15
16 impl super::Temp {
prepare_marker(&mut self, marker: &str) -> (&[u16], u32)17 fn prepare_marker(&mut self, marker: &str) -> (&[u16], u32) {
18 self.marker.clear();
19 self.marker.extend(marker.encode_utf16());
20 self.marker.push(0);
21 (&self.marker, self.marker.len() as u32 * 2)
22 }
23 }
24
25 impl super::CommandEncoder {
begin_pass(&mut self, kind: super::PassKind, label: crate::Label)26 unsafe fn begin_pass(&mut self, kind: super::PassKind, label: crate::Label) {
27 let list = self.list.unwrap();
28 self.pass.kind = kind;
29 if let Some(label) = label {
30 let (wide_label, size) = self.temp.prepare_marker(label);
31 list.BeginEvent(0, wide_label.as_ptr() as *const _, size);
32 self.pass.has_label = true;
33 }
34 self.pass.dirty_root_elements = 0;
35 self.pass.dirty_vertex_buffers = 0;
36 list.set_descriptor_heaps(&[self.shared.heap_views.raw, self.shared.heap_samplers.raw]);
37 }
38
end_pass(&mut self)39 unsafe fn end_pass(&mut self) {
40 let list = self.list.unwrap();
41 list.set_descriptor_heaps(&[]);
42 if self.pass.has_label {
43 list.EndEvent();
44 }
45 self.pass.clear();
46 }
47
prepare_draw(&mut self, base_vertex: i32, base_instance: u32)48 unsafe fn prepare_draw(&mut self, base_vertex: i32, base_instance: u32) {
49 while self.pass.dirty_vertex_buffers != 0 {
50 let list = self.list.unwrap();
51 let index = self.pass.dirty_vertex_buffers.trailing_zeros();
52 self.pass.dirty_vertex_buffers ^= 1 << index;
53 list.IASetVertexBuffers(
54 index,
55 1,
56 self.pass.vertex_buffers.as_ptr().offset(index as isize),
57 );
58 }
59 if let Some(root_index) = self.pass.layout.special_constants_root_index {
60 let needs_update = match self.pass.root_elements[root_index as usize] {
61 super::RootElement::SpecialConstantBuffer {
62 base_vertex: other_vertex,
63 base_instance: other_instance,
64 other: _,
65 } => base_vertex != other_vertex || base_instance != other_instance,
66 _ => true,
67 };
68 if needs_update {
69 self.pass.dirty_root_elements |= 1 << root_index;
70 self.pass.root_elements[root_index as usize] =
71 super::RootElement::SpecialConstantBuffer {
72 base_vertex,
73 base_instance,
74 other: 0,
75 };
76 }
77 }
78 self.update_root_elements();
79 }
80
prepare_dispatch(&mut self, count: [u32; 3])81 fn prepare_dispatch(&mut self, count: [u32; 3]) {
82 if let Some(root_index) = self.pass.layout.special_constants_root_index {
83 let needs_update = match self.pass.root_elements[root_index as usize] {
84 super::RootElement::SpecialConstantBuffer {
85 base_vertex,
86 base_instance,
87 other,
88 } => [base_vertex as u32, base_instance, other] != count,
89 _ => true,
90 };
91 if needs_update {
92 self.pass.dirty_root_elements |= 1 << root_index;
93 self.pass.root_elements[root_index as usize] =
94 super::RootElement::SpecialConstantBuffer {
95 base_vertex: count[0] as i32,
96 base_instance: count[1],
97 other: count[2],
98 };
99 }
100 }
101 self.update_root_elements();
102 }
103
104 //Note: we have to call this lazily before draw calls. Otherwise, D3D complains
105 // about the root parameters being incompatible with root signature.
update_root_elements(&mut self)106 fn update_root_elements(&mut self) {
107 use super::{BufferViewKind as Bvk, PassKind as Pk};
108
109 while self.pass.dirty_root_elements != 0 {
110 let list = self.list.unwrap();
111 let index = self.pass.dirty_root_elements.trailing_zeros();
112 self.pass.dirty_root_elements ^= 1 << index;
113
114 match self.pass.root_elements[index as usize] {
115 super::RootElement::Empty => log::error!("Root index {} is not bound", index),
116 super::RootElement::SpecialConstantBuffer {
117 base_vertex,
118 base_instance,
119 other,
120 } => match self.pass.kind {
121 Pk::Render => {
122 list.set_graphics_root_constant(index, base_vertex as u32, 0);
123 list.set_graphics_root_constant(index, base_instance, 1);
124 }
125 Pk::Compute => {
126 list.set_compute_root_constant(index, base_vertex as u32, 0);
127 list.set_compute_root_constant(index, base_instance, 1);
128 list.set_compute_root_constant(index, other, 2);
129 }
130 Pk::Transfer => (),
131 },
132 super::RootElement::Table(descriptor) => match self.pass.kind {
133 Pk::Render => list.set_graphics_root_descriptor_table(index, descriptor),
134 Pk::Compute => list.set_compute_root_descriptor_table(index, descriptor),
135 Pk::Transfer => (),
136 },
137 super::RootElement::DynamicOffsetBuffer { kind, address } => {
138 match (self.pass.kind, kind) {
139 (Pk::Render, Bvk::Constant) => {
140 list.set_graphics_root_constant_buffer_view(index, address)
141 }
142 (Pk::Compute, Bvk::Constant) => {
143 list.set_compute_root_constant_buffer_view(index, address)
144 }
145 (Pk::Render, Bvk::ShaderResource) => {
146 list.set_graphics_root_shader_resource_view(index, address)
147 }
148 (Pk::Compute, Bvk::ShaderResource) => {
149 list.set_compute_root_shader_resource_view(index, address)
150 }
151 (Pk::Render, Bvk::UnorderedAccess) => {
152 list.set_graphics_root_unordered_access_view(index, address)
153 }
154 (Pk::Compute, Bvk::UnorderedAccess) => {
155 list.set_compute_root_unordered_access_view(index, address)
156 }
157 (Pk::Transfer, _) => (),
158 }
159 }
160 }
161 }
162 }
163
reset_signature(&mut self, layout: &super::PipelineLayoutShared)164 fn reset_signature(&mut self, layout: &super::PipelineLayoutShared) {
165 log::trace!("Reset signature {:?}", layout.signature);
166 if let Some(root_index) = layout.special_constants_root_index {
167 self.pass.root_elements[root_index as usize] =
168 super::RootElement::SpecialConstantBuffer {
169 base_vertex: 0,
170 base_instance: 0,
171 other: 0,
172 };
173 }
174 self.pass.layout = layout.clone();
175 self.pass.dirty_root_elements = (1 << layout.total_root_elements) - 1;
176 }
177 }
178
179 impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError>180 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
181 let list = match self.free_lists.pop() {
182 Some(list) => {
183 list.reset(self.allocator, native::PipelineState::null());
184 list
185 }
186 None => self
187 .device
188 .create_graphics_command_list(
189 native::CmdListType::Direct,
190 self.allocator,
191 native::PipelineState::null(),
192 0,
193 )
194 .into_device_result("Create command list")?,
195 };
196
197 if let Some(label) = label {
198 let cwstr = conv::map_label(label);
199 list.SetName(cwstr.as_ptr());
200 }
201
202 self.list = Some(list);
203 self.temp.clear();
204 self.pass.clear();
205 Ok(())
206 }
discard_encoding(&mut self)207 unsafe fn discard_encoding(&mut self) {
208 if let Some(list) = self.list.take() {
209 list.close();
210 self.free_lists.push(list);
211 }
212 }
end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError>213 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
214 let raw = self.list.take().unwrap();
215 raw.close();
216 Ok(super::CommandBuffer { raw })
217 }
reset_all<I: Iterator<Item = super::CommandBuffer>>(&mut self, command_buffers: I)218 unsafe fn reset_all<I: Iterator<Item = super::CommandBuffer>>(&mut self, command_buffers: I) {
219 for cmd_buf in command_buffers {
220 self.free_lists.push(cmd_buf.raw);
221 }
222 self.allocator.reset();
223 }
224
transition_buffers<'a, T>(&mut self, barriers: T) where T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,225 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
226 where
227 T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
228 {
229 self.temp.barriers.clear();
230
231 log::trace!("List {:p} buffer transitions", self.list.unwrap().as_ptr());
232 for barrier in barriers {
233 log::trace!(
234 "\t{:p}: usage {:?}..{:?}",
235 barrier.buffer.resource.as_ptr(),
236 barrier.usage.start,
237 barrier.usage.end
238 );
239 let s0 = conv::map_buffer_usage_to_state(barrier.usage.start);
240 let s1 = conv::map_buffer_usage_to_state(barrier.usage.end);
241 if s0 != s1 {
242 let mut raw = d3d12::D3D12_RESOURCE_BARRIER {
243 Type: d3d12::D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,
244 Flags: d3d12::D3D12_RESOURCE_BARRIER_FLAG_NONE,
245 u: mem::zeroed(),
246 };
247 *raw.u.Transition_mut() = d3d12::D3D12_RESOURCE_TRANSITION_BARRIER {
248 pResource: barrier.buffer.resource.as_mut_ptr(),
249 Subresource: d3d12::D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
250 StateBefore: s0,
251 StateAfter: s1,
252 };
253 self.temp.barriers.push(raw);
254 } else if barrier.usage.start == crate::BufferUses::STORAGE_WRITE {
255 let mut raw = d3d12::D3D12_RESOURCE_BARRIER {
256 Type: d3d12::D3D12_RESOURCE_BARRIER_TYPE_UAV,
257 Flags: d3d12::D3D12_RESOURCE_BARRIER_FLAG_NONE,
258 u: mem::zeroed(),
259 };
260 *raw.u.UAV_mut() = d3d12::D3D12_RESOURCE_UAV_BARRIER {
261 pResource: barrier.buffer.resource.as_mut_ptr(),
262 };
263 self.temp.barriers.push(raw);
264 }
265 }
266
267 if !self.temp.barriers.is_empty() {
268 self.list
269 .unwrap()
270 .ResourceBarrier(self.temp.barriers.len() as u32, self.temp.barriers.as_ptr());
271 }
272 }
273
transition_textures<'a, T>(&mut self, barriers: T) where T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,274 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
275 where
276 T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
277 {
278 self.temp.barriers.clear();
279
280 log::trace!("List {:p} texture transitions", self.list.unwrap().as_ptr());
281 for barrier in barriers {
282 log::trace!(
283 "\t{:p}: usage {:?}..{:?}, range {:?}",
284 barrier.texture.resource.as_ptr(),
285 barrier.usage.start,
286 barrier.usage.end,
287 barrier.range
288 );
289 let s0 = conv::map_texture_usage_to_state(barrier.usage.start);
290 let s1 = conv::map_texture_usage_to_state(barrier.usage.end);
291 if s0 != s1 {
292 let mut raw = d3d12::D3D12_RESOURCE_BARRIER {
293 Type: d3d12::D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,
294 Flags: d3d12::D3D12_RESOURCE_BARRIER_FLAG_NONE,
295 u: mem::zeroed(),
296 };
297 *raw.u.Transition_mut() = d3d12::D3D12_RESOURCE_TRANSITION_BARRIER {
298 pResource: barrier.texture.resource.as_mut_ptr(),
299 Subresource: d3d12::D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
300 StateBefore: s0,
301 StateAfter: s1,
302 };
303
304 let mip_level_count = match barrier.range.mip_level_count {
305 Some(count) => count.get(),
306 None => barrier.texture.mip_level_count - barrier.range.base_mip_level,
307 };
308 let array_layer_count = match barrier.range.array_layer_count {
309 Some(count) => count.get(),
310 None => barrier.texture.array_layer_count() - barrier.range.base_array_layer,
311 };
312
313 if barrier.range.aspect == wgt::TextureAspect::All
314 && barrier.range.base_mip_level == 0
315 && mip_level_count == barrier.texture.mip_level_count
316 && barrier.range.base_array_layer == 0
317 && array_layer_count == barrier.texture.array_layer_count()
318 {
319 // Only one barrier if it affects the whole image.
320 self.temp.barriers.push(raw);
321 } else {
322 // Selected texture aspect is relevant if the texture format has both depth _and_ stencil aspects.
323 let planes = if crate::FormatAspects::from(barrier.texture.format)
324 .contains(crate::FormatAspects::DEPTH | crate::FormatAspects::STENCIL)
325 {
326 match barrier.range.aspect {
327 wgt::TextureAspect::All => 0..2,
328 wgt::TextureAspect::StencilOnly => 1..2,
329 wgt::TextureAspect::DepthOnly => 0..1,
330 }
331 } else {
332 0..1
333 };
334
335 for rel_mip_level in 0..mip_level_count {
336 for rel_array_layer in 0..array_layer_count {
337 for plane in planes.clone() {
338 raw.u.Transition_mut().Subresource =
339 barrier.texture.calc_subresource(
340 barrier.range.base_mip_level + rel_mip_level,
341 barrier.range.base_array_layer + rel_array_layer,
342 plane,
343 );
344 self.temp.barriers.push(raw);
345 }
346 }
347 }
348 }
349 } else if barrier.usage.start == crate::TextureUses::STORAGE_WRITE {
350 let mut raw = d3d12::D3D12_RESOURCE_BARRIER {
351 Type: d3d12::D3D12_RESOURCE_BARRIER_TYPE_UAV,
352 Flags: d3d12::D3D12_RESOURCE_BARRIER_FLAG_NONE,
353 u: mem::zeroed(),
354 };
355 *raw.u.UAV_mut() = d3d12::D3D12_RESOURCE_UAV_BARRIER {
356 pResource: barrier.texture.resource.as_mut_ptr(),
357 };
358 self.temp.barriers.push(raw);
359 }
360 }
361
362 if !self.temp.barriers.is_empty() {
363 self.list
364 .unwrap()
365 .ResourceBarrier(self.temp.barriers.len() as u32, self.temp.barriers.as_ptr());
366 }
367 }
368
clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange)369 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
370 let list = self.list.unwrap();
371 let mut offset = range.start;
372 while offset < range.end {
373 let size = super::ZERO_BUFFER_SIZE.min(range.end - offset);
374 list.CopyBufferRegion(
375 buffer.resource.as_mut_ptr(),
376 offset,
377 self.shared.zero_buffer.as_mut_ptr(),
378 0,
379 size,
380 );
381 offset += size;
382 }
383 }
384
copy_buffer_to_buffer<T>( &mut self, src: &super::Buffer, dst: &super::Buffer, regions: T, ) where T: Iterator<Item = crate::BufferCopy>,385 unsafe fn copy_buffer_to_buffer<T>(
386 &mut self,
387 src: &super::Buffer,
388 dst: &super::Buffer,
389 regions: T,
390 ) where
391 T: Iterator<Item = crate::BufferCopy>,
392 {
393 let list = self.list.unwrap();
394 for r in regions {
395 list.CopyBufferRegion(
396 dst.resource.as_mut_ptr(),
397 r.dst_offset,
398 src.resource.as_mut_ptr(),
399 r.src_offset,
400 r.size.get(),
401 );
402 }
403 }
404
copy_texture_to_texture<T>( &mut self, src: &super::Texture, _src_usage: crate::TextureUses, dst: &super::Texture, regions: T, ) where T: Iterator<Item = crate::TextureCopy>,405 unsafe fn copy_texture_to_texture<T>(
406 &mut self,
407 src: &super::Texture,
408 _src_usage: crate::TextureUses,
409 dst: &super::Texture,
410 regions: T,
411 ) where
412 T: Iterator<Item = crate::TextureCopy>,
413 {
414 let list = self.list.unwrap();
415 let mut src_location = d3d12::D3D12_TEXTURE_COPY_LOCATION {
416 pResource: src.resource.as_mut_ptr(),
417 Type: d3d12::D3D12_TEXTURE_COPY_TYPE_SUBRESOURCE_INDEX,
418 u: mem::zeroed(),
419 };
420 let mut dst_location = d3d12::D3D12_TEXTURE_COPY_LOCATION {
421 pResource: dst.resource.as_mut_ptr(),
422 Type: d3d12::D3D12_TEXTURE_COPY_TYPE_SUBRESOURCE_INDEX,
423 u: mem::zeroed(),
424 };
425
426 for r in regions {
427 let src_box = make_box(&r.src_base.origin, &r.size);
428 *src_location.u.SubresourceIndex_mut() = src.calc_subresource_for_copy(&r.src_base);
429 *dst_location.u.SubresourceIndex_mut() = dst.calc_subresource_for_copy(&r.dst_base);
430
431 list.CopyTextureRegion(
432 &dst_location,
433 r.dst_base.origin.x,
434 r.dst_base.origin.y,
435 r.dst_base.origin.z,
436 &src_location,
437 &src_box,
438 );
439 }
440 }
441
copy_buffer_to_texture<T>( &mut self, src: &super::Buffer, dst: &super::Texture, regions: T, ) where T: Iterator<Item = crate::BufferTextureCopy>,442 unsafe fn copy_buffer_to_texture<T>(
443 &mut self,
444 src: &super::Buffer,
445 dst: &super::Texture,
446 regions: T,
447 ) where
448 T: Iterator<Item = crate::BufferTextureCopy>,
449 {
450 let list = self.list.unwrap();
451 let mut src_location = d3d12::D3D12_TEXTURE_COPY_LOCATION {
452 pResource: src.resource.as_mut_ptr(),
453 Type: d3d12::D3D12_TEXTURE_COPY_TYPE_PLACED_FOOTPRINT,
454 u: mem::zeroed(),
455 };
456 let mut dst_location = d3d12::D3D12_TEXTURE_COPY_LOCATION {
457 pResource: dst.resource.as_mut_ptr(),
458 Type: d3d12::D3D12_TEXTURE_COPY_TYPE_SUBRESOURCE_INDEX,
459 u: mem::zeroed(),
460 };
461 let raw_format = conv::map_texture_format(dst.format);
462
463 let block_size = dst.format.describe().block_dimensions.0 as u32;
464 for r in regions {
465 let src_box = make_box(&wgt::Origin3d::ZERO, &r.size);
466 *src_location.u.PlacedFootprint_mut() = d3d12::D3D12_PLACED_SUBRESOURCE_FOOTPRINT {
467 Offset: r.buffer_layout.offset,
468 Footprint: d3d12::D3D12_SUBRESOURCE_FOOTPRINT {
469 Format: raw_format,
470 Width: r.size.width,
471 Height: r
472 .buffer_layout
473 .rows_per_image
474 .map_or(r.size.height, |count| count.get() * block_size),
475 Depth: r.size.depth,
476 RowPitch: r.buffer_layout.bytes_per_row.map_or(0, |count| {
477 count.get().max(d3d12::D3D12_TEXTURE_DATA_PITCH_ALIGNMENT)
478 }),
479 },
480 };
481 *dst_location.u.SubresourceIndex_mut() = dst.calc_subresource_for_copy(&r.texture_base);
482
483 list.CopyTextureRegion(
484 &dst_location,
485 r.texture_base.origin.x,
486 r.texture_base.origin.y,
487 r.texture_base.origin.z,
488 &src_location,
489 &src_box,
490 );
491 }
492 }
493
copy_texture_to_buffer<T>( &mut self, src: &super::Texture, _src_usage: crate::TextureUses, dst: &super::Buffer, regions: T, ) where T: Iterator<Item = crate::BufferTextureCopy>,494 unsafe fn copy_texture_to_buffer<T>(
495 &mut self,
496 src: &super::Texture,
497 _src_usage: crate::TextureUses,
498 dst: &super::Buffer,
499 regions: T,
500 ) where
501 T: Iterator<Item = crate::BufferTextureCopy>,
502 {
503 let list = self.list.unwrap();
504 let mut src_location = d3d12::D3D12_TEXTURE_COPY_LOCATION {
505 pResource: src.resource.as_mut_ptr(),
506 Type: d3d12::D3D12_TEXTURE_COPY_TYPE_SUBRESOURCE_INDEX,
507 u: mem::zeroed(),
508 };
509 let mut dst_location = d3d12::D3D12_TEXTURE_COPY_LOCATION {
510 pResource: dst.resource.as_mut_ptr(),
511 Type: d3d12::D3D12_TEXTURE_COPY_TYPE_PLACED_FOOTPRINT,
512 u: mem::zeroed(),
513 };
514 let raw_format = conv::map_texture_format(src.format);
515
516 let block_size = src.format.describe().block_dimensions.0 as u32;
517 for r in regions {
518 let src_box = make_box(&r.texture_base.origin, &r.size);
519 *src_location.u.SubresourceIndex_mut() = src.calc_subresource_for_copy(&r.texture_base);
520 *dst_location.u.PlacedFootprint_mut() = d3d12::D3D12_PLACED_SUBRESOURCE_FOOTPRINT {
521 Offset: r.buffer_layout.offset,
522 Footprint: d3d12::D3D12_SUBRESOURCE_FOOTPRINT {
523 Format: raw_format,
524 Width: r.size.width,
525 Height: r
526 .buffer_layout
527 .rows_per_image
528 .map_or(r.size.height, |count| count.get() * block_size),
529 Depth: r.size.depth,
530 RowPitch: r.buffer_layout.bytes_per_row.map_or(0, |count| count.get()),
531 },
532 };
533
534 list.CopyTextureRegion(&dst_location, 0, 0, 0, &src_location, &src_box);
535 }
536 }
537
begin_query(&mut self, set: &super::QuerySet, index: u32)538 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
539 self.list
540 .unwrap()
541 .BeginQuery(set.raw.as_mut_ptr(), set.raw_ty, index);
542 }
end_query(&mut self, set: &super::QuerySet, index: u32)543 unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
544 self.list
545 .unwrap()
546 .EndQuery(set.raw.as_mut_ptr(), set.raw_ty, index);
547 }
write_timestamp(&mut self, set: &super::QuerySet, index: u32)548 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
549 self.list.unwrap().EndQuery(
550 set.raw.as_mut_ptr(),
551 d3d12::D3D12_QUERY_TYPE_TIMESTAMP,
552 index,
553 );
554 }
reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>)555 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
556 // nothing to do here
557 }
copy_query_results( &mut self, set: &super::QuerySet, range: Range<u32>, buffer: &super::Buffer, offset: wgt::BufferAddress, _stride: wgt::BufferSize, )558 unsafe fn copy_query_results(
559 &mut self,
560 set: &super::QuerySet,
561 range: Range<u32>,
562 buffer: &super::Buffer,
563 offset: wgt::BufferAddress,
564 _stride: wgt::BufferSize,
565 ) {
566 self.list.unwrap().ResolveQueryData(
567 set.raw.as_mut_ptr(),
568 set.raw_ty,
569 range.start,
570 range.end - range.start,
571 buffer.resource.as_mut_ptr(),
572 offset,
573 );
574 }
575
576 // render
577
begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>)578 unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
579 self.begin_pass(super::PassKind::Render, desc.label);
580
581 let mut color_views = [native::CpuDescriptor { ptr: 0 }; crate::MAX_COLOR_TARGETS];
582 for (rtv, cat) in color_views.iter_mut().zip(desc.color_attachments.iter()) {
583 *rtv = cat.target.view.handle_rtv.unwrap().raw;
584 }
585 let ds_view = match desc.depth_stencil_attachment {
586 None => ptr::null(),
587 Some(ref ds) => {
588 if ds.target.usage == crate::TextureUses::DEPTH_STENCIL_WRITE {
589 &ds.target.view.handle_dsv_rw.as_ref().unwrap().raw
590 } else {
591 &ds.target.view.handle_dsv_ro.as_ref().unwrap().raw
592 }
593 }
594 };
595
596 let list = self.list.unwrap();
597 list.OMSetRenderTargets(
598 desc.color_attachments.len() as u32,
599 color_views.as_ptr(),
600 0,
601 ds_view,
602 );
603
604 self.pass.resolves.clear();
605 for (rtv, cat) in color_views.iter().zip(desc.color_attachments.iter()) {
606 if !cat.ops.contains(crate::AttachmentOps::LOAD) {
607 let value = [
608 cat.clear_value.r as f32,
609 cat.clear_value.g as f32,
610 cat.clear_value.b as f32,
611 cat.clear_value.a as f32,
612 ];
613 list.clear_render_target_view(*rtv, value, &[]);
614 }
615 if let Some(ref target) = cat.resolve_target {
616 self.pass.resolves.push(super::PassResolve {
617 src: cat.target.view.target_base,
618 dst: target.view.target_base,
619 format: target.view.raw_format,
620 });
621 }
622 }
623 if let Some(ref ds) = desc.depth_stencil_attachment {
624 let mut flags = native::ClearFlags::empty();
625 let aspects = ds.target.view.format_aspects;
626 if !ds.depth_ops.contains(crate::AttachmentOps::LOAD)
627 && aspects.contains(crate::FormatAspects::DEPTH)
628 {
629 flags |= native::ClearFlags::DEPTH;
630 }
631 if !ds.stencil_ops.contains(crate::AttachmentOps::LOAD)
632 && aspects.contains(crate::FormatAspects::STENCIL)
633 {
634 flags |= native::ClearFlags::STENCIL;
635 }
636
637 if !ds_view.is_null() && !flags.is_empty() {
638 list.clear_depth_stencil_view(
639 *ds_view,
640 flags,
641 ds.clear_value.0,
642 ds.clear_value.1 as u8,
643 &[],
644 );
645 }
646 }
647
648 let raw_vp = d3d12::D3D12_VIEWPORT {
649 TopLeftX: 0.0,
650 TopLeftY: 0.0,
651 Width: desc.extent.width as f32,
652 Height: desc.extent.height as f32,
653 MinDepth: 0.0,
654 MaxDepth: 1.0,
655 };
656 let raw_rect = d3d12::D3D12_RECT {
657 left: 0,
658 top: 0,
659 right: desc.extent.width as i32,
660 bottom: desc.extent.height as i32,
661 };
662 list.RSSetViewports(1, &raw_vp);
663 list.RSSetScissorRects(1, &raw_rect);
664 }
665
end_render_pass(&mut self)666 unsafe fn end_render_pass(&mut self) {
667 if !self.pass.resolves.is_empty() {
668 let list = self.list.unwrap();
669 self.temp.barriers.clear();
670
671 // All the targets are expected to be in `COLOR_TARGET` state,
672 // but D3D12 has special source/destination states for the resolves.
673 for resolve in self.pass.resolves.iter() {
674 let mut barrier = d3d12::D3D12_RESOURCE_BARRIER {
675 Type: d3d12::D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,
676 Flags: d3d12::D3D12_RESOURCE_BARRIER_FLAG_NONE,
677 u: mem::zeroed(),
678 };
679 //Note: this assumes `D3D12_RESOURCE_STATE_RENDER_TARGET`.
680 // If it's not the case, we can include the `TextureUses` in `PassResove`.
681 *barrier.u.Transition_mut() = d3d12::D3D12_RESOURCE_TRANSITION_BARRIER {
682 pResource: resolve.src.0.as_mut_ptr(),
683 Subresource: resolve.src.1,
684 StateBefore: d3d12::D3D12_RESOURCE_STATE_RENDER_TARGET,
685 StateAfter: d3d12::D3D12_RESOURCE_STATE_RESOLVE_SOURCE,
686 };
687 self.temp.barriers.push(barrier);
688 *barrier.u.Transition_mut() = d3d12::D3D12_RESOURCE_TRANSITION_BARRIER {
689 pResource: resolve.dst.0.as_mut_ptr(),
690 Subresource: resolve.dst.1,
691 StateBefore: d3d12::D3D12_RESOURCE_STATE_RENDER_TARGET,
692 StateAfter: d3d12::D3D12_RESOURCE_STATE_RESOLVE_DEST,
693 };
694 self.temp.barriers.push(barrier);
695 }
696
697 if !self.temp.barriers.is_empty() {
698 profiling::scope!("ID3D12GraphicsCommandList::ResourceBarrier");
699 list.ResourceBarrier(self.temp.barriers.len() as u32, self.temp.barriers.as_ptr());
700 }
701
702 for resolve in self.pass.resolves.iter() {
703 profiling::scope!("ID3D12GraphicsCommandList::ResolveSubresource");
704 list.ResolveSubresource(
705 resolve.dst.0.as_mut_ptr(),
706 resolve.dst.1,
707 resolve.src.0.as_mut_ptr(),
708 resolve.src.1,
709 resolve.format,
710 );
711 }
712
713 // Flip all the barriers to reverse, back into `COLOR_TARGET`.
714 for barrier in self.temp.barriers.iter_mut() {
715 let transition = barrier.u.Transition_mut();
716 mem::swap(&mut transition.StateBefore, &mut transition.StateAfter);
717 }
718 if !self.temp.barriers.is_empty() {
719 profiling::scope!("ID3D12GraphicsCommandList::ResourceBarrier");
720 list.ResourceBarrier(self.temp.barriers.len() as u32, self.temp.barriers.as_ptr());
721 }
722 }
723
724 self.end_pass();
725 }
726
set_bind_group( &mut self, layout: &super::PipelineLayout, index: u32, group: &super::BindGroup, dynamic_offsets: &[wgt::DynamicOffset], )727 unsafe fn set_bind_group(
728 &mut self,
729 layout: &super::PipelineLayout,
730 index: u32,
731 group: &super::BindGroup,
732 dynamic_offsets: &[wgt::DynamicOffset],
733 ) {
734 log::trace!("Set group[{}]", index);
735 let info = &layout.bind_group_infos[index as usize];
736 let mut root_index = info.base_root_index as usize;
737
738 // Bind CBV/SRC/UAV descriptor tables
739 if info.tables.contains(super::TableTypes::SRV_CBV_UAV) {
740 log::trace!("\tBind element[{}] = view", root_index);
741 self.pass.root_elements[root_index] =
742 super::RootElement::Table(group.handle_views.unwrap().gpu);
743 root_index += 1;
744 }
745
746 // Bind Sampler descriptor tables.
747 if info.tables.contains(super::TableTypes::SAMPLERS) {
748 log::trace!("\tBind element[{}] = sampler", root_index);
749 self.pass.root_elements[root_index] =
750 super::RootElement::Table(group.handle_samplers.unwrap().gpu);
751 root_index += 1;
752 }
753
754 // Bind root descriptors
755 for ((&kind, &gpu_base), &offset) in info
756 .dynamic_buffers
757 .iter()
758 .zip(group.dynamic_buffers.iter())
759 .zip(dynamic_offsets)
760 {
761 log::trace!("\tBind element[{}] = dynamic", root_index);
762 self.pass.root_elements[root_index] = super::RootElement::DynamicOffsetBuffer {
763 kind,
764 address: gpu_base + offset as native::GpuAddress,
765 };
766 root_index += 1;
767 }
768
769 if self.pass.layout.signature == layout.shared.signature {
770 self.pass.dirty_root_elements |= (1 << root_index) - (1 << info.base_root_index);
771 } else {
772 // D3D12 requires full reset on signature change
773 self.reset_signature(&layout.shared);
774 };
775 }
set_push_constants( &mut self, _layout: &super::PipelineLayout, _stages: wgt::ShaderStages, _offset: u32, _data: &[u32], )776 unsafe fn set_push_constants(
777 &mut self,
778 _layout: &super::PipelineLayout,
779 _stages: wgt::ShaderStages,
780 _offset: u32,
781 _data: &[u32],
782 ) {
783 }
784
insert_debug_marker(&mut self, label: &str)785 unsafe fn insert_debug_marker(&mut self, label: &str) {
786 let (wide_label, size) = self.temp.prepare_marker(label);
787 self.list
788 .unwrap()
789 .SetMarker(0, wide_label.as_ptr() as *const _, size);
790 }
begin_debug_marker(&mut self, group_label: &str)791 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
792 let (wide_label, size) = self.temp.prepare_marker(group_label);
793 self.list
794 .unwrap()
795 .BeginEvent(0, wide_label.as_ptr() as *const _, size);
796 }
end_debug_marker(&mut self)797 unsafe fn end_debug_marker(&mut self) {
798 self.list.unwrap().EndEvent()
799 }
800
set_render_pipeline(&mut self, pipeline: &super::RenderPipeline)801 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
802 let list = self.list.unwrap();
803
804 if self.pass.layout.signature != pipeline.layout.signature {
805 // D3D12 requires full reset on signature change
806 list.set_graphics_root_signature(pipeline.layout.signature);
807 self.reset_signature(&pipeline.layout);
808 };
809
810 list.set_pipeline_state(pipeline.raw);
811 list.IASetPrimitiveTopology(pipeline.topology);
812
813 for (index, (vb, &stride)) in self
814 .pass
815 .vertex_buffers
816 .iter_mut()
817 .zip(pipeline.vertex_strides.iter())
818 .enumerate()
819 {
820 if let Some(stride) = stride {
821 if vb.StrideInBytes != stride.get() {
822 vb.StrideInBytes = stride.get();
823 self.pass.dirty_vertex_buffers |= 1 << index;
824 }
825 }
826 }
827 }
828
set_index_buffer<'a>( &mut self, binding: crate::BufferBinding<'a, super::Api>, format: wgt::IndexFormat, )829 unsafe fn set_index_buffer<'a>(
830 &mut self,
831 binding: crate::BufferBinding<'a, super::Api>,
832 format: wgt::IndexFormat,
833 ) {
834 self.list.unwrap().set_index_buffer(
835 binding.resolve_address(),
836 binding.resolve_size() as u32,
837 conv::map_index_format(format),
838 );
839 }
set_vertex_buffer<'a>( &mut self, index: u32, binding: crate::BufferBinding<'a, super::Api>, )840 unsafe fn set_vertex_buffer<'a>(
841 &mut self,
842 index: u32,
843 binding: crate::BufferBinding<'a, super::Api>,
844 ) {
845 let vb = &mut self.pass.vertex_buffers[index as usize];
846 vb.BufferLocation = binding.resolve_address();
847 vb.SizeInBytes = binding.resolve_size() as u32;
848 self.pass.dirty_vertex_buffers |= 1 << index;
849 }
850
set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>)851 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
852 let raw_vp = d3d12::D3D12_VIEWPORT {
853 TopLeftX: rect.x,
854 TopLeftY: rect.y,
855 Width: rect.w,
856 Height: rect.h,
857 MinDepth: depth_range.start,
858 MaxDepth: depth_range.end,
859 };
860 self.list.unwrap().RSSetViewports(1, &raw_vp);
861 }
set_scissor_rect(&mut self, rect: &crate::Rect<u32>)862 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
863 let raw_rect = d3d12::D3D12_RECT {
864 left: rect.x as i32,
865 top: rect.y as i32,
866 right: (rect.x + rect.w) as i32,
867 bottom: (rect.y + rect.h) as i32,
868 };
869 self.list.unwrap().RSSetScissorRects(1, &raw_rect);
870 }
set_stencil_reference(&mut self, value: u32)871 unsafe fn set_stencil_reference(&mut self, value: u32) {
872 self.list.unwrap().set_stencil_reference(value);
873 }
set_blend_constants(&mut self, color: &[f32; 4])874 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
875 self.list.unwrap().set_blend_factor(*color);
876 }
877
draw( &mut self, start_vertex: u32, vertex_count: u32, start_instance: u32, instance_count: u32, )878 unsafe fn draw(
879 &mut self,
880 start_vertex: u32,
881 vertex_count: u32,
882 start_instance: u32,
883 instance_count: u32,
884 ) {
885 self.prepare_draw(start_vertex as i32, start_instance);
886 self.list
887 .unwrap()
888 .draw(vertex_count, instance_count, start_vertex, start_instance);
889 }
draw_indexed( &mut self, start_index: u32, index_count: u32, base_vertex: i32, start_instance: u32, instance_count: u32, )890 unsafe fn draw_indexed(
891 &mut self,
892 start_index: u32,
893 index_count: u32,
894 base_vertex: i32,
895 start_instance: u32,
896 instance_count: u32,
897 ) {
898 self.prepare_draw(base_vertex, start_instance);
899 self.list.unwrap().draw_indexed(
900 index_count,
901 instance_count,
902 start_index,
903 base_vertex,
904 start_instance,
905 );
906 }
draw_indirect( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, draw_count: u32, )907 unsafe fn draw_indirect(
908 &mut self,
909 buffer: &super::Buffer,
910 offset: wgt::BufferAddress,
911 draw_count: u32,
912 ) {
913 self.prepare_draw(0, 0);
914 self.list.unwrap().ExecuteIndirect(
915 self.shared.cmd_signatures.draw.as_mut_ptr(),
916 draw_count,
917 buffer.resource.as_mut_ptr(),
918 offset,
919 ptr::null_mut(),
920 0,
921 );
922 }
draw_indexed_indirect( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, draw_count: u32, )923 unsafe fn draw_indexed_indirect(
924 &mut self,
925 buffer: &super::Buffer,
926 offset: wgt::BufferAddress,
927 draw_count: u32,
928 ) {
929 self.prepare_draw(0, 0);
930 self.list.unwrap().ExecuteIndirect(
931 self.shared.cmd_signatures.draw_indexed.as_mut_ptr(),
932 draw_count,
933 buffer.resource.as_mut_ptr(),
934 offset,
935 ptr::null_mut(),
936 0,
937 );
938 }
draw_indirect_count( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, count_buffer: &super::Buffer, count_offset: wgt::BufferAddress, max_count: u32, )939 unsafe fn draw_indirect_count(
940 &mut self,
941 buffer: &super::Buffer,
942 offset: wgt::BufferAddress,
943 count_buffer: &super::Buffer,
944 count_offset: wgt::BufferAddress,
945 max_count: u32,
946 ) {
947 self.prepare_draw(0, 0);
948 self.list.unwrap().ExecuteIndirect(
949 self.shared.cmd_signatures.draw.as_mut_ptr(),
950 max_count,
951 buffer.resource.as_mut_ptr(),
952 offset,
953 count_buffer.resource.as_mut_ptr(),
954 count_offset,
955 );
956 }
draw_indexed_indirect_count( &mut self, buffer: &super::Buffer, offset: wgt::BufferAddress, count_buffer: &super::Buffer, count_offset: wgt::BufferAddress, max_count: u32, )957 unsafe fn draw_indexed_indirect_count(
958 &mut self,
959 buffer: &super::Buffer,
960 offset: wgt::BufferAddress,
961 count_buffer: &super::Buffer,
962 count_offset: wgt::BufferAddress,
963 max_count: u32,
964 ) {
965 self.prepare_draw(0, 0);
966 self.list.unwrap().ExecuteIndirect(
967 self.shared.cmd_signatures.draw_indexed.as_mut_ptr(),
968 max_count,
969 buffer.resource.as_mut_ptr(),
970 offset,
971 count_buffer.resource.as_mut_ptr(),
972 count_offset,
973 );
974 }
975
976 // compute
977
begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor)978 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {
979 self.begin_pass(super::PassKind::Compute, desc.label);
980 }
end_compute_pass(&mut self)981 unsafe fn end_compute_pass(&mut self) {
982 self.end_pass();
983 }
984
set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline)985 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
986 let list = self.list.unwrap();
987
988 if self.pass.layout.signature != pipeline.layout.signature {
989 // D3D12 requires full reset on signature change
990 list.set_compute_root_signature(pipeline.layout.signature);
991 self.reset_signature(&pipeline.layout);
992 };
993
994 list.set_pipeline_state(pipeline.raw);
995 }
996
dispatch(&mut self, count: [u32; 3])997 unsafe fn dispatch(&mut self, count: [u32; 3]) {
998 self.prepare_dispatch(count);
999 self.list.unwrap().dispatch(count);
1000 }
dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress)1001 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1002 self.prepare_dispatch([0; 3]);
1003 //TODO: update special constants indirectly
1004 self.list.unwrap().ExecuteIndirect(
1005 self.shared.cmd_signatures.dispatch.as_mut_ptr(),
1006 1,
1007 buffer.resource.as_mut_ptr(),
1008 offset,
1009 ptr::null_mut(),
1010 0,
1011 );
1012 }
1013 }
1014