1 /* Copyright (c) 2015-2021 The Khronos Group Inc.
2  * Copyright (c) 2015-2021 Valve Corporation
3  * Copyright (c) 2015-2021 LunarG, Inc.
4  * Copyright (C) 2015-2021 Google Inc.
5  * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *     http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  *
19  * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
20  * Author: Tobin Ehlis <tobine@google.com>
21  * Author: Chris Forbes <chrisf@ijw.co.nz>
22  * Author: Mark Lobodzinski <mark@lunarg.com>
23  * Author: Dave Houlton <daveh@lunarg.com>
24  * Author: John Zulauf <jzulauf@lunarg.com>
25  * Author: Tobias Hector <tobias.hector@amd.com>
26  */
27 #include "cmd_buffer_state.h"
28 #include "render_pass_state.h"
29 #include "state_tracker.h"
30 #include "image_state.h"
31 
COMMAND_POOL_STATE(ValidationStateTracker * dev,VkCommandPool cp,const VkCommandPoolCreateInfo * pCreateInfo,VkQueueFlags flags)32 COMMAND_POOL_STATE::COMMAND_POOL_STATE(ValidationStateTracker *dev, VkCommandPool cp, const VkCommandPoolCreateInfo *pCreateInfo,
33                                        VkQueueFlags flags)
34     : BASE_NODE(cp, kVulkanObjectTypeCommandPool),
35       dev_data(dev),
36       createFlags(pCreateInfo->flags),
37       queueFamilyIndex(pCreateInfo->queueFamilyIndex),
38       queue_flags(flags),
39       unprotected((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0) {}
40 
Allocate(const VkCommandBufferAllocateInfo * create_info,const VkCommandBuffer * command_buffers)41 void COMMAND_POOL_STATE::Allocate(const VkCommandBufferAllocateInfo *create_info, const VkCommandBuffer *command_buffers) {
42     for (uint32_t i = 0; i < create_info->commandBufferCount; i++) {
43         auto new_cb = dev_data->CreateCmdBufferState(command_buffers[i], create_info, this);
44         commandBuffers.emplace(command_buffers[i], new_cb.get());
45         dev_data->Add(std::move(new_cb));
46     }
47 }
48 
Free(uint32_t count,const VkCommandBuffer * command_buffers)49 void COMMAND_POOL_STATE::Free(uint32_t count, const VkCommandBuffer *command_buffers) {
50     for (uint32_t i = 0; i < count; i++) {
51         auto iter = commandBuffers.find(command_buffers[i]);
52         if (iter != commandBuffers.end()) {
53             dev_data->Destroy<CMD_BUFFER_STATE>(iter->first);
54             commandBuffers.erase(iter);
55         }
56     }
57 }
58 
Reset()59 void COMMAND_POOL_STATE::Reset() {
60     for (auto &entry : commandBuffers) {
61         entry.second->Reset();
62     }
63 }
64 
Destroy()65 void COMMAND_POOL_STATE::Destroy() {
66     for (auto &entry : commandBuffers) {
67         dev_data->Destroy<CMD_BUFFER_STATE>(entry.first);
68     }
69     commandBuffers.clear();
70     BASE_NODE::Destroy();
71 }
72 
CommandTypeString(CMD_TYPE type)73 const char *CommandTypeString(CMD_TYPE type) {
74     // Autogenerated as part of the command_validation.h codegen
75     return kGeneratedCommandNameList[type];
76 }
77 
ConvertToDynamicState(CBStatusFlagBits flag)78 VkDynamicState ConvertToDynamicState(CBStatusFlagBits flag) {
79     switch (flag) {
80         case CBSTATUS_LINE_WIDTH_SET:
81             return VK_DYNAMIC_STATE_LINE_WIDTH;
82         case CBSTATUS_DEPTH_BIAS_SET:
83             return VK_DYNAMIC_STATE_DEPTH_BIAS;
84         case CBSTATUS_BLEND_CONSTANTS_SET:
85             return VK_DYNAMIC_STATE_BLEND_CONSTANTS;
86         case CBSTATUS_DEPTH_BOUNDS_SET:
87             return VK_DYNAMIC_STATE_DEPTH_BOUNDS;
88         case CBSTATUS_STENCIL_READ_MASK_SET:
89             return VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK;
90         case CBSTATUS_STENCIL_WRITE_MASK_SET:
91             return VK_DYNAMIC_STATE_STENCIL_WRITE_MASK;
92         case CBSTATUS_STENCIL_REFERENCE_SET:
93             return VK_DYNAMIC_STATE_STENCIL_REFERENCE;
94         case CBSTATUS_VIEWPORT_SET:
95             return VK_DYNAMIC_STATE_VIEWPORT;
96         case CBSTATUS_SCISSOR_SET:
97             return VK_DYNAMIC_STATE_SCISSOR;
98         case CBSTATUS_EXCLUSIVE_SCISSOR_SET:
99             return VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV;
100         case CBSTATUS_SHADING_RATE_PALETTE_SET:
101             return VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
102         case CBSTATUS_LINE_STIPPLE_SET:
103             return VK_DYNAMIC_STATE_LINE_STIPPLE_EXT;
104         case CBSTATUS_VIEWPORT_W_SCALING_SET:
105             return VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV;
106         case CBSTATUS_CULL_MODE_SET:
107             return VK_DYNAMIC_STATE_CULL_MODE_EXT;
108         case CBSTATUS_FRONT_FACE_SET:
109             return VK_DYNAMIC_STATE_FRONT_FACE_EXT;
110         case CBSTATUS_PRIMITIVE_TOPOLOGY_SET:
111             return VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT;
112         case CBSTATUS_VIEWPORT_WITH_COUNT_SET:
113             return VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
114         case CBSTATUS_SCISSOR_WITH_COUNT_SET:
115             return VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
116         case CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET:
117             return VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
118         case CBSTATUS_DEPTH_TEST_ENABLE_SET:
119             return VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT;
120         case CBSTATUS_DEPTH_WRITE_ENABLE_SET:
121             return VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT;
122         case CBSTATUS_DEPTH_COMPARE_OP_SET:
123             return VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT;
124         case CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET:
125             return VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT;
126         case CBSTATUS_STENCIL_TEST_ENABLE_SET:
127             return VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT;
128         case CBSTATUS_STENCIL_OP_SET:
129             return VK_DYNAMIC_STATE_STENCIL_OP_EXT;
130         case CBSTATUS_DISCARD_RECTANGLE_SET:
131             return VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT;
132         case CBSTATUS_SAMPLE_LOCATIONS_SET:
133             return VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT;
134         case CBSTATUS_COARSE_SAMPLE_ORDER_SET:
135             return VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV;
136         case CBSTATUS_PATCH_CONTROL_POINTS_SET:
137             return VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT;
138         case CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET:
139             return VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT;
140         case CBSTATUS_DEPTH_BIAS_ENABLE_SET:
141             return VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT;
142         case CBSTATUS_LOGIC_OP_SET:
143             return VK_DYNAMIC_STATE_LOGIC_OP_EXT;
144         case CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET:
145             return VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT;
146         case CBSTATUS_VERTEX_INPUT_SET:
147             return VK_DYNAMIC_STATE_VERTEX_INPUT_EXT;
148         default:
149             // CBSTATUS_INDEX_BUFFER_BOUND is not in VkDynamicState
150             return VK_DYNAMIC_STATE_MAX_ENUM;
151     }
152     return VK_DYNAMIC_STATE_MAX_ENUM;
153 }
154 
ConvertToCBStatusFlagBits(VkDynamicState state)155 CBStatusFlagBits ConvertToCBStatusFlagBits(VkDynamicState state) {
156     switch (state) {
157         case VK_DYNAMIC_STATE_VIEWPORT:
158             return CBSTATUS_VIEWPORT_SET;
159         case VK_DYNAMIC_STATE_SCISSOR:
160             return CBSTATUS_SCISSOR_SET;
161         case VK_DYNAMIC_STATE_LINE_WIDTH:
162             return CBSTATUS_LINE_WIDTH_SET;
163         case VK_DYNAMIC_STATE_DEPTH_BIAS:
164             return CBSTATUS_DEPTH_BIAS_SET;
165         case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
166             return CBSTATUS_BLEND_CONSTANTS_SET;
167         case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
168             return CBSTATUS_DEPTH_BOUNDS_SET;
169         case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
170             return CBSTATUS_STENCIL_READ_MASK_SET;
171         case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
172             return CBSTATUS_STENCIL_WRITE_MASK_SET;
173         case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
174             return CBSTATUS_STENCIL_REFERENCE_SET;
175         case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
176             return CBSTATUS_VIEWPORT_W_SCALING_SET;
177         case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
178             return CBSTATUS_DISCARD_RECTANGLE_SET;
179         case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
180             return CBSTATUS_SAMPLE_LOCATIONS_SET;
181         case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
182             return CBSTATUS_SHADING_RATE_PALETTE_SET;
183         case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
184             return CBSTATUS_COARSE_SAMPLE_ORDER_SET;
185         case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
186             return CBSTATUS_EXCLUSIVE_SCISSOR_SET;
187         case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
188             return CBSTATUS_LINE_STIPPLE_SET;
189         case VK_DYNAMIC_STATE_CULL_MODE_EXT:
190             return CBSTATUS_CULL_MODE_SET;
191         case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
192             return CBSTATUS_FRONT_FACE_SET;
193         case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
194             return CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
195         case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
196             return CBSTATUS_VIEWPORT_WITH_COUNT_SET;
197         case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
198             return CBSTATUS_SCISSOR_WITH_COUNT_SET;
199         case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
200             return CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
201         case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
202             return CBSTATUS_DEPTH_TEST_ENABLE_SET;
203         case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
204             return CBSTATUS_DEPTH_WRITE_ENABLE_SET;
205         case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
206             return CBSTATUS_DEPTH_COMPARE_OP_SET;
207         case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
208             return CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
209         case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
210             return CBSTATUS_STENCIL_TEST_ENABLE_SET;
211         case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
212             return CBSTATUS_STENCIL_OP_SET;
213         case VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT:
214             return CBSTATUS_PATCH_CONTROL_POINTS_SET;
215         case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT:
216             return CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
217         case VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT:
218             return CBSTATUS_DEPTH_BIAS_ENABLE_SET;
219         case VK_DYNAMIC_STATE_LOGIC_OP_EXT:
220             return CBSTATUS_LOGIC_OP_SET;
221         case VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT:
222             return CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
223         case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT:
224             return CBSTATUS_VERTEX_INPUT_SET;
225         default:
226             return CBSTATUS_NONE;
227     }
228     return CBSTATUS_NONE;
229 }
230 
CMD_BUFFER_STATE(ValidationStateTracker * dev,VkCommandBuffer cb,const VkCommandBufferAllocateInfo * pCreateInfo,const COMMAND_POOL_STATE * pool)231 CMD_BUFFER_STATE::CMD_BUFFER_STATE(ValidationStateTracker *dev, VkCommandBuffer cb, const VkCommandBufferAllocateInfo *pCreateInfo,
232                                    const COMMAND_POOL_STATE *pool)
233     : REFCOUNTED_NODE(cb, kVulkanObjectTypeCommandBuffer),
234       createInfo(*pCreateInfo),
235       command_pool(pool),
236       dev_data(dev),
237       unprotected(pool->unprotected) {
238     Reset();
239 }
240 
241 // Get the image viewstate for a given framebuffer attachment
GetActiveAttachmentImageViewState(uint32_t index)242 IMAGE_VIEW_STATE *CMD_BUFFER_STATE::GetActiveAttachmentImageViewState(uint32_t index) {
243     assert(active_attachments && index != VK_ATTACHMENT_UNUSED && (index < active_attachments->size()));
244     return active_attachments->at(index);
245 }
246 
247 // Get the image viewstate for a given framebuffer attachment
GetActiveAttachmentImageViewState(uint32_t index) const248 const IMAGE_VIEW_STATE *CMD_BUFFER_STATE::GetActiveAttachmentImageViewState(uint32_t index) const {
249     if (!active_attachments || index == VK_ATTACHMENT_UNUSED || (index >= active_attachments->size())) {
250         return nullptr;
251     }
252     return active_attachments->at(index);
253 }
254 
AddChild(BASE_NODE * child_node)255 void CMD_BUFFER_STATE::AddChild(BASE_NODE *child_node) {
256     assert(child_node);
257     if (child_node->AddParent(this)) {
258         object_bindings.insert(child_node);
259     }
260 }
261 
RemoveChild(BASE_NODE * child_node)262 void CMD_BUFFER_STATE::RemoveChild(BASE_NODE *child_node) {
263     assert(child_node);
264     child_node->RemoveParent(this);
265     object_bindings.erase(child_node);
266 }
267 
268 // Reset the command buffer state
269 //  Maintain the createInfo and set state to CB_NEW, but clear all other state
Reset()270 void CMD_BUFFER_STATE::Reset() {
271     ResetUse();
272     // Reset CB state (note that createInfo is not cleared)
273     memset(&beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
274     memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
275     hasDrawCmd = false;
276     hasTraceRaysCmd = false;
277     hasBuildAccelerationStructureCmd = false;
278     hasDispatchCmd = false;
279     state = CB_NEW;
280     commandCount = 0;
281     submitCount = 0;
282     image_layout_change_count = 1;  // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
283     status = 0;
284     static_status = 0;
285     inheritedViewportDepths.clear();
286     usedViewportScissorCount = 0;
287     pipelineStaticViewportCount = 0;
288     pipelineStaticScissorCount = 0;
289     viewportMask = 0;
290     viewportWithCountMask = 0;
291     viewportWithCountCount = 0;
292     scissorMask = 0;
293     scissorWithCountMask = 0;
294     scissorWithCountCount = 0;
295     trashedViewportMask = 0;
296     trashedScissorMask = 0;
297     trashedViewportCount = false;
298     trashedScissorCount = false;
299     usedDynamicViewportCount = false;
300     usedDynamicScissorCount = false;
301     primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
302 
303     activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
304     activeRenderPass = nullptr;
305     active_attachments = nullptr;
306     active_subpasses = nullptr;
307     attachments_view_states.clear();
308     activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
309     activeSubpass = 0;
310     broken_bindings.clear();
311     waitedEvents.clear();
312     events.clear();
313     writeEventsBeforeWait.clear();
314     activeQueries.clear();
315     startedQueries.clear();
316     image_layout_map.clear();
317     current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
318     vertex_buffer_used = false;
319     primaryCommandBuffer = VK_NULL_HANDLE;
320 
321     linkedCommandBuffers.clear();
322     // Remove reverse command buffer links.
323     Invalidate(true);
324 
325     queue_submit_functions.clear();
326     queue_submit_functions_after_render_pass.clear();
327     cmd_execute_commands_functions.clear();
328     eventUpdates.clear();
329     queryUpdates.clear();
330 
331     // Remove object bindings
332     for (const auto &obj : object_bindings) {
333         obj->RemoveParent(this);
334     }
335     object_bindings.clear();
336 
337     for (auto &item : lastBound) {
338         item.Reset();
339     }
340     // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
341     for (auto &framebuffer : framebuffers) {
342         framebuffer->RemoveParent(this);
343     }
344     framebuffers.clear();
345     activeFramebuffer = VK_NULL_HANDLE;
346     index_buffer_binding.reset();
347 
348     qfo_transfer_image_barriers.Reset();
349     qfo_transfer_buffer_barriers.Reset();
350 
351     // Clean up the label data
352     debug_label.Reset();
353     validate_descriptorsets_in_queuesubmit.clear();
354 
355     // Best practices info
356     small_indexed_draw_call_count = 0;
357 
358     transform_feedback_active = false;
359 
360     // Remove object bindings
361     for (auto *base_obj : object_bindings) {
362         RemoveChild(base_obj);
363     }
364     object_bindings.clear();
365 
366     // Clean up the label data
367     ResetCmdDebugUtilsLabel(dev_data->report_data, commandBuffer());
368 
369     if (dev_data->command_buffer_reset_callback) {
370         (*dev_data->command_buffer_reset_callback)(commandBuffer());
371     }
372 }
373 
374 // Track which resources are in-flight by atomically incrementing their "in_use" count
IncrementResources()375 void CMD_BUFFER_STATE::IncrementResources() {
376     submitCount++;
377 
378     // TODO : We should be able to remove the NULL look-up checks from the code below as long as
379     //  all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
380     //  should then be flagged prior to calling this function
381     for (auto event : writeEventsBeforeWait) {
382         auto event_state = dev_data->Get<EVENT_STATE>(event);
383         if (event_state) event_state->write_in_use++;
384     }
385 }
386 
387 // Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
388 // Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
389 // "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
390 //
391 // vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
392 //
393 // Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
ResetPushConstantDataIfIncompatible(const PIPELINE_LAYOUT_STATE * pipeline_layout_state)394 void CMD_BUFFER_STATE::ResetPushConstantDataIfIncompatible(const PIPELINE_LAYOUT_STATE *pipeline_layout_state) {
395     if (pipeline_layout_state == nullptr) {
396         return;
397     }
398     if (push_constant_data_ranges == pipeline_layout_state->push_constant_ranges) {
399         return;
400     }
401 
402     push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
403     push_constant_data.clear();
404     push_constant_data_update.clear();
405     uint32_t size_needed = 0;
406     for (const auto &push_constant_range : *push_constant_data_ranges) {
407         auto size = push_constant_range.offset + push_constant_range.size;
408         size_needed = std::max(size_needed, size);
409 
410         auto stage_flags = push_constant_range.stageFlags;
411         uint32_t bit_shift = 0;
412         while (stage_flags) {
413             if (stage_flags & 1) {
414                 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
415                 const auto it = push_constant_data_update.find(flag);
416 
417                 if (it != push_constant_data_update.end()) {
418                     if (it->second.size() < push_constant_range.offset) {
419                         it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
420                     }
421                     if (it->second.size() < size) {
422                         it->second.resize(size, PC_Byte_Not_Updated);
423                     }
424                 } else {
425                     std::vector<uint8_t> bytes;
426                     bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
427                     bytes.resize(size, PC_Byte_Not_Updated);
428                     push_constant_data_update[flag] = bytes;
429                 }
430             }
431             stage_flags = stage_flags >> 1;
432             ++bit_shift;
433         }
434     }
435     push_constant_data.resize(size_needed, 0);
436 }
437 
Destroy()438 void CMD_BUFFER_STATE::Destroy() {
439     // Allow any derived class to clean up command buffer state
440     if (dev_data->command_buffer_reset_callback) {
441         (*dev_data->command_buffer_reset_callback)(commandBuffer());
442     }
443     if (dev_data->command_buffer_free_callback) {
444         (*dev_data->command_buffer_free_callback)(commandBuffer());
445     }
446 
447     // Remove the cb debug labels
448     EraseCmdDebugUtilsLabel(dev_data->report_data, commandBuffer());
449     Reset();
450     BASE_NODE::Destroy();
451 }
452 
NotifyInvalidate(const BASE_NODE::NodeList & invalid_nodes,bool unlink)453 void CMD_BUFFER_STATE::NotifyInvalidate(const BASE_NODE::NodeList &invalid_nodes, bool unlink) {
454     if (state == CB_RECORDING) {
455         state = CB_INVALID_INCOMPLETE;
456     } else if (state == CB_RECORDED) {
457         state = CB_INVALID_COMPLETE;
458     }
459     assert(!invalid_nodes.empty());
460     LogObjectList log_list;
461     for (auto *obj : invalid_nodes) {
462         log_list.object_list.emplace_back(obj->Handle());
463     }
464     broken_bindings.emplace(invalid_nodes[0]->Handle(), log_list);
465 
466     if (unlink) {
467         for (auto *obj : invalid_nodes) {
468             object_bindings.erase(obj);
469             switch (obj->Type()) {
470                 case kVulkanObjectTypeCommandBuffer:
471                     linkedCommandBuffers.erase(static_cast<CMD_BUFFER_STATE *>(obj));
472                     break;
473                 case kVulkanObjectTypeImage:
474                     image_layout_map.erase(static_cast<IMAGE_STATE *>(obj));
475                     break;
476                 default:
477                     break;
478             }
479         }
480     }
481     BASE_NODE::NotifyInvalidate(invalid_nodes, unlink);
482 }
483 
GetImageSubresourceLayoutMap() const484 const CommandBufferImageLayoutMap& CMD_BUFFER_STATE::GetImageSubresourceLayoutMap() const { return image_layout_map; }
485 
486 // The const variant only need the image as it is the key for the map
GetImageSubresourceLayoutMap(const IMAGE_STATE & image_state) const487 const ImageSubresourceLayoutMap *CMD_BUFFER_STATE::GetImageSubresourceLayoutMap(const IMAGE_STATE &image_state) const {
488     auto it = image_layout_map.find(&image_state);
489     if (it == image_layout_map.cend()) {
490         return nullptr;
491     }
492     return &it->second;
493 }
494 
495 // The non-const variant only needs the image state, as the factory requires it to construct a new entry
GetImageSubresourceLayoutMap(const IMAGE_STATE & image_state)496 ImageSubresourceLayoutMap *CMD_BUFFER_STATE::GetImageSubresourceLayoutMap(const IMAGE_STATE &image_state) {
497     auto &layout_map = image_layout_map[&image_state];
498     if (!layout_map) {
499         // Was an empty slot... fill it in.
500         layout_map.emplace(image_state);
501     }
502     return &layout_map;
503 }
504 
SetQueryState(QueryObject object,QueryState value,QueryMap * localQueryToStateMap)505 static bool SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
506     (*localQueryToStateMap)[object] = value;
507     return false;
508 }
509 
BeginQuery(const QueryObject & query_obj)510 void CMD_BUFFER_STATE::BeginQuery(const QueryObject &query_obj) {
511     activeQueries.insert(query_obj);
512     startedQueries.insert(query_obj);
513     queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
514                                           VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass, QueryMap *localQueryToStateMap) {
515         SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
516         return false;
517     });
518 }
519 
EndQuery(const QueryObject & query_obj)520 void CMD_BUFFER_STATE::EndQuery(const QueryObject &query_obj) {
521     activeQueries.erase(query_obj);
522     queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
523                                           VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass, QueryMap *localQueryToStateMap) {
524         return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
525     });
526 }
527 
SetQueryStateMulti(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,uint32_t perfPass,QueryState value,QueryMap * localQueryToStateMap)528 static bool SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass, QueryState value,
529                                QueryMap *localQueryToStateMap) {
530     for (uint32_t i = 0; i < queryCount; i++) {
531         QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
532         (*localQueryToStateMap)[object] = value;
533     }
534     return false;
535 }
536 
EndQueries(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)537 void CMD_BUFFER_STATE::EndQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {
538     for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
539         QueryObject query = {queryPool, slot};
540         activeQueries.erase(query);
541     }
542     queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data, bool do_validate,
543                                                                   VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
544                                                                   QueryMap *localQueryToStateMap) {
545         return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_ENDED, localQueryToStateMap);
546     });
547 }
548 
ResetQueryPool(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)549 void CMD_BUFFER_STATE::ResetQueryPool(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {
550     for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
551         QueryObject query = {queryPool, slot};
552         resetQueries.insert(query);
553     }
554 
555     queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data, bool do_validate,
556                                                                   VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
557                                                                   QueryMap *localQueryToStateMap) {
558         return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
559     });
560 }
561 
UpdateSubpassAttachments(const safe_VkSubpassDescription2 & subpass,std::vector<SUBPASS_INFO> & subpasses)562 void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
563     for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
564         const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
565         if (attachment_index != VK_ATTACHMENT_UNUSED) {
566             subpasses[attachment_index].used = true;
567             subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
568             subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
569         }
570     }
571 
572     for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
573         const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
574         if (attachment_index != VK_ATTACHMENT_UNUSED) {
575             subpasses[attachment_index].used = true;
576             subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
577             subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
578         }
579         if (subpass.pResolveAttachments) {
580             const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
581             if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
582                 subpasses[attachment_index2].used = true;
583                 subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
584                 subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
585             }
586         }
587     }
588 
589     if (subpass.pDepthStencilAttachment) {
590         const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
591         if (attachment_index != VK_ATTACHMENT_UNUSED) {
592             subpasses[attachment_index].used = true;
593             subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
594             subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
595         }
596     }
597 }
598 
UpdateAttachmentsView(const VkRenderPassBeginInfo * pRenderPassBegin)599 void CMD_BUFFER_STATE::UpdateAttachmentsView(const VkRenderPassBeginInfo *pRenderPassBegin) {
600     auto &attachments = *(active_attachments.get());
601     const bool imageless = (activeFramebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
602     const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
603     if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
604 
605     for (uint32_t i = 0; i < attachments.size(); ++i) {
606         if (imageless) {
607             if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
608                 auto res = attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
609                 attachments[i] = res.first->get();
610             }
611         } else {
612             auto res = attachments_view_states.insert(activeFramebuffer->attachments_view_state[i]);
613             attachments[i] = res.first->get();
614         }
615     }
616 }
617 
BeginRenderPass(CMD_TYPE cmd_type,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassContents contents)618 void CMD_BUFFER_STATE::BeginRenderPass(CMD_TYPE cmd_type, const VkRenderPassBeginInfo *pRenderPassBegin,
619                                        const VkSubpassContents contents) {
620     RecordCmd(cmd_type);
621     activeFramebuffer = dev_data->Get<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer);
622     activeRenderPass = dev_data->Get<RENDER_PASS_STATE>(pRenderPassBegin->renderPass);
623     activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
624     activeSubpass = 0;
625     activeSubpassContents = contents;
626 
627     // Connect this RP to cmdBuffer
628     if (!dev_data->disabled[command_buffer_state] && activeRenderPass) {
629         AddChild(activeRenderPass.get());
630     }
631 
632     auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
633     if (chained_device_group_struct) {
634         active_render_pass_device_mask = chained_device_group_struct->deviceMask;
635     } else {
636         active_render_pass_device_mask = initial_device_mask;
637     }
638 
639     active_subpasses = nullptr;
640     active_attachments = nullptr;
641 
642     if (activeFramebuffer) {
643         framebuffers.insert(activeFramebuffer);
644 
645         // Set cb_state->active_subpasses
646         active_subpasses = std::make_shared<std::vector<SUBPASS_INFO>>(activeFramebuffer->createInfo.attachmentCount);
647         const auto &subpass = activeRenderPass->createInfo.pSubpasses[activeSubpass];
648         UpdateSubpassAttachments(subpass, *active_subpasses);
649 
650         // Set cb_state->active_attachments & cb_state->attachments_view_states
651         active_attachments = std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(activeFramebuffer->createInfo.attachmentCount);
652         UpdateAttachmentsView(pRenderPassBegin);
653 
654         // Connect this framebuffer and its children to this cmdBuffer
655         AddChild(activeFramebuffer.get());
656     }
657 }
658 
NextSubpass(CMD_TYPE cmd_type,VkSubpassContents contents)659 void CMD_BUFFER_STATE::NextSubpass(CMD_TYPE cmd_type, VkSubpassContents contents) {
660     RecordCmd(cmd_type);
661     activeSubpass++;
662     activeSubpassContents = contents;
663 
664     // Update cb_state->active_subpasses
665     if (activeRenderPass && activeFramebuffer) {
666         active_subpasses = nullptr;
667         active_subpasses = std::make_shared<std::vector<SUBPASS_INFO>>(activeFramebuffer->createInfo.attachmentCount);
668 
669         const auto &subpass = activeRenderPass->createInfo.pSubpasses[activeSubpass];
670         UpdateSubpassAttachments(subpass, *active_subpasses);
671     }
672 }
673 
EndRenderPass(CMD_TYPE cmd_type)674 void CMD_BUFFER_STATE::EndRenderPass(CMD_TYPE cmd_type) {
675     RecordCmd(cmd_type);
676     activeRenderPass = nullptr;
677     active_attachments = nullptr;
678     active_subpasses = nullptr;
679     activeSubpass = 0;
680     activeFramebuffer = VK_NULL_HANDLE;
681 }
682 
BeginRendering(CMD_TYPE cmd_type,const VkRenderingInfoKHR * pRenderingInfo)683 void CMD_BUFFER_STATE::BeginRendering(CMD_TYPE cmd_type, const VkRenderingInfoKHR *pRenderingInfo) {
684     RecordCmd(cmd_type);
685     activeRenderPass = std::make_shared<RENDER_PASS_STATE>(pRenderingInfo);
686 
687     auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderingInfo->pNext);
688     if (chained_device_group_struct) {
689         active_render_pass_device_mask = chained_device_group_struct->deviceMask;
690     } else {
691         active_render_pass_device_mask = initial_device_mask;
692     }
693 
694     activeSubpassContents = ((pRenderingInfo->flags & VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR) ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS : VK_SUBPASS_CONTENTS_INLINE);
695 
696     active_attachments = nullptr;
697     uint32_t attachment_count = (pRenderingInfo->colorAttachmentCount + 2) * 2;
698 
699     // Set cb_state->active_attachments & cb_state->attachments_view_states
700     active_attachments = std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(attachment_count);
701     auto &attachments = *(active_attachments.get());
702 
703     for (uint32_t i = 0; i < pRenderingInfo->colorAttachmentCount; ++i) {
704         auto& colorAttachment = attachments[GetDynamicColorAttachmentImageIndex(i)];
705         auto& colorResolveAttachment = attachments[GetDynamicColorResolveAttachmentImageIndex(i)];
706         colorAttachment = nullptr;
707         colorResolveAttachment = nullptr;
708 
709         if (pRenderingInfo->pColorAttachments[i].imageView != VK_NULL_HANDLE) {
710             auto res =
711                 attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(pRenderingInfo->pColorAttachments[i].imageView));
712             colorAttachment = res.first->get();
713             if (pRenderingInfo->pColorAttachments[i].resolveMode != VK_RESOLVE_MODE_NONE &&
714                 pRenderingInfo->pColorAttachments[i].resolveImageView != VK_NULL_HANDLE) {
715                 colorResolveAttachment = res.first->get();
716             }
717         }
718     }
719 
720     if (pRenderingInfo->pDepthAttachment && pRenderingInfo->pDepthAttachment->imageView != VK_NULL_HANDLE) {
721         auto& depthAttachment = attachments[GetDynamicDepthAttachmentImageIndex()];
722         auto& depthResolveAttachment = attachments[GetDynamicDepthResolveAttachmentImageIndex()];
723         depthAttachment = nullptr;
724         depthResolveAttachment = nullptr;
725 
726         auto res = attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(pRenderingInfo->pDepthAttachment->imageView));
727         depthAttachment = res.first->get();
728         if (pRenderingInfo->pDepthAttachment->resolveMode != VK_RESOLVE_MODE_NONE &&
729             pRenderingInfo->pDepthAttachment->resolveImageView != VK_NULL_HANDLE) {
730             depthResolveAttachment = res.first->get();
731         }
732     }
733 
734     if (pRenderingInfo->pStencilAttachment && pRenderingInfo->pStencilAttachment->imageView != VK_NULL_HANDLE) {
735         auto& stencilAttachment = attachments[GetDynamicStencilAttachmentImageIndex()];
736         auto& stencilResolveAttachment = attachments[GetDynamicStencilResolveAttachmentImageIndex()];
737         stencilAttachment = nullptr;
738         stencilResolveAttachment = nullptr;
739 
740         auto res = attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(pRenderingInfo->pStencilAttachment->imageView));
741         stencilAttachment = res.first->get();
742         if (pRenderingInfo->pStencilAttachment->resolveMode != VK_RESOLVE_MODE_NONE &&
743             pRenderingInfo->pStencilAttachment->resolveImageView != VK_NULL_HANDLE) {
744             stencilResolveAttachment = res.first->get();
745         }
746     }
747 }
748 
Begin(const VkCommandBufferBeginInfo * pBeginInfo)749 void CMD_BUFFER_STATE::Begin(const VkCommandBufferBeginInfo *pBeginInfo) {
750     if (CB_RECORDED == state || CB_INVALID_COMPLETE == state) {
751         Reset();
752     }
753     // Set updated state here in case implicit reset occurs above
754     state = CB_RECORDING;
755     beginInfo = *pBeginInfo;
756     if (beginInfo.pInheritanceInfo && (createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
757         inheritanceInfo = *(beginInfo.pInheritanceInfo);
758         beginInfo.pInheritanceInfo = &inheritanceInfo;
759         // If we are a secondary command-buffer and inheriting.  Update the items we should inherit.
760         if ((createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
761             (beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
762             if (beginInfo.pInheritanceInfo->renderPass) {
763                 activeRenderPass = dev_data->Get<RENDER_PASS_STATE>(beginInfo.pInheritanceInfo->renderPass);
764                 activeSubpass = beginInfo.pInheritanceInfo->subpass;
765 
766                 if (beginInfo.pInheritanceInfo->framebuffer) {
767                     activeFramebuffer = dev_data->Get<FRAMEBUFFER_STATE>(beginInfo.pInheritanceInfo->framebuffer);
768                     active_subpasses = nullptr;
769                     active_attachments = nullptr;
770 
771                     if (activeFramebuffer) {
772                         framebuffers.insert(activeFramebuffer);
773 
774                         // Set active_subpasses
775                         active_subpasses = std::make_shared<std::vector<SUBPASS_INFO>>(activeFramebuffer->createInfo.attachmentCount);
776                         const auto& subpass = activeRenderPass->createInfo.pSubpasses[activeSubpass];
777                         UpdateSubpassAttachments(subpass, *active_subpasses);
778 
779                         // Set active_attachments & attachments_view_states
780                         active_attachments =
781                             std::make_shared<std::vector<IMAGE_VIEW_STATE*>>(activeFramebuffer->createInfo.attachmentCount);
782                         UpdateAttachmentsView(nullptr);
783 
784                         // Connect this framebuffer and its children to this cmdBuffer
785                         if (!dev_data->disabled[command_buffer_state]) {
786                             AddChild(activeFramebuffer.get());
787                         }
788                     }
789                 }
790             }
791             else
792             {
793                 auto inheritance_rendering_info = lvl_find_in_chain<VkCommandBufferInheritanceRenderingInfoKHR>(beginInfo.pInheritanceInfo->pNext);
794                 if (inheritance_rendering_info) {
795                     activeRenderPass = std::make_shared<RENDER_PASS_STATE>(inheritance_rendering_info);
796                 }
797             }
798 
799             // Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
800             auto p_inherited_viewport_scissor_info =
801                 LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(beginInfo.pInheritanceInfo->pNext);
802             if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
803                 auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
804                 inheritedViewportDepths.assign(pViewportDepths,
805                                                pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
806             }
807         }
808     }
809 
810     auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
811     if (chained_device_group_struct) {
812         initial_device_mask = chained_device_group_struct->deviceMask;
813     } else {
814         initial_device_mask = (1 << dev_data->physical_device_count) - 1;
815     }
816     performance_lock_acquired = dev_data->performance_lock_acquired;
817 }
818 
End(VkResult result)819 void CMD_BUFFER_STATE::End(VkResult result) {
820     // Cached validation is specific to a specific recording of a specific command buffer.
821     descriptorset_cache.clear();
822     validated_descriptor_sets.clear();
823     if (VK_SUCCESS == result) {
824         state = CB_RECORDED;
825     }
826 }
827 
ExecuteCommands(uint32_t commandBuffersCount,const VkCommandBuffer * pCommandBuffers)828 void CMD_BUFFER_STATE::ExecuteCommands(uint32_t commandBuffersCount, const VkCommandBuffer *pCommandBuffers) {
829     RecordCmd(CMD_EXECUTECOMMANDS);
830     for (uint32_t i = 0; i < commandBuffersCount; i++) {
831         auto sub_cb_state = dev_data->Get<CMD_BUFFER_STATE>(pCommandBuffers[i]);
832         assert(sub_cb_state);
833         if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
834             if (beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
835                 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
836                 // from the validation step to the recording step
837                 beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
838             }
839         }
840 
841         // Propagate inital layout and current layout state to the primary cmd buffer
842         // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
843         // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
844         // for those other classes.
845         for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
846             const auto *image_state = sub_layout_map_entry.first;
847 
848             auto *cb_subres_map = GetImageSubresourceLayoutMap(*image_state);
849             const auto *sub_cb_subres_map = &sub_layout_map_entry.second;
850             assert(cb_subres_map && sub_cb_subres_map);  // Non const get and map traversal should never be null
851             cb_subres_map->UpdateFrom(*sub_cb_subres_map);
852         }
853 
854         sub_cb_state->primaryCommandBuffer = commandBuffer();
855         linkedCommandBuffers.insert(sub_cb_state.get());
856         AddChild(sub_cb_state.get());
857         for (auto &function : sub_cb_state->queryUpdates) {
858             queryUpdates.push_back(function);
859         }
860         for (auto &function : sub_cb_state->queue_submit_functions) {
861             queue_submit_functions.push_back(function);
862         }
863 
864         // State is trashed after executing secondary command buffers.
865         // Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
866         trashedViewportMask = ~uint32_t(0);
867         trashedScissorMask = ~uint32_t(0);
868         trashedViewportCount = true;
869         trashedScissorCount = true;
870     }
871 }
872 
PushDescriptorSetState(VkPipelineBindPoint pipelineBindPoint,PIPELINE_LAYOUT_STATE * pipeline_layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites)873 void CMD_BUFFER_STATE::PushDescriptorSetState(VkPipelineBindPoint pipelineBindPoint, PIPELINE_LAYOUT_STATE *pipeline_layout,
874                                               uint32_t set, uint32_t descriptorWriteCount,
875                                               const VkWriteDescriptorSet *pDescriptorWrites) {
876     // Short circuit invalid updates
877     if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
878         !pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
879         return;
880     }
881 
882     // We need a descriptor set to update the bindings with, compatible with the passed layout
883     const auto &dsl = pipeline_layout->set_layouts[set];
884     const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
885     auto &last_bound = lastBound[lv_bind_point];
886     auto &push_descriptor_set = last_bound.push_descriptor_set;
887     // If we are disturbing the current push_desriptor_set clear it
888     if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
889         last_bound.UnbindAndResetPushDescriptorSet(this, new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, dev_data));
890     }
891 
892     UpdateLastBoundDescriptorSets(pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0, nullptr);
893     last_bound.pipeline_layout = pipeline_layout->layout();
894 
895     // Now that we have either the new or extant push_descriptor set ... do the write updates against it
896     push_descriptor_set->PerformPushDescriptorsUpdate(dev_data, descriptorWriteCount, pDescriptorWrites);
897 }
898 
899 // Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
UpdateStateCmdDrawDispatchType(CMD_TYPE cmd_type,VkPipelineBindPoint bind_point)900 void CMD_BUFFER_STATE::UpdateStateCmdDrawDispatchType(CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
901     UpdateDrawState(cmd_type, bind_point);
902     hasDispatchCmd = true;
903 }
904 
905 // Generic function to handle state update for all CmdDraw* type functions
UpdateStateCmdDrawType(CMD_TYPE cmd_type,VkPipelineBindPoint bind_point)906 void CMD_BUFFER_STATE::UpdateStateCmdDrawType(CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
907     UpdateStateCmdDrawDispatchType(cmd_type, bind_point);
908     hasDrawCmd = true;
909 
910     // Update the consumed viewport/scissor count.
911     uint32_t &used = usedViewportScissorCount;
912     used = std::max(used, pipelineStaticViewportCount);
913     used = std::max(used, pipelineStaticScissorCount);
914     usedDynamicViewportCount |= !!(dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET);  // !! silences MSVC warn
915     usedDynamicScissorCount |= !!(dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
916 }
917 
UpdateDrawState(CMD_TYPE cmd_type,const VkPipelineBindPoint bind_point)918 void CMD_BUFFER_STATE::UpdateDrawState(CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point) {
919     RecordCmd(cmd_type);
920 
921     const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
922     auto &state = lastBound[lv_bind_point];
923     PIPELINE_STATE *pipe = state.pipeline_state;
924     if (VK_NULL_HANDLE != state.pipeline_layout) {
925         for (const auto &set_binding_pair : pipe->active_slots) {
926             uint32_t set_index = set_binding_pair.first;
927             // Pull the set node
928             cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[set_index].bound_descriptor_set;
929 
930             // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
931 
932             // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
933             // Validate pass.  Though in the case of "many" descriptors, typically the descriptor count >> binding count
934             cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
935             const auto &binding_req_map = reduced_map.FilteredMap(*this, *pipe);
936 
937             if (reduced_map.IsManyDescriptors()) {
938                 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
939                 descriptor_set->UpdateValidationCache(*this, *pipe, binding_req_map);
940             }
941 
942             // We can skip updating the state if "nothing" has changed since the last validation.
943             // See CoreChecks::ValidateCmdBufDrawState for more details.
944             bool descriptor_set_changed =
945                 !reduced_map.IsManyDescriptors() ||
946                 // Update if descriptor set (or contents) has changed
947                 state.per_set[set_index].validated_set != descriptor_set ||
948                 state.per_set[set_index].validated_set_change_count != descriptor_set->GetChangeCount() ||
949                 (!dev_data->disabled[image_layout_validation] &&
950                  state.per_set[set_index].validated_set_image_layout_change_count != image_layout_change_count);
951             bool need_update = descriptor_set_changed ||
952                                // Update if previous bindingReqMap doesn't include new bindingReqMap
953                                !std::includes(state.per_set[set_index].validated_set_binding_req_map.begin(),
954                                               state.per_set[set_index].validated_set_binding_req_map.end(), binding_req_map.begin(),
955                                               binding_req_map.end());
956 
957             if (need_update) {
958                 // Bind this set and its active descriptor resources to the command buffer
959                 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
960                     // Only record the bindings that haven't already been recorded
961                     BindingReqMap delta_reqs;
962                     std::set_difference(binding_req_map.begin(), binding_req_map.end(),
963                                         state.per_set[set_index].validated_set_binding_req_map.begin(),
964                                         state.per_set[set_index].validated_set_binding_req_map.end(),
965                                         layer_data::insert_iterator<BindingReqMap>(delta_reqs, delta_reqs.begin()));
966                     descriptor_set->UpdateDrawState(dev_data, this, cmd_type, pipe, delta_reqs);
967                 } else {
968                     descriptor_set->UpdateDrawState(dev_data, this, cmd_type, pipe, binding_req_map);
969                 }
970 
971                 state.per_set[set_index].validated_set = descriptor_set;
972                 state.per_set[set_index].validated_set_change_count = descriptor_set->GetChangeCount();
973                 state.per_set[set_index].validated_set_image_layout_change_count = image_layout_change_count;
974                 if (reduced_map.IsManyDescriptors()) {
975                     // Check whether old == new before assigning, the equality check is much cheaper than
976                     // freeing and reallocating the map.
977                     if (state.per_set[set_index].validated_set_binding_req_map != set_binding_pair.second) {
978                         state.per_set[set_index].validated_set_binding_req_map = set_binding_pair.second;
979                     }
980                 } else {
981                     state.per_set[set_index].validated_set_binding_req_map = BindingReqMap();
982                 }
983             }
984         }
985     }
986     if (pipe && !pipe->vertex_binding_descriptions_.empty()) {
987         vertex_buffer_used = true;
988     }
989 }
990 
991 // Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
992 // One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
993 // is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
UpdateLastBoundDescriptorSets(VkPipelineBindPoint pipeline_bind_point,const PIPELINE_LAYOUT_STATE * pipeline_layout,uint32_t first_set,uint32_t set_count,const VkDescriptorSet * pDescriptorSets,cvdescriptorset::DescriptorSet * push_descriptor_set,uint32_t dynamic_offset_count,const uint32_t * p_dynamic_offsets)994 void CMD_BUFFER_STATE::UpdateLastBoundDescriptorSets(VkPipelineBindPoint pipeline_bind_point,
995                                                      const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
996                                                      uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
997                                                      cvdescriptorset::DescriptorSet *push_descriptor_set,
998                                                      uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
999     assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
1000     // Defensive
1001     assert(pipeline_layout);
1002     if (!pipeline_layout) return;
1003 
1004     uint32_t required_size = first_set + set_count;
1005     const uint32_t last_binding_index = required_size - 1;
1006     assert(last_binding_index < pipeline_layout->compat_for_set.size());
1007 
1008     // Some useful shorthand
1009     const auto lv_bind_point = ConvertToLvlBindPoint(pipeline_bind_point);
1010     auto &last_bound = lastBound[lv_bind_point];
1011     last_bound.pipeline_layout = pipeline_layout->layout();
1012     auto &pipe_compat_ids = pipeline_layout->compat_for_set;
1013     // Resize binding arrays
1014     uint32_t last_set_index = first_set + set_count - 1;
1015     if (last_set_index >= last_bound.per_set.size()) {
1016         last_bound.per_set.resize(last_set_index + 1);
1017     }
1018     const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
1019 
1020     // We need this three times in this function, but nowhere else
1021     auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
1022         if (ds && ds->IsPushDescriptor()) {
1023             assert(ds == last_bound.push_descriptor_set.get());
1024             last_bound.push_descriptor_set = nullptr;
1025             return true;
1026         }
1027         return false;
1028     };
1029 
1030     // Clean up the "disturbed" before and after the range to be set
1031     if (required_size < current_size) {
1032         if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
1033             // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
1034             for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
1035                 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
1036             }
1037         } else {
1038             // We're not disturbing past last, so leave the upper binding data alone.
1039             required_size = current_size;
1040         }
1041     }
1042 
1043     // We resize if we need more set entries or if those past "last" are disturbed
1044     if (required_size != current_size) {
1045         last_bound.per_set.resize(required_size);
1046     }
1047 
1048     // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
1049     for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
1050         if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
1051             push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
1052             last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
1053             last_bound.per_set[set_idx].dynamicOffsets.clear();
1054             last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
1055         }
1056     }
1057 
1058     // Now update the bound sets with the input sets
1059     const uint32_t *input_dynamic_offsets = p_dynamic_offsets;  // "read" pointer for dynamic offset data
1060     for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
1061         auto set_idx = input_idx + first_set;  // set_idx is index within layout, input_idx is index within input descriptor sets
1062         // need to hold a reference for the iteration of the loop if we use Get<>.
1063         std::shared_ptr<cvdescriptorset::DescriptorSet> shared_ds;
1064         cvdescriptorset::DescriptorSet *descriptor_set;
1065         if (!push_descriptor_set) {
1066             shared_ds = dev_data->Get<cvdescriptorset::DescriptorSet>(pDescriptorSets[input_idx]);
1067             descriptor_set = shared_ds.get();
1068         } else {
1069             descriptor_set = push_descriptor_set;
1070         }
1071 
1072         // Record binding (or push)
1073         if (descriptor_set != last_bound.push_descriptor_set.get()) {
1074             // Only cleanup the push descriptors if they aren't the currently used set.
1075             push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
1076         }
1077         last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
1078         last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];  // compat ids are canonical *per* set index
1079 
1080         if (descriptor_set) {
1081             auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
1082             // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
1083             if (set_dynamic_descriptor_count && input_dynamic_offsets) {
1084                 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
1085                 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
1086                 input_dynamic_offsets = end_offset;
1087                 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
1088             } else {
1089                 last_bound.per_set[set_idx].dynamicOffsets.clear();
1090             }
1091             if (!descriptor_set->IsPushDescriptor()) {
1092                 // Can't cache validation of push_descriptors
1093                 validated_descriptor_sets.insert(descriptor_set);
1094             }
1095         }
1096     }
1097 }
1098 
1099 // Set image layout for given VkImageSubresourceRange struct
SetImageLayout(const IMAGE_STATE & image_state,const VkImageSubresourceRange & image_subresource_range,VkImageLayout layout,VkImageLayout expected_layout)1100 void CMD_BUFFER_STATE::SetImageLayout(const IMAGE_STATE &image_state, const VkImageSubresourceRange &image_subresource_range,
1101                                       VkImageLayout layout, VkImageLayout expected_layout) {
1102     auto *subresource_map = GetImageSubresourceLayoutMap(image_state);
1103     assert(subresource_map);  // the non-const getter must return a valid pointer
1104     if (subresource_map->SetSubresourceRangeLayout(*this, image_subresource_range, layout, expected_layout)) {
1105         image_layout_change_count++;  // Change the version of this data to force revalidation
1106     }
1107     for (const auto *alias_state : image_state.aliasing_images) {
1108         assert(alias_state);
1109         // The map state of the aliases should all be in sync, so no need to check the return value
1110         subresource_map = GetImageSubresourceLayoutMap(*alias_state);
1111         assert(subresource_map);
1112         subresource_map->SetSubresourceRangeLayout(*this, image_subresource_range, layout, expected_layout);
1113     }
1114 }
1115 
1116 // Set the initial image layout for all slices of an image view
SetImageViewInitialLayout(const IMAGE_VIEW_STATE & view_state,VkImageLayout layout)1117 void CMD_BUFFER_STATE::SetImageViewInitialLayout(const IMAGE_VIEW_STATE &view_state, VkImageLayout layout) {
1118     if (dev_data->disabled[image_layout_validation]) {
1119         return;
1120     }
1121     IMAGE_STATE *image_state = view_state.image_state.get();
1122     auto *subresource_map = GetImageSubresourceLayoutMap(*image_state);
1123     subresource_map->SetSubresourceRangeInitialLayout(*this, layout, view_state);
1124     for (const auto *alias_state : image_state->aliasing_images) {
1125         assert(alias_state);
1126         subresource_map = GetImageSubresourceLayoutMap(*alias_state);
1127         subresource_map->SetSubresourceRangeInitialLayout(*this, layout, view_state);
1128     }
1129 }
1130 
1131 // Set the initial image layout for a passed non-normalized subresource range
SetImageInitialLayout(const IMAGE_STATE & image_state,const VkImageSubresourceRange & range,VkImageLayout layout)1132 void CMD_BUFFER_STATE::SetImageInitialLayout(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range,
1133                                              VkImageLayout layout) {
1134     auto *subresource_map = GetImageSubresourceLayoutMap(image_state);
1135     assert(subresource_map);
1136     subresource_map->SetSubresourceRangeInitialLayout(*this, image_state.NormalizeSubresourceRange(range), layout);
1137     for (const auto *alias_state : image_state.aliasing_images) {
1138         assert(alias_state);
1139         subresource_map = GetImageSubresourceLayoutMap(*alias_state);
1140         assert(subresource_map);
1141         subresource_map->SetSubresourceRangeInitialLayout(*this, alias_state->NormalizeSubresourceRange(range), layout);
1142     }
1143 }
1144 
SetImageInitialLayout(VkImage image,const VkImageSubresourceRange & range,VkImageLayout layout)1145 void CMD_BUFFER_STATE::SetImageInitialLayout(VkImage image, const VkImageSubresourceRange &range, VkImageLayout layout) {
1146     const auto image_state = dev_data->Get<IMAGE_STATE>(image);
1147     if (!image_state) return;
1148     SetImageInitialLayout(*image_state, range, layout);
1149 }
1150 
SetImageInitialLayout(const IMAGE_STATE & image_state,const VkImageSubresourceLayers & layers,VkImageLayout layout)1151 void CMD_BUFFER_STATE::SetImageInitialLayout(const IMAGE_STATE &image_state, const VkImageSubresourceLayers &layers,
1152                                              VkImageLayout layout) {
1153     SetImageInitialLayout(image_state, RangeFromLayers(layers), layout);
1154 }
1155 
1156 // Set image layout for all slices of an image view
SetImageViewLayout(const IMAGE_VIEW_STATE & view_state,VkImageLayout layout,VkImageLayout layoutStencil)1157 void CMD_BUFFER_STATE::SetImageViewLayout(const IMAGE_VIEW_STATE &view_state, VkImageLayout layout, VkImageLayout layoutStencil) {
1158     const IMAGE_STATE *image_state = view_state.image_state.get();
1159 
1160     VkImageSubresourceRange sub_range = view_state.normalized_subresource_range;
1161 
1162     if (sub_range.aspectMask == (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT) && layoutStencil != kInvalidLayout) {
1163         sub_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1164         SetImageLayout(*image_state, sub_range, layout);
1165         sub_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1166         SetImageLayout(*image_state, sub_range, layoutStencil);
1167     } else {
1168         SetImageLayout(*image_state, sub_range, layout);
1169     }
1170 }
1171 
RecordCmd(CMD_TYPE cmd_type)1172 void CMD_BUFFER_STATE::RecordCmd(CMD_TYPE cmd_type) { commandCount++; }
1173 
RecordStateCmd(CMD_TYPE cmd_type,CBStatusFlags state_bits)1174 void CMD_BUFFER_STATE::RecordStateCmd(CMD_TYPE cmd_type, CBStatusFlags state_bits) {
1175     RecordCmd(cmd_type);
1176     status |= state_bits;
1177     static_status &= ~state_bits;
1178 }
1179 
RecordTransferCmd(CMD_TYPE cmd_type,std::shared_ptr<BINDABLE> && buf1,std::shared_ptr<BINDABLE> && buf2)1180 void CMD_BUFFER_STATE::RecordTransferCmd(CMD_TYPE cmd_type, std::shared_ptr<BINDABLE> &&buf1, std::shared_ptr<BINDABLE> &&buf2) {
1181     RecordCmd(cmd_type);
1182     if (buf1) {
1183         AddChild(buf1.get());
1184     }
1185     if (buf2) {
1186         AddChild(buf2.get());
1187     }
1188 }
1189 
SetEventStageMask(VkEvent event,VkPipelineStageFlags2KHR stageMask,EventToStageMap * localEventToStageMap)1190 static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags2KHR stageMask, EventToStageMap *localEventToStageMap) {
1191     (*localEventToStageMap)[event] = stageMask;
1192     return false;
1193 }
1194 
RecordSetEvent(CMD_TYPE cmd_type,VkEvent event,VkPipelineStageFlags2KHR stageMask)1195 void CMD_BUFFER_STATE::RecordSetEvent(CMD_TYPE cmd_type, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
1196     RecordCmd(cmd_type);
1197     if (!dev_data->disabled[command_buffer_state]) {
1198         auto event_state = dev_data->Get<EVENT_STATE>(event);
1199         if (event_state) {
1200             AddChild(event_state.get());
1201         }
1202     }
1203     events.push_back(event);
1204     if (!waitedEvents.count(event)) {
1205         writeEventsBeforeWait.push_back(event);
1206     }
1207     eventUpdates.emplace_back(
1208         [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
1209             return SetEventStageMask(event, stageMask, localEventToStageMap);
1210         });
1211 }
1212 
RecordResetEvent(CMD_TYPE cmd_type,VkEvent event,VkPipelineStageFlags2KHR stageMask)1213 void CMD_BUFFER_STATE::RecordResetEvent(CMD_TYPE cmd_type, VkEvent event, VkPipelineStageFlags2KHR stageMask) {
1214     RecordCmd(cmd_type);
1215     if (!dev_data->disabled[command_buffer_state]) {
1216         auto event_state = dev_data->Get<EVENT_STATE>(event);
1217         if (event_state) {
1218             AddChild(event_state.get());
1219         }
1220     }
1221     events.push_back(event);
1222     if (!waitedEvents.count(event)) {
1223         writeEventsBeforeWait.push_back(event);
1224     }
1225 
1226     eventUpdates.emplace_back([event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
1227         return SetEventStageMask(event, VkPipelineStageFlags2KHR(0), localEventToStageMap);
1228     });
1229 }
1230 
RecordWaitEvents(CMD_TYPE cmd_type,uint32_t eventCount,const VkEvent * pEvents)1231 void CMD_BUFFER_STATE::RecordWaitEvents(CMD_TYPE cmd_type, uint32_t eventCount, const VkEvent *pEvents) {
1232     RecordCmd(cmd_type);
1233     for (uint32_t i = 0; i < eventCount; ++i) {
1234         if (!dev_data->disabled[command_buffer_state]) {
1235             auto event_state = dev_data->Get<EVENT_STATE>(pEvents[i]);
1236             if (event_state) {
1237                 AddChild(event_state.get());
1238             }
1239         }
1240         waitedEvents.insert(pEvents[i]);
1241         events.push_back(pEvents[i]);
1242     }
1243 }
1244 
RecordBarriers(uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1245 void CMD_BUFFER_STATE::RecordBarriers(uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1246                                       uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1247                                       uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
1248     if (dev_data->disabled[command_buffer_state]) return;
1249 
1250     for (uint32_t i = 0; i < bufferMemoryBarrierCount; i++) {
1251         auto buffer_state = dev_data->Get<BUFFER_STATE>(pBufferMemoryBarriers[i].buffer);
1252         if (buffer_state) {
1253             AddChild(buffer_state.get());
1254         }
1255     }
1256     for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
1257         auto image_state = dev_data->Get<IMAGE_STATE>(pImageMemoryBarriers[i].image);
1258         if (image_state) {
1259             AddChild(image_state.get());
1260         }
1261     }
1262 }
1263 
RecordBarriers(const VkDependencyInfoKHR & dep_info)1264 void CMD_BUFFER_STATE::RecordBarriers(const VkDependencyInfoKHR &dep_info) {
1265     if (dev_data->disabled[command_buffer_state]) return;
1266 
1267     for (uint32_t i = 0; i < dep_info.bufferMemoryBarrierCount; i++) {
1268         auto buffer_state = dev_data->Get<BUFFER_STATE>(dep_info.pBufferMemoryBarriers[i].buffer);
1269         if (buffer_state) {
1270             AddChild(buffer_state.get());
1271         }
1272     }
1273     for (uint32_t i = 0; i < dep_info.imageMemoryBarrierCount; i++) {
1274         auto image_state = dev_data->Get<IMAGE_STATE>(dep_info.pImageMemoryBarriers[i].image);
1275         if (image_state) {
1276             AddChild(image_state.get());
1277         }
1278     }
1279 }
1280 
RecordWriteTimestamp(CMD_TYPE cmd_type,VkPipelineStageFlags2KHR pipelineStage,VkQueryPool queryPool,uint32_t slot)1281 void CMD_BUFFER_STATE::RecordWriteTimestamp(CMD_TYPE cmd_type, VkPipelineStageFlags2KHR pipelineStage, VkQueryPool queryPool,
1282                                             uint32_t slot) {
1283     RecordCmd(cmd_type);
1284     if (dev_data->disabled[query_validation]) return;
1285 
1286     if (!dev_data->disabled[command_buffer_state]) {
1287         auto pool_state = dev_data->Get<QUERY_POOL_STATE>(queryPool);
1288         AddChild(pool_state.get());
1289     }
1290     QueryObject query = {queryPool, slot};
1291     EndQuery(query);
1292 }
1293 
Submit(uint32_t perf_submit_pass)1294 void CMD_BUFFER_STATE::Submit(uint32_t perf_submit_pass) {
1295     VkQueryPool first_pool = VK_NULL_HANDLE;
1296     EventToStageMap local_event_to_stage_map;
1297     QueryMap local_query_to_state_map;
1298     for (auto &function : queryUpdates) {
1299         function(nullptr, /*do_validate*/ false, first_pool, perf_submit_pass, &local_query_to_state_map);
1300     }
1301 
1302     for (const auto &query_state_pair : local_query_to_state_map) {
1303         auto query_pool_state = dev_data->Get<QUERY_POOL_STATE>(query_state_pair.first.pool);
1304         query_pool_state->SetQueryState(query_state_pair.first.query, query_state_pair.first.perf_pass, query_state_pair.second);
1305     }
1306 
1307     for (const auto &function : eventUpdates) {
1308         function(nullptr, /*do_validate*/ false, &local_event_to_stage_map);
1309     }
1310 
1311     for (const auto &eventStagePair : local_event_to_stage_map) {
1312         auto event_state = dev_data->Get<EVENT_STATE>(eventStagePair.first);
1313         event_state->stageMask = eventStagePair.second;
1314     }
1315 }
1316 
Retire(uint32_t perf_submit_pass)1317 void CMD_BUFFER_STATE::Retire(uint32_t perf_submit_pass) {
1318     // First perform decrement on general case bound objects
1319     for (auto event : writeEventsBeforeWait) {
1320         auto event_state = dev_data->Get<EVENT_STATE>(event);
1321         if (event_state) {
1322             event_state->write_in_use--;
1323         }
1324     }
1325     QueryMap local_query_to_state_map;
1326     VkQueryPool first_pool = VK_NULL_HANDLE;
1327     for (auto &function : queryUpdates) {
1328         function(nullptr, /*do_validate*/ false, first_pool, perf_submit_pass, &local_query_to_state_map);
1329     }
1330 
1331     for (const auto &query_state_pair : local_query_to_state_map) {
1332         if (query_state_pair.second == QUERYSTATE_ENDED) {
1333             auto query_pool_state = dev_data->Get<QUERY_POOL_STATE>(query_state_pair.first.pool);
1334             query_pool_state->SetQueryState(query_state_pair.first.query, query_state_pair.first.perf_pass, QUERYSTATE_AVAILABLE);
1335         }
1336     }
1337 }
1338