1 /* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Cody Northrop <cnorthrop@google.com>
19 * Author: Michael Lentine <mlentine@google.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Chia-I Wu <olv@google.com>
22 * Author: Chris Forbes <chrisf@ijw.co.nz>
23 * Author: Mark Lobodzinski <mark@lunarg.com>
24 * Author: Ian Elliott <ianelliott@google.com>
25 * Author: Dave Houlton <daveh@lunarg.com>
26 * Author: Dustin Graves <dustin@lunarg.com>
27 * Author: Jeremy Hayes <jeremy@lunarg.com>
28 * Author: Jon Ashburn <jon@lunarg.com>
29 * Author: Karl Schultz <karl@lunarg.com>
30 * Author: Mark Young <marky@lunarg.com>
31 * Author: Mike Schuchardt <mikes@lunarg.com>
32 * Author: Mike Weiblen <mikew@lunarg.com>
33 * Author: Tony Barbour <tony@LunarG.com>
34 * Author: John Zulauf <jzulauf@lunarg.com>
35 * Author: Shannon McPherson <shannon@lunarg.com>
36 * Author: Jeremy Kniager <jeremyk@lunarg.com>
37 */
38
39 #include <algorithm>
40 #include <array>
41 #include <assert.h>
42 #include <cmath>
43 #include <iostream>
44 #include <list>
45 #include <map>
46 #include <memory>
47 #include <mutex>
48 #include <set>
49 #include <sstream>
50 #include <stdio.h>
51 #include <stdlib.h>
52 #include <string.h>
53 #include <string>
54 #include <valarray>
55
56 #include "vk_loader_platform.h"
57 #include "vk_enum_string_helper.h"
58 #include "chassis.h"
59 #include "convert_to_renderpass2.h"
60 #include "core_validation.h"
61 #include "buffer_validation.h"
62 #include "shader_validation.h"
63 #include "vk_layer_utils.h"
64 #include "command_counter.h"
65
NormalizeImageLayout(VkImageLayout layout,VkImageLayout non_normal,VkImageLayout normal)66 static VkImageLayout NormalizeImageLayout(VkImageLayout layout, VkImageLayout non_normal, VkImageLayout normal) {
67 return (layout == non_normal) ? normal : layout;
68 }
69
NormalizeDepthImageLayout(VkImageLayout layout)70 static VkImageLayout NormalizeDepthImageLayout(VkImageLayout layout) {
71 return NormalizeImageLayout(layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
72 VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL);
73 }
74
NormalizeStencilImageLayout(VkImageLayout layout)75 static VkImageLayout NormalizeStencilImageLayout(VkImageLayout layout) {
76 return NormalizeImageLayout(layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
77 VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL);
78 }
79
ImageLayoutMatches(const VkImageAspectFlags aspect_mask,VkImageLayout a,VkImageLayout b)80 bool ImageLayoutMatches(const VkImageAspectFlags aspect_mask, VkImageLayout a, VkImageLayout b) {
81 bool matches = (a == b);
82 if (!matches) {
83 // Relaxed rules when referencing *only* the depth or stencil aspects
84 if (aspect_mask == VK_IMAGE_ASPECT_DEPTH_BIT) {
85 matches = NormalizeDepthImageLayout(a) == NormalizeDepthImageLayout(b);
86 } else if (aspect_mask == VK_IMAGE_ASPECT_STENCIL_BIT) {
87 matches = NormalizeStencilImageLayout(a) == NormalizeStencilImageLayout(b);
88 }
89 }
90 return matches;
91 }
92
93 // These functions are defined *outside* the core_validation namespace as their type
94 // is also defined outside that namespace
hash() const95 size_t PipelineLayoutCompatDef::hash() const {
96 hash_util::HashCombiner hc;
97 // The set number is integral to the CompatDef's distinctiveness
98 hc << set << push_constant_ranges.get();
99 const auto &descriptor_set_layouts = *set_layouts_id.get();
100 for (uint32_t i = 0; i <= set; i++) {
101 hc << descriptor_set_layouts[i].get();
102 }
103 return hc.Value();
104 }
105
operator ==(const PipelineLayoutCompatDef & other) const106 bool PipelineLayoutCompatDef::operator==(const PipelineLayoutCompatDef &other) const {
107 if ((set != other.set) || (push_constant_ranges != other.push_constant_ranges)) {
108 return false;
109 }
110
111 if (set_layouts_id == other.set_layouts_id) {
112 // if it's the same set_layouts_id, then *any* subset will match
113 return true;
114 }
115
116 // They aren't exactly the same PipelineLayoutSetLayouts, so we need to check if the required subsets match
117 const auto &descriptor_set_layouts = *set_layouts_id.get();
118 assert(set < descriptor_set_layouts.size());
119 const auto &other_ds_layouts = *other.set_layouts_id.get();
120 assert(set < other_ds_layouts.size());
121 for (uint32_t i = 0; i <= set; i++) {
122 if (descriptor_set_layouts[i] != other_ds_layouts[i]) {
123 return false;
124 }
125 }
126 return true;
127 }
128
129 using std::max;
130 using std::string;
131 using std::stringstream;
132 using std::unique_ptr;
133 using std::unordered_map;
134 using std::unordered_set;
135 using std::vector;
136
137 // Get the global maps of pending releases
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkImageMemoryBarrier>::Tag & type_tag) const138 const GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
139 const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) const {
140 return qfo_release_image_barrier_map;
141 }
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag & type_tag) const142 const GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
143 const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) const {
144 return qfo_release_buffer_barrier_map;
145 }
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkImageMemoryBarrier>::Tag & type_tag)146 GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
147 const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
148 return qfo_release_image_barrier_map;
149 }
GetGlobalQFOReleaseBarrierMap(const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag & type_tag)150 GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
151 const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
152 return qfo_release_buffer_barrier_map;
153 }
154
LayoutMapFactory(const IMAGE_STATE & image_state)155 static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactory(const IMAGE_STATE &image_state) {
156 std::unique_ptr<ImageSubresourceLayoutMap> map(new ImageSubresourceLayoutMap(image_state));
157 return map;
158 }
159
160 // The const variant only need the image as it is the key for the map
GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE * cb_state,VkImage image)161 const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image) {
162 auto it = cb_state->image_layout_map.find(image);
163 if (it == cb_state->image_layout_map.cend()) {
164 return nullptr;
165 }
166 return it->second.get();
167 }
168
169 // The non-const variant only needs the image state, as the factory requires it to construct a new entry
GetImageSubresourceLayoutMap(CMD_BUFFER_STATE * cb_state,const IMAGE_STATE & image_state)170 ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state) {
171 auto it = cb_state->image_layout_map.find(image_state.image);
172 if (it == cb_state->image_layout_map.end()) {
173 // Empty slot... fill it in.
174 auto insert_pair = cb_state->image_layout_map.insert(std::make_pair(image_state.image, LayoutMapFactory(image_state)));
175 assert(insert_pair.second);
176 ImageSubresourceLayoutMap *new_map = insert_pair.first->second.get();
177 assert(new_map);
178 return new_map;
179 }
180 return it->second.get();
181 }
182
AddInitialLayoutintoImageLayoutMap(const IMAGE_STATE & image_state,GlobalImageLayoutMap & image_layout_map)183 void AddInitialLayoutintoImageLayoutMap(const IMAGE_STATE &image_state, GlobalImageLayoutMap &image_layout_map) {
184 auto *range_map = GetLayoutRangeMap(&image_layout_map, image_state);
185 auto range_gen = subresource_adapter::RangeGenerator(image_state.subresource_encoder, image_state.full_range);
186 for (; range_gen->non_empty(); ++range_gen) {
187 range_map->insert(range_map->end(), std::make_pair(*range_gen, image_state.createInfo.initialLayout));
188 }
189 }
190
191 // Override base class, we have some extra work to do here
InitDeviceValidationObject(bool add_obj,ValidationObject * inst_obj,ValidationObject * dev_obj)192 void CoreChecks::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
193 if (add_obj) {
194 if (dev_obj->device_extensions.vk_khr_performance_query) {
195 auto command_counter = new CommandCounter(this);
196 dev_obj->object_dispatch.emplace_back(command_counter);
197 }
198 ValidationStateTracker::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
199 }
200 }
201
202 // Tracks the number of commands recorded in a command buffer.
IncrementCommandCount(VkCommandBuffer commandBuffer)203 void CoreChecks::IncrementCommandCount(VkCommandBuffer commandBuffer) {
204 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
205 cb_state->commandCount++;
206 }
207
208 // For given mem object, verify that it is not null or UNBOUND, if it is, report error. Return skip value.
209 template <typename T1>
VerifyBoundMemoryIsValid(const DEVICE_MEMORY_STATE * mem_state,const T1 object,const VulkanTypedHandle & typed_handle,const char * api_name,const char * error_code) const210 bool CoreChecks::VerifyBoundMemoryIsValid(const DEVICE_MEMORY_STATE *mem_state, const T1 object,
211 const VulkanTypedHandle &typed_handle, const char *api_name,
212 const char *error_code) const {
213 bool result = false;
214 auto type_name = object_string[typed_handle.type];
215 if (!mem_state) {
216 result |=
217 LogError(object, error_code, "%s: %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().",
218 api_name, report_data->FormatHandle(typed_handle).c_str(), type_name + 2);
219 } else if (mem_state->destroyed) {
220 result |= LogError(object, error_code,
221 "%s: %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
222 "prior to this operation.",
223 api_name, report_data->FormatHandle(typed_handle).c_str());
224 }
225 return result;
226 }
227
228 // Check to see if memory was ever bound to this image
ValidateMemoryIsBoundToImage(const IMAGE_STATE * image_state,const char * api_name,const char * error_code) const229 bool CoreChecks::ValidateMemoryIsBoundToImage(const IMAGE_STATE *image_state, const char *api_name, const char *error_code) const {
230 bool result = false;
231 if (image_state->create_from_swapchain != VK_NULL_HANDLE) {
232 if (image_state->bind_swapchain == VK_NULL_HANDLE) {
233 LogObjectList objlist(image_state->image);
234 objlist.add(image_state->create_from_swapchain);
235 result |= LogError(
236 objlist, error_code,
237 "%s: %s is created by %s, and the image should be bound by calling vkBindImageMemory2(), and the pNext chain "
238 "includes VkBindImageMemorySwapchainInfoKHR.",
239 api_name, report_data->FormatHandle(image_state->image).c_str(),
240 report_data->FormatHandle(image_state->create_from_swapchain).c_str());
241 } else if (image_state->create_from_swapchain != image_state->bind_swapchain) {
242 LogObjectList objlist(image_state->image);
243 objlist.add(image_state->create_from_swapchain);
244 objlist.add(image_state->bind_swapchain);
245 result |=
246 LogError(objlist, error_code,
247 "%s: %s is created by %s, but the image is bound by %s. The image should be created and bound by the same "
248 "swapchain",
249 api_name, report_data->FormatHandle(image_state->image).c_str(),
250 report_data->FormatHandle(image_state->create_from_swapchain).c_str(),
251 report_data->FormatHandle(image_state->bind_swapchain).c_str());
252 }
253 } else if (image_state->external_ahb) {
254 // TODO look into how to properly check for a valid bound memory for an external AHB
255 } else if (0 == (static_cast<uint32_t>(image_state->createInfo.flags) & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
256 result |= VerifyBoundMemoryIsValid(image_state->binding.mem_state.get(), image_state->image,
257 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage), api_name, error_code);
258 }
259 return result;
260 }
261
262 // Check to see if memory was bound to this buffer
ValidateMemoryIsBoundToBuffer(const BUFFER_STATE * buffer_state,const char * api_name,const char * error_code) const263 bool CoreChecks::ValidateMemoryIsBoundToBuffer(const BUFFER_STATE *buffer_state, const char *api_name,
264 const char *error_code) const {
265 bool result = false;
266 if (0 == (static_cast<uint32_t>(buffer_state->createInfo.flags) & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)) {
267 result |= VerifyBoundMemoryIsValid(buffer_state->binding.mem_state.get(), buffer_state->buffer,
268 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), api_name, error_code);
269 }
270 return result;
271 }
272
273 // Check to see if memory was bound to this acceleration structure
ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE * as_state,const char * api_name,const char * error_code) const274 bool CoreChecks::ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE *as_state, const char *api_name,
275 const char *error_code) const {
276 return VerifyBoundMemoryIsValid(as_state->binding.mem_state.get(), as_state->acceleration_structure,
277 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV),
278 api_name, error_code);
279 }
280
281 // Valid usage checks for a call to SetMemBinding().
282 // For NULL mem case, output warning
283 // Make sure given object is in global object map
284 // IF a previous binding existed, output validation error
285 // Otherwise, add reference from objectInfo to memoryInfo
286 // Add reference off of objInfo
287 // TODO: We may need to refactor or pass in multiple valid usage statements to handle multiple valid usage conditions.
ValidateSetMemBinding(VkDeviceMemory mem,const VulkanTypedHandle & typed_handle,const char * apiName) const288 bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *apiName) const {
289 bool skip = false;
290 // It's an error to bind an object to NULL memory
291 if (mem != VK_NULL_HANDLE) {
292 const BINDABLE *mem_binding = ValidationStateTracker::GetObjectMemBinding(typed_handle);
293 assert(mem_binding);
294 if (mem_binding->sparse) {
295 const char *error_code = nullptr;
296 const char *handle_type = nullptr;
297 if (typed_handle.type == kVulkanObjectTypeBuffer) {
298 handle_type = "BUFFER";
299 if (strcmp(apiName, "vkBindBufferMemory()") == 0) {
300 error_code = "VUID-vkBindBufferMemory-buffer-01030";
301 } else {
302 error_code = "VUID-VkBindBufferMemoryInfo-buffer-01030";
303 }
304 } else if (typed_handle.type == kVulkanObjectTypeImage) {
305 handle_type = "IMAGE";
306 if (strcmp(apiName, "vkBindImageMemory()") == 0) {
307 error_code = "VUID-vkBindImageMemory-image-01045";
308 } else {
309 error_code = "VUID-VkBindImageMemoryInfo-image-01045";
310 }
311 } else {
312 // Unsupported object type
313 assert(false);
314 }
315
316 LogObjectList objlist(mem);
317 objlist.add(typed_handle);
318 skip |= LogError(objlist, error_code,
319 "In %s, attempting to bind %s to %s which was created with sparse memory flags "
320 "(VK_%s_CREATE_SPARSE_*_BIT).",
321 apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
322 handle_type);
323 }
324 const DEVICE_MEMORY_STATE *mem_info = ValidationStateTracker::GetDevMemState(mem);
325 if (mem_info) {
326 const DEVICE_MEMORY_STATE *prev_binding = mem_binding->binding.mem_state.get();
327 if (prev_binding) {
328 if (!prev_binding->destroyed) {
329 const char *error_code = nullptr;
330 if (typed_handle.type == kVulkanObjectTypeBuffer) {
331 if (strcmp(apiName, "vkBindBufferMemory()") == 0) {
332 error_code = "VUID-vkBindBufferMemory-buffer-01029";
333 } else {
334 error_code = "VUID-VkBindBufferMemoryInfo-buffer-01029";
335 }
336 } else if (typed_handle.type == kVulkanObjectTypeImage) {
337 if (strcmp(apiName, "vkBindImageMemory()") == 0) {
338 error_code = "VUID-vkBindImageMemory-image-01044";
339 } else {
340 error_code = "VUID-VkBindImageMemoryInfo-image-01044";
341 }
342 } else {
343 // Unsupported object type
344 assert(false);
345 }
346
347 LogObjectList objlist(mem);
348 objlist.add(typed_handle);
349 objlist.add(prev_binding->mem);
350 skip |=
351 LogError(objlist, error_code, "In %s, attempting to bind %s to %s which has already been bound to %s.",
352 apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
353 report_data->FormatHandle(prev_binding->mem).c_str());
354 } else {
355 LogObjectList objlist(mem);
356 objlist.add(typed_handle);
357 skip |=
358 LogError(objlist, kVUID_Core_MemTrack_RebindObject,
359 "In %s, attempting to bind %s to %s which was previous bound to memory that has "
360 "since been freed. Memory bindings are immutable in "
361 "Vulkan so this attempt to bind to new memory is not allowed.",
362 apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str());
363 }
364 }
365 }
366 }
367 return skip;
368 }
369
ValidateDeviceQueueFamily(uint32_t queue_family,const char * cmd_name,const char * parameter_name,const char * error_code,bool optional=false) const370 bool CoreChecks::ValidateDeviceQueueFamily(uint32_t queue_family, const char *cmd_name, const char *parameter_name,
371 const char *error_code, bool optional = false) const {
372 bool skip = false;
373 if (!optional && queue_family == VK_QUEUE_FAMILY_IGNORED) {
374 skip |= LogError(device, error_code,
375 "%s: %s is VK_QUEUE_FAMILY_IGNORED, but it is required to provide a valid queue family index value.",
376 cmd_name, parameter_name);
377 } else if (queue_family_index_map.find(queue_family) == queue_family_index_map.end()) {
378 skip |=
379 LogError(device, error_code,
380 "%s: %s (= %" PRIu32
381 ") is not one of the queue families given via VkDeviceQueueCreateInfo structures when the device was created.",
382 cmd_name, parameter_name, queue_family);
383 }
384
385 return skip;
386 }
387
388 // Validate the specified queue families against the families supported by the physical device that owns this device
ValidatePhysicalDeviceQueueFamilies(uint32_t queue_family_count,const uint32_t * queue_families,const char * cmd_name,const char * array_parameter_name,const char * vuid) const389 bool CoreChecks::ValidatePhysicalDeviceQueueFamilies(uint32_t queue_family_count, const uint32_t *queue_families,
390 const char *cmd_name, const char *array_parameter_name,
391 const char *vuid) const {
392 bool skip = false;
393 if (queue_families) {
394 std::unordered_set<uint32_t> set;
395 for (uint32_t i = 0; i < queue_family_count; ++i) {
396 std::string parameter_name = std::string(array_parameter_name) + "[" + std::to_string(i) + "]";
397
398 if (set.count(queue_families[i])) {
399 skip |= LogError(device, vuid, "%s: %s (=%" PRIu32 ") is not unique within %s array.", cmd_name,
400 parameter_name.c_str(), queue_families[i], array_parameter_name);
401 } else {
402 set.insert(queue_families[i]);
403 if (queue_families[i] == VK_QUEUE_FAMILY_IGNORED) {
404 skip |= LogError(
405 device, vuid,
406 "%s: %s is VK_QUEUE_FAMILY_IGNORED, but it is required to provide a valid queue family index value.",
407 cmd_name, parameter_name.c_str());
408 } else if (queue_families[i] >= physical_device_state->queue_family_known_count) {
409 LogObjectList obj_list(physical_device);
410 obj_list.add(device);
411 skip |=
412 LogError(obj_list, vuid,
413 "%s: %s (= %" PRIu32
414 ") is not one of the queue families supported by the parent PhysicalDevice %s of this device %s.",
415 cmd_name, parameter_name.c_str(), queue_families[i],
416 report_data->FormatHandle(physical_device).c_str(), report_data->FormatHandle(device).c_str());
417 }
418 }
419 }
420 }
421 return skip;
422 }
423
424 // Check object status for selected flag state
ValidateStatus(const CMD_BUFFER_STATE * pNode,CBStatusFlags status_mask,const char * fail_msg,const char * msg_code) const425 bool CoreChecks::ValidateStatus(const CMD_BUFFER_STATE *pNode, CBStatusFlags status_mask, const char *fail_msg,
426 const char *msg_code) const {
427 if (!(pNode->status & status_mask)) {
428 return LogError(pNode->commandBuffer, msg_code, "%s: %s..", report_data->FormatHandle(pNode->commandBuffer).c_str(),
429 fail_msg);
430 }
431 return false;
432 }
433
434 // Return true if for a given PSO, the given state enum is dynamic, else return false
IsDynamic(const PIPELINE_STATE * pPipeline,const VkDynamicState state)435 static bool IsDynamic(const PIPELINE_STATE *pPipeline, const VkDynamicState state) {
436 if (pPipeline && pPipeline->graphicsPipelineCI.pDynamicState) {
437 for (uint32_t i = 0; i < pPipeline->graphicsPipelineCI.pDynamicState->dynamicStateCount; i++) {
438 if (state == pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i]) return true;
439 }
440 }
441 return false;
442 }
443
444 // Validate state stored as flags at time of draw call
ValidateDrawStateFlags(const CMD_BUFFER_STATE * pCB,const PIPELINE_STATE * pPipe,bool indexed,const char * msg_code) const445 bool CoreChecks::ValidateDrawStateFlags(const CMD_BUFFER_STATE *pCB, const PIPELINE_STATE *pPipe, bool indexed,
446 const char *msg_code) const {
447 bool result = false;
448 if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
449 pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
450 result |=
451 ValidateStatus(pCB, CBSTATUS_LINE_WIDTH_SET, "Dynamic line width state not set for this command buffer", msg_code);
452 }
453 if (pPipe->graphicsPipelineCI.pRasterizationState &&
454 (pPipe->graphicsPipelineCI.pRasterizationState->depthBiasEnable == VK_TRUE)) {
455 result |=
456 ValidateStatus(pCB, CBSTATUS_DEPTH_BIAS_SET, "Dynamic depth bias state not set for this command buffer", msg_code);
457 }
458 if (pPipe->blendConstantsEnabled) {
459 result |= ValidateStatus(pCB, CBSTATUS_BLEND_CONSTANTS_SET, "Dynamic blend constants state not set for this command buffer",
460 msg_code);
461 }
462 if (pPipe->graphicsPipelineCI.pDepthStencilState &&
463 (pPipe->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE)) {
464 result |=
465 ValidateStatus(pCB, CBSTATUS_DEPTH_BOUNDS_SET, "Dynamic depth bounds state not set for this command buffer", msg_code);
466 }
467 if (pPipe->graphicsPipelineCI.pDepthStencilState &&
468 (pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE)) {
469 result |= ValidateStatus(pCB, CBSTATUS_STENCIL_READ_MASK_SET,
470 "Dynamic stencil read mask state not set for this command buffer", msg_code);
471 result |= ValidateStatus(pCB, CBSTATUS_STENCIL_WRITE_MASK_SET,
472 "Dynamic stencil write mask state not set for this command buffer", msg_code);
473 result |= ValidateStatus(pCB, CBSTATUS_STENCIL_REFERENCE_SET,
474 "Dynamic stencil reference state not set for this command buffer", msg_code);
475 }
476 if (indexed) {
477 result |= ValidateStatus(pCB, CBSTATUS_INDEX_BUFFER_BOUND,
478 "Index buffer object not bound to this command buffer when Indexed Draw attempted", msg_code);
479 }
480 if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
481 pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
482 const auto *line_state =
483 lvl_find_in_chain<VkPipelineRasterizationLineStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pRasterizationState->pNext);
484 if (line_state && line_state->stippledLineEnable) {
485 result |= ValidateStatus(pCB, CBSTATUS_LINE_STIPPLE_SET, "Dynamic line stipple state not set for this command buffer",
486 msg_code);
487 }
488 }
489
490 return result;
491 }
492
LogInvalidAttachmentMessage(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,uint32_t primary_attach,uint32_t secondary_attach,const char * msg,const char * caller,const char * error_code) const493 bool CoreChecks::LogInvalidAttachmentMessage(const char *type1_string, const RENDER_PASS_STATE *rp1_state, const char *type2_string,
494 const RENDER_PASS_STATE *rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
495 const char *msg, const char *caller, const char *error_code) const {
496 LogObjectList objlist(rp1_state->renderPass);
497 objlist.add(rp2_state->renderPass);
498 return LogError(objlist, error_code,
499 "%s: RenderPasses incompatible between %s w/ %s and %s w/ %s Attachment %u is not "
500 "compatible with %u: %s.",
501 caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(), type2_string,
502 report_data->FormatHandle(rp2_state->renderPass).c_str(), primary_attach, secondary_attach, msg);
503 }
504
ValidateAttachmentCompatibility(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,uint32_t primary_attach,uint32_t secondary_attach,const char * caller,const char * error_code) const505 bool CoreChecks::ValidateAttachmentCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
506 const char *type2_string, const RENDER_PASS_STATE *rp2_state,
507 uint32_t primary_attach, uint32_t secondary_attach, const char *caller,
508 const char *error_code) const {
509 bool skip = false;
510 const auto &primaryPassCI = rp1_state->createInfo;
511 const auto &secondaryPassCI = rp2_state->createInfo;
512 if (primaryPassCI.attachmentCount <= primary_attach) {
513 primary_attach = VK_ATTACHMENT_UNUSED;
514 }
515 if (secondaryPassCI.attachmentCount <= secondary_attach) {
516 secondary_attach = VK_ATTACHMENT_UNUSED;
517 }
518 if (primary_attach == VK_ATTACHMENT_UNUSED && secondary_attach == VK_ATTACHMENT_UNUSED) {
519 return skip;
520 }
521 if (primary_attach == VK_ATTACHMENT_UNUSED) {
522 skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
523 "The first is unused while the second is not.", caller, error_code);
524 return skip;
525 }
526 if (secondary_attach == VK_ATTACHMENT_UNUSED) {
527 skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
528 "The second is unused while the first is not.", caller, error_code);
529 return skip;
530 }
531 if (primaryPassCI.pAttachments[primary_attach].format != secondaryPassCI.pAttachments[secondary_attach].format) {
532 skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
533 "They have different formats.", caller, error_code);
534 }
535 if (primaryPassCI.pAttachments[primary_attach].samples != secondaryPassCI.pAttachments[secondary_attach].samples) {
536 skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
537 "They have different samples.", caller, error_code);
538 }
539 if (primaryPassCI.pAttachments[primary_attach].flags != secondaryPassCI.pAttachments[secondary_attach].flags) {
540 skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
541 "They have different flags.", caller, error_code);
542 }
543
544 return skip;
545 }
546
ValidateSubpassCompatibility(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,const int subpass,const char * caller,const char * error_code) const547 bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
548 const char *type2_string, const RENDER_PASS_STATE *rp2_state, const int subpass,
549 const char *caller, const char *error_code) const {
550 bool skip = false;
551 const auto &primary_desc = rp1_state->createInfo.pSubpasses[subpass];
552 const auto &secondary_desc = rp2_state->createInfo.pSubpasses[subpass];
553 uint32_t maxInputAttachmentCount = std::max(primary_desc.inputAttachmentCount, secondary_desc.inputAttachmentCount);
554 for (uint32_t i = 0; i < maxInputAttachmentCount; ++i) {
555 uint32_t primary_input_attach = VK_ATTACHMENT_UNUSED, secondary_input_attach = VK_ATTACHMENT_UNUSED;
556 if (i < primary_desc.inputAttachmentCount) {
557 primary_input_attach = primary_desc.pInputAttachments[i].attachment;
558 }
559 if (i < secondary_desc.inputAttachmentCount) {
560 secondary_input_attach = secondary_desc.pInputAttachments[i].attachment;
561 }
562 skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_input_attach,
563 secondary_input_attach, caller, error_code);
564 }
565 uint32_t maxColorAttachmentCount = std::max(primary_desc.colorAttachmentCount, secondary_desc.colorAttachmentCount);
566 for (uint32_t i = 0; i < maxColorAttachmentCount; ++i) {
567 uint32_t primary_color_attach = VK_ATTACHMENT_UNUSED, secondary_color_attach = VK_ATTACHMENT_UNUSED;
568 if (i < primary_desc.colorAttachmentCount) {
569 primary_color_attach = primary_desc.pColorAttachments[i].attachment;
570 }
571 if (i < secondary_desc.colorAttachmentCount) {
572 secondary_color_attach = secondary_desc.pColorAttachments[i].attachment;
573 }
574 skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_color_attach,
575 secondary_color_attach, caller, error_code);
576 if (rp1_state->createInfo.subpassCount > 1) {
577 uint32_t primary_resolve_attach = VK_ATTACHMENT_UNUSED, secondary_resolve_attach = VK_ATTACHMENT_UNUSED;
578 if (i < primary_desc.colorAttachmentCount && primary_desc.pResolveAttachments) {
579 primary_resolve_attach = primary_desc.pResolveAttachments[i].attachment;
580 }
581 if (i < secondary_desc.colorAttachmentCount && secondary_desc.pResolveAttachments) {
582 secondary_resolve_attach = secondary_desc.pResolveAttachments[i].attachment;
583 }
584 skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_resolve_attach,
585 secondary_resolve_attach, caller, error_code);
586 }
587 }
588 uint32_t primary_depthstencil_attach = VK_ATTACHMENT_UNUSED, secondary_depthstencil_attach = VK_ATTACHMENT_UNUSED;
589 if (primary_desc.pDepthStencilAttachment) {
590 primary_depthstencil_attach = primary_desc.pDepthStencilAttachment[0].attachment;
591 }
592 if (secondary_desc.pDepthStencilAttachment) {
593 secondary_depthstencil_attach = secondary_desc.pDepthStencilAttachment[0].attachment;
594 }
595 skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_depthstencil_attach,
596 secondary_depthstencil_attach, caller, error_code);
597
598 // Both renderpasses must agree on Multiview usage
599 if (primary_desc.viewMask && secondary_desc.viewMask) {
600 if (primary_desc.viewMask != secondary_desc.viewMask) {
601 std::stringstream ss;
602 ss << "For subpass " << subpass << ", they have a different viewMask. The first has view mask " << primary_desc.viewMask
603 << " while the second has view mask " << secondary_desc.viewMask << ".";
604 skip |= LogInvalidPnextMessage(type1_string, rp1_state, type2_string, rp2_state, ss.str().c_str(), caller, error_code);
605 }
606 } else if (primary_desc.viewMask) {
607 skip |= LogInvalidPnextMessage(type1_string, rp1_state, type2_string, rp2_state,
608 "The first uses Multiview (has non-zero viewMasks) while the second one does not.", caller,
609 error_code);
610 } else if (secondary_desc.viewMask) {
611 skip |= LogInvalidPnextMessage(type1_string, rp1_state, type2_string, rp2_state,
612 "The second uses Multiview (has non-zero viewMasks) while the first one does not.", caller,
613 error_code);
614 }
615
616 return skip;
617 }
618
LogInvalidPnextMessage(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,const char * msg,const char * caller,const char * error_code) const619 bool CoreChecks::LogInvalidPnextMessage(const char *type1_string, const RENDER_PASS_STATE *rp1_state, const char *type2_string,
620 const RENDER_PASS_STATE *rp2_state, const char *msg, const char *caller,
621 const char *error_code) const {
622 LogObjectList objlist(rp1_state->renderPass);
623 objlist.add(rp2_state->renderPass);
624 return LogError(objlist, error_code, "%s: RenderPasses incompatible between %s w/ %s and %s w/ %s: %s", caller, type1_string,
625 report_data->FormatHandle(rp1_state->renderPass).c_str(), type2_string,
626 report_data->FormatHandle(rp2_state->renderPass).c_str(), msg);
627 }
628
629 // Verify that given renderPass CreateInfo for primary and secondary command buffers are compatible.
630 // This function deals directly with the CreateInfo, there are overloaded versions below that can take the renderPass handle and
631 // will then feed into this function
ValidateRenderPassCompatibility(const char * type1_string,const RENDER_PASS_STATE * rp1_state,const char * type2_string,const RENDER_PASS_STATE * rp2_state,const char * caller,const char * error_code) const632 bool CoreChecks::ValidateRenderPassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
633 const char *type2_string, const RENDER_PASS_STATE *rp2_state, const char *caller,
634 const char *error_code) const {
635 bool skip = false;
636
637 // createInfo flags must be identical for the renderpasses to be compatible.
638 if (rp1_state->createInfo.flags != rp2_state->createInfo.flags) {
639 LogObjectList objlist(rp1_state->renderPass);
640 objlist.add(rp2_state->renderPass);
641 skip |=
642 LogError(objlist, error_code,
643 "%s: RenderPasses incompatible between %s w/ %s with flags of %u and %s w/ "
644 "%s with a flags of %u.",
645 caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(), rp1_state->createInfo.flags,
646 type2_string, report_data->FormatHandle(rp2_state->renderPass).c_str(), rp2_state->createInfo.flags);
647 }
648
649 if (rp1_state->createInfo.subpassCount != rp2_state->createInfo.subpassCount) {
650 LogObjectList objlist(rp1_state->renderPass);
651 objlist.add(rp2_state->renderPass);
652 skip |= LogError(objlist, error_code,
653 "%s: RenderPasses incompatible between %s w/ %s with a subpassCount of %u and %s w/ "
654 "%s with a subpassCount of %u.",
655 caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(),
656 rp1_state->createInfo.subpassCount, type2_string, report_data->FormatHandle(rp2_state->renderPass).c_str(),
657 rp2_state->createInfo.subpassCount);
658 } else {
659 for (uint32_t i = 0; i < rp1_state->createInfo.subpassCount; ++i) {
660 skip |= ValidateSubpassCompatibility(type1_string, rp1_state, type2_string, rp2_state, i, caller, error_code);
661 }
662 }
663
664 // Find an entry of the Fragment Density Map type in the pNext chain, if it exists
665 const auto fdm1 = lvl_find_in_chain<VkRenderPassFragmentDensityMapCreateInfoEXT>(rp1_state->createInfo.pNext);
666 const auto fdm2 = lvl_find_in_chain<VkRenderPassFragmentDensityMapCreateInfoEXT>(rp2_state->createInfo.pNext);
667
668 // Both renderpasses must agree on usage of a Fragment Density Map type
669 if (fdm1 && fdm2) {
670 uint32_t primary_input_attach = fdm1->fragmentDensityMapAttachment.attachment;
671 uint32_t secondary_input_attach = fdm2->fragmentDensityMapAttachment.attachment;
672 skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_input_attach,
673 secondary_input_attach, caller, error_code);
674 } else if (fdm1) {
675 skip |= LogInvalidPnextMessage(type1_string, rp1_state, type2_string, rp2_state,
676 "The first uses a Fragment Density Map while the second one does not.", caller, error_code);
677 } else if (fdm2) {
678 skip |= LogInvalidPnextMessage(type1_string, rp1_state, type2_string, rp2_state,
679 "The second uses a Fragment Density Map while the first one does not.", caller, error_code);
680 }
681
682 return skip;
683 }
684
685 // For given pipeline, return number of MSAA samples, or one if MSAA disabled
GetNumSamples(PIPELINE_STATE const * pipe)686 static VkSampleCountFlagBits GetNumSamples(PIPELINE_STATE const *pipe) {
687 if (pipe->graphicsPipelineCI.pMultisampleState != NULL &&
688 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO == pipe->graphicsPipelineCI.pMultisampleState->sType) {
689 return pipe->graphicsPipelineCI.pMultisampleState->rasterizationSamples;
690 }
691 return VK_SAMPLE_COUNT_1_BIT;
692 }
693
ListBits(std::ostream & s,uint32_t bits)694 static void ListBits(std::ostream &s, uint32_t bits) {
695 for (int i = 0; i < 32 && bits; i++) {
696 if (bits & (1 << i)) {
697 s << i;
698 bits &= ~(1 << i);
699 if (bits) {
700 s << ",";
701 }
702 }
703 }
704 }
705
DynamicStateString(CBStatusFlags input_value)706 std::string DynamicStateString(CBStatusFlags input_value) {
707 std::string ret;
708 int index = 0;
709 while (input_value) {
710 if (input_value & 1) {
711 if (!ret.empty()) ret.append("|");
712 ret.append(string_VkDynamicState(ConvertToDynamicState(static_cast<CBStatusFlagBits>(1 << index))));
713 }
714 ++index;
715 input_value >>= 1;
716 }
717 if (ret.empty()) ret.append(string_VkDynamicState(ConvertToDynamicState(static_cast<CBStatusFlagBits>(0))));
718 return ret;
719 }
720
721 // Validate draw-time state related to the PSO
ValidatePipelineDrawtimeState(const LAST_BOUND_STATE & state,const CMD_BUFFER_STATE * pCB,CMD_TYPE cmd_type,const PIPELINE_STATE * pPipeline,const char * caller) const722 bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, const CMD_BUFFER_STATE *pCB, CMD_TYPE cmd_type,
723 const PIPELINE_STATE *pPipeline, const char *caller) const {
724 bool skip = false;
725 const auto ¤t_vtx_bfr_binding_info = pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings;
726 const DrawDispatchVuid vuid = GetDrawDispatchVuid(cmd_type);
727
728 // Verify if using dynamic state setting commands that it doesn't set up in pipeline
729 CBStatusFlags invalid_status = CBSTATUS_ALL_STATE_SET & ~(pCB->dynamic_status | pCB->static_status);
730 if (invalid_status) {
731 std::string dynamic_states = DynamicStateString(invalid_status);
732 LogObjectList objlist(pCB->commandBuffer);
733 objlist.add(pPipeline->pipeline);
734 skip |= LogError(objlist, vuid.dynamic_state_setting_commands,
735 "%s: %s doesn't set up %s, but it calls the related dynamic state setting commands", caller,
736 report_data->FormatHandle(state.pipeline_state->pipeline).c_str(), dynamic_states.c_str());
737 }
738
739 // Verify vertex binding
740 if (pPipeline->vertex_binding_descriptions_.size() > 0) {
741 for (size_t i = 0; i < pPipeline->vertex_binding_descriptions_.size(); i++) {
742 const auto vertex_binding = pPipeline->vertex_binding_descriptions_[i].binding;
743 if (current_vtx_bfr_binding_info.size() < (vertex_binding + 1)) {
744 skip |= LogError(pCB->commandBuffer, vuid.vertex_binding,
745 "%s: %s expects that this Command Buffer's vertex binding Index %u should be set via "
746 "vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct at "
747 "index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
748 caller, report_data->FormatHandle(state.pipeline_state->pipeline).c_str(), vertex_binding, i,
749 vertex_binding);
750 } else if ((current_vtx_bfr_binding_info[vertex_binding].buffer == VK_NULL_HANDLE) &&
751 !enabled_features.robustness2_features.nullDescriptor) {
752 skip |= LogError(pCB->commandBuffer, vuid.vertex_binding_null,
753 "%s: Vertex binding %d must not be VK_NULL_HANDLE %s expects that this Command Buffer's vertex "
754 "binding Index %u should be set via "
755 "vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct at "
756 "index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
757 caller, vertex_binding, report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
758 vertex_binding, i, vertex_binding);
759 }
760 }
761
762 // Verify vertex attribute address alignment
763 for (size_t i = 0; i < pPipeline->vertex_attribute_descriptions_.size(); i++) {
764 const auto &attribute_description = pPipeline->vertex_attribute_descriptions_[i];
765 const auto vertex_binding = attribute_description.binding;
766 const auto attribute_offset = attribute_description.offset;
767
768 const auto &vertex_binding_map_it = pPipeline->vertex_binding_to_index_map_.find(vertex_binding);
769 if ((vertex_binding_map_it != pPipeline->vertex_binding_to_index_map_.cend()) &&
770 (vertex_binding < current_vtx_bfr_binding_info.size()) &&
771 (current_vtx_bfr_binding_info[vertex_binding].buffer != VK_NULL_HANDLE)) {
772 auto vertex_buffer_stride = pPipeline->vertex_binding_descriptions_[vertex_binding_map_it->second].stride;
773 if (IsDynamic(pPipeline, VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT)) {
774 vertex_buffer_stride = (uint32_t)current_vtx_bfr_binding_info[vertex_binding].stride;
775 uint32_t attribute_binding_extent =
776 attribute_description.offset + FormatElementSize(attribute_description.format);
777 if (vertex_buffer_stride < attribute_binding_extent) {
778 skip |=
779 LogError(pCB->commandBuffer, "VUID-vkCmdBindVertexBuffers2EXT-pStrides-03363",
780 "The pStrides[%u] (%u) parameter in the last call to vkCmdBindVertexBuffers2EXT is less than "
781 "the extent of the binding for attribute %u (%u).",
782 vertex_binding, vertex_buffer_stride, i, attribute_binding_extent);
783 }
784 }
785 const auto vertex_buffer_offset = current_vtx_bfr_binding_info[vertex_binding].offset;
786
787 // Use 1 as vertex/instance index to use buffer stride as well
788 const auto attrib_address = vertex_buffer_offset + vertex_buffer_stride + attribute_offset;
789
790 VkDeviceSize vtx_attrib_req_alignment = pPipeline->vertex_attribute_alignments_[i];
791
792 if (SafeModulo(attrib_address, vtx_attrib_req_alignment) != 0) {
793 LogObjectList objlist(current_vtx_bfr_binding_info[vertex_binding].buffer);
794 objlist.add(state.pipeline_state->pipeline);
795 skip |= LogError(objlist, vuid.vertex_binding_attribute,
796 "%s: Invalid attribAddress alignment for vertex attribute " PRINTF_SIZE_T_SPECIFIER
797 ", %s,from of %s and vertex %s.",
798 caller, i, string_VkFormat(attribute_description.format),
799 report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
800 report_data->FormatHandle(current_vtx_bfr_binding_info[vertex_binding].buffer).c_str());
801 }
802 } else {
803 LogObjectList objlist(pCB->commandBuffer);
804 objlist.add(state.pipeline_state->pipeline);
805 skip |= LogError(objlist, vuid.vertex_binding_attribute,
806 "%s: binding #%" PRIu32
807 " in pVertexAttributeDescriptions of %s is invalid in vkCmdBindVertexBuffers of %s.",
808 caller, vertex_binding, report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
809 report_data->FormatHandle(pCB->commandBuffer).c_str());
810 }
811 }
812 }
813
814 // If Viewport or scissors are dynamic, verify that dynamic count matches PSO count.
815 // Skip check if rasterization is disabled or there is no viewport.
816 if ((!pPipeline->graphicsPipelineCI.pRasterizationState ||
817 (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) &&
818 pPipeline->graphicsPipelineCI.pViewportState) {
819 bool dynViewport = IsDynamic(pPipeline, VK_DYNAMIC_STATE_VIEWPORT);
820 bool dynScissor = IsDynamic(pPipeline, VK_DYNAMIC_STATE_SCISSOR);
821
822 if (dynViewport) {
823 const auto requiredViewportsMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->viewportCount) - 1;
824 const auto missingViewportMask = ~pCB->viewportMask & requiredViewportsMask;
825 if (missingViewportMask) {
826 std::stringstream ss;
827 ss << caller << ": Dynamic viewport(s) ";
828 ListBits(ss, missingViewportMask);
829 ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetViewport().";
830 skip |= LogError(device, kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
831 }
832 }
833
834 if (dynScissor) {
835 const auto requiredScissorMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->scissorCount) - 1;
836 const auto missingScissorMask = ~pCB->scissorMask & requiredScissorMask;
837 if (missingScissorMask) {
838 std::stringstream ss;
839 ss << caller << ": Dynamic scissor(s) ";
840 ListBits(ss, missingScissorMask);
841 ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetScissor().";
842 skip |= LogError(device, kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
843 }
844 }
845
846 bool dynViewportCount = IsDynamic(pPipeline, VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT);
847 bool dynScissorCount = IsDynamic(pPipeline, VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT);
848
849 // VUID {refpage}-viewportCount-03417
850 if (dynViewportCount && !dynScissorCount) {
851 const auto requiredViewportMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->scissorCount) - 1;
852 const auto missingViewportMask = ~pCB->viewportWithCountMask & requiredViewportMask;
853 if (missingViewportMask) {
854 std::stringstream ss;
855 ss << caller << ": Dynamic viewport with count ";
856 ListBits(ss, missingViewportMask);
857 ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetViewportWithCountEXT().";
858 skip |= LogError(device, vuid.viewport_count, "%s", ss.str().c_str());
859 }
860 }
861
862 // VUID {refpage}-scissorCount-03418
863 if (dynScissorCount && !dynViewportCount) {
864 const auto requiredScissorMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->viewportCount) - 1;
865 const auto missingScissorMask = ~pCB->scissorWithCountMask & requiredScissorMask;
866 if (missingScissorMask) {
867 std::stringstream ss;
868 ss << caller << ": Dynamic scissor with count ";
869 ListBits(ss, missingScissorMask);
870 ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetScissorWithCountEXT().";
871 skip |= LogError(device, vuid.scissor_count, "%s", ss.str().c_str());
872 }
873 }
874
875 // VUID {refpage}-viewportCount-03419
876 if (dynScissorCount && dynViewportCount) {
877 if (pCB->viewportWithCountMask != pCB->scissorWithCountMask) {
878 std::stringstream ss;
879 ss << caller << ": Dynamic viewport and scissor with count ";
880 ListBits(ss, pCB->viewportWithCountMask ^ pCB->scissorWithCountMask);
881 ss << " are used by pipeline state object, but were not provided via matching calls to "
882 "vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT().";
883 skip |= LogError(device, vuid.viewport_scissor_count, "%s", ss.str().c_str());
884 }
885 }
886 }
887
888 // Verify that any MSAA request in PSO matches sample# in bound FB
889 // Skip the check if rasterization is disabled.
890 if (!pPipeline->graphicsPipelineCI.pRasterizationState ||
891 (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) {
892 VkSampleCountFlagBits pso_num_samples = GetNumSamples(pPipeline);
893 if (pCB->activeRenderPass) {
894 const auto render_pass_info = pCB->activeRenderPass->createInfo.ptr();
895 const VkSubpassDescription2KHR *subpass_desc = &render_pass_info->pSubpasses[pCB->activeSubpass];
896 uint32_t i;
897 unsigned subpass_num_samples = 0;
898
899 for (i = 0; i < subpass_desc->colorAttachmentCount; i++) {
900 const auto attachment = subpass_desc->pColorAttachments[i].attachment;
901 if (attachment != VK_ATTACHMENT_UNUSED)
902 subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
903 }
904
905 if (subpass_desc->pDepthStencilAttachment &&
906 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
907 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
908 subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
909 }
910
911 if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples) &&
912 ((subpass_num_samples & static_cast<unsigned>(pso_num_samples)) != subpass_num_samples)) {
913 LogObjectList objlist(pPipeline->pipeline);
914 objlist.add(pCB->activeRenderPass->renderPass);
915 skip |= LogError(objlist, kVUID_Core_DrawState_NumSamplesMismatch,
916 "%s: Num samples mismatch! At draw-time in %s with %u samples while current %s w/ "
917 "%u samples!",
918 caller, report_data->FormatHandle(pPipeline->pipeline).c_str(), pso_num_samples,
919 report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), subpass_num_samples);
920 }
921 } else {
922 skip |= LogError(pPipeline->pipeline, kVUID_Core_DrawState_NoActiveRenderpass,
923 "%s: No active render pass found at draw-time in %s!", caller,
924 report_data->FormatHandle(pPipeline->pipeline).c_str());
925 }
926 }
927 // Verify that PSO creation renderPass is compatible with active renderPass
928 if (pCB->activeRenderPass) {
929 // TODO: AMD extension codes are included here, but actual function entrypoints are not yet intercepted
930 if (pCB->activeRenderPass->renderPass != pPipeline->rp_state->renderPass) {
931 // renderPass that PSO was created with must be compatible with active renderPass that PSO is being used with
932 skip |= ValidateRenderPassCompatibility("active render pass", pCB->activeRenderPass.get(), "pipeline state object",
933 pPipeline->rp_state.get(), caller, vuid.render_pass_compatible);
934 }
935 if (pPipeline->graphicsPipelineCI.subpass != pCB->activeSubpass) {
936 skip |=
937 LogError(pPipeline->pipeline, vuid.subpass_index, "%s: Pipeline was built for subpass %u but used in subpass %u.",
938 caller, pPipeline->graphicsPipelineCI.subpass, pCB->activeSubpass);
939 }
940 // Check if depth stencil attachment was created with sample location compatible bit
941 if (pPipeline->sample_location_enabled == VK_TRUE) {
942 const safe_VkAttachmentReference2 *ds_attachment =
943 pCB->activeRenderPass->createInfo.pSubpasses[pCB->activeSubpass].pDepthStencilAttachment;
944 const FRAMEBUFFER_STATE *fb_state = pCB->activeFramebuffer.get();
945 if ((ds_attachment != nullptr) && (fb_state != nullptr)) {
946 const uint32_t attachment = ds_attachment->attachment;
947 if (attachment != VK_ATTACHMENT_UNUSED) {
948 const IMAGE_VIEW_STATE *imageview_state = GetAttachmentImageViewState(pCB, fb_state, attachment);
949 if (imageview_state != nullptr) {
950 const IMAGE_STATE *image_state = GetImageState(imageview_state->create_info.image);
951 if (image_state != nullptr) {
952 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT) == 0) {
953 skip |= LogError(pPipeline->pipeline, vuid.sample_location,
954 "%s: sampleLocationsEnable is true for the pipeline, but the subpass (%u) depth "
955 "stencil attachment's VkImage was not created with "
956 "VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT.",
957 caller, pCB->activeSubpass);
958 }
959 }
960 }
961 }
962 }
963 }
964 }
965
966 // VUID {refpage}-primitiveTopology-03420
967 skip |= ValidateStatus(pCB, CBSTATUS_PRIMITIVE_TOPOLOGY_SET, "Dynamic primitive topology state not set for this command buffer",
968 vuid.primitive_topology);
969 if (IsDynamic(pPipeline, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT)) {
970 bool compatible_topology = false;
971 switch (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology) {
972 case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
973 switch (pCB->primitiveTopology) {
974 case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
975 compatible_topology = true;
976 break;
977 default:
978 break;
979 }
980 break;
981 case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
982 case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
983 case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
984 case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
985 switch (pCB->primitiveTopology) {
986 case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
987 case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
988 compatible_topology = true;
989 break;
990 default:
991 break;
992 }
993 break;
994 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
995 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
996 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
997 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
998 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
999 switch (pCB->primitiveTopology) {
1000 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
1001 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
1002 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
1003 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
1004 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
1005 compatible_topology = true;
1006 break;
1007 default:
1008 break;
1009 }
1010 break;
1011 case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
1012 switch (pCB->primitiveTopology) {
1013 case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
1014 compatible_topology = true;
1015 break;
1016 default:
1017 break;
1018 }
1019 break;
1020 default:
1021 break;
1022 }
1023 if (!compatible_topology) {
1024 skip |= LogError(pPipeline->pipeline, vuid.primitive_topology,
1025 "%s: the last primitive topology %s state set by vkCmdSetPrimitiveTopologyEXT is "
1026 "not compatible with the pipeline topology %s.",
1027 caller, string_VkPrimitiveTopology(pCB->primitiveTopology),
1028 string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1029 }
1030 }
1031
1032 return skip;
1033 }
1034
1035 // For given cvdescriptorset::DescriptorSet, verify that its Set is compatible w/ the setLayout corresponding to
1036 // pipelineLayout[layoutIndex]
VerifySetLayoutCompatibility(const debug_report_data * report_data,const cvdescriptorset::DescriptorSet * descriptor_set,PIPELINE_LAYOUT_STATE const * pipeline_layout,const uint32_t layoutIndex,string & errorMsg)1037 static bool VerifySetLayoutCompatibility(const debug_report_data *report_data, const cvdescriptorset::DescriptorSet *descriptor_set,
1038 PIPELINE_LAYOUT_STATE const *pipeline_layout, const uint32_t layoutIndex,
1039 string &errorMsg) {
1040 auto num_sets = pipeline_layout->set_layouts.size();
1041 if (layoutIndex >= num_sets) {
1042 stringstream errorStr;
1043 errorStr << report_data->FormatHandle(pipeline_layout->layout) << ") only contains " << num_sets
1044 << " setLayouts corresponding to sets 0-" << num_sets - 1 << ", but you're attempting to bind set to index "
1045 << layoutIndex;
1046 errorMsg = errorStr.str();
1047 return false;
1048 }
1049 if (descriptor_set->IsPushDescriptor()) return true;
1050 auto layout_node = pipeline_layout->set_layouts[layoutIndex].get();
1051 return cvdescriptorset::VerifySetLayoutCompatibility(report_data, layout_node, descriptor_set->GetLayout().get(), &errorMsg);
1052 }
1053
1054 // Validate overall state at the time of a draw call
ValidateCmdBufDrawState(const CMD_BUFFER_STATE * cb_node,CMD_TYPE cmd_type,const bool indexed,const VkPipelineBindPoint bind_point,const char * function) const1055 bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TYPE cmd_type, const bool indexed,
1056 const VkPipelineBindPoint bind_point, const char *function) const {
1057 const DrawDispatchVuid vuid = GetDrawDispatchVuid(cmd_type);
1058 const auto last_bound_it = cb_node->lastBound.find(bind_point);
1059 const PIPELINE_STATE *pPipe = nullptr;
1060 if (last_bound_it != cb_node->lastBound.cend()) {
1061 pPipe = last_bound_it->second.pipeline_state;
1062 }
1063
1064 if (nullptr == pPipe) {
1065 return LogError(cb_node->commandBuffer, vuid.pipeline_bound,
1066 "Must not call %s on this command buffer while there is no %s pipeline bound.", function,
1067 bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR
1068 ? "RayTracing"
1069 : bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS ? "Graphics" : "Compute");
1070 }
1071
1072 bool result = false;
1073 auto const &state = last_bound_it->second;
1074 std::vector<VkImageView> attachment_views;
1075
1076 if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) {
1077 // First check flag states
1078 result |= ValidateDrawStateFlags(cb_node, pPipe, indexed, vuid.dynamic_state);
1079
1080 if (cb_node->activeRenderPass && cb_node->activeFramebuffer) {
1081 const auto &subpass = cb_node->activeRenderPass->createInfo.pSubpasses[cb_node->activeSubpass];
1082 attachment_views = cb_node->activeFramebuffer->GetUsedAttachments(subpass, cb_node->imagelessFramebufferAttachments);
1083 }
1084 }
1085 // Now complete other state checks
1086 string errorString;
1087 auto const &pipeline_layout = pPipe->pipeline_layout.get();
1088
1089 // Check if the current pipeline is compatible for the maximum used set with the bound sets.
1090 if (pPipe->active_slots.size() > 0 && !CompatForSet(pPipe->max_active_slot, state, pipeline_layout->compat_for_set)) {
1091 LogObjectList objlist(pPipe->pipeline);
1092 objlist.add(pipeline_layout->layout);
1093 objlist.add(state.pipeline_layout);
1094 result |= LogError(objlist, vuid.compatible_pipeline,
1095 "%s(): %s defined with %s is not compatible for maximum set statically used %" PRIu32
1096 " with bound descriptor sets, last bound with %s",
1097 CommandTypeString(cmd_type), report_data->FormatHandle(pPipe->pipeline).c_str(),
1098 report_data->FormatHandle(pipeline_layout->layout).c_str(), pPipe->max_active_slot,
1099 report_data->FormatHandle(state.pipeline_layout).c_str());
1100 }
1101
1102 for (const auto &set_binding_pair : pPipe->active_slots) {
1103 uint32_t setIndex = set_binding_pair.first;
1104 // If valid set is not bound throw an error
1105 if ((state.per_set.size() <= setIndex) || (!state.per_set[setIndex].bound_descriptor_set)) {
1106 result |= LogError(cb_node->commandBuffer, kVUID_Core_DrawState_DescriptorSetNotBound,
1107 "%s uses set #%u but that set is not bound.", report_data->FormatHandle(pPipe->pipeline).c_str(),
1108 setIndex);
1109 } else if (!VerifySetLayoutCompatibility(report_data, state.per_set[setIndex].bound_descriptor_set, pipeline_layout,
1110 setIndex, errorString)) {
1111 // Set is bound but not compatible w/ overlapping pipeline_layout from PSO
1112 VkDescriptorSet setHandle = state.per_set[setIndex].bound_descriptor_set->GetSet();
1113 LogObjectList objlist(setHandle);
1114 objlist.add(pipeline_layout->layout);
1115 result |= LogError(objlist, kVUID_Core_DrawState_PipelineLayoutsIncompatible,
1116 "%s bound as set #%u is not compatible with overlapping %s due to: %s",
1117 report_data->FormatHandle(setHandle).c_str(), setIndex,
1118 report_data->FormatHandle(pipeline_layout->layout).c_str(), errorString.c_str());
1119 } else { // Valid set is bound and layout compatible, validate that it's updated
1120 // Pull the set node
1121 const cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
1122 // Validate the draw-time state for this descriptor set
1123 std::string err_str;
1124 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor
1125 // binding validation. Take the requested binding set and prefilter it to eliminate redundant validation checks.
1126 // Here, the currently bound pipeline determines whether an image validation check is redundant...
1127 // for images are the "req" portion of the binding_req is indirectly (but tightly) coupled to the pipeline.
1128 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
1129 const auto &binding_req_map = reduced_map.FilteredMap(*cb_node, *pPipe);
1130
1131 // We can skip validating the descriptor set if "nothing" has changed since the last validation.
1132 // Same set, no image layout changes, and same "pipeline state" (binding_req_map). If there are
1133 // any dynamic descriptors, always revalidate rather than caching the values. We currently only
1134 // apply this optimization if IsManyDescriptors is true, to avoid the overhead of copying the
1135 // binding_req_map which could potentially be expensive.
1136 bool descriptor_set_changed =
1137 !reduced_map.IsManyDescriptors() ||
1138 // Revalidate each time if the set has dynamic offsets
1139 state.per_set[setIndex].dynamicOffsets.size() > 0 ||
1140 // Revalidate if descriptor set (or contents) has changed
1141 state.per_set[setIndex].validated_set != descriptor_set ||
1142 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
1143 (!disabled[image_layout_validation] &&
1144 state.per_set[setIndex].validated_set_image_layout_change_count != cb_node->image_layout_change_count);
1145 bool need_validate = descriptor_set_changed ||
1146 // Revalidate if previous bindingReqMap doesn't include new bindingReqMap
1147 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
1148 state.per_set[setIndex].validated_set_binding_req_map.end(),
1149 binding_req_map.begin(), binding_req_map.end());
1150
1151 if (need_validate) {
1152 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1153 // Only validate the bindings that haven't already been validated
1154 BindingReqMap delta_reqs;
1155 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
1156 state.per_set[setIndex].validated_set_binding_req_map.begin(),
1157 state.per_set[setIndex].validated_set_binding_req_map.end(),
1158 std::inserter(delta_reqs, delta_reqs.begin()));
1159 result |= ValidateDrawState(bind_point, descriptor_set, delta_reqs, state.per_set[setIndex].dynamicOffsets,
1160 cb_node, attachment_views, function, vuid);
1161 } else {
1162 result |= ValidateDrawState(bind_point, descriptor_set, binding_req_map, state.per_set[setIndex].dynamicOffsets,
1163 cb_node, attachment_views, function, vuid);
1164 }
1165 }
1166 }
1167 }
1168
1169 // Check general pipeline state that needs to be validated at drawtime
1170 if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
1171 result |= ValidatePipelineDrawtimeState(state, cb_node, cmd_type, pPipe, function);
1172
1173 // Verify if push constants have been set
1174 if (cb_node->push_constant_data_ranges) {
1175 if (pipeline_layout->push_constant_ranges != cb_node->push_constant_data_ranges) {
1176 LogObjectList objlist(cb_node->commandBuffer);
1177 objlist.add(cb_node->push_constant_pipeline_layout_set);
1178 objlist.add(pipeline_layout->layout);
1179 objlist.add(pPipe->pipeline);
1180 result |= LogError(
1181 objlist, vuid.push_constants_set, "The active push constants of %s isn't compatible with %s of active %s.",
1182 report_data->FormatHandle(cb_node->push_constant_pipeline_layout_set).c_str(),
1183 report_data->FormatHandle(pipeline_layout->layout).c_str(), report_data->FormatHandle(pPipe->pipeline).c_str());
1184 } else {
1185 for (const auto &stage : pPipe->stage_state) {
1186 const auto *entrypoint =
1187 FindEntrypointStruct(stage.shader_state.get(), stage.entry_point_name.c_str(), stage.stage_flag);
1188 if (!entrypoint || !entrypoint->push_constant_used_in_shader.IsUsed()) {
1189 continue;
1190 }
1191 const auto it = cb_node->push_constant_data_update.find(stage.stage_flag);
1192 if (it == cb_node->push_constant_data_update.end()) {
1193 // This error has been printed in ValidatePushConstantUsage.
1194 break;
1195 }
1196
1197 uint32_t issue_index = 0;
1198 int ret = ValidatePushConstantSetUpdate(it->second, entrypoint->push_constant_used_in_shader, issue_index);
1199
1200 // "not set" error has been printed in ValidatePushConstantUsage.
1201 if (ret == 2) {
1202 const auto loc_descr = entrypoint->push_constant_used_in_shader.GetLocationDesc(issue_index);
1203 LogObjectList objlist(cb_node->commandBuffer);
1204 objlist.add(pipeline_layout->layout);
1205 result |= LogError(objlist, vuid.push_constants_set, "Push-constant buffer:%s in %s of %s is not updated.",
1206 loc_descr.c_str(), string_VkShaderStageFlags(stage.stage_flag).c_str(),
1207 report_data->FormatHandle(pipeline_layout->layout).c_str());
1208 break;
1209 }
1210 }
1211 }
1212 }
1213 return result;
1214 }
1215
ValidatePipelineLocked(std::vector<std::shared_ptr<PIPELINE_STATE>> const & pPipelines,int pipelineIndex) const1216 bool CoreChecks::ValidatePipelineLocked(std::vector<std::shared_ptr<PIPELINE_STATE>> const &pPipelines, int pipelineIndex) const {
1217 bool skip = false;
1218
1219 const PIPELINE_STATE *pPipeline = pPipelines[pipelineIndex].get();
1220
1221 // If create derivative bit is set, check that we've specified a base
1222 // pipeline correctly, and that the base pipeline was created to allow
1223 // derivatives.
1224 if (pPipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
1225 const PIPELINE_STATE *base_pipeline = nullptr;
1226 if (!((pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) ^
1227 (pPipeline->graphicsPipelineCI.basePipelineIndex != -1))) {
1228 // TODO: This check is a superset of VUID-VkGraphicsPipelineCreateInfo-flags-00724 and
1229 // TODO: VUID-VkGraphicsPipelineCreateInfo-flags-00725
1230 skip |= LogError(device, kVUID_Core_DrawState_InvalidPipelineCreateState,
1231 "Invalid Pipeline CreateInfo[%d]: exactly one of base pipeline index and handle must be specified",
1232 pipelineIndex);
1233 } else if (pPipeline->graphicsPipelineCI.basePipelineIndex != -1) {
1234 if (pPipeline->graphicsPipelineCI.basePipelineIndex >= pipelineIndex) {
1235 skip |=
1236 LogError(device, "VUID-vkCreateGraphicsPipelines-flags-00720",
1237 "Invalid Pipeline CreateInfo[%d]: base pipeline must occur earlier in array than derivative pipeline.",
1238 pipelineIndex);
1239 } else {
1240 base_pipeline = pPipelines[pPipeline->graphicsPipelineCI.basePipelineIndex].get();
1241 }
1242 } else if (pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) {
1243 base_pipeline = GetPipelineState(pPipeline->graphicsPipelineCI.basePipelineHandle);
1244 }
1245
1246 if (base_pipeline && !(base_pipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) {
1247 skip |= LogError(device, kVUID_Core_DrawState_InvalidPipelineCreateState,
1248 "Invalid Pipeline CreateInfo[%d]: base pipeline does not allow derivatives.", pipelineIndex);
1249 }
1250 }
1251
1252 return skip;
1253 }
1254
1255 // UNLOCKED pipeline validation. DO NOT lookup objects in the CoreChecks->* maps in this function.
ValidatePipelineUnlocked(const PIPELINE_STATE * pPipeline,uint32_t pipelineIndex) const1256 bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint32_t pipelineIndex) const {
1257 bool skip = false;
1258
1259 // Ensure the subpass index is valid. If not, then ValidateGraphicsPipelineShaderState
1260 // produces nonsense errors that confuse users. Other layers should already
1261 // emit errors for renderpass being invalid.
1262 auto subpass_desc = &pPipeline->rp_state->createInfo.pSubpasses[pPipeline->graphicsPipelineCI.subpass];
1263 if (pPipeline->graphicsPipelineCI.subpass >= pPipeline->rp_state->createInfo.subpassCount) {
1264 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-subpass-00759",
1265 "Invalid Pipeline CreateInfo[%u] State: Subpass index %u is out of range for this renderpass (0..%u).",
1266 pipelineIndex, pPipeline->graphicsPipelineCI.subpass, pPipeline->rp_state->createInfo.subpassCount - 1);
1267 subpass_desc = nullptr;
1268 }
1269
1270 if (pPipeline->graphicsPipelineCI.pColorBlendState != NULL) {
1271 const safe_VkPipelineColorBlendStateCreateInfo *color_blend_state = pPipeline->graphicsPipelineCI.pColorBlendState;
1272 if (subpass_desc && color_blend_state->attachmentCount != subpass_desc->colorAttachmentCount) {
1273 skip |= LogError(
1274 device, "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746",
1275 "vkCreateGraphicsPipelines() pCreateInfo[%u]: %s subpass %u has colorAttachmentCount of %u which doesn't "
1276 "match the pColorBlendState->attachmentCount of %u.",
1277 pipelineIndex, report_data->FormatHandle(pPipeline->rp_state->renderPass).c_str(),
1278 pPipeline->graphicsPipelineCI.subpass, subpass_desc->colorAttachmentCount, color_blend_state->attachmentCount);
1279 }
1280 if (!enabled_features.core.independentBlend) {
1281 if (pPipeline->attachments.size() > 1) {
1282 const VkPipelineColorBlendAttachmentState *const pAttachments = &pPipeline->attachments[0];
1283 for (size_t i = 1; i < pPipeline->attachments.size(); i++) {
1284 // Quoting the spec: "If [the independent blend] feature is not enabled, the VkPipelineColorBlendAttachmentState
1285 // settings for all color attachments must be identical." VkPipelineColorBlendAttachmentState contains
1286 // only attachment state, so memcmp is best suited for the comparison
1287 if (memcmp(static_cast<const void *>(pAttachments), static_cast<const void *>(&pAttachments[i]),
1288 sizeof(pAttachments[0]))) {
1289 skip |=
1290 LogError(device, "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605",
1291 "Invalid Pipeline CreateInfo[%u]: If independent blend feature not enabled, all elements of "
1292 "pAttachments must be identical.",
1293 pipelineIndex);
1294 break;
1295 }
1296 }
1297 }
1298 }
1299 if (!enabled_features.core.logicOp && (pPipeline->graphicsPipelineCI.pColorBlendState->logicOpEnable != VK_FALSE)) {
1300 skip |= LogError(
1301 device, "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606",
1302 "Invalid Pipeline CreateInfo[%u]: If logic operations feature not enabled, logicOpEnable must be VK_FALSE.",
1303 pipelineIndex);
1304 }
1305 for (size_t i = 0; i < pPipeline->attachments.size(); i++) {
1306 if ((pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1307 (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1308 (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1309 (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1310 if (!enabled_features.core.dualSrcBlend) {
1311 skip |= LogError(
1312 device, "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608",
1313 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1314 "].srcColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1315 "enabled.",
1316 pipelineIndex, i, pPipeline->attachments[i].srcColorBlendFactor);
1317 }
1318 }
1319 if ((pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1320 (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1321 (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1322 (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1323 if (!enabled_features.core.dualSrcBlend) {
1324 skip |= LogError(
1325 device, "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609",
1326 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1327 "].dstColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1328 "enabled.",
1329 pipelineIndex, i, pPipeline->attachments[i].dstColorBlendFactor);
1330 }
1331 }
1332 if ((pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1333 (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1334 (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1335 (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1336 if (!enabled_features.core.dualSrcBlend) {
1337 skip |= LogError(
1338 device, "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610",
1339 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1340 "].srcAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1341 "enabled.",
1342 pipelineIndex, i, pPipeline->attachments[i].srcAlphaBlendFactor);
1343 }
1344 }
1345 if ((pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
1346 (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
1347 (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
1348 (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
1349 if (!enabled_features.core.dualSrcBlend) {
1350 skip |= LogError(
1351 device, "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611",
1352 "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
1353 "].dstAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
1354 "enabled.",
1355 pipelineIndex, i, pPipeline->attachments[i].dstAlphaBlendFactor);
1356 }
1357 }
1358 }
1359 }
1360
1361 if (ValidateGraphicsPipelineShaderState(pPipeline)) {
1362 skip = true;
1363 }
1364 // Each shader's stage must be unique
1365 if (pPipeline->duplicate_shaders) {
1366 for (uint32_t stage = VK_SHADER_STAGE_VERTEX_BIT; stage & VK_SHADER_STAGE_ALL_GRAPHICS; stage <<= 1) {
1367 if (pPipeline->duplicate_shaders & stage) {
1368 skip |= LogError(device, kVUID_Core_DrawState_InvalidPipelineCreateState,
1369 "Invalid Pipeline CreateInfo[%u] State: Multiple shaders provided for stage %s", pipelineIndex,
1370 string_VkShaderStageFlagBits(VkShaderStageFlagBits(stage)));
1371 }
1372 }
1373 }
1374 if (device_extensions.vk_nv_mesh_shader) {
1375 // VS or mesh is required
1376 if (!(pPipeline->active_shaders & (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_MESH_BIT_NV))) {
1377 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-stage-02096",
1378 "Invalid Pipeline CreateInfo[%u] State: Vertex Shader or Mesh Shader required.", pipelineIndex);
1379 }
1380 // Can't mix mesh and VTG
1381 if ((pPipeline->active_shaders & (VK_SHADER_STAGE_MESH_BIT_NV | VK_SHADER_STAGE_TASK_BIT_NV)) &&
1382 (pPipeline->active_shaders &
1383 (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
1384 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))) {
1385 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pStages-02095",
1386 "Invalid Pipeline CreateInfo[%u] State: Geometric shader stages must either be all mesh (mesh | task) "
1387 "or all VTG (vertex, tess control, tess eval, geom).",
1388 pipelineIndex);
1389 }
1390 } else {
1391 // VS is required
1392 if (!(pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT)) {
1393 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-stage-00727",
1394 "Invalid Pipeline CreateInfo[%u] State: Vertex Shader required.", pipelineIndex);
1395 }
1396 }
1397
1398 if (!enabled_features.mesh_shader.meshShader && (pPipeline->active_shaders & VK_SHADER_STAGE_MESH_BIT_NV)) {
1399 skip |= LogError(device, "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
1400 "Invalid Pipeline CreateInfo[%u] State: Mesh Shader not supported.", pipelineIndex);
1401 }
1402
1403 if (!enabled_features.mesh_shader.taskShader && (pPipeline->active_shaders & VK_SHADER_STAGE_TASK_BIT_NV)) {
1404 skip |= LogError(device, "VUID-VkPipelineShaderStageCreateInfo-stage-02092",
1405 "Invalid Pipeline CreateInfo[%u] State: Task Shader not supported.", pipelineIndex);
1406 }
1407
1408 // Either both or neither TC/TE shaders should be defined
1409 bool has_control = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0;
1410 bool has_eval = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0;
1411 if (has_control && !has_eval) {
1412 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pStages-00729",
1413 "Invalid Pipeline CreateInfo[%u] State: TE and TC shaders must be included or excluded as a pair.",
1414 pipelineIndex);
1415 }
1416 if (!has_control && has_eval) {
1417 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pStages-00730",
1418 "Invalid Pipeline CreateInfo[%u] State: TE and TC shaders must be included or excluded as a pair.",
1419 pipelineIndex);
1420 }
1421 // Compute shaders should be specified independent of Gfx shaders
1422 if (pPipeline->active_shaders & VK_SHADER_STAGE_COMPUTE_BIT) {
1423 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-stage-00728",
1424 "Invalid Pipeline CreateInfo[%u] State: Do not specify Compute Shader for Gfx Pipeline.", pipelineIndex);
1425 }
1426
1427 if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pInputAssemblyState) {
1428 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
1429 "Invalid Pipeline CreateInfo[%u] State: Missing pInputAssemblyState.", pipelineIndex);
1430 }
1431
1432 // VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid for tessellation pipelines.
1433 // Mismatching primitive topology and tessellation fails graphics pipeline creation.
1434 if (has_control && has_eval &&
1435 (!pPipeline->graphicsPipelineCI.pInputAssemblyState ||
1436 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology != VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
1437 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pStages-00736",
1438 "Invalid Pipeline CreateInfo[%u] State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA topology for "
1439 "tessellation pipelines.",
1440 pipelineIndex);
1441 }
1442 if (pPipeline->graphicsPipelineCI.pInputAssemblyState) {
1443 if (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
1444 if (!has_control || !has_eval) {
1445 skip |= LogError(
1446 device, "VUID-VkGraphicsPipelineCreateInfo-topology-00737",
1447 "Invalid Pipeline CreateInfo[%u] State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid "
1448 "for tessellation pipelines.",
1449 pipelineIndex);
1450 }
1451 }
1452
1453 if ((pPipeline->graphicsPipelineCI.pInputAssemblyState->primitiveRestartEnable == VK_TRUE) &&
1454 (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST ||
1455 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
1456 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ||
1457 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
1458 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
1459 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
1460 skip |= LogError(
1461 device, "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
1462 "vkCreateGraphicsPipelines() pCreateInfo[%u]: topology is %s and primitiveRestartEnable is VK_TRUE. It is invalid.",
1463 pipelineIndex, string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1464 }
1465 if ((enabled_features.core.geometryShader == VK_FALSE) &&
1466 (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
1467 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY ||
1468 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
1469 pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY)) {
1470 skip |=
1471 LogError(device, "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429",
1472 "vkCreateGraphicsPipelines() pCreateInfo[%u]: topology is %s and geometry shaders feature is not enabled. "
1473 "It is invalid.",
1474 pipelineIndex, string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1475 }
1476 if ((enabled_features.core.tessellationShader == VK_FALSE) &&
1477 (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
1478 skip |=
1479 LogError(device, "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
1480 "vkCreateGraphicsPipelines() pCreateInfo[%u]: topology is %s and tessellation shaders feature is not "
1481 "enabled. It is invalid.",
1482 pipelineIndex, string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
1483 }
1484 }
1485
1486 // If a rasterization state is provided...
1487 if (pPipeline->graphicsPipelineCI.pRasterizationState) {
1488 if ((pPipeline->graphicsPipelineCI.pRasterizationState->depthClampEnable == VK_TRUE) &&
1489 (!enabled_features.core.depthClamp)) {
1490 skip |= LogError(device, "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782",
1491 "vkCreateGraphicsPipelines() pCreateInfo[%u]: the depthClamp device feature is disabled: the "
1492 "depthClampEnable member "
1493 "of the VkPipelineRasterizationStateCreateInfo structure must be set to VK_FALSE.",
1494 pipelineIndex);
1495 }
1496
1497 if (!IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_BIAS) &&
1498 (pPipeline->graphicsPipelineCI.pRasterizationState->depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
1499 skip |= LogError(device, kVUID_Core_DrawState_InvalidFeature,
1500 "vkCreateGraphicsPipelines() pCreateInfo[%u]: the depthBiasClamp device feature is disabled: the "
1501 "depthBiasClamp member "
1502 "of the VkPipelineRasterizationStateCreateInfo structure must be set to 0.0 unless the "
1503 "VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state is enabled",
1504 pipelineIndex);
1505 }
1506
1507 // If rasterization is enabled...
1508 if (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) {
1509 if ((pPipeline->graphicsPipelineCI.pMultisampleState->alphaToOneEnable == VK_TRUE) &&
1510 (!enabled_features.core.alphaToOne)) {
1511 skip |= LogError(
1512 device, "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785",
1513 "vkCreateGraphicsPipelines() pCreateInfo[%u]: the alphaToOne device feature is disabled: the alphaToOneEnable "
1514 "member of the VkPipelineMultisampleStateCreateInfo structure must be set to VK_FALSE.",
1515 pipelineIndex);
1516 }
1517
1518 // If subpass uses a depth/stencil attachment, pDepthStencilState must be a pointer to a valid structure
1519 if (subpass_desc && subpass_desc->pDepthStencilAttachment &&
1520 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1521 if (!pPipeline->graphicsPipelineCI.pDepthStencilState) {
1522 skip |=
1523 LogError(device, "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752",
1524 "Invalid Pipeline CreateInfo[%u] State: pDepthStencilState is NULL when rasterization is enabled "
1525 "and subpass uses a depth/stencil attachment.",
1526 pipelineIndex);
1527
1528 } else if ((pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) &&
1529 (!enabled_features.core.depthBounds)) {
1530 skip |= LogError(device, "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598",
1531 "vkCreateGraphicsPipelines() pCreateInfo[%u]: the depthBounds device feature is disabled: the "
1532 "depthBoundsTestEnable member of the VkPipelineDepthStencilStateCreateInfo structure must be "
1533 "set to VK_FALSE.",
1534 pipelineIndex);
1535 }
1536 }
1537
1538 // If subpass uses color attachments, pColorBlendState must be valid pointer
1539 if (subpass_desc) {
1540 uint32_t color_attachment_count = 0;
1541 for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
1542 if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
1543 ++color_attachment_count;
1544 }
1545 }
1546 if (color_attachment_count > 0 && pPipeline->graphicsPipelineCI.pColorBlendState == nullptr) {
1547 skip |= LogError(
1548 device, "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753",
1549 "Invalid Pipeline CreateInfo[%u] State: pColorBlendState is NULL when rasterization is enabled and "
1550 "subpass uses color attachments.",
1551 pipelineIndex);
1552 }
1553 }
1554 }
1555 }
1556
1557 if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pVertexInputState) {
1558 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
1559 "Invalid Pipeline CreateInfo[%u] State: Missing pVertexInputState.", pipelineIndex);
1560 }
1561
1562 auto vi = pPipeline->graphicsPipelineCI.pVertexInputState;
1563 if (vi != NULL) {
1564 for (uint32_t j = 0; j < vi->vertexAttributeDescriptionCount; j++) {
1565 VkFormat format = vi->pVertexAttributeDescriptions[j].format;
1566 // Internal call to get format info. Still goes through layers, could potentially go directly to ICD.
1567 VkFormatProperties properties;
1568 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &properties);
1569 if ((properties.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) == 0) {
1570 skip |=
1571 LogError(device, "VUID-VkVertexInputAttributeDescription-format-00623",
1572 "vkCreateGraphicsPipelines: pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptions[%d].format "
1573 "(%s) is not a supported vertex buffer format.",
1574 pipelineIndex, j, string_VkFormat(format));
1575 }
1576 }
1577 }
1578
1579 if (subpass_desc && pPipeline->graphicsPipelineCI.pMultisampleState) {
1580 const safe_VkPipelineMultisampleStateCreateInfo *multisample_state = pPipeline->graphicsPipelineCI.pMultisampleState;
1581 auto accumColorSamples = [subpass_desc, pPipeline](uint32_t &samples) {
1582 for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; i++) {
1583 const auto attachment = subpass_desc->pColorAttachments[i].attachment;
1584 if (attachment != VK_ATTACHMENT_UNUSED) {
1585 samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1586 }
1587 }
1588 };
1589
1590 if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples)) {
1591 uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
1592 uint32_t subpass_num_samples = 0;
1593
1594 accumColorSamples(subpass_num_samples);
1595
1596 if (subpass_desc->pDepthStencilAttachment &&
1597 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1598 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
1599 subpass_num_samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1600 }
1601
1602 // subpass_num_samples is 0 when the subpass has no attachments or if all attachments are VK_ATTACHMENT_UNUSED.
1603 // Only validate the value of subpass_num_samples if the subpass has attachments that are not VK_ATTACHMENT_UNUSED.
1604 if (subpass_num_samples && (!IsPowerOfTwo(subpass_num_samples) || (subpass_num_samples != raster_samples))) {
1605 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-subpass-00757",
1606 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
1607 "does not match the number of samples of the RenderPass color and/or depth attachment.",
1608 pipelineIndex, raster_samples);
1609 }
1610 }
1611
1612 if (device_extensions.vk_amd_mixed_attachment_samples) {
1613 VkSampleCountFlagBits max_sample_count = static_cast<VkSampleCountFlagBits>(0);
1614 for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
1615 if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
1616 max_sample_count = std::max(
1617 max_sample_count,
1618 pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pColorAttachments[i].attachment].samples);
1619 }
1620 }
1621 if (subpass_desc->pDepthStencilAttachment &&
1622 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1623 max_sample_count = std::max(
1624 max_sample_count,
1625 pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment].samples);
1626 }
1627 if ((pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) &&
1628 (max_sample_count != static_cast<VkSampleCountFlagBits>(0)) &&
1629 (multisample_state->rasterizationSamples != max_sample_count)) {
1630 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-subpass-01505",
1631 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%s) != max "
1632 "attachment samples (%s) used in subpass %u.",
1633 pipelineIndex, string_VkSampleCountFlagBits(multisample_state->rasterizationSamples),
1634 string_VkSampleCountFlagBits(max_sample_count), pPipeline->graphicsPipelineCI.subpass);
1635 }
1636 }
1637
1638 if (device_extensions.vk_nv_framebuffer_mixed_samples) {
1639 uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
1640 uint32_t subpass_color_samples = 0;
1641
1642 accumColorSamples(subpass_color_samples);
1643
1644 if (subpass_desc->pDepthStencilAttachment &&
1645 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1646 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
1647 const uint32_t subpass_depth_samples =
1648 static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1649
1650 if (pPipeline->graphicsPipelineCI.pDepthStencilState) {
1651 const bool ds_test_enabled =
1652 (pPipeline->graphicsPipelineCI.pDepthStencilState->depthTestEnable == VK_TRUE) ||
1653 (pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) ||
1654 (pPipeline->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE);
1655
1656 if (ds_test_enabled && (!IsPowerOfTwo(subpass_depth_samples) || (raster_samples != subpass_depth_samples))) {
1657 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-subpass-01411",
1658 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
1659 "does not match the number of samples of the RenderPass depth attachment (%u).",
1660 pipelineIndex, raster_samples, subpass_depth_samples);
1661 }
1662 }
1663 }
1664
1665 if (IsPowerOfTwo(subpass_color_samples)) {
1666 if (raster_samples < subpass_color_samples) {
1667 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-subpass-01412",
1668 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
1669 "is not greater or equal to the number of samples of the RenderPass color attachment (%u).",
1670 pipelineIndex, raster_samples, subpass_color_samples);
1671 }
1672
1673 if (multisample_state) {
1674 if ((raster_samples > subpass_color_samples) && (multisample_state->sampleShadingEnable == VK_TRUE)) {
1675 skip |=
1676 LogError(device, "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415",
1677 "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->sampleShadingEnable must be "
1678 "VK_FALSE when "
1679 "pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) is greater than the number of "
1680 "samples of the "
1681 "subpass color attachment (%u).",
1682 pipelineIndex, pipelineIndex, raster_samples, subpass_color_samples);
1683 }
1684
1685 const auto *coverage_modulation_state =
1686 lvl_find_in_chain<VkPipelineCoverageModulationStateCreateInfoNV>(multisample_state->pNext);
1687
1688 if (coverage_modulation_state && (coverage_modulation_state->coverageModulationTableEnable == VK_TRUE)) {
1689 if (coverage_modulation_state->coverageModulationTableCount != (raster_samples / subpass_color_samples)) {
1690 skip |= LogError(
1691 device, "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405",
1692 "vkCreateGraphicsPipelines: pCreateInfos[%d] VkPipelineCoverageModulationStateCreateInfoNV "
1693 "coverageModulationTableCount of %u is invalid.",
1694 pipelineIndex, coverage_modulation_state->coverageModulationTableCount);
1695 }
1696 }
1697 }
1698 }
1699 }
1700
1701 if (device_extensions.vk_nv_coverage_reduction_mode) {
1702 uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
1703 uint32_t subpass_color_samples = 0;
1704 uint32_t subpass_depth_samples = 0;
1705
1706 accumColorSamples(subpass_color_samples);
1707
1708 if (subpass_desc->pDepthStencilAttachment &&
1709 subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
1710 const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
1711 subpass_depth_samples = static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
1712 }
1713
1714 if (multisample_state && IsPowerOfTwo(subpass_color_samples) &&
1715 (subpass_depth_samples == 0 || IsPowerOfTwo(subpass_depth_samples))) {
1716 const auto *coverage_reduction_state =
1717 lvl_find_in_chain<VkPipelineCoverageReductionStateCreateInfoNV>(multisample_state->pNext);
1718
1719 if (coverage_reduction_state) {
1720 const VkCoverageReductionModeNV coverage_reduction_mode = coverage_reduction_state->coverageReductionMode;
1721 uint32_t combination_count = 0;
1722 std::vector<VkFramebufferMixedSamplesCombinationNV> combinations;
1723 DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physical_device, &combination_count,
1724 nullptr);
1725 combinations.resize(combination_count);
1726 DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physical_device, &combination_count,
1727 &combinations[0]);
1728
1729 bool combination_found = false;
1730 for (const auto &combination : combinations) {
1731 if (coverage_reduction_mode == combination.coverageReductionMode &&
1732 raster_samples == combination.rasterizationSamples &&
1733 subpass_depth_samples == combination.depthStencilSamples &&
1734 subpass_color_samples == combination.colorSamples) {
1735 combination_found = true;
1736 break;
1737 }
1738 }
1739
1740 if (!combination_found) {
1741 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722",
1742 "vkCreateGraphicsPipelines: pCreateInfos[%d] the specified combination of coverage "
1743 "reduction mode (%s), pMultisampleState->rasterizationSamples (%u), sample counts for "
1744 "the subpass color and depth/stencil attachments is not a valid combination returned by "
1745 "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.",
1746 pipelineIndex, string_VkCoverageReductionModeNV(coverage_reduction_mode));
1747 }
1748 }
1749 }
1750 }
1751
1752 if (device_extensions.vk_nv_fragment_coverage_to_color) {
1753 const auto coverage_to_color_state = lvl_find_in_chain<VkPipelineCoverageToColorStateCreateInfoNV>(multisample_state);
1754
1755 if (coverage_to_color_state && coverage_to_color_state->coverageToColorEnable == VK_TRUE) {
1756 bool attachment_is_valid = false;
1757 std::string error_detail;
1758
1759 if (coverage_to_color_state->coverageToColorLocation < subpass_desc->colorAttachmentCount) {
1760 const auto color_attachment_ref =
1761 subpass_desc->pColorAttachments[coverage_to_color_state->coverageToColorLocation];
1762 if (color_attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
1763 const auto color_attachment = pPipeline->rp_state->createInfo.pAttachments[color_attachment_ref.attachment];
1764
1765 switch (color_attachment.format) {
1766 case VK_FORMAT_R8_UINT:
1767 case VK_FORMAT_R8_SINT:
1768 case VK_FORMAT_R16_UINT:
1769 case VK_FORMAT_R16_SINT:
1770 case VK_FORMAT_R32_UINT:
1771 case VK_FORMAT_R32_SINT:
1772 attachment_is_valid = true;
1773 break;
1774 default:
1775 std::ostringstream str;
1776 str << "references an attachment with an invalid format ("
1777 << string_VkFormat(color_attachment.format) << ").";
1778 error_detail = str.str();
1779 break;
1780 }
1781 } else {
1782 std::ostringstream str;
1783 str << "references an invalid attachment. The subpass pColorAttachments["
1784 << coverage_to_color_state->coverageToColorLocation
1785 << "].attachment has the value VK_ATTACHMENT_UNUSED.";
1786 error_detail = str.str();
1787 }
1788 } else {
1789 std::ostringstream str;
1790 str << "references an non-existing attachment since the subpass colorAttachmentCount is "
1791 << subpass_desc->colorAttachmentCount << ".";
1792 error_detail = str.str();
1793 }
1794
1795 if (!attachment_is_valid) {
1796 skip |= LogError(device, "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
1797 "vkCreateGraphicsPipelines: pCreateInfos[%" PRId32
1798 "].pMultisampleState VkPipelineCoverageToColorStateCreateInfoNV "
1799 "coverageToColorLocation = %" PRIu32 " %s",
1800 pipelineIndex, coverage_to_color_state->coverageToColorLocation, error_detail.c_str());
1801 }
1802 }
1803 }
1804
1805 if (device_extensions.vk_ext_sample_locations) {
1806 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
1807 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(multisample_state->pNext);
1808
1809 if (sample_location_state != nullptr) {
1810 if ((sample_location_state->sampleLocationsEnable == VK_TRUE) &&
1811 (IsDynamic(pPipeline, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT) == false)) {
1812 const VkSampleLocationsInfoEXT sample_location_info = sample_location_state->sampleLocationsInfo;
1813 skip |= ValidateSampleLocationsInfo(&sample_location_info, "vkCreateGraphicsPipelines");
1814 const VkExtent2D grid_size = sample_location_info.sampleLocationGridSize;
1815
1816 VkMultisamplePropertiesEXT multisample_prop;
1817 DispatchGetPhysicalDeviceMultisamplePropertiesEXT(physical_device, multisample_state->rasterizationSamples,
1818 &multisample_prop);
1819 const VkExtent2D max_grid_size = multisample_prop.maxSampleLocationGridSize;
1820
1821 // Note order or "divide" in "sampleLocationsInfo must evenly divide VkMultisamplePropertiesEXT"
1822 if (SafeModulo(max_grid_size.width, grid_size.width) != 0) {
1823 skip |= LogError(
1824 device, "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01521",
1825 "vkCreateGraphicsPipelines() pCreateInfo[%u]: Because there is no dynamic state for Sample Location "
1826 "and sampleLocationEnable is true, the "
1827 "VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsInfo::sampleLocationGridSize.width (%u) "
1828 "must be evenly divided by VkMultisamplePropertiesEXT::sampleLocationGridSize.width (%u).",
1829 pipelineIndex, grid_size.width, max_grid_size.width);
1830 }
1831 if (SafeModulo(max_grid_size.height, grid_size.height) != 0) {
1832 skip |= LogError(
1833 device, "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01522",
1834 "vkCreateGraphicsPipelines() pCreateInfo[%u]: Because there is no dynamic state for Sample Location "
1835 "and sampleLocationEnable is true, the "
1836 "VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsInfo::sampleLocationGridSize.height (%u) "
1837 "must be evenly divided by VkMultisamplePropertiesEXT::sampleLocationGridSize.height (%u).",
1838 pipelineIndex, grid_size.height, max_grid_size.height);
1839 }
1840 if (sample_location_info.sampleLocationsPerPixel != multisample_state->rasterizationSamples) {
1841 skip |= LogError(
1842 device, "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523",
1843 "vkCreateGraphicsPipelines() pCreateInfo[%u]: Because there is no dynamic state for Sample Location "
1844 "and sampleLocationEnable is true, the "
1845 "VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsInfo::sampleLocationsPerPixel (%s) must "
1846 "be the same as the VkPipelineMultisampleStateCreateInfo::rasterizationSamples (%s).",
1847 pipelineIndex, string_VkSampleCountFlagBits(sample_location_info.sampleLocationsPerPixel),
1848 string_VkSampleCountFlagBits(multisample_state->rasterizationSamples));
1849 }
1850 }
1851 }
1852 }
1853 }
1854
1855 skip |= ValidatePipelineCacheControlFlags(pPipeline->graphicsPipelineCI.flags, pipelineIndex, "vkCreateGraphicsPipelines",
1856 "VUID-VkGraphicsPipelineCreateInfo-pipelineCreationCacheControl-02878");
1857
1858 // VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03378
1859 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState &&
1860 (IsDynamic(pPipeline, VK_DYNAMIC_STATE_CULL_MODE_EXT) || IsDynamic(pPipeline, VK_DYNAMIC_STATE_FRONT_FACE_EXT) ||
1861 IsDynamic(pPipeline, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT) ||
1862 IsDynamic(pPipeline, VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT) ||
1863 IsDynamic(pPipeline, VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT) ||
1864 IsDynamic(pPipeline, VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT) ||
1865 IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT) ||
1866 IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT) ||
1867 IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT) ||
1868 IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT) ||
1869 IsDynamic(pPipeline, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT) || IsDynamic(pPipeline, VK_DYNAMIC_STATE_STENCIL_OP_EXT))) {
1870 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03378",
1871 "Extended dynamic state used by the extendedDynamicState feature is not enabled");
1872 }
1873
1874 return skip;
1875 }
1876
1877 // Block of code at start here specifically for managing/tracking DSs
1878
1879 // Validate that given set is valid and that it's not being used by an in-flight CmdBuffer
1880 // func_str is the name of the calling function
1881 // Return false if no errors occur
1882 // Return true if validation error occurs and callback returns true (to skip upcoming API call down the chain)
ValidateIdleDescriptorSet(VkDescriptorSet set,const char * func_str) const1883 bool CoreChecks::ValidateIdleDescriptorSet(VkDescriptorSet set, const char *func_str) const {
1884 if (disabled[idle_descriptor_set]) return false;
1885 bool skip = false;
1886 auto set_node = setMap.find(set);
1887 if (set_node != setMap.end()) {
1888 // TODO : This covers various error cases so should pass error enum into this function and use passed in enum here
1889 if (set_node->second->in_use.load()) {
1890 skip |= LogError(set, "VUID-vkFreeDescriptorSets-pDescriptorSets-00309",
1891 "Cannot call %s() on %s that is in use by a command buffer.", func_str,
1892 report_data->FormatHandle(set).c_str());
1893 }
1894 }
1895 return skip;
1896 }
1897
1898 // If a renderpass is active, verify that the given command type is appropriate for current subpass state
ValidateCmdSubpassState(const CMD_BUFFER_STATE * pCB,const CMD_TYPE cmd_type) const1899 bool CoreChecks::ValidateCmdSubpassState(const CMD_BUFFER_STATE *pCB, const CMD_TYPE cmd_type) const {
1900 if (!pCB->activeRenderPass) return false;
1901 bool skip = false;
1902 if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS &&
1903 (cmd_type != CMD_EXECUTECOMMANDS && cmd_type != CMD_NEXTSUBPASS && cmd_type != CMD_ENDRENDERPASS &&
1904 cmd_type != CMD_NEXTSUBPASS2 && cmd_type != CMD_ENDRENDERPASS2)) {
1905 skip |= LogError(pCB->commandBuffer, kVUID_Core_DrawState_InvalidCommandBuffer,
1906 "Commands cannot be called in a subpass using secondary command buffers.");
1907 } else if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_INLINE && cmd_type == CMD_EXECUTECOMMANDS) {
1908 skip |= LogError(pCB->commandBuffer, kVUID_Core_DrawState_InvalidCommandBuffer,
1909 "vkCmdExecuteCommands() cannot be called in a subpass using inline commands.");
1910 }
1911 return skip;
1912 }
1913
ValidateCmdQueueFlags(const CMD_BUFFER_STATE * cb_node,const char * caller_name,VkQueueFlags required_flags,const char * error_code) const1914 bool CoreChecks::ValidateCmdQueueFlags(const CMD_BUFFER_STATE *cb_node, const char *caller_name, VkQueueFlags required_flags,
1915 const char *error_code) const {
1916 auto pool = cb_node->command_pool.get();
1917 if (pool) {
1918 const uint32_t queue_family_index = pool->queueFamilyIndex;
1919 const VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[queue_family_index].queueFlags;
1920 if (!(required_flags & queue_flags)) {
1921 string required_flags_string;
1922 for (auto flag : {VK_QUEUE_TRANSFER_BIT, VK_QUEUE_GRAPHICS_BIT, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_SPARSE_BINDING_BIT,
1923 VK_QUEUE_PROTECTED_BIT}) {
1924 if (flag & required_flags) {
1925 if (required_flags_string.size()) {
1926 required_flags_string += " or ";
1927 }
1928 required_flags_string += string_VkQueueFlagBits(flag);
1929 }
1930 }
1931 return LogError(cb_node->commandBuffer, error_code,
1932 "%s(): Called in command buffer %s which was allocated from the command pool %s which was created with "
1933 "queueFamilyIndex %u which doesn't contain the required %s capability flags.",
1934 caller_name, report_data->FormatHandle(cb_node->commandBuffer).c_str(),
1935 report_data->FormatHandle(pool->commandPool).c_str(), queue_family_index,
1936 required_flags_string.c_str());
1937 }
1938 }
1939 return false;
1940 }
1941
ValidateSampleLocationsInfo(const VkSampleLocationsInfoEXT * pSampleLocationsInfo,const char * apiName) const1942 bool CoreChecks::ValidateSampleLocationsInfo(const VkSampleLocationsInfoEXT *pSampleLocationsInfo, const char *apiName) const {
1943 bool skip = false;
1944 const VkSampleCountFlagBits sample_count = pSampleLocationsInfo->sampleLocationsPerPixel;
1945 const uint32_t sample_total_size = pSampleLocationsInfo->sampleLocationGridSize.width *
1946 pSampleLocationsInfo->sampleLocationGridSize.height * SampleCountSize(sample_count);
1947 if (pSampleLocationsInfo->sampleLocationsCount != sample_total_size) {
1948 skip |= LogError(device, "VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-01527",
1949 "%s: VkSampleLocationsInfoEXT::sampleLocationsCount (%u) must equal grid width * grid height * pixel "
1950 "sample rate which currently is (%u * %u * %u).",
1951 apiName, pSampleLocationsInfo->sampleLocationsCount, pSampleLocationsInfo->sampleLocationGridSize.width,
1952 pSampleLocationsInfo->sampleLocationGridSize.height, SampleCountSize(sample_count));
1953 }
1954 if ((phys_dev_ext_props.sample_locations_props.sampleLocationSampleCounts & sample_count) == 0) {
1955 skip |= LogError(device, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-01526",
1956 "%s: VkSampleLocationsInfoEXT::sampleLocationsPerPixel of %s is not supported by the device, please check "
1957 "VkPhysicalDeviceSampleLocationsPropertiesEXT::sampleLocationSampleCounts for valid sample counts.",
1958 apiName, string_VkSampleCountFlagBits(sample_count));
1959 }
1960
1961 return skip;
1962 }
1963
GetCauseStr(VulkanTypedHandle obj)1964 static char const *GetCauseStr(VulkanTypedHandle obj) {
1965 if (obj.type == kVulkanObjectTypeDescriptorSet) return "destroyed or updated";
1966 if (obj.type == kVulkanObjectTypeCommandBuffer) return "destroyed or rerecorded";
1967 return "destroyed";
1968 }
1969
ReportInvalidCommandBuffer(const CMD_BUFFER_STATE * cb_state,const char * call_source) const1970 bool CoreChecks::ReportInvalidCommandBuffer(const CMD_BUFFER_STATE *cb_state, const char *call_source) const {
1971 bool skip = false;
1972 for (auto obj : cb_state->broken_bindings) {
1973 const char *cause_str = GetCauseStr(obj);
1974 string VUID;
1975 std::ostringstream str;
1976 str << kVUID_Core_DrawState_InvalidCommandBuffer << "-" << object_string[obj.type];
1977 VUID = str.str();
1978 LogObjectList objlist(cb_state->commandBuffer);
1979 objlist.add(obj);
1980 skip |=
1981 LogError(objlist, VUID.c_str(), "You are adding %s to %s that is invalid because bound %s was %s.", call_source,
1982 report_data->FormatHandle(cb_state->commandBuffer).c_str(), report_data->FormatHandle(obj).c_str(), cause_str);
1983 }
1984 return skip;
1985 }
1986
1987 // 'commandBuffer must be in the recording state' valid usage error code for each command
1988 // Autogenerated as part of the vk_validation_error_message.h codegen
1989 static const std::array<const char *, CMD_RANGE_SIZE> must_be_recording_list = {{VUID_MUST_BE_RECORDING_LIST}};
1990 // This accounts for the following VUIDs, enumerated here for search and tracking purposes:
1991 // VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording
1992 // VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording
1993 // VUID-vkCmdBeginQuery-commandBuffer-recording
1994 // VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording
1995 // VUID-vkCmdBeginRenderPass-commandBuffer-recording
1996 // VUID-vkCmdBeginRenderPass2-commandBuffer-recording
1997 // VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-recording
1998 // VUID-vkCmdBindDescriptorSets-commandBuffer-recording
1999 // VUID-vkCmdBindIndexBuffer-commandBuffer-recording
2000 // VUID-vkCmdBindPipeline-commandBuffer-recording
2001 // VUID-vkCmdBindPipelineShaderGroupNV-commandBuffer-recording
2002 // VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording
2003 // VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-recording
2004 // VUID-vkCmdBindVertexBuffers-commandBuffer-recording
2005 // VUID-vkCmdBlitImage-commandBuffer-recording
2006 // VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-recording
2007 // VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-recording
2008 // VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording
2009 // VUID-vkCmdClearAttachments-commandBuffer-recording
2010 // VUID-vkCmdClearColorImage-commandBuffer-recording
2011 // VUID-vkCmdClearDepthStencilImage-commandBuffer-recording
2012 // VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-recording
2013 // VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording
2014 // VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-recording
2015 // VUID-vkCmdCopyBuffer-commandBuffer-recording
2016 // VUID-vkCmdCopyBufferToImage-commandBuffer-recording
2017 // VUID-vkCmdCopyImage-commandBuffer-recording
2018 // VUID-vkCmdCopyImageToBuffer-commandBuffer-recording
2019 // VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-recording
2020 // VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording
2021 // VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording
2022 // VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording
2023 // VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording
2024 // VUID-vkCmdDispatch-commandBuffer-recording
2025 // VUID-vkCmdDispatchBase-commandBuffer-recording
2026 // VUID-vkCmdDispatchIndirect-commandBuffer-recording
2027 // VUID-vkCmdDraw-commandBuffer-recording
2028 // VUID-vkCmdDrawIndexed-commandBuffer-recording
2029 // VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording
2030 // VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-recording
2031 // VUID-vkCmdDrawIndirect-commandBuffer-recording
2032 // VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording
2033 // VUID-vkCmdDrawIndirectCount-commandBuffer-recording
2034 // VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording
2035 // VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording
2036 // VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording
2037 // VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording
2038 // VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording
2039 // VUID-vkCmdEndQuery-commandBuffer-recording
2040 // VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording
2041 // VUID-vkCmdEndRenderPass-commandBuffer-recording
2042 // VUID-vkCmdEndRenderPass2-commandBuffer-recording
2043 // VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-recording
2044 // VUID-vkCmdExecuteCommands-commandBuffer-recording
2045 // VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-recording
2046 // VUID-vkCmdFillBuffer-commandBuffer-recording
2047 // VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording
2048 // VUID-vkCmdNextSubpass-commandBuffer-recording
2049 // VUID-vkCmdNextSubpass2-commandBuffer-recording
2050 // VUID-vkCmdPipelineBarrier-commandBuffer-recording
2051 // VUID-vkCmdPreprocessGeneratedCommandsNV-commandBuffer-recording
2052 // VUID-vkCmdPushConstants-commandBuffer-recording
2053 // VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording
2054 // VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording
2055 // VUID-vkCmdResetEvent-commandBuffer-recording
2056 // VUID-vkCmdResetQueryPool-commandBuffer-recording
2057 // VUID-vkCmdResolveImage-commandBuffer-recording
2058 // VUID-vkCmdSetBlendConstants-commandBuffer-recording
2059 // VUID-vkCmdSetCheckpointNV-commandBuffer-recording
2060 // VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-recording
2061 // VUID-vkCmdSetDepthBias-commandBuffer-recording
2062 // VUID-vkCmdSetDepthBounds-commandBuffer-recording
2063 // VUID-vkCmdSetDeviceMask-commandBuffer-recording
2064 // VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording
2065 // VUID-vkCmdSetEvent-commandBuffer-recording
2066 // VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording
2067 // VUID-vkCmdSetLineStippleEXT-commandBuffer-recording
2068 // VUID-vkCmdSetLineWidth-commandBuffer-recording
2069 // VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording
2070 // VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording
2071 // VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording
2072 // VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording
2073 // VUID-vkCmdSetScissor-commandBuffer-recording
2074 // VUID-vkCmdSetStencilCompareMask-commandBuffer-recording
2075 // VUID-vkCmdSetStencilReference-commandBuffer-recording
2076 // VUID-vkCmdSetStencilWriteMask-commandBuffer-recording
2077 // VUID-vkCmdSetViewport-commandBuffer-recording
2078 // VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording
2079 // VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording
2080 // VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-recording
2081 // VUID-vkCmdTraceRaysKHR-commandBuffer-recording
2082 // VUID-vkCmdTraceRaysNV-commandBuffer-recording
2083 // VUID-vkCmdUpdateBuffer-commandBuffer-recording
2084 // VUID-vkCmdWaitEvents-commandBuffer-recording
2085 // VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-recording
2086 // VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording
2087 // VUID-vkCmdWriteTimestamp-commandBuffer-recording
2088 // VUID-vkEndCommandBuffer-commandBuffer-00059
2089
2090 // Validate the given command being added to the specified cmd buffer, flagging errors if CB is not in the recording state or if
2091 // there's an issue with the Cmd ordering
ValidateCmd(const CMD_BUFFER_STATE * cb_state,const CMD_TYPE cmd,const char * caller_name) const2092 bool CoreChecks::ValidateCmd(const CMD_BUFFER_STATE *cb_state, const CMD_TYPE cmd, const char *caller_name) const {
2093 switch (cb_state->state) {
2094 case CB_RECORDING:
2095 return ValidateCmdSubpassState(cb_state, cmd);
2096
2097 case CB_INVALID_COMPLETE:
2098 case CB_INVALID_INCOMPLETE:
2099 return ReportInvalidCommandBuffer(cb_state, caller_name);
2100
2101 default:
2102 assert(cmd != CMD_NONE);
2103 const auto error = must_be_recording_list[cmd];
2104 return LogError(cb_state->commandBuffer, error, "You must call vkBeginCommandBuffer() before this call to %s.",
2105 caller_name);
2106 }
2107 }
2108
ValidateIndirectCmd(VkCommandBuffer command_buffer,VkBuffer buffer,CMD_TYPE cmd_type,const char * caller_name) const2109 bool CoreChecks::ValidateIndirectCmd(VkCommandBuffer command_buffer, VkBuffer buffer, CMD_TYPE cmd_type,
2110 const char *caller_name) const {
2111 bool skip = false;
2112 const DrawDispatchVuid vuid = GetDrawDispatchVuid(cmd_type);
2113 const CMD_BUFFER_STATE *cb_state = GetCBState(command_buffer);
2114 const BUFFER_STATE *buffer_state = GetBufferState(buffer);
2115
2116 if ((cb_state != nullptr) && (buffer_state != nullptr)) {
2117 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, caller_name, vuid.indirect_contiguous_memory);
2118 skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true, vuid.indirect_buffer_bit,
2119 caller_name, "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
2120 if (cb_state->unprotected == false) {
2121 skip |= LogError(cb_state->commandBuffer, vuid.indirect_protected_cb,
2122 "%s: Indirect commands can't be used in protected command buffers.", caller_name);
2123 }
2124 }
2125 return skip;
2126 }
2127
2128 template <typename T1>
ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask,const T1 object,const char * VUID) const2129 bool CoreChecks::ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask, const T1 object, const char *VUID) const {
2130 bool skip = false;
2131 uint32_t count = 1 << physical_device_count;
2132 if (count <= deviceMask) {
2133 skip |= LogError(object, VUID, "deviceMask(0x%" PRIx32 ") is invalid. Physical device count is %" PRIu32 ".", deviceMask,
2134 physical_device_count);
2135 }
2136 return skip;
2137 }
2138
2139 template <typename T1>
ValidateDeviceMaskToZero(uint32_t deviceMask,const T1 object,const char * VUID) const2140 bool CoreChecks::ValidateDeviceMaskToZero(uint32_t deviceMask, const T1 object, const char *VUID) const {
2141 bool skip = false;
2142 if (deviceMask == 0) {
2143 skip |= LogError(object, VUID, "deviceMask(0x%" PRIx32 ") must be non-zero.", deviceMask);
2144 }
2145 return skip;
2146 }
2147
2148 template <typename T1>
ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE * pCB,uint32_t deviceMask,const T1 object,const char * VUID) const2149 bool CoreChecks::ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask, const T1 object,
2150 const char *VUID) const {
2151 bool skip = false;
2152 if ((deviceMask & pCB->initial_device_mask) != deviceMask) {
2153 skip |= LogError(object, VUID, "deviceMask(0x%" PRIx32 ") is not a subset of %s initial device mask(0x%" PRIx32 ").",
2154 deviceMask, report_data->FormatHandle(pCB->commandBuffer).c_str(), pCB->initial_device_mask);
2155 }
2156 return skip;
2157 }
2158
ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE * pCB,uint32_t deviceMask,const char * VUID) const2159 bool CoreChecks::ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask, const char *VUID) const {
2160 bool skip = false;
2161 if ((deviceMask & pCB->active_render_pass_device_mask) != deviceMask) {
2162 skip |= LogError(pCB->commandBuffer, VUID, "deviceMask(0x%" PRIx32 ") is not a subset of %s device mask(0x%" PRIx32 ").",
2163 deviceMask, report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(),
2164 pCB->active_render_pass_device_mask);
2165 }
2166 return skip;
2167 }
2168
2169 // Flags validation error if the associated call is made inside a render pass. The apiName routine should ONLY be called outside a
2170 // render pass.
InsideRenderPass(const CMD_BUFFER_STATE * pCB,const char * apiName,const char * msgCode) const2171 bool CoreChecks::InsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
2172 bool inside = false;
2173 if (pCB->activeRenderPass) {
2174 inside = LogError(pCB->commandBuffer, msgCode, "%s: It is invalid to issue this call inside an active %s.", apiName,
2175 report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str());
2176 }
2177 return inside;
2178 }
2179
2180 // Flags validation error if the associated call is made outside a render pass. The apiName
2181 // routine should ONLY be called inside a render pass.
OutsideRenderPass(const CMD_BUFFER_STATE * pCB,const char * apiName,const char * msgCode) const2182 bool CoreChecks::OutsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
2183 bool outside = false;
2184 if (((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) && (!pCB->activeRenderPass)) ||
2185 ((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) && (!pCB->activeRenderPass) &&
2186 !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT))) {
2187 outside = LogError(pCB->commandBuffer, msgCode, "%s: This call must be issued inside an active render pass.", apiName);
2188 }
2189 return outside;
2190 }
2191
ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE * pd_state,uint32_t requested_queue_family,const char * err_code,const char * cmd_name,const char * queue_family_var_name) const2192 bool CoreChecks::ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t requested_queue_family,
2193 const char *err_code, const char *cmd_name, const char *queue_family_var_name) const {
2194 bool skip = false;
2195
2196 if (requested_queue_family >= pd_state->queue_family_known_count) {
2197 const char *conditional_ext_cmd =
2198 instance_extensions.vk_khr_get_physical_device_properties_2 ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]" : "";
2199
2200 skip |= LogError(pd_state->phys_device, err_code,
2201 "%s: %s (= %" PRIu32
2202 ") is not less than any previously obtained pQueueFamilyPropertyCount from "
2203 "vkGetPhysicalDeviceQueueFamilyProperties%s (i.e. is not less than %s).",
2204 cmd_name, queue_family_var_name, requested_queue_family, conditional_ext_cmd,
2205 std::to_string(pd_state->queue_family_known_count).c_str());
2206 }
2207 return skip;
2208 }
2209
2210 // Verify VkDeviceQueueCreateInfos
ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE * pd_state,uint32_t info_count,const VkDeviceQueueCreateInfo * infos) const2211 bool CoreChecks::ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t info_count,
2212 const VkDeviceQueueCreateInfo *infos) const {
2213 bool skip = false;
2214
2215 std::unordered_set<uint32_t> queue_family_set;
2216
2217 for (uint32_t i = 0; i < info_count; ++i) {
2218 const auto requested_queue_family = infos[i].queueFamilyIndex;
2219
2220 std::string queue_family_var_name = "pCreateInfo->pQueueCreateInfos[" + std::to_string(i) + "].queueFamilyIndex";
2221 skip |= ValidateQueueFamilyIndex(pd_state, requested_queue_family, "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381",
2222 "vkCreateDevice", queue_family_var_name.c_str());
2223
2224 if (queue_family_set.insert(requested_queue_family).second == false) {
2225 skip |= LogError(pd_state->phys_device, "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372",
2226 "CreateDevice(): %s (=%" PRIu32 ") is not unique within pQueueCreateInfos.",
2227 queue_family_var_name.c_str(), requested_queue_family);
2228 }
2229
2230 // Verify that requested queue count of queue family is known to be valid at this point in time
2231 if (requested_queue_family < pd_state->queue_family_known_count) {
2232 const auto requested_queue_count = infos[i].queueCount;
2233 const bool queue_family_has_props = requested_queue_family < pd_state->queue_family_properties.size();
2234 // spec guarantees at least one queue for each queue family
2235 const uint32_t available_queue_count =
2236 queue_family_has_props ? pd_state->queue_family_properties[requested_queue_family].queueCount : 1;
2237 const char *conditional_ext_cmd = instance_extensions.vk_khr_get_physical_device_properties_2
2238 ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]"
2239 : "";
2240
2241 if (requested_queue_count > available_queue_count) {
2242 const std::string count_note =
2243 queue_family_has_props
2244 ? "i.e. is not less than or equal to " +
2245 std::to_string(pd_state->queue_family_properties[requested_queue_family].queueCount)
2246 : "the pQueueFamilyProperties[" + std::to_string(requested_queue_family) + "] was never obtained";
2247
2248 skip |= LogError(
2249 pd_state->phys_device, "VUID-VkDeviceQueueCreateInfo-queueCount-00382",
2250 "vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32 "].queueCount (=%" PRIu32
2251 ") is not less than or equal to available queue count for this pCreateInfo->pQueueCreateInfos[%" PRIu32
2252 "].queueFamilyIndex} (=%" PRIu32 ") obtained previously from vkGetPhysicalDeviceQueueFamilyProperties%s (%s).",
2253 i, requested_queue_count, i, requested_queue_family, conditional_ext_cmd, count_note.c_str());
2254 }
2255 }
2256 }
2257
2258 return skip;
2259 }
2260
PreCallValidateCreateDevice(VkPhysicalDevice gpu,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice) const2261 bool CoreChecks::PreCallValidateCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2262 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) const {
2263 bool skip = false;
2264 auto pd_state = GetPhysicalDeviceState(gpu);
2265
2266 // TODO: object_tracker should perhaps do this instead
2267 // and it does not seem to currently work anyway -- the loader just crashes before this point
2268 if (!pd_state) {
2269 skip |= LogError(device, kVUID_Core_DevLimit_MustQueryCount,
2270 "Invalid call to vkCreateDevice() w/o first calling vkEnumeratePhysicalDevices().");
2271 } else {
2272 skip |= ValidateDeviceQueueCreateInfos(pd_state, pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos);
2273 }
2274 return skip;
2275 }
2276
PostCallRecordCreateDevice(VkPhysicalDevice gpu,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice,VkResult result)2277 void CoreChecks::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
2278 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
2279 // The state tracker sets up the device state
2280 StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
2281
2282 // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker refactor
2283 // would be messier without.
2284 // TODO: Find a good way to do this hooklessly.
2285 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
2286 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
2287 CoreChecks *core_checks = static_cast<CoreChecks *>(validation_data);
2288 core_checks->SetSetImageViewInitialLayoutCallback(
2289 [core_checks](CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &iv_state, VkImageLayout layout) -> void {
2290 core_checks->SetImageViewInitialLayout(cb_node, iv_state, layout);
2291 });
2292 }
2293
PreCallRecordDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)2294 void CoreChecks::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2295 if (!device) return;
2296 imageLayoutMap.clear();
2297
2298 StateTracker::PreCallRecordDestroyDevice(device, pAllocator);
2299 }
2300
2301 // For given stage mask, if Geometry shader stage is on w/o GS being enabled, report geo_error_id
2302 // and if Tessellation Control or Evaluation shader stages are on w/o TS being enabled, report tess_error_id.
2303 // Similarly for mesh and task shaders.
ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask,const char * caller,const char * geo_error_id,const char * tess_error_id,const char * mesh_error_id,const char * task_error_id) const2304 bool CoreChecks::ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask, const char *caller, const char *geo_error_id,
2305 const char *tess_error_id, const char *mesh_error_id,
2306 const char *task_error_id) const {
2307 bool skip = false;
2308 if (!enabled_features.core.geometryShader && (stageMask & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)) {
2309 skip |=
2310 LogError(device, geo_error_id,
2311 "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when device does not have "
2312 "geometryShader feature enabled.",
2313 caller);
2314 }
2315 if (!enabled_features.core.tessellationShader &&
2316 (stageMask & (VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT))) {
2317 skip |= LogError(device, tess_error_id,
2318 "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT and/or "
2319 "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device does not have "
2320 "tessellationShader feature enabled.",
2321 caller);
2322 }
2323 if (!enabled_features.mesh_shader.meshShader && (stageMask & VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV)) {
2324 skip |= LogError(device, mesh_error_id,
2325 "%s call includes a stageMask with VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV bit set when device does not have "
2326 "VkPhysicalDeviceMeshShaderFeaturesNV::meshShader feature enabled.",
2327 caller);
2328 }
2329 if (!enabled_features.mesh_shader.taskShader && (stageMask & VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV)) {
2330 skip |= LogError(device, task_error_id,
2331 "%s call includes a stageMask with VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV bit set when device does not have "
2332 "VkPhysicalDeviceMeshShaderFeaturesNV::taskShader feature enabled.",
2333 caller);
2334 }
2335 return skip;
2336 }
2337
ValidateStageMaskHost(VkPipelineStageFlags stageMask,const char * caller,const char * vuid) const2338 bool CoreChecks::ValidateStageMaskHost(VkPipelineStageFlags stageMask, const char *caller, const char *vuid) const {
2339 bool skip = false;
2340 if ((stageMask & VK_PIPELINE_STAGE_HOST_BIT) != 0) {
2341 skip |= LogError(
2342 device, vuid,
2343 "%s: stage mask must not include VK_PIPELINE_STAGE_HOST_BIT as the stage can't be invoked inside a command buffer.",
2344 caller);
2345 }
2346 return skip;
2347 }
2348
2349 // Note: This function assumes that the global lock is held by the calling thread.
2350 // For the given queue, verify the queue state up to the given seq number.
2351 // Currently the only check is to make sure that if there are events to be waited on prior to
2352 // a QueryReset, make sure that all such events have been signalled.
VerifyQueueStateToSeq(const QUEUE_STATE * initial_queue,uint64_t initial_seq) const2353 bool CoreChecks::VerifyQueueStateToSeq(const QUEUE_STATE *initial_queue, uint64_t initial_seq) const {
2354 bool skip = false;
2355
2356 // sequence number we want to validate up to, per queue
2357 std::unordered_map<const QUEUE_STATE *, uint64_t> target_seqs{{initial_queue, initial_seq}};
2358 // sequence number we've completed validation for, per queue
2359 std::unordered_map<const QUEUE_STATE *, uint64_t> done_seqs;
2360 std::vector<const QUEUE_STATE *> worklist{initial_queue};
2361
2362 while (worklist.size()) {
2363 auto queue = worklist.back();
2364 worklist.pop_back();
2365
2366 auto target_seq = target_seqs[queue];
2367 auto seq = std::max(done_seqs[queue], queue->seq);
2368 auto sub_it = queue->submissions.begin() + int(seq - queue->seq); // seq >= queue->seq
2369
2370 for (; seq < target_seq; ++sub_it, ++seq) {
2371 for (auto &wait : sub_it->waitSemaphores) {
2372 auto other_queue = GetQueueState(wait.queue);
2373
2374 if (other_queue == queue) continue; // semaphores /always/ point backwards, so no point here.
2375
2376 auto other_target_seq = std::max(target_seqs[other_queue], wait.seq);
2377 auto other_done_seq = std::max(done_seqs[other_queue], other_queue->seq);
2378
2379 // if this wait is for another queue, and covers new sequence
2380 // numbers beyond what we've already validated, mark the new
2381 // target seq and (possibly-re)add the queue to the worklist.
2382 if (other_done_seq < other_target_seq) {
2383 target_seqs[other_queue] = other_target_seq;
2384 worklist.push_back(other_queue);
2385 }
2386 }
2387 }
2388
2389 // finally mark the point we've now validated this queue to.
2390 done_seqs[queue] = seq;
2391 }
2392
2393 return skip;
2394 }
2395
2396 // When the given fence is retired, verify outstanding queue operations through the point of the fence
VerifyQueueStateToFence(VkFence fence) const2397 bool CoreChecks::VerifyQueueStateToFence(VkFence fence) const {
2398 auto fence_state = GetFenceState(fence);
2399 if (fence_state && fence_state->scope == kSyncScopeInternal && VK_NULL_HANDLE != fence_state->signaler.first) {
2400 return VerifyQueueStateToSeq(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
2401 }
2402 return false;
2403 }
2404
ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE * pCB,int current_submit_count) const2405 bool CoreChecks::ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE *pCB, int current_submit_count) const {
2406 bool skip = false;
2407 if ((pCB->in_use.load() || current_submit_count > 1) &&
2408 !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
2409 skip |= LogError(device, "VUID-vkQueueSubmit-pCommandBuffers-00071",
2410 "%s is already in use and is not marked for simultaneous use.",
2411 report_data->FormatHandle(pCB->commandBuffer).c_str());
2412 }
2413 return skip;
2414 }
2415
ValidateCommandBufferState(const CMD_BUFFER_STATE * cb_state,const char * call_source,int current_submit_count,const char * vu_id) const2416 bool CoreChecks::ValidateCommandBufferState(const CMD_BUFFER_STATE *cb_state, const char *call_source, int current_submit_count,
2417 const char *vu_id) const {
2418 bool skip = false;
2419 if (disabled[command_buffer_state]) return skip;
2420 // Validate ONE_TIME_SUBMIT_BIT CB is not being submitted more than once
2421 if ((cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) &&
2422 (cb_state->submitCount + current_submit_count > 1)) {
2423 skip |= LogError(cb_state->commandBuffer, kVUID_Core_DrawState_CommandBufferSingleSubmitViolation,
2424 "%s was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted 0x%" PRIxLEAST64
2425 "times.",
2426 report_data->FormatHandle(cb_state->commandBuffer).c_str(), cb_state->submitCount + current_submit_count);
2427 }
2428
2429 // Validate that cmd buffers have been updated
2430 switch (cb_state->state) {
2431 case CB_INVALID_INCOMPLETE:
2432 case CB_INVALID_COMPLETE:
2433 skip |= ReportInvalidCommandBuffer(cb_state, call_source);
2434 break;
2435
2436 case CB_NEW:
2437 skip |= LogError(cb_state->commandBuffer, vu_id, "%s used in the call to %s is unrecorded and contains no commands.",
2438 report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
2439 break;
2440
2441 case CB_RECORDING:
2442 skip |= LogError(cb_state->commandBuffer, kVUID_Core_DrawState_NoEndCommandBuffer,
2443 "You must call vkEndCommandBuffer() on %s before this call to %s!",
2444 report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
2445 break;
2446
2447 default: /* recorded */
2448 break;
2449 }
2450 return skip;
2451 }
2452
2453 // Check that the queue family index of 'queue' matches one of the entries in pQueueFamilyIndices
ValidImageBufferQueue(const CMD_BUFFER_STATE * cb_node,const VulkanTypedHandle & object,uint32_t queueFamilyIndex,uint32_t count,const uint32_t * indices) const2454 bool CoreChecks::ValidImageBufferQueue(const CMD_BUFFER_STATE *cb_node, const VulkanTypedHandle &object, uint32_t queueFamilyIndex,
2455 uint32_t count, const uint32_t *indices) const {
2456 bool found = false;
2457 bool skip = false;
2458 for (uint32_t i = 0; i < count; i++) {
2459 if (indices[i] == queueFamilyIndex) {
2460 found = true;
2461 break;
2462 }
2463 }
2464
2465 if (!found) {
2466 LogObjectList objlist(cb_node->commandBuffer);
2467 objlist.add(object);
2468 skip = LogError(objlist, kVUID_Core_DrawState_InvalidQueueFamily,
2469 "vkQueueSubmit: %s contains %s which was not created allowing concurrent access to "
2470 "this queue family %d.",
2471 report_data->FormatHandle(cb_node->commandBuffer).c_str(), report_data->FormatHandle(object).c_str(),
2472 queueFamilyIndex);
2473 }
2474 return skip;
2475 }
2476
2477 // Validate that queueFamilyIndices of primary command buffers match this queue
2478 // Secondary command buffers were previously validated in vkCmdExecuteCommands().
ValidateQueueFamilyIndices(const CMD_BUFFER_STATE * pCB,VkQueue queue) const2479 bool CoreChecks::ValidateQueueFamilyIndices(const CMD_BUFFER_STATE *pCB, VkQueue queue) const {
2480 bool skip = false;
2481 auto pPool = pCB->command_pool.get();
2482 auto queue_state = GetQueueState(queue);
2483
2484 if (pPool && queue_state) {
2485 if (pPool->queueFamilyIndex != queue_state->queueFamilyIndex) {
2486 LogObjectList objlist(pCB->commandBuffer);
2487 objlist.add(queue);
2488 skip |= LogError(objlist, "VUID-vkQueueSubmit-pCommandBuffers-00074",
2489 "vkQueueSubmit: Primary %s created in queue family %d is being submitted on %s "
2490 "from queue family %d.",
2491 report_data->FormatHandle(pCB->commandBuffer).c_str(), pPool->queueFamilyIndex,
2492 report_data->FormatHandle(queue).c_str(), queue_state->queueFamilyIndex);
2493 }
2494
2495 // Ensure that any bound images or buffers created with SHARING_MODE_CONCURRENT have access to the current queue family
2496 for (const auto &object : pCB->object_bindings) {
2497 if (object.type == kVulkanObjectTypeImage) {
2498 auto image_state = object.node ? (IMAGE_STATE *)object.node : GetImageState(object.Cast<VkImage>());
2499 if (image_state && image_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
2500 skip |= ValidImageBufferQueue(pCB, object, queue_state->queueFamilyIndex,
2501 image_state->createInfo.queueFamilyIndexCount,
2502 image_state->createInfo.pQueueFamilyIndices);
2503 }
2504 } else if (object.type == kVulkanObjectTypeBuffer) {
2505 auto buffer_state = object.node ? (BUFFER_STATE *)object.node : GetBufferState(object.Cast<VkBuffer>());
2506 if (buffer_state && buffer_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
2507 skip |= ValidImageBufferQueue(pCB, object, queue_state->queueFamilyIndex,
2508 buffer_state->createInfo.queueFamilyIndexCount,
2509 buffer_state->createInfo.pQueueFamilyIndices);
2510 }
2511 }
2512 }
2513 }
2514
2515 return skip;
2516 }
2517
ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE * pCB,int current_submit_count,QFOTransferCBScoreboards<VkImageMemoryBarrier> * qfo_image_scoreboards,QFOTransferCBScoreboards<VkBufferMemoryBarrier> * qfo_buffer_scoreboards) const2518 bool CoreChecks::ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE *pCB, int current_submit_count,
2519 QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
2520 QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) const {
2521 // Track in-use for resources off of primary and any secondary CBs
2522 bool skip = false;
2523
2524 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2525 skip |= LogError(pCB->commandBuffer, "VUID-VkSubmitInfo-pCommandBuffers-00075",
2526 "Command buffer %s was included in the pCommandBuffers array of QueueSubmit but was allocated with "
2527 "VK_COMMAND_BUFFER_LEVEL_SECONDARY.",
2528 report_data->FormatHandle(pCB->commandBuffer).c_str());
2529 } else {
2530 for (auto pSubCB : pCB->linkedCommandBuffers) {
2531 skip |= ValidateQueuedQFOTransfers(pSubCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
2532 // TODO: replace with InvalidateCommandBuffers() at recording.
2533 if ((pSubCB->primaryCommandBuffer != pCB->commandBuffer) &&
2534 !(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
2535 LogObjectList objlist(device);
2536 objlist.add(pCB->commandBuffer);
2537 objlist.add(pSubCB->commandBuffer);
2538 objlist.add(pSubCB->primaryCommandBuffer);
2539 skip |= LogError(objlist, "VUID-vkQueueSubmit-pCommandBuffers-00073",
2540 "%s was submitted with secondary %s but that buffer has subsequently been bound to "
2541 "primary %s and it does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
2542 report_data->FormatHandle(pCB->commandBuffer).c_str(),
2543 report_data->FormatHandle(pSubCB->commandBuffer).c_str(),
2544 report_data->FormatHandle(pSubCB->primaryCommandBuffer).c_str());
2545 }
2546 }
2547 }
2548
2549 // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing on device
2550 skip |= ValidateCommandBufferSimultaneousUse(pCB, current_submit_count);
2551
2552 skip |= ValidateQueuedQFOTransfers(pCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
2553
2554 skip |= ValidateCommandBufferState(pCB, "vkQueueSubmit()", current_submit_count, "VUID-vkQueueSubmit-pCommandBuffers-00072");
2555
2556 return skip;
2557 }
2558
ValidateFenceForSubmit(const FENCE_STATE * pFence,const char * inflight_vuid,const char * retired_vuid,const char * func_name) const2559 bool CoreChecks::ValidateFenceForSubmit(const FENCE_STATE *pFence, const char *inflight_vuid, const char *retired_vuid,
2560 const char *func_name) const {
2561 bool skip = false;
2562
2563 if (pFence && pFence->scope == kSyncScopeInternal) {
2564 if (pFence->state == FENCE_INFLIGHT) {
2565 skip |= LogError(pFence->fence, inflight_vuid, "%s: %s is already in use by another submission.", func_name,
2566 report_data->FormatHandle(pFence->fence).c_str());
2567 }
2568
2569 else if (pFence->state == FENCE_RETIRED) {
2570 skip |= LogError(pFence->fence, retired_vuid,
2571 "%s: %s submitted in SIGNALED state. Fences must be reset before being submitted", func_name,
2572 report_data->FormatHandle(pFence->fence).c_str());
2573 }
2574 }
2575
2576 return skip;
2577 }
2578
PostCallRecordQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence,VkResult result)2579 void CoreChecks::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
2580 VkResult result) {
2581 StateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
2582
2583 if (result != VK_SUCCESS) return;
2584 // The triply nested for duplicates that in the StateTracker, but avoids the need for two additional callbacks.
2585 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2586 const VkSubmitInfo *submit = &pSubmits[submit_idx];
2587 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2588 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2589 if (cb_node) {
2590 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
2591 UpdateCmdBufImageLayouts(secondaryCmdBuffer);
2592 RecordQueuedQFOTransfers(secondaryCmdBuffer);
2593 }
2594 UpdateCmdBufImageLayouts(cb_node);
2595 RecordQueuedQFOTransfers(cb_node);
2596 }
2597 }
2598 }
2599 }
2600
SemaphoreWasSignaled(VkSemaphore semaphore) const2601 bool CoreChecks::SemaphoreWasSignaled(VkSemaphore semaphore) const {
2602 for (auto &pair : queueMap) {
2603 const QUEUE_STATE &queueState = pair.second;
2604 for (const auto &submission : queueState.submissions) {
2605 for (const auto &signalSemaphore : submission.signalSemaphores) {
2606 if (signalSemaphore.semaphore == semaphore) {
2607 return true;
2608 }
2609 }
2610 }
2611 }
2612
2613 return false;
2614 }
2615
ValidateSemaphoresForSubmit(VkQueue queue,const VkSubmitInfo * submit,uint32_t submit_index,unordered_set<VkSemaphore> * unsignaled_sema_arg,unordered_set<VkSemaphore> * signaled_sema_arg,unordered_set<VkSemaphore> * internal_sema_arg) const2616 bool CoreChecks::ValidateSemaphoresForSubmit(VkQueue queue, const VkSubmitInfo *submit, uint32_t submit_index,
2617 unordered_set<VkSemaphore> *unsignaled_sema_arg,
2618 unordered_set<VkSemaphore> *signaled_sema_arg,
2619 unordered_set<VkSemaphore> *internal_sema_arg) const {
2620 bool skip = false;
2621 auto &signaled_semaphores = *signaled_sema_arg;
2622 auto &unsignaled_semaphores = *unsignaled_sema_arg;
2623 auto &internal_semaphores = *internal_sema_arg;
2624 auto *timeline_semaphore_submit_info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
2625 const char *vuid_error = device_extensions.vk_khr_timeline_semaphore ? "VUID-vkQueueSubmit-pWaitSemaphores-03238"
2626 : "VUID-vkQueueSubmit-pWaitSemaphores-00069";
2627
2628 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
2629 skip |=
2630 ValidateStageMaskGsTsEnables(submit->pWaitDstStageMask[i], "vkQueueSubmit()",
2631 "VUID-VkSubmitInfo-pWaitDstStageMask-00076", "VUID-VkSubmitInfo-pWaitDstStageMask-00077",
2632 "VUID-VkSubmitInfo-pWaitDstStageMask-02089", "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
2633 skip |= ValidateStageMaskHost(submit->pWaitDstStageMask[i], "vkQueueSubmit()", "VUID-VkSubmitInfo-pWaitDstStageMask-00078");
2634 VkSemaphore semaphore = submit->pWaitSemaphores[i];
2635 const auto *pSemaphore = GetSemaphoreState(semaphore);
2636 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
2637 skip |= LogError(semaphore, "VUID-VkSubmitInfo-pWaitSemaphores-03239",
2638 "VkQueueSubmit: pSubmits[%u].pWaitSemaphores[%u] (%s) is a timeline semaphore, but pSubmits[%u] does "
2639 "not include an instance of VkTimelineSemaphoreSubmitInfoKHR",
2640 submit_index, i, report_data->FormatHandle(semaphore).c_str(), submit_index);
2641 }
2642 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
2643 submit->waitSemaphoreCount != timeline_semaphore_submit_info->waitSemaphoreValueCount) {
2644 skip |= LogError(semaphore, "VUID-VkSubmitInfo-pNext-03240",
2645 "VkQueueSubmit: pSubmits[%u].pWaitSemaphores[%u] (%s) is a timeline semaphore, it contains an "
2646 "instance of VkTimelineSemaphoreSubmitInfoKHR, but waitSemaphoreValueCount (%u) is different than "
2647 "pSubmits[%u].waitSemaphoreCount (%u)",
2648 submit_index, i, report_data->FormatHandle(semaphore).c_str(),
2649 timeline_semaphore_submit_info->waitSemaphoreValueCount, submit_index, submit->waitSemaphoreCount);
2650 }
2651 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
2652 (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
2653 if (unsignaled_semaphores.count(semaphore) ||
2654 (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled) && !SemaphoreWasSignaled(semaphore))) {
2655 LogObjectList objlist(semaphore);
2656 objlist.add(queue);
2657 skip |= LogError(
2658 objlist, pSemaphore->scope == kSyncScopeInternal ? vuid_error : kVUID_Core_DrawState_QueueForwardProgress,
2659 "vkQueueSubmit: Queue %s is waiting on pSubmits[%u].pWaitSemaphores[%u] (%s) that has no way to be signaled.",
2660 report_data->FormatHandle(queue).c_str(), submit_index, i, report_data->FormatHandle(semaphore).c_str());
2661 } else {
2662 signaled_semaphores.erase(semaphore);
2663 unsignaled_semaphores.insert(semaphore);
2664 }
2665 }
2666 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR && pSemaphore->scope == kSyncScopeExternalTemporary) {
2667 internal_semaphores.insert(semaphore);
2668 }
2669 }
2670 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2671 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2672 const auto *pSemaphore = GetSemaphoreState(semaphore);
2673 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
2674 skip |= LogError(semaphore, "VUID-VkSubmitInfo-pWaitSemaphores-03239",
2675 "VkQueueSubmit: pSubmits[%u].pSignalSemaphores[%u] (%s) is a timeline semaphore, but pSubmits[%u] "
2676 "does not include an instance of VkTimelineSemaphoreSubmitInfoKHR",
2677 submit_index, i, report_data->FormatHandle(semaphore).c_str(), submit_index);
2678 }
2679 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
2680 submit->signalSemaphoreCount != timeline_semaphore_submit_info->signalSemaphoreValueCount) {
2681 skip |= LogError(semaphore, "VUID-VkSubmitInfo-pNext-03241",
2682 "VkQueueSubmit: pSubmits[%u].pSignalSemaphores[%u] (%s) is a timeline semaphore, it contains an "
2683 "instance of VkTimelineSemaphoreSubmitInfoKHR, but signalSemaphoreValueCount (%u) is different than "
2684 "pSubmits[%u].signalSemaphoreCount (%u)",
2685 submit_index, i, report_data->FormatHandle(semaphore).c_str(),
2686 timeline_semaphore_submit_info->signalSemaphoreValueCount, submit_index, submit->signalSemaphoreCount);
2687 }
2688 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
2689 timeline_semaphore_submit_info->pSignalSemaphoreValues[i] <= pSemaphore->payload) {
2690 skip |= LogError(semaphore, "VUID-VkSubmitInfo-pSignalSemaphores-03242",
2691 "VkQueueSubmit: signal value (0x%" PRIx64
2692 ") in %s must be greater than current timeline semaphore %s value (0x%" PRIx64
2693 ") in pSubmits[%u].pSignalSemaphores[%u]",
2694 pSemaphore->payload, report_data->FormatHandle(queue).c_str(),
2695 report_data->FormatHandle(semaphore).c_str(),
2696 timeline_semaphore_submit_info->pSignalSemaphoreValues[i], submit_index, i);
2697 }
2698 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
2699 (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
2700 if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
2701 LogObjectList objlist(semaphore);
2702 objlist.add(queue);
2703 objlist.add(pSemaphore->signaler.first);
2704 skip |= LogError(objlist, kVUID_Core_DrawState_QueueForwardProgress,
2705 "vkQueueSubmit: %s is signaling pSubmits[%u].pSignalSemaphores[%u] (%s) that was previously "
2706 "signaled by %s but has not since been waited on by any queue.",
2707 report_data->FormatHandle(queue).c_str(), submit_index, i,
2708 report_data->FormatHandle(semaphore).c_str(),
2709 report_data->FormatHandle(pSemaphore->signaler.first).c_str());
2710 } else {
2711 unsignaled_semaphores.erase(semaphore);
2712 signaled_semaphores.insert(semaphore);
2713 }
2714 }
2715 }
2716
2717 return skip;
2718 }
2719
ValidateMaxTimelineSemaphoreValueDifference(VkSemaphore semaphore,uint64_t value,const char * func_name,const char * vuid) const2720 bool CoreChecks::ValidateMaxTimelineSemaphoreValueDifference(VkSemaphore semaphore, uint64_t value, const char *func_name,
2721 const char *vuid) const {
2722 bool skip = false;
2723 const auto pSemaphore = GetSemaphoreState(semaphore);
2724
2725 if (pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) return false;
2726
2727 uint64_t diff = value > pSemaphore->payload ? value - pSemaphore->payload : pSemaphore->payload - value;
2728
2729 if (diff > phys_dev_props_core12.maxTimelineSemaphoreValueDifference) {
2730 skip |= LogError(semaphore, vuid, "%s: value exceeds limit regarding current semaphore %s payload", func_name,
2731 report_data->FormatHandle(semaphore).c_str());
2732 }
2733
2734 for (auto &pair : queueMap) {
2735 const QUEUE_STATE &queueState = pair.second;
2736 for (const auto &submission : queueState.submissions) {
2737 for (const auto &signalSemaphore : submission.signalSemaphores) {
2738 if (signalSemaphore.semaphore == semaphore) {
2739 diff = value > signalSemaphore.payload ? value - signalSemaphore.payload : signalSemaphore.payload - value;
2740 if (diff > phys_dev_props_core12.maxTimelineSemaphoreValueDifference) {
2741 skip |= LogError(semaphore, vuid, "%s: value exceeds limit regarding pending semaphore %s signal value",
2742 func_name, report_data->FormatHandle(semaphore).c_str());
2743 }
2744 }
2745 }
2746 for (const auto &waitSemaphore : submission.waitSemaphores) {
2747 if (waitSemaphore.semaphore == semaphore) {
2748 diff = value > waitSemaphore.payload ? value - waitSemaphore.payload : waitSemaphore.payload - value;
2749 if (diff > phys_dev_props_core12.maxTimelineSemaphoreValueDifference) {
2750 skip |= LogError(semaphore, vuid, "%s: value exceeds limit regarding pending semaphore %s wait value",
2751 func_name, report_data->FormatHandle(semaphore).c_str());
2752 }
2753 }
2754 }
2755 }
2756 }
2757
2758 return skip;
2759 }
2760
ValidateCommandBuffersForSubmit(VkQueue queue,const VkSubmitInfo * submit,GlobalImageLayoutMap * overlayImageLayoutMap_arg,QueryMap * local_query_to_state_map,vector<VkCommandBuffer> * current_cmds_arg) const2761 bool CoreChecks::ValidateCommandBuffersForSubmit(VkQueue queue, const VkSubmitInfo *submit,
2762 GlobalImageLayoutMap *overlayImageLayoutMap_arg,
2763 QueryMap *local_query_to_state_map,
2764 vector<VkCommandBuffer> *current_cmds_arg) const {
2765 bool skip = false;
2766 auto queue_state = GetQueueState(queue);
2767
2768 GlobalImageLayoutMap &overlayLayoutMap = *overlayImageLayoutMap_arg;
2769 vector<VkCommandBuffer> ¤t_cmds = *current_cmds_arg;
2770
2771 QFOTransferCBScoreboards<VkImageMemoryBarrier> qfo_image_scoreboards;
2772 QFOTransferCBScoreboards<VkBufferMemoryBarrier> qfo_buffer_scoreboards;
2773 EventToStageMap localEventToStageMap;
2774
2775 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2776 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2777
2778 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2779 const auto *cb_node = GetCBState(submit->pCommandBuffers[i]);
2780 if (cb_node) {
2781 skip |= ValidateCmdBufImageLayouts(cb_node, imageLayoutMap, &overlayLayoutMap);
2782 current_cmds.push_back(submit->pCommandBuffers[i]);
2783 skip |= ValidatePrimaryCommandBufferState(
2784 cb_node, (int)std::count(current_cmds.begin(), current_cmds.end(), submit->pCommandBuffers[i]),
2785 &qfo_image_scoreboards, &qfo_buffer_scoreboards);
2786 skip |= ValidateQueueFamilyIndices(cb_node, queue);
2787
2788 for (auto descriptorSet : cb_node->validate_descriptorsets_in_queuesubmit) {
2789 const cvdescriptorset::DescriptorSet *set_node = GetSetNode(descriptorSet.first);
2790 if (set_node) {
2791 for (auto cmd_info : descriptorSet.second) {
2792 std::string function = "vkQueueSubmit(), ";
2793 function += cmd_info.function;
2794 for (auto binding_info : cmd_info.binding_infos) {
2795 std::string error;
2796 std::vector<uint32_t> dynamicOffsets;
2797 // dynamic data isn't allowed in UPDATE_AFTER_BIND, so dynamicOffsets is always empty.
2798 skip |= ValidateDescriptorSetBindingData(cmd_info.bind_point, cb_node, set_node, dynamicOffsets,
2799 binding_info, cmd_info.framebuffer, cmd_info.attachment_views,
2800 function.c_str(), GetDrawDispatchVuid(cmd_info.cmd_type));
2801 }
2802 }
2803 }
2804 }
2805
2806 // Potential early exit here as bad object state may crash in delayed function calls
2807 if (skip) {
2808 return true;
2809 }
2810
2811 // Call submit-time functions to validate or update local mirrors of state (to preserve const-ness at validate time)
2812 for (auto &function : cb_node->queue_submit_functions) {
2813 skip |= function(this, queue_state);
2814 }
2815 for (auto &function : cb_node->eventUpdates) {
2816 skip |= function(this, /*do_validate*/ true, &localEventToStageMap);
2817 }
2818 VkQueryPool first_perf_query_pool = VK_NULL_HANDLE;
2819 for (auto &function : cb_node->queryUpdates) {
2820 skip |= function(this, /*do_validate*/ true, first_perf_query_pool, perf_pass, local_query_to_state_map);
2821 }
2822 }
2823 }
2824 return skip;
2825 }
2826
PreCallValidateQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence) const2827 bool CoreChecks::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
2828 VkFence fence) const {
2829 const auto *pFence = GetFenceState(fence);
2830 bool skip =
2831 ValidateFenceForSubmit(pFence, "VUID-vkQueueSubmit-fence-00064", "VUID-vkQueueSubmit-fence-00063", "vkQueueSubmit()");
2832 if (skip) {
2833 return true;
2834 }
2835
2836 unordered_set<VkSemaphore> signaled_semaphores;
2837 unordered_set<VkSemaphore> unsignaled_semaphores;
2838 unordered_set<VkSemaphore> internal_semaphores;
2839 vector<VkCommandBuffer> current_cmds;
2840 GlobalImageLayoutMap overlayImageLayoutMap;
2841 QueryMap local_query_to_state_map;
2842
2843 // Now verify each individual submit
2844 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2845 const VkSubmitInfo *submit = &pSubmits[submit_idx];
2846 skip |= ValidateSemaphoresForSubmit(queue, submit, submit_idx, &unsignaled_semaphores, &signaled_semaphores,
2847 &internal_semaphores);
2848 skip |= ValidateCommandBuffersForSubmit(queue, submit, &overlayImageLayoutMap, &local_query_to_state_map, ¤t_cmds);
2849
2850 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupSubmitInfo>(submit->pNext);
2851 if (chained_device_group_struct && chained_device_group_struct->commandBufferCount > 0) {
2852 for (uint32_t i = 0; i < chained_device_group_struct->commandBufferCount; ++i) {
2853 skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_device_group_struct->pCommandBufferDeviceMasks[i], queue,
2854 "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086");
2855 }
2856 }
2857
2858 auto protected_submit_info = lvl_find_in_chain<VkProtectedSubmitInfo>(submit->pNext);
2859 if (protected_submit_info) {
2860 const bool protectedSubmit = protected_submit_info->protectedSubmit == VK_TRUE;
2861 // Only check feature once for submit
2862 if ((protectedSubmit == true) && (enabled_features.core11.protectedMemory == VK_FALSE)) {
2863 skip |= LogError(queue, "VUID-VkProtectedSubmitInfo-protectedSubmit-01816",
2864 "vkQueueSubmit(): The protectedMemory device feature is disabled, can't submit a protected queue "
2865 "to %s pSubmits[%u]",
2866 report_data->FormatHandle(queue).c_str(), submit_idx);
2867 }
2868
2869 // Make sure command buffers are all protected or unprotected
2870 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2871 const CMD_BUFFER_STATE *cb_state = GetCBState(submit->pCommandBuffers[i]);
2872 if (cb_state != nullptr) {
2873 if ((cb_state->unprotected == true) && (protectedSubmit == true)) {
2874 LogObjectList objlist(cb_state->commandBuffer);
2875 objlist.add(queue);
2876 skip |= LogError(objlist, "VUID-VkSubmitInfo-pNext-04148",
2877 "vkQueueSubmit(): command buffer %s is unprotected while queue %s pSubmits[%u] has "
2878 "VkProtectedSubmitInfo:protectedSubmit set to VK_TRUE",
2879 report_data->FormatHandle(cb_state->commandBuffer).c_str(),
2880 report_data->FormatHandle(queue).c_str(), submit_idx);
2881 }
2882 if ((cb_state->unprotected == false) && (protectedSubmit == false)) {
2883 LogObjectList objlist(cb_state->commandBuffer);
2884 objlist.add(queue);
2885 skip |= LogError(objlist, "VUID-VkSubmitInfo-pNext-04120",
2886 "vkQueueSubmit(): command buffer %s is protected while queue %s pSubmits[%u] has "
2887 "VkProtectedSubmitInfo:protectedSubmit set to VK_FALSE",
2888 report_data->FormatHandle(cb_state->commandBuffer).c_str(),
2889 report_data->FormatHandle(queue).c_str(), submit_idx);
2890 }
2891 }
2892 }
2893 }
2894 }
2895
2896 if (skip) return skip;
2897
2898 // Now verify maxTimelineSemaphoreValueDifference
2899 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2900 const VkSubmitInfo *submit = &pSubmits[submit_idx];
2901 auto *info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
2902 if (info) {
2903 // If there are any timeline semaphores, this condition gets checked before the early return above
2904 if (info->waitSemaphoreValueCount)
2905 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
2906 VkSemaphore semaphore = submit->pWaitSemaphores[i];
2907 skip |= ValidateMaxTimelineSemaphoreValueDifference(semaphore, info->pWaitSemaphoreValues[i], "VkQueueSubmit",
2908 "VUID-VkSubmitInfo-pWaitSemaphores-03243");
2909 }
2910 // If there are any timeline semaphores, this condition gets checked before the early return above
2911 if (info->signalSemaphoreValueCount)
2912 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2913 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2914 skip |= ValidateMaxTimelineSemaphoreValueDifference(semaphore, info->pSignalSemaphoreValues[i], "VkQueueSubmit",
2915 "VUID-VkSubmitInfo-pSignalSemaphores-03244");
2916 }
2917 }
2918 }
2919
2920 return skip;
2921 }
2922
2923 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2924 // Android-specific validation that uses types defined only on Android and only for NDK versions
2925 // that support the VK_ANDROID_external_memory_android_hardware_buffer extension.
2926 // This chunk could move into a seperate core_validation_android.cpp file... ?
2927
2928 // clang-format off
2929
2930 // Map external format and usage flags to/from equivalent Vulkan flags
2931 // (Tables as of v1.1.92)
2932
2933 // AHardwareBuffer Format Vulkan Format
2934 // ====================== =============
2935 // AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM VK_FORMAT_R8G8B8A8_UNORM
2936 // AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM VK_FORMAT_R8G8B8A8_UNORM
2937 // AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM VK_FORMAT_R8G8B8_UNORM
2938 // AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM VK_FORMAT_R5G6B5_UNORM_PACK16
2939 // AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT VK_FORMAT_R16G16B16A16_SFLOAT
2940 // AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM VK_FORMAT_A2B10G10R10_UNORM_PACK32
2941 // AHARDWAREBUFFER_FORMAT_D16_UNORM VK_FORMAT_D16_UNORM
2942 // AHARDWAREBUFFER_FORMAT_D24_UNORM VK_FORMAT_X8_D24_UNORM_PACK32
2943 // AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT VK_FORMAT_D24_UNORM_S8_UINT
2944 // AHARDWAREBUFFER_FORMAT_D32_FLOAT VK_FORMAT_D32_SFLOAT
2945 // AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT VK_FORMAT_D32_SFLOAT_S8_UINT
2946 // AHARDWAREBUFFER_FORMAT_S8_UINT VK_FORMAT_S8_UINT
2947
2948 // The AHARDWAREBUFFER_FORMAT_* are an enum in the NDK headers, but get passed in to Vulkan
2949 // as uint32_t. Casting the enums here avoids scattering casts around in the code.
2950 std::map<uint32_t, VkFormat> ahb_format_map_a2v = {
2951 { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_R8G8B8A8_UNORM },
2952 { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM, VK_FORMAT_R8G8B8A8_UNORM },
2953 { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM, VK_FORMAT_R8G8B8_UNORM },
2954 { (uint32_t)AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM, VK_FORMAT_R5G6B5_UNORM_PACK16 },
2955 { (uint32_t)AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT, VK_FORMAT_R16G16B16A16_SFLOAT },
2956 { (uint32_t)AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM, VK_FORMAT_A2B10G10R10_UNORM_PACK32 },
2957 { (uint32_t)AHARDWAREBUFFER_FORMAT_D16_UNORM, VK_FORMAT_D16_UNORM },
2958 { (uint32_t)AHARDWAREBUFFER_FORMAT_D24_UNORM, VK_FORMAT_X8_D24_UNORM_PACK32 },
2959 { (uint32_t)AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT },
2960 { (uint32_t)AHARDWAREBUFFER_FORMAT_D32_FLOAT, VK_FORMAT_D32_SFLOAT },
2961 { (uint32_t)AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT },
2962 { (uint32_t)AHARDWAREBUFFER_FORMAT_S8_UINT, VK_FORMAT_S8_UINT }
2963 };
2964
2965 // AHardwareBuffer Usage Vulkan Usage or Creation Flag (Intermixed - Aargh!)
2966 // ===================== ===================================================
2967 // None VK_IMAGE_USAGE_TRANSFER_SRC_BIT
2968 // None VK_IMAGE_USAGE_TRANSFER_DST_BIT
2969 // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE VK_IMAGE_USAGE_SAMPLED_BIT
2970 // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
2971 // AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
2972 // AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
2973 // AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
2974 // AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE None
2975 // AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT VK_IMAGE_CREATE_PROTECTED_BIT
2976 // None VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
2977 // None VK_IMAGE_CREATE_EXTENDED_USAGE_BIT
2978
2979 // Same casting rationale. De-mixing the table to prevent type confusion and aliasing
2980 std::map<uint64_t, VkImageUsageFlags> ahb_usage_map_a2v = {
2981 { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE, (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) },
2982 { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER, (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) },
2983 { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, 0 }, // No equivalent
2984 };
2985
2986 std::map<uint64_t, VkImageCreateFlags> ahb_create_map_a2v = {
2987 { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP, VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT },
2988 { (uint64_t)AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT, VK_IMAGE_CREATE_PROTECTED_BIT },
2989 { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, 0 }, // No equivalent
2990 };
2991
2992 std::map<VkImageUsageFlags, uint64_t> ahb_usage_map_v2a = {
2993 { VK_IMAGE_USAGE_SAMPLED_BIT, (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE },
2994 { VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE },
2995 { VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, (uint64_t)AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER },
2996 { VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, (uint64_t)AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER },
2997 };
2998
2999 std::map<VkImageCreateFlags, uint64_t> ahb_create_map_v2a = {
3000 { VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, (uint64_t)AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP },
3001 { VK_IMAGE_CREATE_PROTECTED_BIT, (uint64_t)AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT },
3002 };
3003
3004 // clang-format on
3005
3006 //
3007 // AHB-extension new APIs
3008 //
PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties) const3009 bool CoreChecks::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
3010 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties) const {
3011 bool skip = false;
3012 // buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags.
3013 AHardwareBuffer_Desc ahb_desc;
3014 AHardwareBuffer_describe(buffer, &ahb_desc);
3015 uint32_t required_flags = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
3016 AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
3017 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
3018 if (0 == (ahb_desc.usage & required_flags)) {
3019 skip |= LogError(device, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884",
3020 "vkGetAndroidHardwareBufferPropertiesANDROID: The AHardwareBuffer's AHardwareBuffer_Desc.usage (0x%" PRIx64
3021 ") does not have any AHARDWAREBUFFER_USAGE_GPU_* flags set.",
3022 ahb_desc.usage);
3023 }
3024 return skip;
3025 }
3026
PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer) const3027 bool CoreChecks::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,
3028 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
3029 struct AHardwareBuffer **pBuffer) const {
3030 bool skip = false;
3031 const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(pInfo->memory);
3032
3033 // VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in
3034 // VkExportMemoryAllocateInfoKHR::handleTypes when memory was created.
3035 if (!mem_info->is_export ||
3036 (0 == (mem_info->export_handle_type_flags & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID))) {
3037 skip |= LogError(device, "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882",
3038 "vkGetMemoryAndroidHardwareBufferANDROID: %s was not allocated for export, or the "
3039 "export handleTypes (0x%" PRIx32
3040 ") did not contain VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.",
3041 report_data->FormatHandle(pInfo->memory).c_str(), mem_info->export_handle_type_flags);
3042 }
3043
3044 // If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo
3045 // with non-NULL image member, then that image must already be bound to memory.
3046 if (mem_info->is_dedicated && (VK_NULL_HANDLE != mem_info->dedicated_image)) {
3047 const auto image_state = GetImageState(mem_info->dedicated_image);
3048 // count() requires DEVICE_MEMORY_STATE* const & or DEVICE_MEMORY_STATE*, not const DEVICE_MEMORY_STATE*.
3049 // But here is in a const function. It could get const DEVICE_MEMORY_STATE* only, so cast it.
3050 if ((nullptr == image_state) || (0 == (image_state->GetBoundMemory().count((DEVICE_MEMORY_STATE *)mem_info)))) {
3051 LogObjectList objlist(device);
3052 objlist.add(pInfo->memory);
3053 objlist.add(mem_info->dedicated_image);
3054 skip |= LogError(objlist, "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883",
3055 "vkGetMemoryAndroidHardwareBufferANDROID: %s was allocated using a dedicated "
3056 "%s, but that image is not bound to the VkDeviceMemory object.",
3057 report_data->FormatHandle(pInfo->memory).c_str(),
3058 report_data->FormatHandle(mem_info->dedicated_image).c_str());
3059 }
3060 }
3061
3062 return skip;
3063 }
3064
3065 //
3066 // AHB-specific validation within non-AHB APIs
3067 //
ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo * alloc_info) const3068 bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const {
3069 bool skip = false;
3070 auto import_ahb_info = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(alloc_info->pNext);
3071 auto exp_mem_alloc_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(alloc_info->pNext);
3072 auto mem_ded_alloc_info = lvl_find_in_chain<VkMemoryDedicatedAllocateInfo>(alloc_info->pNext);
3073
3074 if ((import_ahb_info) && (NULL != import_ahb_info->buffer)) {
3075 // This is an import with handleType of VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID
3076 AHardwareBuffer_Desc ahb_desc = {};
3077 AHardwareBuffer_describe(import_ahb_info->buffer, &ahb_desc);
3078
3079 // Validate AHardwareBuffer_Desc::usage is a valid usage for imported AHB
3080 //
3081 // BLOB & GPU_DATA_BUFFER combo specifically allowed
3082 if ((AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) || (0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
3083 // Otherwise, must be a combination from the AHardwareBuffer Format and Usage Equivalence tables
3084 // Usage must have at least one bit from the table. It may have additional bits not in the table
3085 uint64_t ahb_equiv_usage_bits = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
3086 AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
3087 AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
3088 if (0 == (ahb_desc.usage & ahb_equiv_usage_bits)) {
3089 skip |=
3090 LogError(device, "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881",
3091 "vkAllocateMemory: The AHardwareBuffer_Desc's usage (0x%" PRIx64 ") is not compatible with Vulkan.",
3092 ahb_desc.usage);
3093 }
3094 }
3095
3096 // Collect external buffer info
3097 VkPhysicalDeviceExternalBufferInfo pdebi = {};
3098 pdebi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
3099 pdebi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3100 if (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & ahb_desc.usage) {
3101 pdebi.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE];
3102 }
3103 if (AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER & ahb_desc.usage) {
3104 pdebi.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER];
3105 }
3106 VkExternalBufferProperties ext_buf_props = {};
3107 ext_buf_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
3108
3109 DispatchGetPhysicalDeviceExternalBufferProperties(physical_device, &pdebi, &ext_buf_props);
3110
3111 // If buffer is not NULL, Android hardware buffers must be supported for import, as reported by
3112 // VkExternalImageFormatProperties or VkExternalBufferProperties.
3113 if (0 == (ext_buf_props.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT)) {
3114 // Collect external format info
3115 VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
3116 pdeifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
3117 pdeifi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3118 VkPhysicalDeviceImageFormatInfo2 pdifi2 = {};
3119 pdifi2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
3120 pdifi2.pNext = &pdeifi;
3121 if (0 < ahb_format_map_a2v.count(ahb_desc.format)) pdifi2.format = ahb_format_map_a2v[ahb_desc.format];
3122 pdifi2.type = VK_IMAGE_TYPE_2D; // Seems likely
3123 pdifi2.tiling = VK_IMAGE_TILING_OPTIMAL; // Ditto
3124 if (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & ahb_desc.usage) {
3125 pdifi2.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE];
3126 }
3127 if (AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER & ahb_desc.usage) {
3128 pdifi2.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER];
3129 }
3130 if (AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP & ahb_desc.usage) {
3131 pdifi2.flags |= ahb_create_map_a2v[AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP];
3132 }
3133 if (AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT & ahb_desc.usage) {
3134 pdifi2.flags |= ahb_create_map_a2v[AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT];
3135 }
3136
3137 VkExternalImageFormatProperties ext_img_fmt_props = {};
3138 ext_img_fmt_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
3139 VkImageFormatProperties2 ifp2 = {};
3140 ifp2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
3141 ifp2.pNext = &ext_img_fmt_props;
3142
3143 VkResult fmt_lookup_result = DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &pdifi2, &ifp2);
3144
3145 if ((VK_SUCCESS != fmt_lookup_result) || (0 == (ext_img_fmt_props.externalMemoryProperties.externalMemoryFeatures &
3146 VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))) {
3147 skip |= LogError(device, "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880",
3148 "vkAllocateMemory: Neither the VkExternalImageFormatProperties nor the VkExternalBufferProperties "
3149 "structs for the AHardwareBuffer include the VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT flag.");
3150 }
3151 }
3152
3153 // Retrieve buffer and format properties of the provided AHardwareBuffer
3154 VkAndroidHardwareBufferFormatPropertiesANDROID ahb_format_props = {};
3155 ahb_format_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
3156 VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
3157 ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
3158 ahb_props.pNext = &ahb_format_props;
3159 DispatchGetAndroidHardwareBufferPropertiesANDROID(device, import_ahb_info->buffer, &ahb_props);
3160
3161 // allocationSize must be the size returned by vkGetAndroidHardwareBufferPropertiesANDROID for the Android hardware buffer
3162 if (alloc_info->allocationSize != ahb_props.allocationSize) {
3163 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-allocationSize-02383",
3164 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
3165 "struct, allocationSize (%" PRId64
3166 ") does not match the AHardwareBuffer's reported allocationSize (%" PRId64 ").",
3167 alloc_info->allocationSize, ahb_props.allocationSize);
3168 }
3169
3170 // memoryTypeIndex must be one of those returned by vkGetAndroidHardwareBufferPropertiesANDROID for the AHardwareBuffer
3171 // Note: memoryTypeIndex is an index, memoryTypeBits is a bitmask
3172 uint32_t mem_type_bitmask = 1 << alloc_info->memoryTypeIndex;
3173 if (0 == (mem_type_bitmask & ahb_props.memoryTypeBits)) {
3174 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385",
3175 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
3176 "struct, memoryTypeIndex (%" PRId32
3177 ") does not correspond to a bit set in AHardwareBuffer's reported "
3178 "memoryTypeBits bitmask (0x%" PRIx32 ").",
3179 alloc_info->memoryTypeIndex, ahb_props.memoryTypeBits);
3180 }
3181
3182 // Checks for allocations without a dedicated allocation requirement
3183 if ((nullptr == mem_ded_alloc_info) || (VK_NULL_HANDLE == mem_ded_alloc_info->image)) {
3184 // the Android hardware buffer must have a format of AHARDWAREBUFFER_FORMAT_BLOB and a usage that includes
3185 // AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER
3186 if (((uint64_t)AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) ||
3187 (0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
3188 skip |= LogError(
3189 device, "VUID-VkMemoryAllocateInfo-pNext-02384",
3190 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
3191 "struct without a dedicated allocation requirement, while the AHardwareBuffer_Desc's format ( %u ) is not "
3192 "AHARDWAREBUFFER_FORMAT_BLOB or usage (0x%" PRIx64 ") does not include AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER.",
3193 ahb_desc.format, ahb_desc.usage);
3194 }
3195 } else { // Checks specific to import with a dedicated allocation requirement
3196 const VkImageCreateInfo *ici = &(GetImageState(mem_ded_alloc_info->image)->createInfo);
3197
3198 // The Android hardware buffer's usage must include at least one of AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER or
3199 // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
3200 if (0 == (ahb_desc.usage & (AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE))) {
3201 skip |= LogError(
3202 device, "VUID-VkMemoryAllocateInfo-pNext-02386",
3203 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID and a "
3204 "dedicated allocation requirement, while the AHardwareBuffer's usage (0x%" PRIx64
3205 ") contains neither AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER nor AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE.",
3206 ahb_desc.usage);
3207 }
3208
3209 // the format of image must be VK_FORMAT_UNDEFINED or the format returned by
3210 // vkGetAndroidHardwareBufferPropertiesANDROID
3211 if ((ici->format != ahb_format_props.format) && (VK_FORMAT_UNDEFINED != ici->format)) {
3212 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-pNext-02387",
3213 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
3214 "VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
3215 "format (%s) is not VK_FORMAT_UNDEFINED and does not match the AHardwareBuffer's format (%s).",
3216 string_VkFormat(ici->format), string_VkFormat(ahb_format_props.format));
3217 }
3218
3219 // The width, height, and array layer dimensions of image and the Android hardwarebuffer must be identical
3220 if ((ici->extent.width != ahb_desc.width) || (ici->extent.height != ahb_desc.height) ||
3221 (ici->arrayLayers != ahb_desc.layers)) {
3222 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-pNext-02388",
3223 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
3224 "VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
3225 "width, height, and arrayLayers (%" PRId32 " %" PRId32 " %" PRId32
3226 ") do not match those of the AHardwareBuffer (%" PRId32 " %" PRId32 " %" PRId32 ").",
3227 ici->extent.width, ici->extent.height, ici->arrayLayers, ahb_desc.width, ahb_desc.height,
3228 ahb_desc.layers);
3229 }
3230
3231 // If the Android hardware buffer's usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, the image must
3232 // have either a full mipmap chain or exactly 1 mip level.
3233 //
3234 // NOTE! The language of this VUID contradicts the language in the spec (1.1.93), which says "The
3235 // AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE flag does not correspond to a Vulkan image usage or creation flag. Instead,
3236 // its presence indicates that the Android hardware buffer contains a complete mipmap chain, and its absence indicates
3237 // that the Android hardware buffer contains only a single mip level."
3238 //
3239 // TODO: This code implements the VUID's meaning, but it seems likely that the spec text is actually correct.
3240 // Clarification requested.
3241 if ((ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE) && (ici->mipLevels != 1) &&
3242 (ici->mipLevels != FullMipChainLevels(ici->extent))) {
3243 skip |=
3244 LogError(device, "VUID-VkMemoryAllocateInfo-pNext-02389",
3245 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
3246 "usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE but mipLevels (%" PRId32
3247 ") is neither 1 nor full mip "
3248 "chain levels (%" PRId32 ").",
3249 ici->mipLevels, FullMipChainLevels(ici->extent));
3250 }
3251
3252 // each bit set in the usage of image must be listed in AHardwareBuffer Usage Equivalence, and if there is a
3253 // corresponding AHARDWAREBUFFER_USAGE bit listed that bit must be included in the Android hardware buffer's
3254 // AHardwareBuffer_Desc::usage
3255 if (ici->usage & ~(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
3256 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
3257 VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
3258 skip |=
3259 LogError(device, "VUID-VkMemoryAllocateInfo-pNext-02390",
3260 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
3261 "dedicated image usage bits (0x%" PRIx64
3262 ") include an issue not listed in the AHardwareBuffer Usage Equivalence table.",
3263 ici->usage);
3264 }
3265
3266 std::vector<VkImageUsageFlags> usages = {VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
3267 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
3268 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT};
3269 for (VkImageUsageFlags ubit : usages) {
3270 if (ici->usage & ubit) {
3271 uint64_t ahb_usage = ahb_usage_map_v2a[ubit];
3272 if (0 == (ahb_usage & ahb_desc.usage)) {
3273 skip |= LogError(
3274 device, "VUID-VkMemoryAllocateInfo-pNext-02390",
3275 "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
3276 "The dedicated image usage bit %s equivalent is not in AHardwareBuffer_Desc.usage (0x%" PRIx64 ") ",
3277 string_VkImageUsageFlags(ubit).c_str(), ahb_desc.usage);
3278 }
3279 }
3280 }
3281 }
3282 } else { // Not an import
3283 if ((exp_mem_alloc_info) && (mem_ded_alloc_info) &&
3284 (0 != (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID & exp_mem_alloc_info->handleTypes)) &&
3285 (VK_NULL_HANDLE != mem_ded_alloc_info->image)) {
3286 // This is an Android HW Buffer export
3287 if (0 != alloc_info->allocationSize) {
3288 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-pNext-01874",
3289 "vkAllocateMemory: pNext chain indicates a dedicated Android Hardware Buffer export allocation, "
3290 "but allocationSize is non-zero.");
3291 }
3292 } else {
3293 if (0 == alloc_info->allocationSize) {
3294 skip |= LogError(
3295 device, "VUID-VkMemoryAllocateInfo-pNext-01874",
3296 "vkAllocateMemory: pNext chain does not indicate a dedicated export allocation, but allocationSize is 0.");
3297 };
3298 }
3299 }
3300 return skip;
3301 }
3302
ValidateGetImageMemoryRequirementsANDROID(const VkImage image,const char * func_name) const3303 bool CoreChecks::ValidateGetImageMemoryRequirementsANDROID(const VkImage image, const char *func_name) const {
3304 bool skip = false;
3305
3306 const IMAGE_STATE *image_state = GetImageState(image);
3307 if (image_state != nullptr) {
3308 if (image_state->external_ahb && (0 == image_state->GetBoundMemory().size())) {
3309 const char *vuid = strcmp(func_name, "vkGetImageMemoryRequirements()") == 0
3310 ? "VUID-vkGetImageMemoryRequirements-image-04004"
3311 : "VUID-VkImageMemoryRequirementsInfo2-image-01897";
3312 skip |=
3313 LogError(image, vuid,
3314 "%s: Attempt get image memory requirements for an image created with a "
3315 "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID handleType, which has not yet been "
3316 "bound to memory.",
3317 func_name);
3318 }
3319 }
3320 return skip;
3321 }
3322
ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,const VkImageFormatProperties2 * pImageFormatProperties) const3323 bool CoreChecks::ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(
3324 const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, const VkImageFormatProperties2 *pImageFormatProperties) const {
3325 bool skip = false;
3326 const VkAndroidHardwareBufferUsageANDROID *ahb_usage =
3327 lvl_find_in_chain<VkAndroidHardwareBufferUsageANDROID>(pImageFormatProperties->pNext);
3328 if (nullptr != ahb_usage) {
3329 const VkPhysicalDeviceExternalImageFormatInfo *pdeifi =
3330 lvl_find_in_chain<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
3331 if ((nullptr == pdeifi) || (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID != pdeifi->handleType)) {
3332 skip |= LogError(device, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868",
3333 "vkGetPhysicalDeviceImageFormatProperties2: pImageFormatProperties includes a chained "
3334 "VkAndroidHardwareBufferUsageANDROID struct, but pImageFormatInfo does not include a chained "
3335 "VkPhysicalDeviceExternalImageFormatInfo struct with handleType "
3336 "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.");
3337 }
3338 }
3339 return skip;
3340 }
3341
ValidateBufferImportedHandleANDROID(const char * func_name,VkExternalMemoryHandleTypeFlags handleType,VkDeviceMemory memory,VkBuffer buffer) const3342 bool CoreChecks::ValidateBufferImportedHandleANDROID(const char *func_name, VkExternalMemoryHandleTypeFlags handleType,
3343 VkDeviceMemory memory, VkBuffer buffer) const {
3344 bool skip = false;
3345 if ((handleType & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) == 0) {
3346 const char *vuid = (strcmp(func_name, "vkBindBufferMemory()") == 0) ? "VUID-vkBindBufferMemory-memory-02986"
3347 : "VUID-VkBindBufferMemoryInfo-memory-02986";
3348 LogObjectList objlist(buffer);
3349 objlist.add(memory);
3350 skip |= LogError(objlist, vuid,
3351 "%s: The VkDeviceMemory (%s) was created with an AHB import operation which is not set "
3352 "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID in the VkBuffer (%s) "
3353 "VkExternalMemoryBufferreateInfo::handleType (%s)",
3354 func_name, report_data->FormatHandle(memory).c_str(), report_data->FormatHandle(buffer).c_str(),
3355 string_VkExternalMemoryHandleTypeFlags(handleType).c_str());
3356 }
3357 return skip;
3358 }
3359
ValidateImageImportedHandleANDROID(const char * func_name,VkExternalMemoryHandleTypeFlags handleType,VkDeviceMemory memory,VkImage image) const3360 bool CoreChecks::ValidateImageImportedHandleANDROID(const char *func_name, VkExternalMemoryHandleTypeFlags handleType,
3361 VkDeviceMemory memory, VkImage image) const {
3362 bool skip = false;
3363 if ((handleType & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) == 0) {
3364 const char *vuid = (strcmp(func_name, "vkBindImageMemory()") == 0) ? "VUID-vkBindImageMemory-memory-02990"
3365 : "VUID-VkBindImageMemoryInfo-memory-02990";
3366 LogObjectList objlist(image);
3367 objlist.add(memory);
3368 skip |= LogError(objlist, vuid,
3369 "%s: The VkDeviceMemory (%s) was created with an AHB import operation which is not set "
3370 "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID in the VkImage (%s) "
3371 "VkExternalMemoryImageCreateInfo::handleType (%s)",
3372 func_name, report_data->FormatHandle(memory).c_str(), report_data->FormatHandle(image).c_str(),
3373 string_VkExternalMemoryHandleTypeFlags(handleType).c_str());
3374 }
3375 return skip;
3376 }
3377
3378 #else // !VK_USE_PLATFORM_ANDROID_KHR
3379
ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo * alloc_info) const3380 bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const { return false; }
3381
ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,const VkImageFormatProperties2 * pImageFormatProperties) const3382 bool CoreChecks::ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(
3383 const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, const VkImageFormatProperties2 *pImageFormatProperties) const {
3384 return false;
3385 }
3386
ValidateGetImageMemoryRequirementsANDROID(const VkImage image,const char * func_name) const3387 bool CoreChecks::ValidateGetImageMemoryRequirementsANDROID(const VkImage image, const char *func_name) const { return false; }
3388
ValidateBufferImportedHandleANDROID(const char * func_name,VkExternalMemoryHandleTypeFlags handleType,VkDeviceMemory memory,VkBuffer buffer) const3389 bool CoreChecks::ValidateBufferImportedHandleANDROID(const char *func_name, VkExternalMemoryHandleTypeFlags handleType,
3390 VkDeviceMemory memory, VkBuffer buffer) const {
3391 return false;
3392 }
3393
ValidateImageImportedHandleANDROID(const char * func_name,VkExternalMemoryHandleTypeFlags handleType,VkDeviceMemory memory,VkImage image) const3394 bool CoreChecks::ValidateImageImportedHandleANDROID(const char *func_name, VkExternalMemoryHandleTypeFlags handleType,
3395 VkDeviceMemory memory, VkImage image) const {
3396 return false;
3397 }
3398
3399 #endif // VK_USE_PLATFORM_ANDROID_KHR
3400
PreCallValidateAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory) const3401 bool CoreChecks::PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
3402 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) const {
3403 bool skip = false;
3404 if (memObjMap.size() >= phys_dev_props.limits.maxMemoryAllocationCount) {
3405 skip |= LogError(device, kVUIDUndefined,
3406 "vkAllocateMemory: Number of currently valid memory objects is not less than the maximum allowed (%u).",
3407 phys_dev_props.limits.maxMemoryAllocationCount);
3408 }
3409
3410 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
3411 skip |= ValidateAllocateMemoryANDROID(pAllocateInfo);
3412 } else {
3413 if (0 == pAllocateInfo->allocationSize) {
3414 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-allocationSize-00638", "vkAllocateMemory: allocationSize is 0.");
3415 };
3416 }
3417
3418 auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
3419 if (chained_flags_struct && chained_flags_struct->flags == VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT) {
3420 skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_flags_struct->deviceMask, device,
3421 "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675");
3422 skip |=
3423 ValidateDeviceMaskToZero(chained_flags_struct->deviceMask, device, "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676");
3424 }
3425
3426 if (pAllocateInfo->memoryTypeIndex >= phys_dev_mem_props.memoryTypeCount) {
3427 skip |= LogError(device, "VUID-vkAllocateMemory-pAllocateInfo-01714",
3428 "vkAllocateMemory: attempting to allocate memory type %u, which is not a valid index. Device only "
3429 "advertises %u memory types.",
3430 pAllocateInfo->memoryTypeIndex, phys_dev_mem_props.memoryTypeCount);
3431 } else {
3432 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
3433 if (pAllocateInfo->allocationSize > phys_dev_mem_props.memoryHeaps[memory_type.heapIndex].size) {
3434 skip |= LogError(device, "VUID-vkAllocateMemory-pAllocateInfo-01713",
3435 "vkAllocateMemory: attempting to allocate %" PRIu64
3436 " bytes from heap %u,"
3437 "but size of that heap is only %" PRIu64 " bytes.",
3438 pAllocateInfo->allocationSize, memory_type.heapIndex,
3439 phys_dev_mem_props.memoryHeaps[memory_type.heapIndex].size);
3440 }
3441
3442 if (!enabled_features.device_coherent_memory_features.deviceCoherentMemory &&
3443 ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD) != 0)) {
3444 skip |= LogError(device, "VUID-vkAllocateMemory-deviceCoherentMemory-02790",
3445 "vkAllocateMemory: attempting to allocate memory type %u, which includes the "
3446 "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD memory property, but the deviceCoherentMemory feature "
3447 "is not enabled.",
3448 pAllocateInfo->memoryTypeIndex);
3449 }
3450
3451 if ((enabled_features.core11.protectedMemory == VK_FALSE) &&
3452 ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)) {
3453 skip |= LogError(device, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872",
3454 "vkAllocateMemory(): attempting to allocate memory type %u, which includes the "
3455 "VK_MEMORY_PROPERTY_PROTECTED_BIT memory property, but the protectedMemory feature "
3456 "is not enabled.",
3457 pAllocateInfo->memoryTypeIndex);
3458 }
3459 }
3460
3461 bool imported_ahb = false;
3462 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3463 // "memory is not an imported Android Hardware Buffer" refers to VkImportAndroidHardwareBufferInfoANDROID with a non-NULL
3464 // buffer value. Memory imported has another VUID to check size and allocationSize match up
3465 auto imported_ahb_info = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
3466 if (imported_ahb_info != nullptr) {
3467 imported_ahb = imported_ahb_info->buffer != nullptr;
3468 }
3469 #endif
3470 auto dedicated_allocate_info = lvl_find_in_chain<VkMemoryDedicatedAllocateInfo>(pAllocateInfo->pNext);
3471 if (dedicated_allocate_info) {
3472 if ((dedicated_allocate_info->buffer != VK_NULL_HANDLE) && (dedicated_allocate_info->image != VK_NULL_HANDLE)) {
3473 skip |= LogError(device, "VUID-VkMemoryDedicatedAllocateInfo-image-01432",
3474 "vkAllocateMemory: Either buffer or image has to be VK_NULL_HANDLE in VkMemoryDedicatedAllocateInfo");
3475 } else if (dedicated_allocate_info->image != VK_NULL_HANDLE) {
3476 // Dedicated VkImage
3477 const IMAGE_STATE *image_state = GetImageState(dedicated_allocate_info->image);
3478 if (image_state->disjoint == true) {
3479 skip |= LogError(
3480 device, "VUID-VkMemoryDedicatedAllocateInfo-image-01797",
3481 "vkAllocateMemory: VkImage %s can't be used in VkMemoryDedicatedAllocateInfo because it was created with "
3482 "VK_IMAGE_CREATE_DISJOINT_BIT",
3483 report_data->FormatHandle(dedicated_allocate_info->image).c_str());
3484 } else {
3485 if ((pAllocateInfo->allocationSize != image_state->requirements.size) && (imported_ahb == false)) {
3486 const char *vuid = (device_extensions.vk_android_external_memory_android_hardware_buffer)
3487 ? "VUID-VkMemoryDedicatedAllocateInfo-image-02964"
3488 : "VUID-VkMemoryDedicatedAllocateInfo-image-01433";
3489 skip |= LogError(
3490 device, vuid,
3491 "vkAllocateMemory: Allocation Size (%u) needs to be equal to VkImage %s VkMemoryRequirements::size (%u)",
3492 pAllocateInfo->allocationSize, report_data->FormatHandle(dedicated_allocate_info->image).c_str(),
3493 image_state->requirements.size);
3494 }
3495 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) != 0) {
3496 skip |= LogError(
3497 device, "VUID-VkMemoryDedicatedAllocateInfo-image-01434",
3498 "vkAllocateMemory: VkImage %s can't be used in VkMemoryDedicatedAllocateInfo because it was created with "
3499 "VK_IMAGE_CREATE_SPARSE_BINDING_BIT",
3500 report_data->FormatHandle(dedicated_allocate_info->image).c_str());
3501 }
3502 }
3503 } else if (dedicated_allocate_info->buffer != VK_NULL_HANDLE) {
3504 // Dedicated VkBuffer
3505 const BUFFER_STATE *buffer_state = GetBufferState(dedicated_allocate_info->buffer);
3506 if ((pAllocateInfo->allocationSize != buffer_state->requirements.size) && (imported_ahb == false)) {
3507 const char *vuid = (device_extensions.vk_android_external_memory_android_hardware_buffer)
3508 ? "VUID-VkMemoryDedicatedAllocateInfo-buffer-02965"
3509 : "VUID-VkMemoryDedicatedAllocateInfo-buffer-01435";
3510 skip |= LogError(
3511 device, vuid,
3512 "vkAllocateMemory: Allocation Size (%u) needs to be equal to VkBuffer %s VkMemoryRequirements::size (%u)",
3513 pAllocateInfo->allocationSize, report_data->FormatHandle(dedicated_allocate_info->buffer).c_str(),
3514 buffer_state->requirements.size);
3515 }
3516 if ((buffer_state->createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) != 0) {
3517 skip |= LogError(
3518 device, "VUID-VkMemoryDedicatedAllocateInfo-buffer-01436",
3519 "vkAllocateMemory: VkBuffer %s can't be used in VkMemoryDedicatedAllocateInfo because it was created with "
3520 "VK_BUFFER_CREATE_SPARSE_BINDING_BIT",
3521 report_data->FormatHandle(dedicated_allocate_info->buffer).c_str());
3522 }
3523 }
3524 }
3525
3526 // TODO: VUIDs ending in 00643, 00644, 00646, 00647, 01742, 01743, 01745, 00645, 00648, 01744
3527 return skip;
3528 }
3529
3530 // For given obj node, if it is use, flag a validation error and return callback result, else return false
ValidateObjectNotInUse(const BASE_NODE * obj_node,const VulkanTypedHandle & obj_struct,const char * caller_name,const char * error_code) const3531 bool CoreChecks::ValidateObjectNotInUse(const BASE_NODE *obj_node, const VulkanTypedHandle &obj_struct, const char *caller_name,
3532 const char *error_code) const {
3533 if (disabled[object_in_use]) return false;
3534 bool skip = false;
3535 if (obj_node->in_use.load()) {
3536 skip |= LogError(device, error_code, "Cannot call %s on %s that is currently in use by a command buffer.", caller_name,
3537 report_data->FormatHandle(obj_struct).c_str());
3538 }
3539 return skip;
3540 }
3541
PreCallValidateFreeMemory(VkDevice device,VkDeviceMemory mem,const VkAllocationCallbacks * pAllocator) const3542 bool CoreChecks::PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) const {
3543 const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
3544 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
3545 bool skip = false;
3546 if (mem_info) {
3547 skip |= ValidateObjectNotInUse(mem_info, obj_struct, "vkFreeMemory", "VUID-vkFreeMemory-memory-00677");
3548 }
3549 return skip;
3550 }
3551
3552 // Validate that given Map memory range is valid. This means that the memory should not already be mapped,
3553 // and that the size of the map range should be:
3554 // 1. Not zero
3555 // 2. Within the size of the memory allocation
ValidateMapMemRange(const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize offset,VkDeviceSize size) const3556 bool CoreChecks::ValidateMapMemRange(const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize offset, VkDeviceSize size) const {
3557 bool skip = false;
3558 assert(mem_info);
3559 const auto mem = mem_info->mem;
3560 if (size == 0) {
3561 skip = LogError(mem, "VUID-vkMapMemory-size-00680", "VkMapMemory: Attempting to map memory range of size zero");
3562 }
3563
3564 // It is an application error to call VkMapMemory on an object that is already mapped
3565 if (mem_info->mapped_range.size != 0) {
3566 skip = LogError(mem, "VUID-vkMapMemory-memory-00678", "VkMapMemory: Attempting to map memory on an already-mapped %s.",
3567 report_data->FormatHandle(mem).c_str());
3568 }
3569
3570 // Validate offset is not over allocaiton size
3571 if (offset >= mem_info->alloc_info.allocationSize) {
3572 skip = LogError(mem, "VUID-vkMapMemory-offset-00679",
3573 "VkMapMemory: Attempting to map memory with an offset of 0x%" PRIx64
3574 " which is larger than the total array size 0x%" PRIx64,
3575 offset, mem_info->alloc_info.allocationSize);
3576 }
3577 // Validate that offset + size is within object's allocationSize
3578 if (size != VK_WHOLE_SIZE) {
3579 if ((offset + size) > mem_info->alloc_info.allocationSize) {
3580 skip = LogError(mem, "VUID-vkMapMemory-size-00681",
3581 "VkMapMemory: Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64 " oversteps total array size 0x%" PRIx64
3582 ".",
3583 offset, size + offset, mem_info->alloc_info.allocationSize);
3584 }
3585 }
3586 return skip;
3587 }
3588
PreCallValidateWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout) const3589 bool CoreChecks::PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
3590 uint64_t timeout) const {
3591 // Verify fence status of submitted fences
3592 bool skip = false;
3593 for (uint32_t i = 0; i < fenceCount; i++) {
3594 skip |= VerifyQueueStateToFence(pFences[i]);
3595 }
3596 return skip;
3597 }
3598
PreCallValidateGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue) const3599 bool CoreChecks::PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
3600 VkQueue *pQueue) const {
3601 bool skip = false;
3602
3603 skip |= ValidateDeviceQueueFamily(queueFamilyIndex, "vkGetDeviceQueue", "queueFamilyIndex",
3604 "VUID-vkGetDeviceQueue-queueFamilyIndex-00384");
3605 const auto &queue_data = queue_family_index_map.find(queueFamilyIndex);
3606 if ((queue_data != queue_family_index_map.end()) && (queue_data->second <= queueIndex)) {
3607 skip |= LogError(device, "VUID-vkGetDeviceQueue-queueIndex-00385",
3608 "vkGetDeviceQueue: queueIndex (=%" PRIu32
3609 ") is not less than the number of queues requested from queueFamilyIndex (=%" PRIu32
3610 ") when the device was created (i.e. is not less than %" PRIu32 ").",
3611 queueIndex, queueFamilyIndex, queue_data->second);
3612 }
3613
3614 const auto &queue_flags = queue_family_create_flags_map.find(queueFamilyIndex);
3615 if ((queue_flags != queue_family_create_flags_map.end()) && (queue_flags->second != 0)) {
3616 skip |= LogError(device, "VUID-vkGetDeviceQueue-flags-01841",
3617 "vkGetDeviceQueue: queueIndex (=%" PRIu32
3618 ") was created with a non-zero VkDeviceQueueCreateFlags. Need to use vkGetDeviceQueue2 instead.",
3619 queueIndex);
3620 }
3621 return skip;
3622 }
3623
PreCallValidateQueueWaitIdle(VkQueue queue) const3624 bool CoreChecks::PreCallValidateQueueWaitIdle(VkQueue queue) const {
3625 const QUEUE_STATE *queue_state = GetQueueState(queue);
3626 return VerifyQueueStateToSeq(queue_state, queue_state->seq + queue_state->submissions.size());
3627 }
3628
PreCallValidateDeviceWaitIdle(VkDevice device) const3629 bool CoreChecks::PreCallValidateDeviceWaitIdle(VkDevice device) const {
3630 bool skip = false;
3631 const auto &const_queue_map = queueMap;
3632 for (auto &queue : const_queue_map) {
3633 skip |= VerifyQueueStateToSeq(&queue.second, queue.second.seq + queue.second.submissions.size());
3634 }
3635 return skip;
3636 }
3637
PreCallValidateCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore) const3638 bool CoreChecks::PreCallValidateCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
3639 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) const {
3640 bool skip = false;
3641 auto *sem_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
3642
3643 if (sem_type_create_info && sem_type_create_info->semaphoreType == VK_SEMAPHORE_TYPE_TIMELINE_KHR &&
3644 !enabled_features.core12.timelineSemaphore && !device_extensions.vk_khr_timeline_semaphore) {
3645 skip |= LogError(device, "VUID-VkSemaphoreTypeCreateInfo-timelineSemaphore-03252",
3646 "VkCreateSemaphore: timelineSemaphore feature is not enabled, can not create timeline semaphores");
3647 }
3648
3649 if (sem_type_create_info && sem_type_create_info->semaphoreType == VK_SEMAPHORE_TYPE_BINARY_KHR &&
3650 sem_type_create_info->initialValue != 0) {
3651 skip |= LogError(device, "VUID-VkSemaphoreTypeCreateInfo-semaphoreType-03279",
3652 "vkCreateSemaphore: if semaphoreType is VK_SEMAPHORE_TYPE_BINARY_KHR, initialValue must be zero");
3653 }
3654
3655 return skip;
3656 }
3657
PreCallValidateWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfoKHR * pWaitInfo,uint64_t timeout) const3658 bool CoreChecks::PreCallValidateWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfoKHR *pWaitInfo, uint64_t timeout) const {
3659 return ValidateWaitSemaphores(device, pWaitInfo, timeout, "VkWaitSemaphores");
3660 }
3661
PreCallValidateWaitSemaphoresKHR(VkDevice device,const VkSemaphoreWaitInfoKHR * pWaitInfo,uint64_t timeout) const3662 bool CoreChecks::PreCallValidateWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR *pWaitInfo,
3663 uint64_t timeout) const {
3664 return ValidateWaitSemaphores(device, pWaitInfo, timeout, "VkWaitSemaphoresKHR");
3665 }
3666
ValidateWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfoKHR * pWaitInfo,uint64_t timeout,const char * apiName) const3667 bool CoreChecks::ValidateWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfoKHR *pWaitInfo, uint64_t timeout,
3668 const char *apiName) const {
3669 bool skip = false;
3670
3671 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
3672 auto *pSemaphore = GetSemaphoreState(pWaitInfo->pSemaphores[i]);
3673 if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
3674 skip |= LogError(pWaitInfo->pSemaphores[i], "VUID-VkSemaphoreWaitInfo-pSemaphores-03256",
3675 "%s(): all semaphores in pWaitInfo must be timeline semaphores, but %s is not", apiName,
3676 report_data->FormatHandle(pWaitInfo->pSemaphores[i]).c_str());
3677 }
3678 }
3679
3680 return skip;
3681 }
3682
PreCallValidateDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator) const3683 bool CoreChecks::PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) const {
3684 const FENCE_STATE *fence_node = GetFenceState(fence);
3685 bool skip = false;
3686 if (fence_node) {
3687 if (fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
3688 skip |= LogError(fence, "VUID-vkDestroyFence-fence-01120", "%s is in use.", report_data->FormatHandle(fence).c_str());
3689 }
3690 }
3691 return skip;
3692 }
3693
PreCallValidateDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator) const3694 bool CoreChecks::PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore,
3695 const VkAllocationCallbacks *pAllocator) const {
3696 const SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
3697 const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
3698 bool skip = false;
3699 if (sema_node) {
3700 skip |= ValidateObjectNotInUse(sema_node, obj_struct, "vkDestroySemaphore", "VUID-vkDestroySemaphore-semaphore-01137");
3701 }
3702 return skip;
3703 }
3704
PreCallValidateDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator) const3705 bool CoreChecks::PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) const {
3706 const EVENT_STATE *event_state = GetEventState(event);
3707 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
3708 bool skip = false;
3709 if (event_state) {
3710 skip |= ValidateObjectNotInUse(event_state, obj_struct, "vkDestroyEvent", "VUID-vkDestroyEvent-event-01145");
3711 }
3712 return skip;
3713 }
3714
PreCallValidateDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator) const3715 bool CoreChecks::PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
3716 const VkAllocationCallbacks *pAllocator) const {
3717 if (disabled[query_validation]) return false;
3718 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
3719 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
3720 bool skip = false;
3721 if (qp_state) {
3722 skip |= ValidateObjectNotInUse(qp_state, obj_struct, "vkDestroyQueryPool", "VUID-vkDestroyQueryPool-queryPool-00793");
3723 }
3724 return skip;
3725 }
3726
ValidatePerformanceQueryResults(const char * cmd_name,const QUERY_POOL_STATE * query_pool_state,uint32_t firstQuery,uint32_t queryCount,VkQueryResultFlags flags) const3727 bool CoreChecks::ValidatePerformanceQueryResults(const char *cmd_name, const QUERY_POOL_STATE *query_pool_state,
3728 uint32_t firstQuery, uint32_t queryCount, VkQueryResultFlags flags) const {
3729 bool skip = false;
3730
3731 if (flags & (VK_QUERY_RESULT_WITH_AVAILABILITY_BIT | VK_QUERY_RESULT_PARTIAL_BIT | VK_QUERY_RESULT_64_BIT)) {
3732 string invalid_flags_string;
3733 for (auto flag : {VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT, VK_QUERY_RESULT_64_BIT}) {
3734 if (flag & flags) {
3735 if (invalid_flags_string.size()) {
3736 invalid_flags_string += " and ";
3737 }
3738 invalid_flags_string += string_VkQueryResultFlagBits(flag);
3739 }
3740 }
3741 skip |= LogError(query_pool_state->pool,
3742 strcmp(cmd_name, "vkGetQueryPoolResults") == 0 ? "VUID-vkGetQueryPoolResults-queryType-03230"
3743 : "VUID-vkCmdCopyQueryPoolResults-queryType-03233",
3744 "%s: QueryPool %s was created with a queryType of"
3745 "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR but flags contains %s.",
3746 cmd_name, report_data->FormatHandle(query_pool_state->pool).c_str(), invalid_flags_string.c_str());
3747 }
3748
3749 for (uint32_t queryIndex = firstQuery; queryIndex < queryCount; queryIndex++) {
3750 uint32_t submitted = 0;
3751 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
3752 QueryObject obj(QueryObject(query_pool_state->pool, queryIndex), passIndex);
3753 auto query_pass_iter = queryToStateMap.find(obj);
3754 if (query_pass_iter != queryToStateMap.end() && query_pass_iter->second == QUERYSTATE_AVAILABLE) submitted++;
3755 }
3756 if (submitted < query_pool_state->n_performance_passes) {
3757 skip |= LogError(query_pool_state->pool, "VUID-vkGetQueryPoolResults-queryType-03231",
3758 "%s: QueryPool %s has %u performance query passes, but the query has only been "
3759 "submitted for %u of the passes.",
3760 cmd_name, report_data->FormatHandle(query_pool_state->pool).c_str(),
3761 query_pool_state->n_performance_passes, submitted);
3762 }
3763 }
3764
3765 return skip;
3766 }
3767
ValidateGetQueryPoolPerformanceResults(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,void * pData,VkDeviceSize stride,VkQueryResultFlags flags,const char * apiName) const3768 bool CoreChecks::ValidateGetQueryPoolPerformanceResults(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
3769 void *pData, VkDeviceSize stride, VkQueryResultFlags flags,
3770 const char *apiName) const {
3771 bool skip = false;
3772 const auto query_pool_state = GetQueryPoolState(queryPool);
3773
3774 if (!query_pool_state || query_pool_state->createInfo.queryType != VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) return skip;
3775
3776 if (((((uintptr_t)pData) % sizeof(VkPerformanceCounterResultKHR)) != 0 ||
3777 (stride % sizeof(VkPerformanceCounterResultKHR)) != 0)) {
3778 skip |= LogError(queryPool, "VUID-vkGetQueryPoolResults-queryType-03229",
3779 "%s(): QueryPool %s was created with a queryType of "
3780 "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR but pData & stride are not multiple of the "
3781 "size of VkPerformanceCounterResultKHR.",
3782 apiName, report_data->FormatHandle(queryPool).c_str());
3783 }
3784
3785 skip |= ValidatePerformanceQueryResults(apiName, query_pool_state, firstQuery, queryCount, flags);
3786
3787 return skip;
3788 }
3789
PreCallValidateGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags) const3790 bool CoreChecks::PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
3791 uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride,
3792 VkQueryResultFlags flags) const {
3793 if (disabled[query_validation]) return false;
3794 bool skip = false;
3795 skip |= ValidateQueryPoolStride("VUID-vkGetQueryPoolResults-flags-02827", "VUID-vkGetQueryPoolResults-flags-00815", stride,
3796 "dataSize", dataSize, flags);
3797 skip |= ValidateQueryPoolIndex(queryPool, firstQuery, queryCount, "vkGetQueryPoolResults()",
3798 "VUID-vkGetQueryPoolResults-firstQuery-00813", "VUID-vkGetQueryPoolResults-firstQuery-00816");
3799 skip |=
3800 ValidateGetQueryPoolPerformanceResults(queryPool, firstQuery, queryCount, pData, stride, flags, "vkGetQueryPoolResults");
3801
3802 const auto query_pool_state = GetQueryPoolState(queryPool);
3803 if (query_pool_state) {
3804 if ((query_pool_state->createInfo.queryType == VK_QUERY_TYPE_TIMESTAMP) && (flags & VK_QUERY_RESULT_PARTIAL_BIT)) {
3805 skip |= LogError(
3806 queryPool, "VUID-vkGetQueryPoolResults-queryType-00818",
3807 "%s was created with a queryType of VK_QUERY_TYPE_TIMESTAMP but flags contains VK_QUERY_RESULT_PARTIAL_BIT.",
3808 report_data->FormatHandle(queryPool).c_str());
3809 }
3810
3811 if (!skip) {
3812 uint32_t query_avail_data = (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) ? 1 : 0;
3813 uint32_t query_size_in_bytes = (flags & VK_QUERY_RESULT_64_BIT) ? sizeof(uint64_t) : sizeof(uint32_t);
3814 uint32_t query_items = 0;
3815 uint32_t query_size = 0;
3816
3817 switch (query_pool_state->createInfo.queryType) {
3818 case VK_QUERY_TYPE_OCCLUSION:
3819 // Occlusion queries write one integer value - the number of samples passed.
3820 query_items = 1;
3821 query_size = query_size_in_bytes * (query_items + query_avail_data);
3822 break;
3823
3824 case VK_QUERY_TYPE_PIPELINE_STATISTICS:
3825 // Pipeline statistics queries write one integer value for each bit that is enabled in the pipelineStatistics
3826 // when the pool is created
3827 {
3828 const int num_bits = sizeof(VkFlags) * CHAR_BIT;
3829 std::bitset<num_bits> pipe_stats_bits(query_pool_state->createInfo.pipelineStatistics);
3830 query_items = static_cast<uint32_t>(pipe_stats_bits.count());
3831 query_size = query_size_in_bytes * (query_items + query_avail_data);
3832 }
3833 break;
3834
3835 case VK_QUERY_TYPE_TIMESTAMP:
3836 // Timestamp queries write one integer
3837 query_items = 1;
3838 query_size = query_size_in_bytes * (query_items + query_avail_data);
3839 break;
3840
3841 case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT:
3842 // Transform feedback queries write two integers
3843 query_items = 2;
3844 query_size = query_size_in_bytes * (query_items + query_avail_data);
3845 break;
3846
3847 case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR:
3848 // Performance queries store results in a tightly packed array of VkPerformanceCounterResultsKHR
3849 query_items = query_pool_state->perf_counter_index_count;
3850 query_size = sizeof(VkPerformanceCounterResultKHR) * query_items;
3851 if (query_size > stride) {
3852 skip |= LogError(queryPool, "VUID-vkGetQueryPoolResults-queryType-04519",
3853 "vkGetQueryPoolResults() on querypool %s specified stride %" PRIu64
3854 " which must be at least counterIndexCount (%d) "
3855 "multiplied by sizeof(VkPerformanceCounterResultKHR) (%d).",
3856 report_data->FormatHandle(queryPool).c_str(), stride, query_items,
3857 sizeof(VkPerformanceCounterResultKHR));
3858 }
3859 break;
3860
3861 // These cases intentionally fall through to the default
3862 case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR: // VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV
3863 case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR:
3864 case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL:
3865 default:
3866 query_size = 0;
3867 break;
3868 }
3869
3870 if (query_size && (((queryCount - 1) * stride + query_size) > dataSize)) {
3871 skip |= LogError(queryPool, "VUID-vkGetQueryPoolResults-dataSize-00817",
3872 "vkGetQueryPoolResults() on querypool %s specified dataSize %zu which is "
3873 "incompatible with the specified query type and options.",
3874 report_data->FormatHandle(queryPool).c_str(), dataSize);
3875 }
3876 }
3877 }
3878
3879 return skip;
3880 }
3881
ValidateInsertMemoryRange(const VulkanTypedHandle & typed_handle,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize memoryOffset,const char * api_name) const3882 bool CoreChecks::ValidateInsertMemoryRange(const VulkanTypedHandle &typed_handle, const DEVICE_MEMORY_STATE *mem_info,
3883 VkDeviceSize memoryOffset, const char *api_name) const {
3884 bool skip = false;
3885
3886 if (memoryOffset >= mem_info->alloc_info.allocationSize) {
3887 const char *error_code = nullptr;
3888 if (typed_handle.type == kVulkanObjectTypeBuffer) {
3889 if (strcmp(api_name, "vkBindBufferMemory()") == 0) {
3890 error_code = "VUID-vkBindBufferMemory-memoryOffset-01031";
3891 } else {
3892 error_code = "VUID-VkBindBufferMemoryInfo-memoryOffset-01031";
3893 }
3894 } else if (typed_handle.type == kVulkanObjectTypeImage) {
3895 if (strcmp(api_name, "vkBindImageMemory()") == 0) {
3896 error_code = "VUID-vkBindImageMemory-memoryOffset-01046";
3897 } else {
3898 error_code = "VUID-VkBindImageMemoryInfo-memoryOffset-01046";
3899 }
3900 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
3901 error_code = "VUID-VkBindAccelerationStructureMemoryInfoKHR-memoryOffset-02451";
3902 } else {
3903 // Unsupported object type
3904 assert(false);
3905 }
3906
3907 LogObjectList objlist(mem_info->mem);
3908 objlist.add(typed_handle);
3909 skip = LogError(objlist, error_code,
3910 "In %s, attempting to bind %s to %s, memoryOffset=0x%" PRIxLEAST64
3911 " must be less than the memory allocation size 0x%" PRIxLEAST64 ".",
3912 api_name, report_data->FormatHandle(mem_info->mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
3913 memoryOffset, mem_info->alloc_info.allocationSize);
3914 }
3915
3916 return skip;
3917 }
3918
ValidateInsertImageMemoryRange(VkImage image,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const char * api_name) const3919 bool CoreChecks::ValidateInsertImageMemoryRange(VkImage image, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
3920 const char *api_name) const {
3921 return ValidateInsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, api_name);
3922 }
3923
ValidateInsertBufferMemoryRange(VkBuffer buffer,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const char * api_name) const3924 bool CoreChecks::ValidateInsertBufferMemoryRange(VkBuffer buffer, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
3925 const char *api_name) const {
3926 return ValidateInsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, api_name);
3927 }
3928
ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as,const DEVICE_MEMORY_STATE * mem_info,VkDeviceSize mem_offset,const char * api_name) const3929 bool CoreChecks::ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, const DEVICE_MEMORY_STATE *mem_info,
3930 VkDeviceSize mem_offset, const char *api_name) const {
3931 return ValidateInsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset,
3932 api_name);
3933 }
3934
ValidateMemoryTypes(const DEVICE_MEMORY_STATE * mem_info,const uint32_t memory_type_bits,const char * funcName,const char * msgCode) const3935 bool CoreChecks::ValidateMemoryTypes(const DEVICE_MEMORY_STATE *mem_info, const uint32_t memory_type_bits, const char *funcName,
3936 const char *msgCode) const {
3937 bool skip = false;
3938 if (((1 << mem_info->alloc_info.memoryTypeIndex) & memory_type_bits) == 0) {
3939 skip = LogError(mem_info->mem, msgCode,
3940 "%s(): MemoryRequirements->memoryTypeBits (0x%X) for this object type are not compatible with the memory "
3941 "type (0x%X) of %s.",
3942 funcName, memory_type_bits, mem_info->alloc_info.memoryTypeIndex,
3943 report_data->FormatHandle(mem_info->mem).c_str());
3944 }
3945 return skip;
3946 }
3947
ValidateBindBufferMemory(VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memoryOffset,const char * api_name) const3948 bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
3949 const char *api_name) const {
3950 const BUFFER_STATE *buffer_state = GetBufferState(buffer);
3951 bool bind_buffer_mem_2 = strcmp(api_name, "vkBindBufferMemory()") != 0;
3952
3953 bool skip = false;
3954 if (buffer_state) {
3955 // Track objects tied to memory
3956 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
3957 skip = ValidateSetMemBinding(mem, obj_struct, api_name);
3958
3959 const auto mem_info = GetDevMemState(mem);
3960
3961 // Validate memory requirements alignment
3962 if (SafeModulo(memoryOffset, buffer_state->requirements.alignment) != 0) {
3963 const char *vuid =
3964 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-memoryOffset-01036" : "VUID-vkBindBufferMemory-memoryOffset-01036";
3965 skip |= LogError(buffer, vuid,
3966 "%s: memoryOffset is 0x%" PRIxLEAST64
3967 " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
3968 ", returned from a call to vkGetBufferMemoryRequirements with buffer.",
3969 api_name, memoryOffset, buffer_state->requirements.alignment);
3970 }
3971
3972 if (mem_info) {
3973 // Validate bound memory range information
3974 skip |= ValidateInsertBufferMemoryRange(buffer, mem_info, memoryOffset, api_name);
3975
3976 const char *mem_type_vuid =
3977 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-memory-01035" : "VUID-vkBindBufferMemory-memory-01035";
3978 skip |= ValidateMemoryTypes(mem_info, buffer_state->requirements.memoryTypeBits, api_name, mem_type_vuid);
3979
3980 // Validate memory requirements size
3981 if (buffer_state->requirements.size > (mem_info->alloc_info.allocationSize - memoryOffset)) {
3982 const char *vuid =
3983 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-size-01037" : "VUID-vkBindBufferMemory-size-01037";
3984 skip |= LogError(buffer, vuid,
3985 "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
3986 " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
3987 ", returned from a call to vkGetBufferMemoryRequirements with buffer.",
3988 api_name, mem_info->alloc_info.allocationSize - memoryOffset, buffer_state->requirements.size);
3989 }
3990
3991 // Validate dedicated allocation
3992 if (mem_info->is_dedicated && ((mem_info->dedicated_buffer != buffer) || (memoryOffset != 0))) {
3993 const char *vuid =
3994 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-memory-01508" : "VUID-vkBindBufferMemory-memory-01508";
3995 LogObjectList objlist(buffer);
3996 objlist.add(mem);
3997 objlist.add(mem_info->dedicated_buffer);
3998 skip |= LogError(objlist, vuid,
3999 "%s: for dedicated %s, VkMemoryDedicatedAllocateInfoKHR::buffer %s must be equal "
4000 "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
4001 api_name, report_data->FormatHandle(mem).c_str(),
4002 report_data->FormatHandle(mem_info->dedicated_buffer).c_str(),
4003 report_data->FormatHandle(buffer).c_str(), memoryOffset);
4004 }
4005
4006 auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(mem_info->alloc_info.pNext);
4007 if (enabled_features.core12.bufferDeviceAddress &&
4008 (buffer_state->createInfo.usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR) &&
4009 (!chained_flags_struct || !(chained_flags_struct->flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR))) {
4010 skip |= LogError(buffer, "VUID-vkBindBufferMemory-bufferDeviceAddress-03339",
4011 "%s: If buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR bit set, "
4012 "memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR bit set.",
4013 api_name);
4014 }
4015
4016 // Validate export memory handles
4017 if ((mem_info->export_handle_type_flags != 0) &&
4018 ((mem_info->export_handle_type_flags & buffer_state->external_memory_handle) == 0)) {
4019 const char *vuid =
4020 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-memory-02726" : "VUID-vkBindBufferMemory-memory-02726";
4021 LogObjectList objlist(buffer);
4022 objlist.add(mem);
4023 skip |= LogError(objlist, vuid,
4024 "%s: The VkDeviceMemory (%s) has an external handleType of %s which does not include at least one "
4025 "handle from VkBuffer (%s) handleType %s.",
4026 api_name, report_data->FormatHandle(mem).c_str(),
4027 string_VkExternalMemoryHandleTypeFlags(mem_info->export_handle_type_flags).c_str(),
4028 report_data->FormatHandle(buffer).c_str(),
4029 string_VkExternalMemoryHandleTypeFlags(buffer_state->external_memory_handle).c_str());
4030 }
4031
4032 // Validate import memory handles
4033 if (mem_info->is_import_ahb == true) {
4034 skip |= ValidateBufferImportedHandleANDROID(api_name, buffer_state->external_memory_handle, mem, buffer);
4035 } else if (mem_info->is_import == true) {
4036 if ((mem_info->import_handle_type_flags & buffer_state->external_memory_handle) == 0) {
4037 const char *vuid = nullptr;
4038 if ((bind_buffer_mem_2) && (device_extensions.vk_android_external_memory_android_hardware_buffer)) {
4039 vuid = "VUID-VkBindBufferMemoryInfo-memory-02985";
4040 } else if ((!bind_buffer_mem_2) && (device_extensions.vk_android_external_memory_android_hardware_buffer)) {
4041 vuid = "VUID-vkBindBufferMemory-memory-02985";
4042 } else if ((bind_buffer_mem_2) && (!device_extensions.vk_android_external_memory_android_hardware_buffer)) {
4043 vuid = "VUID-VkBindBufferMemoryInfo-memory-02727";
4044 } else if ((!bind_buffer_mem_2) && (!device_extensions.vk_android_external_memory_android_hardware_buffer)) {
4045 vuid = "VUID-vkBindBufferMemory-memory-02727";
4046 }
4047 LogObjectList objlist(buffer);
4048 objlist.add(mem);
4049 skip |= LogError(objlist, vuid,
4050 "%s: The VkDeviceMemory (%s) was created with an import operation with handleType of %s which "
4051 "is not set in the VkBuffer (%s) VkExternalMemoryBufferCreateInfo::handleType (%s)",
4052 api_name, report_data->FormatHandle(mem).c_str(),
4053 string_VkExternalMemoryHandleTypeFlags(mem_info->import_handle_type_flags).c_str(),
4054 report_data->FormatHandle(buffer).c_str(),
4055 string_VkExternalMemoryHandleTypeFlags(buffer_state->external_memory_handle).c_str());
4056 }
4057 }
4058
4059 // Validate mix of protected buffer and memory
4060 if ((buffer_state->unprotected == false) && (mem_info->unprotected == true)) {
4061 const char *vuid =
4062 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-None-01898" : "VUID-vkBindBufferMemory-None-01898";
4063 LogObjectList objlist(buffer);
4064 objlist.add(mem);
4065 skip |= LogError(objlist, vuid,
4066 "%s: The VkDeviceMemory (%s) was not created with protected memory but the VkBuffer (%s) was set "
4067 "to use protected memory.",
4068 api_name, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(buffer).c_str());
4069 } else if ((buffer_state->unprotected == true) && (mem_info->unprotected == false)) {
4070 const char *vuid =
4071 bind_buffer_mem_2 ? "VUID-VkBindBufferMemoryInfo-None-01899" : "VUID-vkBindBufferMemory-None-01899";
4072 LogObjectList objlist(buffer);
4073 objlist.add(mem);
4074 skip |= LogError(objlist, vuid,
4075 "%s: The VkDeviceMemory (%s) was created with protected memory but the VkBuffer (%s) was not set "
4076 "to use protected memory.",
4077 api_name, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(buffer).c_str());
4078 }
4079 }
4080 }
4081 return skip;
4082 }
4083
PreCallValidateBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memoryOffset) const4084 bool CoreChecks::PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
4085 VkDeviceSize memoryOffset) const {
4086 const char *api_name = "vkBindBufferMemory()";
4087 return ValidateBindBufferMemory(buffer, mem, memoryOffset, api_name);
4088 }
4089
PreCallValidateBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfoKHR * pBindInfos) const4090 bool CoreChecks::PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
4091 const VkBindBufferMemoryInfoKHR *pBindInfos) const {
4092 char api_name[64];
4093 bool skip = false;
4094
4095 for (uint32_t i = 0; i < bindInfoCount; i++) {
4096 sprintf(api_name, "vkBindBufferMemory2() pBindInfos[%u]", i);
4097 skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
4098 }
4099 return skip;
4100 }
4101
PreCallValidateBindBufferMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfoKHR * pBindInfos) const4102 bool CoreChecks::PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4103 const VkBindBufferMemoryInfoKHR *pBindInfos) const {
4104 char api_name[64];
4105 bool skip = false;
4106
4107 for (uint32_t i = 0; i < bindInfoCount; i++) {
4108 sprintf(api_name, "vkBindBufferMemory2KHR() pBindInfos[%u]", i);
4109 skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
4110 }
4111 return skip;
4112 }
4113
PreCallValidateGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements) const4114 bool CoreChecks::PreCallValidateGetImageMemoryRequirements(VkDevice device, VkImage image,
4115 VkMemoryRequirements *pMemoryRequirements) const {
4116 bool skip = false;
4117 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4118 skip |= ValidateGetImageMemoryRequirementsANDROID(image, "vkGetImageMemoryRequirements()");
4119 }
4120
4121 const IMAGE_STATE *image_state = GetImageState(image);
4122 if (image_state) {
4123 // Checks for no disjoint bit
4124 if (image_state->disjoint == true) {
4125 skip |= LogError(image, "VUID-vkGetImageMemoryRequirements-image-01588",
4126 "vkGetImageMemoryRequirements(): %s must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT "
4127 "(need to use vkGetImageMemoryRequirements2).",
4128 report_data->FormatHandle(image).c_str());
4129 }
4130 }
4131 return skip;
4132 }
4133
ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2 * pInfo,const char * func_name) const4134 bool CoreChecks::ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2 *pInfo, const char *func_name) const {
4135 bool skip = false;
4136 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4137 skip |= ValidateGetImageMemoryRequirementsANDROID(pInfo->image, func_name);
4138 }
4139
4140 const IMAGE_STATE *image_state = GetImageState(pInfo->image);
4141 const VkFormat image_format = image_state->createInfo.format;
4142 const VkImageTiling image_tiling = image_state->createInfo.tiling;
4143 const VkImagePlaneMemoryRequirementsInfo *image_plane_info =
4144 lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
4145
4146 if ((FormatIsMultiplane(image_format)) && (image_state->disjoint == true) && (image_plane_info == nullptr)) {
4147 skip |= LogError(pInfo->image, "VUID-VkImageMemoryRequirementsInfo2-image-01589",
4148 "%s: %s image was created with a multi-planar format (%s) and "
4149 "VK_IMAGE_CREATE_DISJOINT_BIT, but the current pNext doesn't include a "
4150 "VkImagePlaneMemoryRequirementsInfo struct",
4151 func_name, report_data->FormatHandle(pInfo->image).c_str(), string_VkFormat(image_format));
4152 }
4153
4154 if ((image_state->disjoint == false) && (image_plane_info != nullptr)) {
4155 skip |= LogError(pInfo->image, "VUID-VkImageMemoryRequirementsInfo2-image-01590",
4156 "%s: %s image was not created with VK_IMAGE_CREATE_DISJOINT_BIT,"
4157 "but the current pNext includes a VkImagePlaneMemoryRequirementsInfo struct",
4158 func_name, report_data->FormatHandle(pInfo->image).c_str());
4159 }
4160
4161 if ((FormatIsMultiplane(image_format) == false) && (image_tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) &&
4162 (image_plane_info != nullptr)) {
4163 skip |= LogError(pInfo->image, "VUID-VkImageMemoryRequirementsInfo2-image-02280",
4164 "%s: %s image is a single-plane format (%s) and does not have tiling of "
4165 "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,"
4166 "but the current pNext includes a VkImagePlaneMemoryRequirementsInfo struct",
4167 func_name, report_data->FormatHandle(pInfo->image).c_str(), string_VkFormat(image_format));
4168 }
4169
4170 if (image_plane_info != nullptr) {
4171 if ((image_tiling == VK_IMAGE_TILING_LINEAR) || (image_tiling == VK_IMAGE_TILING_OPTIMAL)) {
4172 // Make sure planeAspect is only a single, valid plane
4173 uint32_t planes = FormatPlaneCount(image_format);
4174 VkImageAspectFlags aspect = image_plane_info->planeAspect;
4175 if ((2 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT)) {
4176 skip |= LogError(
4177 pInfo->image, "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02281",
4178 "%s: Image %s VkImagePlaneMemoryRequirementsInfo::planeAspect is %s but can only be VK_IMAGE_ASPECT_PLANE_0_BIT"
4179 "or VK_IMAGE_ASPECT_PLANE_1_BIT.",
4180 func_name, report_data->FormatHandle(image_state->image).c_str(), string_VkImageAspectFlags(aspect).c_str());
4181 }
4182 if ((3 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT) &&
4183 (aspect != VK_IMAGE_ASPECT_PLANE_2_BIT)) {
4184 skip |= LogError(
4185 pInfo->image, "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02281",
4186 "%s: Image %s VkImagePlaneMemoryRequirementsInfo::planeAspect is %s but can only be VK_IMAGE_ASPECT_PLANE_0_BIT"
4187 "or VK_IMAGE_ASPECT_PLANE_1_BIT or VK_IMAGE_ASPECT_PLANE_2_BIT.",
4188 func_name, report_data->FormatHandle(image_state->image).c_str(), string_VkImageAspectFlags(aspect).c_str());
4189 }
4190 }
4191 }
4192 return skip;
4193 }
4194
PreCallValidateGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements) const4195 bool CoreChecks::PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4196 VkMemoryRequirements2 *pMemoryRequirements) const {
4197 return ValidateGetImageMemoryRequirements2(pInfo, "vkGetImageMemoryRequirements2()");
4198 }
4199
PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements) const4200 bool CoreChecks::PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
4201 VkMemoryRequirements2 *pMemoryRequirements) const {
4202 return ValidateGetImageMemoryRequirements2(pInfo, "vkGetImageMemoryRequirements2KHR()");
4203 }
4204
PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties) const4205 bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
4206 const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
4207 VkImageFormatProperties2 *pImageFormatProperties) const {
4208 // Can't wrap AHB-specific validation in a device extension check here, but no harm
4209 bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(pImageFormatInfo, pImageFormatProperties);
4210 return skip;
4211 }
4212
PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties) const4213 bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
4214 const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
4215 VkImageFormatProperties2 *pImageFormatProperties) const {
4216 // Can't wrap AHB-specific validation in a device extension check here, but no harm
4217 bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(pImageFormatInfo, pImageFormatProperties);
4218 return skip;
4219 }
4220
PreCallValidateDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator) const4221 bool CoreChecks::PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline,
4222 const VkAllocationCallbacks *pAllocator) const {
4223 const PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
4224 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
4225 bool skip = false;
4226 if (pipeline_state) {
4227 skip |= ValidateObjectNotInUse(pipeline_state, obj_struct, "vkDestroyPipeline", "VUID-vkDestroyPipeline-pipeline-00765");
4228 }
4229 return skip;
4230 }
4231
PreCallValidateDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator) const4232 bool CoreChecks::PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) const {
4233 const SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
4234 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
4235 bool skip = false;
4236 if (sampler_state) {
4237 skip |= ValidateObjectNotInUse(sampler_state, obj_struct, "vkDestroySampler", "VUID-vkDestroySampler-sampler-01082");
4238 }
4239 return skip;
4240 }
4241
PreCallValidateDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator) const4242 bool CoreChecks::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
4243 const VkAllocationCallbacks *pAllocator) const {
4244 const DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
4245 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
4246 bool skip = false;
4247 if (desc_pool_state) {
4248 skip |= ValidateObjectNotInUse(desc_pool_state, obj_struct, "vkDestroyDescriptorPool",
4249 "VUID-vkDestroyDescriptorPool-descriptorPool-00303");
4250 }
4251 return skip;
4252 }
4253
4254 // Verify cmdBuffer in given cb_node is not in global in-flight set, and return skip result
4255 // If this is a secondary command buffer, then make sure its primary is also in-flight
4256 // If primary is not in-flight, then remove secondary from global in-flight set
4257 // This function is only valid at a point when cmdBuffer is being reset or freed
CheckCommandBufferInFlight(const CMD_BUFFER_STATE * cb_node,const char * action,const char * error_code) const4258 bool CoreChecks::CheckCommandBufferInFlight(const CMD_BUFFER_STATE *cb_node, const char *action, const char *error_code) const {
4259 bool skip = false;
4260 if (cb_node->in_use.load()) {
4261 skip |= LogError(cb_node->commandBuffer, error_code, "Attempt to %s %s which is in use.", action,
4262 report_data->FormatHandle(cb_node->commandBuffer).c_str());
4263 }
4264 return skip;
4265 }
4266
4267 // Iterate over all cmdBuffers in given commandPool and verify that each is not in use
CheckCommandBuffersInFlight(const COMMAND_POOL_STATE * pPool,const char * action,const char * error_code) const4268 bool CoreChecks::CheckCommandBuffersInFlight(const COMMAND_POOL_STATE *pPool, const char *action, const char *error_code) const {
4269 bool skip = false;
4270 for (auto cmd_buffer : pPool->commandBuffers) {
4271 skip |= CheckCommandBufferInFlight(GetCBState(cmd_buffer), action, error_code);
4272 }
4273 return skip;
4274 }
4275
PreCallValidateFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers) const4276 bool CoreChecks::PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
4277 const VkCommandBuffer *pCommandBuffers) const {
4278 bool skip = false;
4279 for (uint32_t i = 0; i < commandBufferCount; i++) {
4280 const auto *cb_node = GetCBState(pCommandBuffers[i]);
4281 // Delete CB information structure, and remove from commandBufferMap
4282 if (cb_node) {
4283 skip |= CheckCommandBufferInFlight(cb_node, "free", "VUID-vkFreeCommandBuffers-pCommandBuffers-00047");
4284 }
4285 }
4286 return skip;
4287 }
4288
PreCallValidateCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool) const4289 bool CoreChecks::PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
4290 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) const {
4291 bool skip = false;
4292 skip |= ValidateDeviceQueueFamily(pCreateInfo->queueFamilyIndex, "vkCreateCommandPool", "pCreateInfo->queueFamilyIndex",
4293 "VUID-vkCreateCommandPool-queueFamilyIndex-01937");
4294 if ((enabled_features.core11.protectedMemory == VK_FALSE) &&
4295 ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) != 0)) {
4296 skip |= LogError(device, "VUID-VkCommandPoolCreateInfo-flags-02860",
4297 "vkCreateCommandPool(): the protectedMemory device feature is disabled: CommandPools cannot be created "
4298 "with the VK_COMMAND_POOL_CREATE_PROTECTED_BIT set.");
4299 }
4300
4301 return skip;
4302 }
4303
PreCallValidateCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool) const4304 bool CoreChecks::PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
4305 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) const {
4306 if (disabled[query_validation]) return false;
4307 bool skip = false;
4308 if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) {
4309 if (!enabled_features.core.pipelineStatisticsQuery) {
4310 skip |= LogError(device, "VUID-VkQueryPoolCreateInfo-queryType-00791",
4311 "vkCreateQueryPool(): Query pool with type VK_QUERY_TYPE_PIPELINE_STATISTICS created on a device with "
4312 "VkDeviceCreateInfo.pEnabledFeatures.pipelineStatisticsQuery == VK_FALSE.");
4313 }
4314 }
4315 if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4316 if (!enabled_features.performance_query_features.performanceCounterQueryPools) {
4317 skip |=
4318 LogError(device, "VUID-VkQueryPoolPerformanceCreateInfoKHR-performanceCounterQueryPools-03237",
4319 "vkCreateQueryPool(): Query pool with type VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR created on a device with "
4320 "VkPhysicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools == VK_FALSE.");
4321 }
4322
4323 auto perf_ci = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
4324 if (!perf_ci) {
4325 skip |= LogError(
4326 device, "VUID-VkQueryPoolCreateInfo-queryType-03222",
4327 "vkCreateQueryPool(): Query pool with type VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR created but the pNext chain of "
4328 "pCreateInfo does not contain in instance of VkQueryPoolPerformanceCreateInfoKHR.");
4329 } else {
4330 const auto &perf_counter_iter = physical_device_state->perf_counters.find(perf_ci->queueFamilyIndex);
4331 if (perf_counter_iter == physical_device_state->perf_counters.end()) {
4332 skip |= LogError(
4333 device, "VUID-VkQueryPoolPerformanceCreateInfoKHR-queueFamilyIndex-03236",
4334 "vkCreateQueryPool(): VkQueryPerformanceCreateInfoKHR::queueFamilyIndex is not a valid queue family index.");
4335 } else {
4336 const QUEUE_FAMILY_PERF_COUNTERS *perf_counters = perf_counter_iter->second.get();
4337 for (uint32_t idx = 0; idx < perf_ci->counterIndexCount; idx++) {
4338 if (perf_ci->pCounterIndices[idx] >= perf_counters->counters.size()) {
4339 skip |= LogError(
4340 device, "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321",
4341 "vkCreateQueryPool(): VkQueryPerformanceCreateInfoKHR::pCounterIndices[%u] = %u is not a valid "
4342 "counter index.",
4343 idx, perf_ci->pCounterIndices[idx]);
4344 }
4345 }
4346 }
4347 }
4348 }
4349 return skip;
4350 }
4351
PreCallValidateDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator) const4352 bool CoreChecks::PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
4353 const VkAllocationCallbacks *pAllocator) const {
4354 const COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
4355 bool skip = false;
4356 if (cp_state) {
4357 // Verify that command buffers in pool are complete (not in-flight)
4358 skip |= CheckCommandBuffersInFlight(cp_state, "destroy command pool with", "VUID-vkDestroyCommandPool-commandPool-00041");
4359 }
4360 return skip;
4361 }
4362
PreCallValidateResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags) const4363 bool CoreChecks::PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const {
4364 const auto *command_pool_state = GetCommandPoolState(commandPool);
4365 return CheckCommandBuffersInFlight(command_pool_state, "reset command pool with", "VUID-vkResetCommandPool-commandPool-00040");
4366 }
4367
PreCallValidateResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences) const4368 bool CoreChecks::PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) const {
4369 bool skip = false;
4370 for (uint32_t i = 0; i < fenceCount; ++i) {
4371 const auto pFence = GetFenceState(pFences[i]);
4372 if (pFence && pFence->scope == kSyncScopeInternal && pFence->state == FENCE_INFLIGHT) {
4373 skip |= LogError(pFences[i], "VUID-vkResetFences-pFences-01123", "%s is in use.",
4374 report_data->FormatHandle(pFences[i]).c_str());
4375 }
4376 }
4377 return skip;
4378 }
4379
PreCallValidateDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator) const4380 bool CoreChecks::PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
4381 const VkAllocationCallbacks *pAllocator) const {
4382 const FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
4383 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
4384 bool skip = false;
4385 if (framebuffer_state) {
4386 skip |= ValidateObjectNotInUse(framebuffer_state, obj_struct, "vkDestroyFramebuffer",
4387 "VUID-vkDestroyFramebuffer-framebuffer-00892");
4388 }
4389 return skip;
4390 }
4391
PreCallValidateDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator) const4392 bool CoreChecks::PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
4393 const VkAllocationCallbacks *pAllocator) const {
4394 const RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
4395 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
4396 bool skip = false;
4397 if (rp_state) {
4398 skip |= ValidateObjectNotInUse(rp_state, obj_struct, "vkDestroyRenderPass", "VUID-vkDestroyRenderPass-renderPass-00873");
4399 }
4400 return skip;
4401 }
4402
4403 // Access helper functions for external modules
GetPDFormatProperties(const VkFormat format) const4404 VkFormatProperties CoreChecks::GetPDFormatProperties(const VkFormat format) const {
4405 VkFormatProperties format_properties;
4406 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
4407 return format_properties;
4408 }
4409
ValidatePipelineVertexDivisors(std::vector<std::shared_ptr<PIPELINE_STATE>> const & pipe_state_vec,const uint32_t count,const VkGraphicsPipelineCreateInfo * pipe_cis) const4410 bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::shared_ptr<PIPELINE_STATE>> const &pipe_state_vec,
4411 const uint32_t count, const VkGraphicsPipelineCreateInfo *pipe_cis) const {
4412 bool skip = false;
4413 const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
4414
4415 for (uint32_t i = 0; i < count; i++) {
4416 auto pvids_ci = lvl_find_in_chain<VkPipelineVertexInputDivisorStateCreateInfoEXT>(pipe_cis[i].pVertexInputState->pNext);
4417 if (nullptr == pvids_ci) continue;
4418
4419 const PIPELINE_STATE *pipe_state = pipe_state_vec[i].get();
4420 for (uint32_t j = 0; j < pvids_ci->vertexBindingDivisorCount; j++) {
4421 const VkVertexInputBindingDivisorDescriptionEXT *vibdd = &(pvids_ci->pVertexBindingDivisors[j]);
4422 if (vibdd->binding >= device_limits->maxVertexInputBindings) {
4423 skip |= LogError(
4424 device, "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
4425 "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
4426 "pVertexBindingDivisors[%1u] binding index of (%1u) exceeds device maxVertexInputBindings (%1u).",
4427 i, j, vibdd->binding, device_limits->maxVertexInputBindings);
4428 }
4429 if (vibdd->divisor > phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor) {
4430 skip |= LogError(
4431 device, "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
4432 "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
4433 "pVertexBindingDivisors[%1u] divisor of (%1u) exceeds extension maxVertexAttribDivisor (%1u).",
4434 i, j, vibdd->divisor, phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor);
4435 }
4436 if ((0 == vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateZeroDivisor) {
4437 skip |= LogError(
4438 device, "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228",
4439 "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
4440 "pVertexBindingDivisors[%1u] divisor must not be 0 when vertexAttributeInstanceRateZeroDivisor feature is not "
4441 "enabled.",
4442 i, j);
4443 }
4444 if ((1 != vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateDivisor) {
4445 skip |= LogError(
4446 device, "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229",
4447 "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
4448 "pVertexBindingDivisors[%1u] divisor (%1u) must be 1 when vertexAttributeInstanceRateDivisor feature is not "
4449 "enabled.",
4450 i, j, vibdd->divisor);
4451 }
4452
4453 // Find the corresponding binding description and validate input rate setting
4454 bool failed_01871 = true;
4455 for (size_t k = 0; k < pipe_state->vertex_binding_descriptions_.size(); k++) {
4456 if ((vibdd->binding == pipe_state->vertex_binding_descriptions_[k].binding) &&
4457 (VK_VERTEX_INPUT_RATE_INSTANCE == pipe_state->vertex_binding_descriptions_[k].inputRate)) {
4458 failed_01871 = false;
4459 break;
4460 }
4461 }
4462 if (failed_01871) { // Description not found, or has incorrect inputRate value
4463 skip |= LogError(
4464 device, "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871",
4465 "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
4466 "pVertexBindingDivisors[%1u] specifies binding index (%1u), but that binding index's "
4467 "VkVertexInputBindingDescription.inputRate member is not VK_VERTEX_INPUT_RATE_INSTANCE.",
4468 i, j, vibdd->binding);
4469 }
4470 }
4471 }
4472 return skip;
4473 }
4474
ValidatePipelineCacheControlFlags(VkPipelineCreateFlags flags,uint32_t index,const char * caller_name,const char * vuid) const4475 bool CoreChecks::ValidatePipelineCacheControlFlags(VkPipelineCreateFlags flags, uint32_t index, const char *caller_name,
4476 const char *vuid) const {
4477 bool skip = false;
4478 if (enabled_features.pipeline_creation_cache_control_features.pipelineCreationCacheControl == VK_FALSE) {
4479 const VkPipelineCreateFlags invalid_flags =
4480 VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT | VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT;
4481 if ((flags & invalid_flags) != 0) {
4482 skip |= LogError(device, vuid,
4483 "%s(): pipelineCreationCacheControl is turned off but pipeline[%u] has VkPipelineCreateFlags "
4484 "containing VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or "
4485 "VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT",
4486 caller_name, index);
4487 }
4488 }
4489 return skip;
4490 }
4491
PreCallValidateCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache) const4492 bool CoreChecks::PreCallValidateCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
4493 const VkAllocationCallbacks *pAllocator,
4494 VkPipelineCache *pPipelineCache) const {
4495 bool skip = false;
4496 if (enabled_features.pipeline_creation_cache_control_features.pipelineCreationCacheControl == VK_FALSE) {
4497 if ((pCreateInfo->flags & VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT) != 0) {
4498 skip |= LogError(device, "VUID-VkPipelineCacheCreateInfo-pipelineCreationCacheControl-02892",
4499 "vkCreatePipelineCache(): pipelineCreationCacheControl is turned off but pCreateInfo::flags contains "
4500 "VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT");
4501 }
4502 }
4503 return skip;
4504 }
4505
PreCallValidateCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * cgpl_state_data) const4506 bool CoreChecks::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
4507 const VkGraphicsPipelineCreateInfo *pCreateInfos,
4508 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
4509 void *cgpl_state_data) const {
4510 bool skip = StateTracker::PreCallValidateCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
4511 pPipelines, cgpl_state_data);
4512 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
4513
4514 for (uint32_t i = 0; i < count; i++) {
4515 skip |= ValidatePipelineLocked(cgpl_state->pipe_state, i);
4516 }
4517
4518 for (uint32_t i = 0; i < count; i++) {
4519 skip |= ValidatePipelineUnlocked(cgpl_state->pipe_state[i].get(), i);
4520 }
4521
4522 if (device_extensions.vk_ext_vertex_attribute_divisor) {
4523 skip |= ValidatePipelineVertexDivisors(cgpl_state->pipe_state, count, pCreateInfos);
4524 }
4525
4526 return skip;
4527 }
4528
PreCallValidateCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * ccpl_state_data) const4529 bool CoreChecks::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
4530 const VkComputePipelineCreateInfo *pCreateInfos,
4531 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
4532 void *ccpl_state_data) const {
4533 bool skip = StateTracker::PreCallValidateCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
4534 pPipelines, ccpl_state_data);
4535
4536 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
4537 for (uint32_t i = 0; i < count; i++) {
4538 // TODO: Add Compute Pipeline Verification
4539 skip |= ValidateComputePipelineShaderState(ccpl_state->pipe_state[i].get());
4540 skip |= ValidatePipelineCacheControlFlags(pCreateInfos->flags, i, "vkCreateComputePipelines",
4541 "VUID-VkComputePipelineCreateInfo-pipelineCreationCacheControl-02875");
4542 }
4543 return skip;
4544 }
4545
PreCallValidateCreateRayTracingPipelinesNV(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoNV * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * crtpl_state_data) const4546 bool CoreChecks::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
4547 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
4548 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
4549 void *crtpl_state_data) const {
4550 bool skip = StateTracker::PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
4551 pPipelines, crtpl_state_data);
4552
4553 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
4554 for (uint32_t i = 0; i < count; i++) {
4555 PIPELINE_STATE *pipeline = crtpl_state->pipe_state[i].get();
4556 if (pipeline->raytracingPipelineCI.flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
4557 const PIPELINE_STATE *base_pipeline = nullptr;
4558 if (pipeline->raytracingPipelineCI.basePipelineIndex != -1) {
4559 base_pipeline = crtpl_state->pipe_state[pipeline->raytracingPipelineCI.basePipelineIndex].get();
4560 } else if (pipeline->raytracingPipelineCI.basePipelineHandle != VK_NULL_HANDLE) {
4561 base_pipeline = GetPipelineState(pipeline->raytracingPipelineCI.basePipelineHandle);
4562 }
4563 if (!base_pipeline || !(base_pipeline->getPipelineCreateFlags() & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) {
4564 skip |= LogError(
4565 device, "VUID-vkCreateRayTracingPipelinesNV-flags-03416",
4566 "vkCreateRayTracingPipelinesNV: If the flags member of any element of pCreateInfos contains the "
4567 "VK_PIPELINE_CREATE_DERIVATIVE_BIT flag,"
4568 "the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set.");
4569 }
4570 }
4571 skip |= ValidateRayTracingPipeline(pipeline, /*isKHR*/ false);
4572 skip |= ValidatePipelineCacheControlFlags(pCreateInfos->flags, i, "vkCreateRayTracingPipelinesNV",
4573 "VUID-VkRayTracingPipelineCreateInfoNV-pipelineCreationCacheControl-02905");
4574 }
4575 return skip;
4576 }
4577
PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device,VkPipelineCache pipelineCache,uint32_t count,const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,void * crtpl_state_data) const4578 bool CoreChecks::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
4579 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
4580 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
4581 void *crtpl_state_data) const {
4582 bool skip = StateTracker::PreCallValidateCreateRayTracingPipelinesKHR(device, pipelineCache, count, pCreateInfos, pAllocator,
4583 pPipelines, crtpl_state_data);
4584
4585 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
4586 for (uint32_t i = 0; i < count; i++) {
4587 PIPELINE_STATE *pipeline = crtpl_state->pipe_state[i].get();
4588 if (pipeline->raytracingPipelineCI.flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
4589 const PIPELINE_STATE *base_pipeline = nullptr;
4590 if (pipeline->raytracingPipelineCI.basePipelineIndex != -1) {
4591 base_pipeline = crtpl_state->pipe_state[pipeline->raytracingPipelineCI.basePipelineIndex].get();
4592 } else if (pipeline->raytracingPipelineCI.basePipelineHandle != VK_NULL_HANDLE) {
4593 base_pipeline = GetPipelineState(pipeline->raytracingPipelineCI.basePipelineHandle);
4594 }
4595 if (!base_pipeline || !(base_pipeline->getPipelineCreateFlags() & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) {
4596 skip |= LogError(
4597 device, "VUID-vkCreateRayTracingPipelinesKHR-flags-03416",
4598 "vkCreateRayTracingPipelinesKHR: If the flags member of any element of pCreateInfos contains the "
4599 "VK_PIPELINE_CREATE_DERIVATIVE_BIT flag,"
4600 "the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set.");
4601 }
4602 }
4603 skip |= ValidateRayTracingPipeline(pipeline, /*isKHR*/ true);
4604 skip |= ValidatePipelineCacheControlFlags(pCreateInfos->flags, i, "vkCreateRayTracingPipelinesKHR",
4605 "VUID-VkRayTracingPipelineCreateInfoKHR-pipelineCreationCacheControl-02905");
4606 }
4607 return skip;
4608 }
4609
PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device,const VkPipelineInfoKHR * pPipelineInfo,uint32_t * pExecutableCount,VkPipelineExecutablePropertiesKHR * pProperties) const4610 bool CoreChecks::PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR *pPipelineInfo,
4611 uint32_t *pExecutableCount,
4612 VkPipelineExecutablePropertiesKHR *pProperties) const {
4613 bool skip = false;
4614
4615 if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
4616 skip |= LogError(device, "VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableInfo-03270",
4617 "vkGetPipelineExecutablePropertiesKHR called when pipelineExecutableInfo feature is not enabled.");
4618 }
4619
4620 return skip;
4621 }
4622
ValidatePipelineExecutableInfo(VkDevice device,const VkPipelineExecutableInfoKHR * pExecutableInfo) const4623 bool CoreChecks::ValidatePipelineExecutableInfo(VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo) const {
4624 bool skip = false;
4625
4626 if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
4627 skip |= LogError(device, "VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272",
4628 "vkGetPipelineExecutableStatisticsKHR called when pipelineExecutableInfo feature is not enabled.");
4629 }
4630
4631 VkPipelineInfoKHR pi = {};
4632 pi.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
4633 pi.pipeline = pExecutableInfo->pipeline;
4634
4635 // We could probably cache this instead of fetching it every time
4636 uint32_t executableCount = 0;
4637 DispatchGetPipelineExecutablePropertiesKHR(device, &pi, &executableCount, NULL);
4638
4639 if (pExecutableInfo->executableIndex >= executableCount) {
4640 skip |=
4641 LogError(pExecutableInfo->pipeline, "VUID-VkPipelineExecutableInfoKHR-executableIndex-03275",
4642 "VkPipelineExecutableInfo::executableIndex (%1u) must be less than the number of executables associated with "
4643 "the pipeline (%1u) as returned by vkGetPipelineExecutablePropertiessKHR",
4644 pExecutableInfo->executableIndex, executableCount);
4645 }
4646
4647 return skip;
4648 }
4649
PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device,const VkPipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pStatisticCount,VkPipelineExecutableStatisticKHR * pStatistics) const4650 bool CoreChecks::PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device,
4651 const VkPipelineExecutableInfoKHR *pExecutableInfo,
4652 uint32_t *pStatisticCount,
4653 VkPipelineExecutableStatisticKHR *pStatistics) const {
4654 bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
4655
4656 const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
4657 if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR)) {
4658 skip |= LogError(pExecutableInfo->pipeline, "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274",
4659 "vkGetPipelineExecutableStatisticsKHR called on a pipeline created without the "
4660 "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR flag set");
4661 }
4662
4663 return skip;
4664 }
4665
PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(VkDevice device,const VkPipelineExecutableInfoKHR * pExecutableInfo,uint32_t * pInternalRepresentationCount,VkPipelineExecutableInternalRepresentationKHR * pStatistics) const4666 bool CoreChecks::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
4667 VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pInternalRepresentationCount,
4668 VkPipelineExecutableInternalRepresentationKHR *pStatistics) const {
4669 bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
4670
4671 const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
4672 if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR)) {
4673 skip |= LogError(pExecutableInfo->pipeline, "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278",
4674 "vkGetPipelineExecutableInternalRepresentationsKHR called on a pipeline created without the "
4675 "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR flag set");
4676 }
4677
4678 return skip;
4679 }
4680
PreCallValidateCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout) const4681 bool CoreChecks::PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
4682 const VkAllocationCallbacks *pAllocator,
4683 VkDescriptorSetLayout *pSetLayout) const {
4684 return cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
4685 this, pCreateInfo, IsExtEnabled(device_extensions.vk_khr_push_descriptor), phys_dev_ext_props.max_push_descriptors,
4686 IsExtEnabled(device_extensions.vk_ext_descriptor_indexing), &enabled_features.core12,
4687 &enabled_features.inline_uniform_block, &phys_dev_ext_props.inline_uniform_block_props, &device_extensions);
4688 }
4689
4690 // Used by CreatePipelineLayout and CmdPushConstants.
4691 // Note that the index argument is optional and only used by CreatePipelineLayout.
ValidatePushConstantRange(const uint32_t offset,const uint32_t size,const char * caller_name,uint32_t index=0) const4692 bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t size, const char *caller_name,
4693 uint32_t index = 0) const {
4694 if (disabled[push_constant_range]) return false;
4695 uint32_t const maxPushConstantsSize = phys_dev_props.limits.maxPushConstantsSize;
4696 bool skip = false;
4697 // Check that offset + size don't exceed the max.
4698 // Prevent arithetic overflow here by avoiding addition and testing in this order.
4699 if ((offset >= maxPushConstantsSize) || (size > maxPushConstantsSize - offset)) {
4700 // This is a pain just to adapt the log message to the caller, but better to sort it out only when there is a problem.
4701 if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
4702 if (offset >= maxPushConstantsSize) {
4703 skip |= LogError(
4704 device, "VUID-VkPushConstantRange-offset-00294",
4705 "%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
4706 caller_name, index, offset, maxPushConstantsSize);
4707 }
4708 if (size > maxPushConstantsSize - offset) {
4709 skip |= LogError(device, "VUID-VkPushConstantRange-size-00298",
4710 "%s call has push constants index %u with offset %u and size %u that exceeds this device's "
4711 "maxPushConstantSize of %u.",
4712 caller_name, index, offset, size, maxPushConstantsSize);
4713 }
4714 } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
4715 if (offset >= maxPushConstantsSize) {
4716 skip |= LogError(
4717 device, "VUID-vkCmdPushConstants-offset-00370",
4718 "%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
4719 caller_name, index, offset, maxPushConstantsSize);
4720 }
4721 if (size > maxPushConstantsSize - offset) {
4722 skip |= LogError(device, "VUID-vkCmdPushConstants-size-00371",
4723 "%s call has push constants index %u with offset %u and size %u that exceeds this device's "
4724 "maxPushConstantSize of %u.",
4725 caller_name, index, offset, size, maxPushConstantsSize);
4726 }
4727 } else {
4728 skip |= LogError(device, kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
4729 }
4730 }
4731 // size needs to be non-zero and a multiple of 4.
4732 if ((size == 0) || ((size & 0x3) != 0)) {
4733 if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
4734 if (size == 0) {
4735 skip |= LogError(device, "VUID-VkPushConstantRange-size-00296",
4736 "%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
4737 index, size);
4738 }
4739 if (size & 0x3) {
4740 skip |= LogError(device, "VUID-VkPushConstantRange-size-00297",
4741 "%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
4742 index, size);
4743 }
4744 } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
4745 if (size == 0) {
4746 skip |= LogError(device, "VUID-vkCmdPushConstants-size-arraylength",
4747 "%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
4748 index, size);
4749 }
4750 if (size & 0x3) {
4751 skip |= LogError(device, "VUID-vkCmdPushConstants-size-00369",
4752 "%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
4753 index, size);
4754 }
4755 } else {
4756 skip |= LogError(device, kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
4757 }
4758 }
4759 // offset needs to be a multiple of 4.
4760 if ((offset & 0x3) != 0) {
4761 if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
4762 skip |= LogError(device, "VUID-VkPushConstantRange-offset-00295",
4763 "%s call has push constants index %u with offset %u. Offset must be a multiple of 4.", caller_name,
4764 index, offset);
4765 } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
4766 skip |= LogError(device, "VUID-vkCmdPushConstants-offset-00368",
4767 "%s call has push constants with offset %u. Offset must be a multiple of 4.", caller_name, offset);
4768 } else {
4769 skip |= LogError(device, kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
4770 }
4771 }
4772 return skip;
4773 }
4774
4775 enum DSL_DESCRIPTOR_GROUPS {
4776 DSL_TYPE_SAMPLERS = 0,
4777 DSL_TYPE_UNIFORM_BUFFERS,
4778 DSL_TYPE_STORAGE_BUFFERS,
4779 DSL_TYPE_SAMPLED_IMAGES,
4780 DSL_TYPE_STORAGE_IMAGES,
4781 DSL_TYPE_INPUT_ATTACHMENTS,
4782 DSL_TYPE_INLINE_UNIFORM_BLOCK,
4783 DSL_NUM_DESCRIPTOR_GROUPS
4784 };
4785
4786 // Used by PreCallValidateCreatePipelineLayout.
4787 // Returns an array of size DSL_NUM_DESCRIPTOR_GROUPS of the maximum number of descriptors used in any single pipeline stage
GetDescriptorCountMaxPerStage(const DeviceFeatures * enabled_features,const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> & set_layouts,bool skip_update_after_bind)4788 std::valarray<uint32_t> GetDescriptorCountMaxPerStage(
4789 const DeviceFeatures *enabled_features,
4790 const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
4791 // Identify active pipeline stages
4792 std::vector<VkShaderStageFlags> stage_flags = {VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_FRAGMENT_BIT,
4793 VK_SHADER_STAGE_COMPUTE_BIT};
4794 if (enabled_features->core.geometryShader) {
4795 stage_flags.push_back(VK_SHADER_STAGE_GEOMETRY_BIT);
4796 }
4797 if (enabled_features->core.tessellationShader) {
4798 stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
4799 stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT);
4800 }
4801
4802 // Allow iteration over enum values
4803 std::vector<DSL_DESCRIPTOR_GROUPS> dsl_groups = {
4804 DSL_TYPE_SAMPLERS, DSL_TYPE_UNIFORM_BUFFERS, DSL_TYPE_STORAGE_BUFFERS, DSL_TYPE_SAMPLED_IMAGES,
4805 DSL_TYPE_STORAGE_IMAGES, DSL_TYPE_INPUT_ATTACHMENTS, DSL_TYPE_INLINE_UNIFORM_BLOCK};
4806
4807 // Sum by layouts per stage, then pick max of stages per type
4808 std::valarray<uint32_t> max_sum(0U, DSL_NUM_DESCRIPTOR_GROUPS); // max descriptor sum among all pipeline stages
4809 for (auto stage : stage_flags) {
4810 std::valarray<uint32_t> stage_sum(0U, DSL_NUM_DESCRIPTOR_GROUPS); // per-stage sums
4811 for (auto dsl : set_layouts) {
4812 if (skip_update_after_bind &&
4813 (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
4814 continue;
4815 }
4816
4817 for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
4818 const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
4819 // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
4820 if (0 != (stage & binding->stageFlags) && binding->descriptorCount > 0) {
4821 switch (binding->descriptorType) {
4822 case VK_DESCRIPTOR_TYPE_SAMPLER:
4823 stage_sum[DSL_TYPE_SAMPLERS] += binding->descriptorCount;
4824 break;
4825 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
4826 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
4827 stage_sum[DSL_TYPE_UNIFORM_BUFFERS] += binding->descriptorCount;
4828 break;
4829 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
4830 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
4831 stage_sum[DSL_TYPE_STORAGE_BUFFERS] += binding->descriptorCount;
4832 break;
4833 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
4834 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
4835 stage_sum[DSL_TYPE_SAMPLED_IMAGES] += binding->descriptorCount;
4836 break;
4837 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
4838 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
4839 stage_sum[DSL_TYPE_STORAGE_IMAGES] += binding->descriptorCount;
4840 break;
4841 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
4842 stage_sum[DSL_TYPE_SAMPLED_IMAGES] += binding->descriptorCount;
4843 stage_sum[DSL_TYPE_SAMPLERS] += binding->descriptorCount;
4844 break;
4845 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
4846 stage_sum[DSL_TYPE_INPUT_ATTACHMENTS] += binding->descriptorCount;
4847 break;
4848 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
4849 // count one block per binding. descriptorCount is number of bytes
4850 stage_sum[DSL_TYPE_INLINE_UNIFORM_BLOCK]++;
4851 break;
4852 default:
4853 break;
4854 }
4855 }
4856 }
4857 }
4858 for (auto type : dsl_groups) {
4859 max_sum[type] = std::max(stage_sum[type], max_sum[type]);
4860 }
4861 }
4862 return max_sum;
4863 }
4864
4865 // Used by PreCallValidateCreatePipelineLayout.
4866 // Returns a map indexed by VK_DESCRIPTOR_TYPE_* enum of the summed descriptors by type.
4867 // Note: descriptors only count against the limit once even if used by multiple stages.
GetDescriptorSum(const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> & set_layouts,bool skip_update_after_bind)4868 std::map<uint32_t, uint32_t> GetDescriptorSum(
4869 const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
4870 std::map<uint32_t, uint32_t> sum_by_type;
4871 for (auto dsl : set_layouts) {
4872 if (skip_update_after_bind && (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
4873 continue;
4874 }
4875
4876 for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
4877 const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
4878 // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
4879 if (binding->descriptorCount > 0) {
4880 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
4881 // count one block per binding. descriptorCount is number of bytes
4882 sum_by_type[binding->descriptorType]++;
4883 } else {
4884 sum_by_type[binding->descriptorType] += binding->descriptorCount;
4885 }
4886 }
4887 }
4888 }
4889 return sum_by_type;
4890 }
4891
PreCallValidateCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout) const4892 bool CoreChecks::PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
4893 const VkAllocationCallbacks *pAllocator,
4894 VkPipelineLayout *pPipelineLayout) const {
4895 bool skip = false;
4896
4897 // Validate layout count against device physical limit
4898 if (pCreateInfo->setLayoutCount > phys_dev_props.limits.maxBoundDescriptorSets) {
4899 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286",
4900 "vkCreatePipelineLayout(): setLayoutCount (%d) exceeds physical device maxBoundDescriptorSets limit (%d).",
4901 pCreateInfo->setLayoutCount, phys_dev_props.limits.maxBoundDescriptorSets);
4902 }
4903
4904 // Validate Push Constant ranges
4905 uint32_t i, j;
4906 for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
4907 skip |= ValidatePushConstantRange(pCreateInfo->pPushConstantRanges[i].offset, pCreateInfo->pPushConstantRanges[i].size,
4908 "vkCreatePipelineLayout()", i);
4909 if (0 == pCreateInfo->pPushConstantRanges[i].stageFlags) {
4910 skip |= LogError(device, "VUID-VkPushConstantRange-stageFlags-requiredbitmask",
4911 "vkCreatePipelineLayout() call has no stageFlags set.");
4912 }
4913 }
4914
4915 // As of 1.0.28, there is a VU that states that a stage flag cannot appear more than once in the list of push constant ranges.
4916 for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
4917 for (j = i + 1; j < pCreateInfo->pushConstantRangeCount; ++j) {
4918 if (0 != (pCreateInfo->pPushConstantRanges[i].stageFlags & pCreateInfo->pPushConstantRanges[j].stageFlags)) {
4919 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292",
4920 "vkCreatePipelineLayout() Duplicate stage flags found in ranges %d and %d.", i, j);
4921 }
4922 }
4923 }
4924
4925 // Early-out
4926 if (skip) return skip;
4927
4928 std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts(pCreateInfo->setLayoutCount, nullptr);
4929 unsigned int push_descriptor_set_count = 0;
4930 {
4931 for (i = 0; i < pCreateInfo->setLayoutCount; ++i) {
4932 set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
4933 if (set_layouts[i]->IsPushDescriptor()) ++push_descriptor_set_count;
4934 }
4935 }
4936
4937 if (push_descriptor_set_count > 1) {
4938 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293",
4939 "vkCreatePipelineLayout() Multiple push descriptor sets found.");
4940 }
4941
4942 // Max descriptors by type, within a single pipeline stage
4943 std::valarray<uint32_t> max_descriptors_per_stage = GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, true);
4944 // Samplers
4945 if (max_descriptors_per_stage[DSL_TYPE_SAMPLERS] > phys_dev_props.limits.maxPerStageDescriptorSamplers) {
4946 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03016"
4947 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287";
4948 skip |= LogError(device, vuid,
4949 "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
4950 "maxPerStageDescriptorSamplers limit (%d).",
4951 max_descriptors_per_stage[DSL_TYPE_SAMPLERS], phys_dev_props.limits.maxPerStageDescriptorSamplers);
4952 }
4953
4954 // Uniform buffers
4955 if (max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorUniformBuffers) {
4956 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03017"
4957 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288";
4958 skip |= LogError(device, vuid,
4959 "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
4960 "maxPerStageDescriptorUniformBuffers limit (%d).",
4961 max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS],
4962 phys_dev_props.limits.maxPerStageDescriptorUniformBuffers);
4963 }
4964
4965 // Storage buffers
4966 if (max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorStorageBuffers) {
4967 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03018"
4968 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289";
4969 skip |= LogError(device, vuid,
4970 "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
4971 "maxPerStageDescriptorStorageBuffers limit (%d).",
4972 max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS],
4973 phys_dev_props.limits.maxPerStageDescriptorStorageBuffers);
4974 }
4975
4976 // Sampled images
4977 if (max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorSampledImages) {
4978 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03019"
4979 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290";
4980 skip |=
4981 LogError(device, vuid,
4982 "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
4983 "maxPerStageDescriptorSampledImages limit (%d).",
4984 max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES], phys_dev_props.limits.maxPerStageDescriptorSampledImages);
4985 }
4986
4987 // Storage images
4988 if (max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorStorageImages) {
4989 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03020"
4990 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291";
4991 skip |=
4992 LogError(device, vuid,
4993 "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
4994 "maxPerStageDescriptorStorageImages limit (%d).",
4995 max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES], phys_dev_props.limits.maxPerStageDescriptorStorageImages);
4996 }
4997
4998 // Input attachments
4999 if (max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS] > phys_dev_props.limits.maxPerStageDescriptorInputAttachments) {
5000 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03021"
5001 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676";
5002 skip |= LogError(device, vuid,
5003 "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
5004 "maxPerStageDescriptorInputAttachments limit (%d).",
5005 max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS],
5006 phys_dev_props.limits.maxPerStageDescriptorInputAttachments);
5007 }
5008
5009 // Inline uniform blocks
5010 if (max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
5011 phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks) {
5012 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214"
5013 : "VUID-VkPipelineLayoutCreateInfo-descriptorType-02212";
5014 skip |= LogError(device, vuid,
5015 "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
5016 "maxPerStageDescriptorInlineUniformBlocks limit (%d).",
5017 max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK],
5018 phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks);
5019 }
5020
5021 // Total descriptors by type
5022 //
5023 std::map<uint32_t, uint32_t> sum_all_stages = GetDescriptorSum(set_layouts, true);
5024 // Samplers
5025 uint32_t sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLER] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
5026 if (sum > phys_dev_props.limits.maxDescriptorSetSamplers) {
5027 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03028"
5028 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677";
5029 skip |= LogError(device, vuid,
5030 "vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
5031 "maxDescriptorSetSamplers limit (%d).",
5032 sum, phys_dev_props.limits.maxDescriptorSetSamplers);
5033 }
5034
5035 // Uniform buffers
5036 if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] > phys_dev_props.limits.maxDescriptorSetUniformBuffers) {
5037 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03029"
5038 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678";
5039 skip |= LogError(device, vuid,
5040 "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
5041 "maxDescriptorSetUniformBuffers limit (%d).",
5042 sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER], phys_dev_props.limits.maxDescriptorSetUniformBuffers);
5043 }
5044
5045 // Dynamic uniform buffers
5046 if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic) {
5047 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03030"
5048 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679";
5049 skip |= LogError(device, vuid,
5050 "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
5051 "maxDescriptorSetUniformBuffersDynamic limit (%d).",
5052 sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
5053 phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic);
5054 }
5055
5056 // Storage buffers
5057 if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] > phys_dev_props.limits.maxDescriptorSetStorageBuffers) {
5058 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03031"
5059 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680";
5060 skip |= LogError(device, vuid,
5061 "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
5062 "maxDescriptorSetStorageBuffers limit (%d).",
5063 sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER], phys_dev_props.limits.maxDescriptorSetStorageBuffers);
5064 }
5065
5066 // Dynamic storage buffers
5067 if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic) {
5068 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03032"
5069 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681";
5070 skip |= LogError(device, vuid,
5071 "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
5072 "maxDescriptorSetStorageBuffersDynamic limit (%d).",
5073 sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
5074 phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic);
5075 }
5076
5077 // Sampled images
5078 sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
5079 sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
5080 if (sum > phys_dev_props.limits.maxDescriptorSetSampledImages) {
5081 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03033"
5082 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682";
5083 skip |= LogError(device, vuid,
5084 "vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
5085 "maxDescriptorSetSampledImages limit (%d).",
5086 sum, phys_dev_props.limits.maxDescriptorSetSampledImages);
5087 }
5088
5089 // Storage images
5090 sum = sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
5091 if (sum > phys_dev_props.limits.maxDescriptorSetStorageImages) {
5092 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03034"
5093 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683";
5094 skip |= LogError(device, vuid,
5095 "vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
5096 "maxDescriptorSetStorageImages limit (%d).",
5097 sum, phys_dev_props.limits.maxDescriptorSetStorageImages);
5098 }
5099
5100 // Input attachments
5101 if (sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] > phys_dev_props.limits.maxDescriptorSetInputAttachments) {
5102 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-03035"
5103 : "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684";
5104 skip |=
5105 LogError(device, vuid,
5106 "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
5107 "maxDescriptorSetInputAttachments limit (%d).",
5108 sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT], phys_dev_props.limits.maxDescriptorSetInputAttachments);
5109 }
5110
5111 // Inline uniform blocks
5112 if (sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
5113 phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks) {
5114 const char *vuid = (device_extensions.vk_ext_descriptor_indexing) ? "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216"
5115 : "VUID-VkPipelineLayoutCreateInfo-descriptorType-02213";
5116 skip |= LogError(device, vuid,
5117 "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
5118 "maxDescriptorSetInlineUniformBlocks limit (%d).",
5119 sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
5120 phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks);
5121 }
5122
5123 if (device_extensions.vk_ext_descriptor_indexing) {
5124 // XXX TODO: replace with correct VU messages
5125
5126 // Max descriptors by type, within a single pipeline stage
5127 std::valarray<uint32_t> max_descriptors_per_stage_update_after_bind =
5128 GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, false);
5129 // Samplers
5130 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS] >
5131 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers) {
5132 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-03022",
5133 "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
5134 "maxPerStageDescriptorUpdateAfterBindSamplers limit (%d).",
5135 max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS],
5136 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers);
5137 }
5138
5139 // Uniform buffers
5140 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS] >
5141 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers) {
5142 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023",
5143 "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
5144 "maxPerStageDescriptorUpdateAfterBindUniformBuffers limit (%d).",
5145 max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS],
5146 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers);
5147 }
5148
5149 // Storage buffers
5150 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS] >
5151 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers) {
5152 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024",
5153 "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
5154 "maxPerStageDescriptorUpdateAfterBindStorageBuffers limit (%d).",
5155 max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS],
5156 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers);
5157 }
5158
5159 // Sampled images
5160 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES] >
5161 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages) {
5162 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025",
5163 "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
5164 "maxPerStageDescriptorUpdateAfterBindSampledImages limit (%d).",
5165 max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES],
5166 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages);
5167 }
5168
5169 // Storage images
5170 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES] >
5171 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages) {
5172 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026",
5173 "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
5174 "maxPerStageDescriptorUpdateAfterBindStorageImages limit (%d).",
5175 max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES],
5176 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages);
5177 }
5178
5179 // Input attachments
5180 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS] >
5181 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments) {
5182 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027",
5183 "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
5184 "maxPerStageDescriptorUpdateAfterBindInputAttachments limit (%d).",
5185 max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS],
5186 phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments);
5187 }
5188
5189 // Inline uniform blocks
5190 if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
5191 phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) {
5192 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215",
5193 "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
5194 "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks limit (%d).",
5195 max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK],
5196 phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
5197 }
5198
5199 // Total descriptors by type, summed across all pipeline stages
5200 //
5201 std::map<uint32_t, uint32_t> sum_all_stages_update_after_bind = GetDescriptorSum(set_layouts, false);
5202 // Samplers
5203 sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLER] +
5204 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
5205 if (sum > phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers) {
5206 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036",
5207 "vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
5208 "maxDescriptorSetUpdateAfterBindSamplers limit (%d).",
5209 sum, phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers);
5210 }
5211
5212 // Uniform buffers
5213 if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] >
5214 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers) {
5215 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037",
5216 "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
5217 "maxDescriptorSetUpdateAfterBindUniformBuffers limit (%d).",
5218 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER],
5219 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers);
5220 }
5221
5222 // Dynamic uniform buffers
5223 if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] >
5224 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) {
5225 skip |=
5226 LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038",
5227 "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
5228 "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic limit (%d).",
5229 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
5230 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
5231 }
5232
5233 // Storage buffers
5234 if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] >
5235 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers) {
5236 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039",
5237 "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
5238 "maxDescriptorSetUpdateAfterBindStorageBuffers limit (%d).",
5239 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER],
5240 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers);
5241 }
5242
5243 // Dynamic storage buffers
5244 if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] >
5245 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) {
5246 skip |=
5247 LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040",
5248 "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
5249 "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic limit (%d).",
5250 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
5251 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
5252 }
5253
5254 // Sampled images
5255 sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] +
5256 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
5257 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
5258 if (sum > phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages) {
5259 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041",
5260 "vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
5261 "maxDescriptorSetUpdateAfterBindSampledImages limit (%d).",
5262 sum, phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages);
5263 }
5264
5265 // Storage images
5266 sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] +
5267 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
5268 if (sum > phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages) {
5269 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042",
5270 "vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
5271 "maxDescriptorSetUpdateAfterBindStorageImages limit (%d).",
5272 sum, phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages);
5273 }
5274
5275 // Input attachments
5276 if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] >
5277 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments) {
5278 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043",
5279 "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
5280 "maxDescriptorSetUpdateAfterBindInputAttachments limit (%d).",
5281 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT],
5282 phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments);
5283 }
5284
5285 // Inline uniform blocks
5286 if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
5287 phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks) {
5288 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217",
5289 "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
5290 "maxDescriptorSetUpdateAfterBindInlineUniformBlocks limit (%d).",
5291 sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
5292 phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
5293 }
5294 }
5295
5296 if (device_extensions.vk_ext_fragment_density_map_2) {
5297 uint32_t sum_subsampled_samplers = 0;
5298 for (auto dsl : set_layouts) {
5299 // find the number of subsampled samplers across all stages
5300 // NOTE: this does not use the GetDescriptorSum patter because it needs the GetSamplerState method
5301 if ((dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
5302 continue;
5303 }
5304 for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
5305 const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
5306
5307 // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
5308 if (binding->descriptorCount > 0) {
5309 if (((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
5310 (binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)) &&
5311 (binding->pImmutableSamplers != nullptr)) {
5312 for (uint32_t sampler_idx = 0; sampler_idx < binding->descriptorCount; sampler_idx++) {
5313 const SAMPLER_STATE *state = GetSamplerState(binding->pImmutableSamplers[sampler_idx]);
5314 if (state->createInfo.flags & (VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT |
5315 VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT)) {
5316 sum_subsampled_samplers++;
5317 }
5318 }
5319 }
5320 }
5321 }
5322 }
5323 if (sum_subsampled_samplers > phys_dev_ext_props.fragment_density_map2_props.maxDescriptorSetSubsampledSamplers) {
5324 skip |= LogError(device, "VUID-VkPipelineLayoutCreateInfo-pImmutableSamplers-03566",
5325 "vkCreatePipelineLayout(): sum of sampler bindings with flags containing "
5326 "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT or "
5327 "VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT among all stages(% d) "
5328 "exceeds device maxDescriptorSetSubsampledSamplers limit (%d).",
5329 sum_subsampled_samplers,
5330 phys_dev_ext_props.fragment_density_map2_props.maxDescriptorSetSubsampledSamplers);
5331 }
5332 }
5333 return skip;
5334 }
5335
PreCallValidateResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags) const5336 bool CoreChecks::PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
5337 VkDescriptorPoolResetFlags flags) const {
5338 // Make sure sets being destroyed are not currently in-use
5339 if (disabled[idle_descriptor_set]) return false;
5340 bool skip = false;
5341 const DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
5342 if (pPool != nullptr) {
5343 for (auto ds : pPool->sets) {
5344 if (ds && ds->in_use.load()) {
5345 skip |= LogError(descriptorPool, "VUID-vkResetDescriptorPool-descriptorPool-00313",
5346 "It is invalid to call vkResetDescriptorPool() with descriptor sets in use by a command buffer.");
5347 if (skip) break;
5348 }
5349 }
5350 }
5351 return skip;
5352 }
5353
5354 // Ensure the pool contains enough descriptors and descriptor sets to satisfy
5355 // an allocation request. Fills common_data with the total number of descriptors of each type required,
5356 // as well as DescriptorSetLayout ptrs used for later update.
PreCallValidateAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets,void * ads_state_data) const5357 bool CoreChecks::PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
5358 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
5359 StateTracker::PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, ads_state_data);
5360
5361 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
5362 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
5363 // All state checks for AllocateDescriptorSets is done in single function
5364 return ValidateAllocateDescriptorSets(pAllocateInfo, ads_state);
5365 }
5366
PreCallValidateFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets) const5367 bool CoreChecks::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
5368 const VkDescriptorSet *pDescriptorSets) const {
5369 // Make sure that no sets being destroyed are in-flight
5370 bool skip = false;
5371 // First make sure sets being destroyed are not currently in-use
5372 for (uint32_t i = 0; i < count; ++i) {
5373 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
5374 skip |= ValidateIdleDescriptorSet(pDescriptorSets[i], "vkFreeDescriptorSets");
5375 }
5376 }
5377 const DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
5378 if (pool_state && !(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT & pool_state->createInfo.flags)) {
5379 // Can't Free from a NON_FREE pool
5380 skip |= LogError(descriptorPool, "VUID-vkFreeDescriptorSets-descriptorPool-00312",
5381 "It is invalid to call vkFreeDescriptorSets() with a pool created without setting "
5382 "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.");
5383 }
5384 return skip;
5385 }
5386
PreCallValidateUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies) const5387 bool CoreChecks::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
5388 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
5389 const VkCopyDescriptorSet *pDescriptorCopies) const {
5390 // First thing to do is perform map look-ups.
5391 // NOTE : UpdateDescriptorSets is somewhat unique in that it's operating on a number of DescriptorSets
5392 // so we can't just do a single map look-up up-front, but do them individually in functions below
5393
5394 // Now make call(s) that validate state, but don't perform state updates in this function
5395 // Note, here DescriptorSets is unique in that we don't yet have an instance. Using a helper function in the
5396 // namespace which will parse params and make calls into specific class instances
5397 return ValidateUpdateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies,
5398 "vkUpdateDescriptorSets()");
5399 }
5400
PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo) const5401 bool CoreChecks::PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer,
5402 const VkCommandBufferBeginInfo *pBeginInfo) const {
5403 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5404 if (!cb_state) return false;
5405 bool skip = false;
5406 if (cb_state->in_use.load()) {
5407 skip |= LogError(commandBuffer, "VUID-vkBeginCommandBuffer-commandBuffer-00049",
5408 "Calling vkBeginCommandBuffer() on active %s before it has completed. You must check "
5409 "command buffer fence before this call.",
5410 report_data->FormatHandle(commandBuffer).c_str());
5411 }
5412 if (cb_state->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
5413 // Primary Command Buffer
5414 const VkCommandBufferUsageFlags invalid_usage =
5415 (VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT);
5416 if ((pBeginInfo->flags & invalid_usage) == invalid_usage) {
5417 skip |= LogError(commandBuffer, "VUID-vkBeginCommandBuffer-commandBuffer-02840",
5418 "vkBeginCommandBuffer(): Primary %s can't have both VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT and "
5419 "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
5420 report_data->FormatHandle(commandBuffer).c_str());
5421 }
5422 } else {
5423 // Secondary Command Buffer
5424 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
5425 if (!pInfo) {
5426 skip |= LogError(commandBuffer, "VUID-vkBeginCommandBuffer-commandBuffer-00051",
5427 "vkBeginCommandBuffer(): Secondary %s must have inheritance info.",
5428 report_data->FormatHandle(commandBuffer).c_str());
5429 } else {
5430 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
5431 assert(pInfo->renderPass);
5432 const auto *framebuffer = GetFramebufferState(pInfo->framebuffer);
5433 if (framebuffer) {
5434 if (framebuffer->createInfo.renderPass != pInfo->renderPass) {
5435 const auto *render_pass = GetRenderPassState(pInfo->renderPass);
5436 // renderPass that framebuffer was created with must be compatible with local renderPass
5437 skip |= ValidateRenderPassCompatibility("framebuffer", framebuffer->rp_state.get(), "command buffer",
5438 render_pass, "vkBeginCommandBuffer()",
5439 "VUID-VkCommandBufferBeginInfo-flags-00055");
5440 }
5441 }
5442 }
5443 if ((pInfo->occlusionQueryEnable == VK_FALSE || enabled_features.core.occlusionQueryPrecise == VK_FALSE) &&
5444 (pInfo->queryFlags & VK_QUERY_CONTROL_PRECISE_BIT)) {
5445 skip |= LogError(commandBuffer, "VUID-vkBeginCommandBuffer-commandBuffer-00052",
5446 "vkBeginCommandBuffer(): Secondary %s must not have VK_QUERY_CONTROL_PRECISE_BIT if "
5447 "occulusionQuery is disabled or the device does not support precise occlusion queries.",
5448 report_data->FormatHandle(commandBuffer).c_str());
5449 }
5450 }
5451 if (pInfo && pInfo->renderPass != VK_NULL_HANDLE) {
5452 const auto *renderPass = GetRenderPassState(pInfo->renderPass);
5453 if (renderPass) {
5454 if (pInfo->subpass >= renderPass->createInfo.subpassCount) {
5455 skip |= LogError(commandBuffer, "VUID-VkCommandBufferBeginInfo-flags-00054",
5456 "vkBeginCommandBuffer(): Secondary %s must have a subpass index (%d) that is "
5457 "less than the number of subpasses (%d).",
5458 report_data->FormatHandle(commandBuffer).c_str(), pInfo->subpass,
5459 renderPass->createInfo.subpassCount);
5460 }
5461 }
5462 }
5463 }
5464 if (CB_RECORDING == cb_state->state) {
5465 skip |= LogError(commandBuffer, "VUID-vkBeginCommandBuffer-commandBuffer-00049",
5466 "vkBeginCommandBuffer(): Cannot call Begin on %s in the RECORDING state. Must first call "
5467 "vkEndCommandBuffer().",
5468 report_data->FormatHandle(commandBuffer).c_str());
5469 } else if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
5470 VkCommandPool cmdPool = cb_state->createInfo.commandPool;
5471 const auto *pPool = cb_state->command_pool.get();
5472 if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
5473 LogObjectList objlist(commandBuffer);
5474 objlist.add(cmdPool);
5475 skip |= LogError(objlist, "VUID-vkBeginCommandBuffer-commandBuffer-00050",
5476 "Call to vkBeginCommandBuffer() on %s attempts to implicitly reset cmdBuffer created from "
5477 "%s that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
5478 report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
5479 }
5480 }
5481 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
5482 if (chained_device_group_struct) {
5483 skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_device_group_struct->deviceMask, commandBuffer,
5484 "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106");
5485 skip |= ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, commandBuffer,
5486 "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107");
5487 }
5488 return skip;
5489 }
5490
PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) const5491 bool CoreChecks::PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) const {
5492 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5493 if (!cb_state) return false;
5494 bool skip = false;
5495 if ((VK_COMMAND_BUFFER_LEVEL_PRIMARY == cb_state->createInfo.level) ||
5496 !(cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
5497 // This needs spec clarification to update valid usage, see comments in PR:
5498 // https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/165
5499 skip |= InsideRenderPass(cb_state, "vkEndCommandBuffer()", "VUID-vkEndCommandBuffer-commandBuffer-00060");
5500 }
5501
5502 skip |= ValidateCmd(cb_state, CMD_ENDCOMMANDBUFFER, "vkEndCommandBuffer()");
5503 for (auto query : cb_state->activeQueries) {
5504 skip |= LogError(commandBuffer, "VUID-vkEndCommandBuffer-commandBuffer-00061",
5505 "vkEndCommandBuffer(): Ending command buffer with in progress query: %s, query %d.",
5506 report_data->FormatHandle(query.pool).c_str(), query.query);
5507 }
5508 return skip;
5509 }
5510
PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags) const5511 bool CoreChecks::PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const {
5512 bool skip = false;
5513 const CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
5514 if (!pCB) return false;
5515 VkCommandPool cmdPool = pCB->createInfo.commandPool;
5516 const auto *pPool = pCB->command_pool.get();
5517
5518 if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
5519 LogObjectList objlist(commandBuffer);
5520 objlist.add(cmdPool);
5521 skip |= LogError(objlist, "VUID-vkResetCommandBuffer-commandBuffer-00046",
5522 "vkResetCommandBuffer(): Attempt to reset %s created from %s that does NOT have the "
5523 "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
5524 report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
5525 }
5526 skip |= CheckCommandBufferInFlight(pCB, "reset", "VUID-vkResetCommandBuffer-commandBuffer-00045");
5527
5528 return skip;
5529 }
5530
GetPipelineTypeName(VkPipelineBindPoint pipelineBindPoint)5531 static const char *GetPipelineTypeName(VkPipelineBindPoint pipelineBindPoint) {
5532 switch (pipelineBindPoint) {
5533 case VK_PIPELINE_BIND_POINT_GRAPHICS:
5534 return "graphics";
5535 case VK_PIPELINE_BIND_POINT_COMPUTE:
5536 return "compute";
5537 case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
5538 return "ray-tracing";
5539 default:
5540 return "unknown";
5541 }
5542 }
5543
ValidateGraphicsPipelineBindPoint(const CMD_BUFFER_STATE * cb_state,const PIPELINE_STATE * pipeline_state) const5544 bool CoreChecks::ValidateGraphicsPipelineBindPoint(const CMD_BUFFER_STATE *cb_state, const PIPELINE_STATE *pipeline_state) const {
5545 bool skip = false;
5546 const FRAMEBUFFER_STATE *fb_state = cb_state->activeFramebuffer.get();
5547
5548 if (fb_state) {
5549 auto subpass_desc = &pipeline_state->rp_state->createInfo.pSubpasses[pipeline_state->graphicsPipelineCI.subpass];
5550
5551 for (size_t i = 0; i < pipeline_state->attachments.size() && i < subpass_desc->colorAttachmentCount; i++) {
5552 const auto attachment = subpass_desc->pColorAttachments[i].attachment;
5553 if (attachment == VK_ATTACHMENT_UNUSED) continue;
5554
5555 const IMAGE_VIEW_STATE *imageview_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
5556 if (!imageview_state) continue;
5557
5558 const IMAGE_STATE *image_state = GetImageState(imageview_state->create_info.image);
5559 if (!image_state) continue;
5560
5561 const VkFormat format = pipeline_state->rp_state->createInfo.pAttachments[attachment].format;
5562
5563 if (pipeline_state->graphicsPipelineCI.pRasterizationState &&
5564 !pipeline_state->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable &&
5565 pipeline_state->attachments[i].blendEnable &&
5566 !(image_state->format_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT)) {
5567 skip |= LogError(device, "VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023",
5568 "vkCreateGraphicsPipelines(): pipeline.pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
5569 "].blendEnable is VK_TRUE but format %s associated with this attached image (%s) does "
5570 "not support VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT.",
5571 i, report_data->FormatHandle(image_state->image).c_str(), string_VkFormat(format));
5572 }
5573 }
5574 }
5575
5576 return skip;
5577 }
5578
PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline) const5579 bool CoreChecks::PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
5580 VkPipeline pipeline) const {
5581 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5582 assert(cb_state);
5583
5584 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindPipeline()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
5585 "VUID-vkCmdBindPipeline-commandBuffer-cmdpool");
5586 skip |= ValidateCmd(cb_state, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
5587 static const std::map<VkPipelineBindPoint, std::string> bindpoint_errors = {
5588 std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdBindPipeline-pipelineBindPoint-00777"),
5589 std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdBindPipeline-pipelineBindPoint-00778"),
5590 std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdBindPipeline-pipelineBindPoint-02391")};
5591
5592 skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, "vkCmdBindPipeline()", bindpoint_errors);
5593
5594 const auto *pipeline_state = GetPipelineState(pipeline);
5595 assert(pipeline_state);
5596
5597 const auto &pipeline_state_bind_point = pipeline_state->getPipelineType();
5598
5599 if (pipelineBindPoint != pipeline_state_bind_point) {
5600 if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) {
5601 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBindPipeline-pipelineBindPoint-00779",
5602 "Cannot bind a pipeline of type %s to the graphics pipeline bind point",
5603 GetPipelineTypeName(pipeline_state_bind_point));
5604 } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
5605 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBindPipeline-pipelineBindPoint-00780",
5606 "Cannot bind a pipeline of type %s to the compute pipeline bind point",
5607 GetPipelineTypeName(pipeline_state_bind_point));
5608 } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
5609 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBindPipeline-pipelineBindPoint-02392",
5610 "Cannot bind a pipeline of type %s to the ray-tracing pipeline bind point",
5611 GetPipelineTypeName(pipeline_state_bind_point));
5612 }
5613 } else {
5614 if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
5615 skip |= ValidateGraphicsPipelineBindPoint(cb_state, pipeline_state);
5616 }
5617
5618 return skip;
5619 }
5620
PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports) const5621 bool CoreChecks::PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
5622 const VkViewport *pViewports) const {
5623 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5624 assert(cb_state);
5625 bool skip =
5626 ValidateCmdQueueFlags(cb_state, "vkCmdSetViewport()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
5627 skip |= ValidateCmd(cb_state, CMD_SETVIEWPORT, "vkCmdSetViewport()");
5628 return skip;
5629 }
5630
PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors) const5631 bool CoreChecks::PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
5632 const VkRect2D *pScissors) const {
5633 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5634 assert(cb_state);
5635 bool skip =
5636 ValidateCmdQueueFlags(cb_state, "vkCmdSetScissor()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetScissor-commandBuffer-cmdpool");
5637 skip |= ValidateCmd(cb_state, CMD_SETSCISSOR, "vkCmdSetScissor()");
5638 return skip;
5639 }
5640
PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer,uint32_t firstExclusiveScissor,uint32_t exclusiveScissorCount,const VkRect2D * pExclusiveScissors) const5641 bool CoreChecks::PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
5642 uint32_t exclusiveScissorCount, const VkRect2D *pExclusiveScissors) const {
5643 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5644 assert(cb_state);
5645 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetExclusiveScissorNV()", VK_QUEUE_GRAPHICS_BIT,
5646 "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool");
5647 skip |= ValidateCmd(cb_state, CMD_SETEXCLUSIVESCISSORNV, "vkCmdSetExclusiveScissorNV()");
5648 if (!enabled_features.exclusive_scissor.exclusiveScissor) {
5649 skip |= LogError(commandBuffer, "VUID-vkCmdSetExclusiveScissorNV-None-02031",
5650 "vkCmdSetExclusiveScissorNV: The exclusiveScissor feature is disabled.");
5651 }
5652 return skip;
5653 }
5654
PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer,VkImageView imageView,VkImageLayout imageLayout) const5655 bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
5656 VkImageLayout imageLayout) const {
5657 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5658 assert(cb_state);
5659 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindShadingRateImageNV()", VK_QUEUE_GRAPHICS_BIT,
5660 "VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool");
5661
5662 skip |= ValidateCmd(cb_state, CMD_BINDSHADINGRATEIMAGENV, "vkCmdBindShadingRateImageNV()");
5663
5664 if (!enabled_features.shading_rate_image.shadingRateImage) {
5665 skip |= LogError(commandBuffer, "VUID-vkCmdBindShadingRateImageNV-None-02058",
5666 "vkCmdBindShadingRateImageNV: The shadingRateImage feature is disabled.");
5667 }
5668
5669 if (imageView != VK_NULL_HANDLE) {
5670 const auto view_state = GetImageViewState(imageView);
5671 auto &ivci = view_state->create_info;
5672
5673 if (!view_state || (ivci.viewType != VK_IMAGE_VIEW_TYPE_2D && ivci.viewType != VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
5674 skip |= LogError(imageView, "VUID-vkCmdBindShadingRateImageNV-imageView-02059",
5675 "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must be a valid "
5676 "VkImageView handle of type VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY.");
5677 }
5678
5679 if (view_state && ivci.format != VK_FORMAT_R8_UINT) {
5680 skip |= LogError(
5681 imageView, "VUID-vkCmdBindShadingRateImageNV-imageView-02060",
5682 "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must have a format of VK_FORMAT_R8_UINT.");
5683 }
5684
5685 const VkImageCreateInfo *ici = view_state ? &GetImageState(view_state->create_info.image)->createInfo : nullptr;
5686 if (ici && !(ici->usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV)) {
5687 skip |= LogError(imageView, "VUID-vkCmdBindShadingRateImageNV-imageView-02061",
5688 "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, the image must have been "
5689 "created with VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV set.");
5690 }
5691
5692 if (view_state) {
5693 const auto image_state = GetImageState(view_state->create_info.image);
5694 bool hit_error = false;
5695
5696 // XXX TODO: While the VUID says "each subresource", only the base mip level is
5697 // actually used. Since we don't have an existing convenience function to iterate
5698 // over all mip levels, just don't bother with non-base levels.
5699 const VkImageSubresourceRange &range = view_state->create_info.subresourceRange;
5700 VkImageSubresourceLayers subresource = {range.aspectMask, range.baseMipLevel, range.baseArrayLayer, range.layerCount};
5701
5702 if (image_state) {
5703 skip |= VerifyImageLayout(cb_state, image_state, subresource, imageLayout, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
5704 "vkCmdCopyImage()", "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063",
5705 "VUID-vkCmdBindShadingRateImageNV-imageView-02062", &hit_error);
5706 }
5707 }
5708 }
5709
5710 return skip;
5711 }
5712
PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkShadingRatePaletteNV * pShadingRatePalettes) const5713 bool CoreChecks::PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
5714 uint32_t viewportCount,
5715 const VkShadingRatePaletteNV *pShadingRatePalettes) const {
5716 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5717 assert(cb_state);
5718 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportShadingRatePaletteNV()", VK_QUEUE_GRAPHICS_BIT,
5719 "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool");
5720
5721 skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTSHADINGRATEPALETTENV, "vkCmdSetViewportShadingRatePaletteNV()");
5722
5723 if (!enabled_features.shading_rate_image.shadingRateImage) {
5724 skip |= LogError(commandBuffer, "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064",
5725 "vkCmdSetViewportShadingRatePaletteNV: The shadingRateImage feature is disabled.");
5726 }
5727
5728 for (uint32_t i = 0; i < viewportCount; ++i) {
5729 auto *palette = &pShadingRatePalettes[i];
5730 if (palette->shadingRatePaletteEntryCount == 0 ||
5731 palette->shadingRatePaletteEntryCount > phys_dev_ext_props.shading_rate_image_props.shadingRatePaletteSize) {
5732 skip |= LogError(
5733 commandBuffer, "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071",
5734 "vkCmdSetViewportShadingRatePaletteNV: shadingRatePaletteEntryCount must be between 1 and shadingRatePaletteSize.");
5735 }
5736 }
5737
5738 return skip;
5739 }
5740
ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV & triangles,const char * func_name) const5741 bool CoreChecks::ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles, const char *func_name) const {
5742 bool skip = false;
5743
5744 const BUFFER_STATE *vb_state = GetBufferState(triangles.vertexData);
5745 if (vb_state != nullptr && vb_state->createInfo.size <= triangles.vertexOffset) {
5746 skip |= LogError(device, "VUID-VkGeometryTrianglesNV-vertexOffset-02428", "%s", func_name);
5747 }
5748
5749 const BUFFER_STATE *ib_state = GetBufferState(triangles.indexData);
5750 if (ib_state != nullptr && ib_state->createInfo.size <= triangles.indexOffset) {
5751 skip |= LogError(device, "VUID-VkGeometryTrianglesNV-indexOffset-02431", "%s", func_name);
5752 }
5753
5754 const BUFFER_STATE *td_state = GetBufferState(triangles.transformData);
5755 if (td_state != nullptr && td_state->createInfo.size <= triangles.transformOffset) {
5756 skip |= LogError(device, "VUID-VkGeometryTrianglesNV-transformOffset-02437", "%s", func_name);
5757 }
5758
5759 return skip;
5760 }
5761
ValidateGeometryAABBNV(const VkGeometryAABBNV & aabbs,const char * func_name) const5762 bool CoreChecks::ValidateGeometryAABBNV(const VkGeometryAABBNV &aabbs, const char *func_name) const {
5763 bool skip = false;
5764
5765 const BUFFER_STATE *aabb_state = GetBufferState(aabbs.aabbData);
5766 if (aabb_state != nullptr && aabb_state->createInfo.size > 0 && aabb_state->createInfo.size <= aabbs.offset) {
5767 skip |= LogError(device, "VUID-VkGeometryAABBNV-offset-02439", "%s", func_name);
5768 }
5769
5770 return skip;
5771 }
5772
ValidateGeometryNV(const VkGeometryNV & geometry,const char * func_name) const5773 bool CoreChecks::ValidateGeometryNV(const VkGeometryNV &geometry, const char *func_name) const {
5774 bool skip = false;
5775 if (geometry.geometryType == VK_GEOMETRY_TYPE_TRIANGLES_NV) {
5776 skip = ValidateGeometryTrianglesNV(geometry.geometry.triangles, func_name);
5777 } else if (geometry.geometryType == VK_GEOMETRY_TYPE_AABBS_NV) {
5778 skip = ValidateGeometryAABBNV(geometry.geometry.aabbs, func_name);
5779 }
5780 return skip;
5781 }
5782
PreCallValidateCreateAccelerationStructureNV(VkDevice device,const VkAccelerationStructureCreateInfoNV * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkAccelerationStructureNV * pAccelerationStructure) const5783 bool CoreChecks::PreCallValidateCreateAccelerationStructureNV(VkDevice device,
5784 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
5785 const VkAllocationCallbacks *pAllocator,
5786 VkAccelerationStructureNV *pAccelerationStructure) const {
5787 bool skip = false;
5788 if (pCreateInfo != nullptr && pCreateInfo->info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
5789 for (uint32_t i = 0; i < pCreateInfo->info.geometryCount; i++) {
5790 skip |= ValidateGeometryNV(pCreateInfo->info.pGeometries[i], "vkCreateAccelerationStructureNV():");
5791 }
5792 }
5793 return skip;
5794 }
5795
PreCallValidateCreateAccelerationStructureKHR(VkDevice device,const VkAccelerationStructureCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkAccelerationStructureKHR * pAccelerationStructure) const5796 bool CoreChecks::PreCallValidateCreateAccelerationStructureKHR(VkDevice device,
5797 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
5798 const VkAllocationCallbacks *pAllocator,
5799 VkAccelerationStructureKHR *pAccelerationStructure) const {
5800 bool skip = false;
5801 if (pCreateInfo) {
5802 for (uint32_t i = 0; i < pCreateInfo->maxGeometryCount; ++i) {
5803 if (pCreateInfo->pGeometryInfos[i].geometryType == VK_GEOMETRY_TYPE_TRIANGLES_KHR) {
5804 const VkFormatProperties format_properties = GetPDFormatProperties(pCreateInfo->pGeometryInfos[i].vertexFormat);
5805 if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR)) {
5806 skip |= LogError(
5807 device, "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-03501",
5808 "VkAccelerationStructureCreateGeometryTypeInfoKHR: If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR,"
5809 "pCreateInfo->pGeometryInfos[%u].vertexFormat %s must support the "
5810 "VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR in"
5811 "VkFormatProperties::bufferFeatures as returned by vkGetPhysicalDeviceFormatProperties2.",
5812 i, string_VkFormat(pCreateInfo->pGeometryInfos[i].vertexFormat));
5813 }
5814 }
5815 }
5816 }
5817 return skip;
5818 }
ValidateBindAccelerationStructureMemory(VkDevice device,const VkBindAccelerationStructureMemoryInfoKHR & info) const5819 bool CoreChecks::ValidateBindAccelerationStructureMemory(VkDevice device,
5820 const VkBindAccelerationStructureMemoryInfoKHR &info) const {
5821 bool skip = false;
5822
5823 const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
5824 if (!as_state) {
5825 return skip;
5826 }
5827 if (!as_state->GetBoundMemory().empty()) {
5828 skip |=
5829 LogError(info.accelerationStructure, "VUID-VkBindAccelerationStructureMemoryInfoKHR-accelerationStructure-02450",
5830 "vkBindAccelerationStructureMemoryNV(): accelerationStructure must not already be backed by a memory object.");
5831 }
5832
5833 // Validate bound memory range information
5834 const auto mem_info = GetDevMemState(info.memory);
5835 if (mem_info) {
5836 skip |= ValidateInsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
5837 "vkBindAccelerationStructureMemoryNV()");
5838 skip |= ValidateMemoryTypes(mem_info, as_state->memory_requirements.memoryRequirements.memoryTypeBits,
5839 "vkBindAccelerationStructureMemoryNV()",
5840 "VUID-VkBindAccelerationStructureMemoryInfoKHR-memory-02593");
5841 }
5842
5843 // Validate memory requirements alignment
5844 if (SafeModulo(info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment) != 0) {
5845 skip |= LogError(info.accelerationStructure, "VUID-VkBindAccelerationStructureMemoryInfoKHR-memoryOffset-02594",
5846 "vkBindAccelerationStructureMemoryNV(): memoryOffset is 0x%" PRIxLEAST64
5847 " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
5848 ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
5849 "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
5850 info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment);
5851 }
5852
5853 if (mem_info) {
5854 // Validate memory requirements size
5855 if (as_state->memory_requirements.memoryRequirements.size > (mem_info->alloc_info.allocationSize - info.memoryOffset)) {
5856 skip |= LogError(info.accelerationStructure, "VUID-VkBindAccelerationStructureMemoryInfoKHR-size-02595",
5857 "vkBindAccelerationStructureMemoryNV(): memory size minus memoryOffset is 0x%" PRIxLEAST64
5858 " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
5859 ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
5860 "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
5861 mem_info->alloc_info.allocationSize - info.memoryOffset,
5862 as_state->memory_requirements.memoryRequirements.size);
5863 }
5864 }
5865
5866 return skip;
5867 }
PreCallValidateBindAccelerationStructureMemoryKHR(VkDevice device,uint32_t bindInfoCount,const VkBindAccelerationStructureMemoryInfoKHR * pBindInfos) const5868 bool CoreChecks::PreCallValidateBindAccelerationStructureMemoryKHR(
5869 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos) const {
5870 bool skip = false;
5871 for (uint32_t i = 0; i < bindInfoCount; i++) {
5872 skip |= ValidateBindAccelerationStructureMemory(device, pBindInfos[i]);
5873 }
5874 return skip;
5875 }
5876
PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device,uint32_t bindInfoCount,const VkBindAccelerationStructureMemoryInfoNV * pBindInfos) const5877 bool CoreChecks::PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
5878 const VkBindAccelerationStructureMemoryInfoNV *pBindInfos) const {
5879 return PreCallValidateBindAccelerationStructureMemoryKHR(device, bindInfoCount, pBindInfos);
5880 }
5881
PreCallValidateGetAccelerationStructureHandleNV(VkDevice device,VkAccelerationStructureNV accelerationStructure,size_t dataSize,void * pData) const5882 bool CoreChecks::PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
5883 size_t dataSize, void *pData) const {
5884 bool skip = false;
5885
5886 const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
5887 if (as_state != nullptr) {
5888 // TODO: update the fake VUID below once the real one is generated.
5889 skip = ValidateMemoryIsBoundToAccelerationStructure(
5890 as_state, "vkGetAccelerationStructureHandleNV",
5891 "UNASSIGNED-vkGetAccelerationStructureHandleNV-accelerationStructure-XXXX");
5892 }
5893
5894 return skip;
5895 }
5896
PreCallValidateCmdBuildAccelerationStructureKHR(VkCommandBuffer commandBuffer,uint32_t infoCount,const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,const VkAccelerationStructureBuildOffsetInfoKHR * const * ppOffsetInfos) const5897 bool CoreChecks::PreCallValidateCmdBuildAccelerationStructureKHR(
5898 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5899 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) const {
5900 bool skip = false;
5901 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5902 assert(cb_state);
5903 skip = ValidateCmdQueueFlags(cb_state, "vkCmdBuildAccelerationStructureKHR()", VK_QUEUE_COMPUTE_BIT,
5904 "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-cmdpool");
5905 skip |= ValidateCmd(cb_state, CMD_BUILDACCELERATIONSTRUCTUREKHR, "vkCmdBuildAccelerationStructureKHR()");
5906 skip |=
5907 InsideRenderPass(cb_state, "vkCmdBuildAccelerationStructureKHR()", "VUID-vkCmdBuildAccelerationStructureKHR-renderpass");
5908 if (pInfos != NULL) {
5909 for (uint32_t info_index = 0; info_index < infoCount; ++info_index) {
5910 const ACCELERATION_STRUCTURE_STATE *src_as_state =
5911 GetAccelerationStructureState(pInfos[info_index].srcAccelerationStructure);
5912 if (pInfos[info_index].update == VK_TRUE) {
5913 if (pInfos[info_index].srcAccelerationStructure == VK_NULL_HANDLE) {
5914 skip |= LogError(commandBuffer, "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03537",
5915 "vkCmdBuildAccelerationStructureKHR(): If update is VK_TRUE, srcAccelerationStructure must "
5916 "not be VK_NULL_HANDLE");
5917 } else {
5918 if (src_as_state == nullptr || !src_as_state->built ||
5919 !(src_as_state->build_info_khr.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR)) {
5920 skip |= LogError(
5921 commandBuffer, "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03538",
5922 "vkCmdBuildAccelerationStructureKHR(): If update is VK_TRUE, srcAccelerationStructure must have "
5923 "been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR set "
5924 "in VkAccelerationStructureBuildGeometryInfoKHR flags");
5925 }
5926 }
5927 }
5928 }
5929 }
5930 return skip;
5931 }
5932
PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,const VkAccelerationStructureInfoNV * pInfo,VkBuffer instanceData,VkDeviceSize instanceOffset,VkBool32 update,VkAccelerationStructureNV dst,VkAccelerationStructureNV src,VkBuffer scratch,VkDeviceSize scratchOffset) const5933 bool CoreChecks::PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
5934 const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
5935 VkDeviceSize instanceOffset, VkBool32 update,
5936 VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
5937 VkBuffer scratch, VkDeviceSize scratchOffset) const {
5938 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5939 assert(cb_state);
5940 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBuildAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
5941 "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool");
5942
5943 skip |= ValidateCmd(cb_state, CMD_BUILDACCELERATIONSTRUCTURENV, "vkCmdBuildAccelerationStructureNV()");
5944 skip |= InsideRenderPass(cb_state, "vkCmdBuildAccelerationStructureNV()", "VUID-vkCmdBuildAccelerationStructureNV-renderpass");
5945
5946 if (pInfo != nullptr && pInfo->type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
5947 for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
5948 skip |= ValidateGeometryNV(pInfo->pGeometries[i], "vkCmdBuildAccelerationStructureNV():");
5949 }
5950 }
5951
5952 if (pInfo != nullptr && pInfo->geometryCount > phys_dev_ext_props.ray_tracing_propsNV.maxGeometryCount) {
5953 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241",
5954 "vkCmdBuildAccelerationStructureNV(): geometryCount [%d] must be less than or equal to "
5955 "VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount.",
5956 pInfo->geometryCount);
5957 }
5958
5959 const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
5960 const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
5961 const BUFFER_STATE *scratch_buffer_state = GetBufferState(scratch);
5962
5963 if (dst_as_state != nullptr && pInfo != nullptr) {
5964 if (dst_as_state->create_infoNV.info.type != pInfo->type) {
5965 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
5966 "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::type"
5967 "[%s] must be identical to build info VkAccelerationStructureInfoNV::type [%s].",
5968 string_VkAccelerationStructureTypeNV(dst_as_state->create_infoNV.info.type),
5969 string_VkAccelerationStructureTypeNV(pInfo->type));
5970 }
5971 if (dst_as_state->create_infoNV.info.flags != pInfo->flags) {
5972 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
5973 "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::flags"
5974 "[0x%X] must be identical to build info VkAccelerationStructureInfoNV::flags [0x%X].",
5975 dst_as_state->create_infoNV.info.flags, pInfo->flags);
5976 }
5977 if (dst_as_state->create_infoNV.info.instanceCount < pInfo->instanceCount) {
5978 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
5979 "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::instanceCount "
5980 "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::instanceCount [%d].",
5981 dst_as_state->create_infoNV.info.instanceCount, pInfo->instanceCount);
5982 }
5983 if (dst_as_state->create_infoNV.info.geometryCount < pInfo->geometryCount) {
5984 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
5985 "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::geometryCount"
5986 "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::geometryCount [%d].",
5987 dst_as_state->create_infoNV.info.geometryCount, pInfo->geometryCount);
5988 } else {
5989 for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
5990 const VkGeometryDataNV &create_geometry_data = dst_as_state->create_infoNV.info.pGeometries[i].geometry;
5991 const VkGeometryDataNV &build_geometry_data = pInfo->pGeometries[i].geometry;
5992 if (create_geometry_data.triangles.vertexCount < build_geometry_data.triangles.vertexCount) {
5993 skip |= LogError(
5994 commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
5995 "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.vertexCount [%d]"
5996 "must be greater than or equal to build info pGeometries[%d].geometry.triangles.vertexCount [%d].",
5997 i, create_geometry_data.triangles.vertexCount, i, build_geometry_data.triangles.vertexCount);
5998 break;
5999 }
6000 if (create_geometry_data.triangles.indexCount < build_geometry_data.triangles.indexCount) {
6001 skip |= LogError(
6002 commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
6003 "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.indexCount [%d]"
6004 "must be greater than or equal to build info pGeometries[%d].geometry.triangles.indexCount [%d].",
6005 i, create_geometry_data.triangles.indexCount, i, build_geometry_data.triangles.indexCount);
6006 break;
6007 }
6008 if (create_geometry_data.aabbs.numAABBs < build_geometry_data.aabbs.numAABBs) {
6009 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
6010 "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.aabbs.numAABBs [%d]"
6011 "must be greater than or equal to build info pGeometries[%d].geometry.aabbs.numAABBs [%d].",
6012 i, create_geometry_data.aabbs.numAABBs, i, build_geometry_data.aabbs.numAABBs);
6013 break;
6014 }
6015 }
6016 }
6017 }
6018
6019 if (dst_as_state != nullptr) {
6020 skip |= ValidateMemoryIsBoundToAccelerationStructure(
6021 dst_as_state, "vkCmdBuildAccelerationStructureNV()",
6022 "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
6023 }
6024
6025 if (update == VK_TRUE) {
6026 if (src == VK_NULL_HANDLE) {
6027 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
6028 "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must not be VK_NULL_HANDLE.");
6029 } else {
6030 if (src_as_state == nullptr || !src_as_state->built ||
6031 !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV)) {
6032 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
6033 "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must have been built before "
6034 "with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in "
6035 "VkAccelerationStructureInfoNV::flags.");
6036 }
6037 }
6038 if (dst_as_state != nullptr && !dst_as_state->update_scratch_memory_requirements_checked) {
6039 skip |=
6040 LogWarning(dst, kVUID_Core_CmdBuildAccelNV_NoUpdateMemReqQuery,
6041 "vkCmdBuildAccelerationStructureNV(): Updating %s but vkGetAccelerationStructureMemoryRequirementsNV() "
6042 "has not been called for update scratch memory.",
6043 report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
6044 // Use requirements fetched at create time
6045 }
6046 if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
6047 dst_as_state->update_scratch_memory_requirements.memoryRequirements.size >
6048 (scratch_buffer_state->createInfo.size - scratchOffset)) {
6049 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-update-02492",
6050 "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, The size member of the "
6051 "VkMemoryRequirements structure returned from a call to "
6052 "vkGetAccelerationStructureMemoryRequirementsNV with "
6053 "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
6054 "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
6055 "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than "
6056 "or equal to the size of scratch minus scratchOffset");
6057 }
6058 } else {
6059 if (dst_as_state != nullptr && !dst_as_state->build_scratch_memory_requirements_checked) {
6060 skip |= LogWarning(dst, kVUID_Core_CmdBuildAccelNV_NoScratchMemReqQuery,
6061 "vkCmdBuildAccelerationStructureNV(): Assigning scratch buffer to %s but "
6062 "vkGetAccelerationStructureMemoryRequirementsNV() has not been called for scratch memory.",
6063 report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
6064 // Use requirements fetched at create time
6065 }
6066 if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
6067 dst_as_state->build_scratch_memory_requirements.memoryRequirements.size >
6068 (scratch_buffer_state->createInfo.size - scratchOffset)) {
6069 skip |= LogError(commandBuffer, "VUID-vkCmdBuildAccelerationStructureNV-update-02491",
6070 "vkCmdBuildAccelerationStructureNV(): If update is VK_FALSE, The size member of the "
6071 "VkMemoryRequirements structure returned from a call to "
6072 "vkGetAccelerationStructureMemoryRequirementsNV with "
6073 "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
6074 "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
6075 "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than "
6076 "or equal to the size of scratch minus scratchOffset");
6077 }
6078 }
6079 if (instanceData != VK_NULL_HANDLE) {
6080 const auto buffer_state = GetBufferState(instanceData);
6081 if (buffer_state != nullptr) {
6082 skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, true,
6083 "VUID-VkAccelerationStructureInfoNV-instanceData-02782",
6084 "vkCmdBuildAccelerationStructureNV()", "VK_BUFFER_USAGE_RAY_TRACING_BIT_NV");
6085 }
6086 }
6087 if (scratch_buffer_state != nullptr) {
6088 skip |= ValidateBufferUsageFlags(scratch_buffer_state, VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, true,
6089 "VUID-VkAccelerationStructureInfoNV-scratch-02781", "vkCmdBuildAccelerationStructureNV()",
6090 "VK_BUFFER_USAGE_RAY_TRACING_BIT_NV");
6091 }
6092 return skip;
6093 }
6094
PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,VkAccelerationStructureNV dst,VkAccelerationStructureNV src,VkCopyAccelerationStructureModeNV mode) const6095 bool CoreChecks::PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
6096 VkAccelerationStructureNV src,
6097 VkCopyAccelerationStructureModeNV mode) const {
6098 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6099 assert(cb_state);
6100 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
6101 "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool");
6102
6103 skip |= ValidateCmd(cb_state, CMD_COPYACCELERATIONSTRUCTURENV, "vkCmdCopyAccelerationStructureNV()");
6104 skip |= InsideRenderPass(cb_state, "vkCmdCopyAccelerationStructureNV()", "VUID-vkCmdCopyAccelerationStructureNV-renderpass");
6105 const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
6106 const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
6107
6108 if (dst_as_state != nullptr) {
6109 skip |= ValidateMemoryIsBoundToAccelerationStructure(
6110 dst_as_state, "vkCmdBuildAccelerationStructureNV()",
6111 "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
6112 }
6113
6114 if (mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV) {
6115 if (src_as_state != nullptr &&
6116 (!src_as_state->built || !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV))) {
6117 skip |= LogError(commandBuffer, "VUID-vkCmdCopyAccelerationStructureNV-src-03411",
6118 "vkCmdCopyAccelerationStructureNV(): src must have been built with "
6119 "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is "
6120 "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV.");
6121 }
6122 }
6123 if (!(mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV || mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR)) {
6124 skip |= LogError(commandBuffer, "VUID-vkCmdCopyAccelerationStructureNV-mode-03410",
6125 "vkCmdCopyAccelerationStructureNV():mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR"
6126 "or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR.");
6127 }
6128 return skip;
6129 }
6130
PreCallValidateDestroyAccelerationStructureNV(VkDevice device,VkAccelerationStructureNV accelerationStructure,const VkAllocationCallbacks * pAllocator) const6131 bool CoreChecks::PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
6132 const VkAllocationCallbacks *pAllocator) const {
6133 const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
6134 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
6135 bool skip = false;
6136 if (as_state) {
6137 skip |= ValidateObjectNotInUse(as_state, obj_struct, "vkDestroyAccelerationStructureNV",
6138 "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02442");
6139 }
6140 return skip;
6141 }
6142
PreCallValidateDestroyAccelerationStructureKHR(VkDevice device,VkAccelerationStructureKHR accelerationStructure,const VkAllocationCallbacks * pAllocator) const6143 bool CoreChecks::PreCallValidateDestroyAccelerationStructureKHR(VkDevice device, VkAccelerationStructureKHR accelerationStructure,
6144 const VkAllocationCallbacks *pAllocator) const {
6145 const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
6146 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
6147 bool skip = false;
6148 if (as_state) {
6149 skip |= ValidateObjectNotInUse(as_state, obj_struct, "vkDestroyAccelerationStructureKHR",
6150 "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02442");
6151 }
6152 if (pAllocator && !as_state->allocator) {
6153 skip |= LogError(device, "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02444",
6154 "vkDestroyAccelerationStructureKH:If no VkAllocationCallbacks were provided when accelerationStructure"
6155 "was created, pAllocator must be NULL.");
6156 }
6157 return skip;
6158 }
6159
PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewportWScalingNV * pViewportWScalings) const6160 bool CoreChecks::PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
6161 uint32_t viewportCount,
6162 const VkViewportWScalingNV *pViewportWScalings) const {
6163 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6164 assert(cb_state);
6165 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportWScalingNV()", VK_QUEUE_GRAPHICS_BIT,
6166 "VUID-vkCmdSetViewportWScalingNV-commandBuffer-cmdpool");
6167
6168 skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTWSCALINGNV, "vkCmdSetViewportWScalingNV()");
6169
6170 return skip;
6171 }
6172
PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth) const6173 bool CoreChecks::PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const {
6174 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6175 assert(cb_state);
6176 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineWidth()", VK_QUEUE_GRAPHICS_BIT,
6177 "VUID-vkCmdSetLineWidth-commandBuffer-cmdpool");
6178 skip |= ValidateCmd(cb_state, CMD_SETLINEWIDTH, "vkCmdSetLineWidth()");
6179 return skip;
6180 }
6181
PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern) const6182 bool CoreChecks::PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
6183 uint16_t lineStipplePattern) const {
6184 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6185 assert(cb_state);
6186 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineStippleEXT()", VK_QUEUE_GRAPHICS_BIT,
6187 "VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool");
6188 skip |= ValidateCmd(cb_state, CMD_SETLINESTIPPLEEXT, "vkCmdSetLineStippleEXT()");
6189 return skip;
6190 }
6191
PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor) const6192 bool CoreChecks::PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
6193 float depthBiasSlopeFactor) const {
6194 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6195 assert(cb_state);
6196 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBias()", VK_QUEUE_GRAPHICS_BIT,
6197 "VUID-vkCmdSetDepthBias-commandBuffer-cmdpool");
6198 skip |= ValidateCmd(cb_state, CMD_SETDEPTHBIAS, "vkCmdSetDepthBias()");
6199 if ((depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
6200 skip |= LogError(commandBuffer, "VUID-vkCmdSetDepthBias-depthBiasClamp-00790",
6201 "vkCmdSetDepthBias(): the depthBiasClamp device feature is disabled: the depthBiasClamp parameter must "
6202 "be set to 0.0.");
6203 }
6204 return skip;
6205 }
6206
PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4]) const6207 bool CoreChecks::PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const {
6208 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6209 assert(cb_state);
6210 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetBlendConstants()", VK_QUEUE_GRAPHICS_BIT,
6211 "VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool");
6212 skip |= ValidateCmd(cb_state, CMD_SETBLENDCONSTANTS, "vkCmdSetBlendConstants()");
6213 return skip;
6214 }
6215
PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds) const6216 bool CoreChecks::PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const {
6217 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6218 assert(cb_state);
6219 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBounds()", VK_QUEUE_GRAPHICS_BIT,
6220 "VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool");
6221 skip |= ValidateCmd(cb_state, CMD_SETDEPTHBOUNDS, "vkCmdSetDepthBounds()");
6222 return skip;
6223 }
6224
PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask) const6225 bool CoreChecks::PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6226 uint32_t compareMask) const {
6227 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6228 assert(cb_state);
6229 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilCompareMask()", VK_QUEUE_GRAPHICS_BIT,
6230 "VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool");
6231 skip |= ValidateCmd(cb_state, CMD_SETSTENCILCOMPAREMASK, "vkCmdSetStencilCompareMask()");
6232 return skip;
6233 }
6234
PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask) const6235 bool CoreChecks::PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6236 uint32_t writeMask) const {
6237 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6238 assert(cb_state);
6239 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilWriteMask()", VK_QUEUE_GRAPHICS_BIT,
6240 "VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool");
6241 skip |= ValidateCmd(cb_state, CMD_SETSTENCILWRITEMASK, "vkCmdSetStencilWriteMask()");
6242 return skip;
6243 }
6244
PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference) const6245 bool CoreChecks::PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6246 uint32_t reference) const {
6247 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6248 assert(cb_state);
6249 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilReference()", VK_QUEUE_GRAPHICS_BIT,
6250 "VUID-vkCmdSetStencilReference-commandBuffer-cmdpool");
6251 skip |= ValidateCmd(cb_state, CMD_SETSTENCILREFERENCE, "vkCmdSetStencilReference()");
6252 return skip;
6253 }
6254
ValidateDynamicOffsetAlignment(VkCommandBuffer command_buffer,const VkDescriptorSetLayoutBinding * binding,VkDescriptorType test_type,VkDeviceSize alignment,const uint32_t * pDynamicOffsets,const char * err_msg,const char * limit_name,uint32_t * offset_idx) const6255 bool CoreChecks::ValidateDynamicOffsetAlignment(VkCommandBuffer command_buffer, const VkDescriptorSetLayoutBinding *binding,
6256 VkDescriptorType test_type, VkDeviceSize alignment, const uint32_t *pDynamicOffsets,
6257 const char *err_msg, const char *limit_name, uint32_t *offset_idx) const {
6258 bool skip = false;
6259 if (binding->descriptorType == test_type) {
6260 const auto end_idx = *offset_idx + binding->descriptorCount;
6261 for (uint32_t current_idx = *offset_idx; current_idx < end_idx; current_idx++) {
6262 if (SafeModulo(pDynamicOffsets[current_idx], alignment) != 0) {
6263 skip |= LogError(
6264 command_buffer, err_msg,
6265 "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of device limit %s 0x%" PRIxLEAST64
6266 ".",
6267 current_idx, pDynamicOffsets[current_idx], limit_name, alignment);
6268 }
6269 }
6270 *offset_idx = end_idx;
6271 }
6272 return skip;
6273 }
6274
PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t setCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets) const6275 bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
6276 VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
6277 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
6278 const uint32_t *pDynamicOffsets) const {
6279 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6280 assert(cb_state);
6281 bool skip = false;
6282 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdBindDescriptorSets()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
6283 "VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool");
6284 skip |= ValidateCmd(cb_state, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescriptorSets()");
6285 // Track total count of dynamic descriptor types to make sure we have an offset for each one
6286 uint32_t total_dynamic_descriptors = 0;
6287 string error_string = "";
6288
6289 const auto *pipeline_layout = GetPipelineLayout(layout);
6290 for (uint32_t set_idx = 0; set_idx < setCount; set_idx++) {
6291 const cvdescriptorset::DescriptorSet *descriptor_set = GetSetNode(pDescriptorSets[set_idx]);
6292 if (descriptor_set) {
6293 // Verify that set being bound is compatible with overlapping setLayout of pipelineLayout
6294 if (!VerifySetLayoutCompatibility(report_data, descriptor_set, pipeline_layout, set_idx + firstSet, error_string)) {
6295 skip |= LogError(pDescriptorSets[set_idx], "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358",
6296 "vkCmdBindDescriptorSets(): descriptorSet #%u being bound is not compatible with overlapping "
6297 "descriptorSetLayout at index %u of "
6298 "%s due to: %s.",
6299 set_idx, set_idx + firstSet, report_data->FormatHandle(layout).c_str(), error_string.c_str());
6300 }
6301
6302 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
6303 if (set_dynamic_descriptor_count) {
6304 // First make sure we won't overstep bounds of pDynamicOffsets array
6305 if ((total_dynamic_descriptors + set_dynamic_descriptor_count) > dynamicOffsetCount) {
6306 // Test/report this here, such that we don't run past the end of pDynamicOffsets in the else clause
6307 skip |=
6308 LogError(pDescriptorSets[set_idx], "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
6309 "vkCmdBindDescriptorSets(): descriptorSet #%u (%s) requires %u dynamicOffsets, but only %u "
6310 "dynamicOffsets are left in "
6311 "pDynamicOffsets array. There must be one dynamic offset for each dynamic descriptor being bound.",
6312 set_idx, report_data->FormatHandle(pDescriptorSets[set_idx]).c_str(),
6313 descriptor_set->GetDynamicDescriptorCount(), (dynamicOffsetCount - total_dynamic_descriptors));
6314 // Set the number found to the maximum to prevent duplicate messages, or subsquent descriptor sets from
6315 // testing against the "short tail" we're skipping below.
6316 total_dynamic_descriptors = dynamicOffsetCount;
6317 } else { // Validate dynamic offsets and Dynamic Offset Minimums
6318 uint32_t cur_dyn_offset = total_dynamic_descriptors;
6319 const auto dsl = descriptor_set->GetLayout();
6320 const auto binding_count = dsl->GetBindingCount();
6321 const auto &limits = phys_dev_props.limits;
6322 for (uint32_t binding_idx = 0; binding_idx < binding_count; binding_idx++) {
6323 const auto *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
6324 skip |= ValidateDynamicOffsetAlignment(commandBuffer, binding, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
6325 limits.minUniformBufferOffsetAlignment, pDynamicOffsets,
6326 "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971",
6327 "minUniformBufferOffsetAlignment", &cur_dyn_offset);
6328 skip |= ValidateDynamicOffsetAlignment(commandBuffer, binding, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
6329 limits.minStorageBufferOffsetAlignment, pDynamicOffsets,
6330 "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972",
6331 "minStorageBufferOffsetAlignment", &cur_dyn_offset);
6332 }
6333 // Keep running total of dynamic descriptor count to verify at the end
6334 total_dynamic_descriptors += set_dynamic_descriptor_count;
6335 }
6336 }
6337 } else {
6338 skip |= LogError(pDescriptorSets[set_idx], kVUID_Core_DrawState_InvalidSet,
6339 "vkCmdBindDescriptorSets(): Attempt to bind %s that doesn't exist!",
6340 report_data->FormatHandle(pDescriptorSets[set_idx]).c_str());
6341 }
6342 }
6343 // dynamicOffsetCount must equal the total number of dynamic descriptors in the sets being bound
6344 if (total_dynamic_descriptors != dynamicOffsetCount) {
6345 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
6346 "vkCmdBindDescriptorSets(): Attempting to bind %u descriptorSets with %u dynamic descriptors, but "
6347 "dynamicOffsetCount is %u. It should "
6348 "exactly match the number of dynamic descriptors.",
6349 setCount, total_dynamic_descriptors, dynamicOffsetCount);
6350 }
6351 // firstSet and descriptorSetCount sum must be less than setLayoutCount
6352 if ((firstSet + setCount) > static_cast<uint32_t>(pipeline_layout->set_layouts.size())) {
6353 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBindDescriptorSets-firstSet-00360",
6354 "vkCmdBindDescriptorSets(): Sum of firstSet (%u) and descriptorSetCount (%u) is greater than "
6355 "VkPipelineLayoutCreateInfo::setLayoutCount "
6356 "(%zu) when pipeline layout was created",
6357 firstSet, setCount, pipeline_layout->set_layouts.size());
6358 }
6359 return skip;
6360 }
6361
6362 // Validates that the supplied bind point is supported for the command buffer (vis. the command pool)
6363 // Takes array of error codes as some of the VUID's (e.g. vkCmdBindPipeline) are written per bindpoint
6364 // TODO add vkCmdBindPipeline bind_point validation using this call.
ValidatePipelineBindPoint(const CMD_BUFFER_STATE * cb_state,VkPipelineBindPoint bind_point,const char * func_name,const std::map<VkPipelineBindPoint,std::string> & bind_errors) const6365 bool CoreChecks::ValidatePipelineBindPoint(const CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point, const char *func_name,
6366 const std::map<VkPipelineBindPoint, std::string> &bind_errors) const {
6367 bool skip = false;
6368 auto pool = cb_state->command_pool.get();
6369 if (pool) { // The loss of a pool in a recording cmd is reported in DestroyCommandPool
6370 static const std::map<VkPipelineBindPoint, VkQueueFlags> flag_mask = {
6371 std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT)),
6372 std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, static_cast<VkQueueFlags>(VK_QUEUE_COMPUTE_BIT)),
6373 std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
6374 static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT)),
6375 };
6376 const auto &qfp = GetPhysicalDeviceState()->queue_family_properties[pool->queueFamilyIndex];
6377 if (0 == (qfp.queueFlags & flag_mask.at(bind_point))) {
6378 const std::string &error = bind_errors.at(bind_point);
6379 LogObjectList objlist(cb_state->commandBuffer);
6380 objlist.add(cb_state->createInfo.commandPool);
6381 skip |= LogError(objlist, error, "%s: %s was allocated from %s that does not support bindpoint %s.", func_name,
6382 report_data->FormatHandle(cb_state->commandBuffer).c_str(),
6383 report_data->FormatHandle(cb_state->createInfo.commandPool).c_str(),
6384 string_VkPipelineBindPoint(bind_point));
6385 }
6386 }
6387 return skip;
6388 }
6389
PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites) const6390 bool CoreChecks::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
6391 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
6392 const VkWriteDescriptorSet *pDescriptorWrites) const {
6393 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6394 assert(cb_state);
6395 const char *func_name = "vkCmdPushDescriptorSetKHR()";
6396 bool skip = false;
6397 skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETKHR, func_name);
6398 skip |= ValidateCmdQueueFlags(cb_state, func_name, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT),
6399 "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
6400
6401 static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
6402 std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
6403 std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
6404 std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363")};
6405
6406 skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, func_name, bind_errors);
6407 const auto layout_data = GetPipelineLayout(layout);
6408
6409 // Validate the set index points to a push descriptor set and is in range
6410 if (layout_data) {
6411 const auto &set_layouts = layout_data->set_layouts;
6412 if (set < set_layouts.size()) {
6413 const auto dsl = set_layouts[set];
6414 if (dsl) {
6415 if (!dsl->IsPushDescriptor()) {
6416 skip = LogError(layout, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
6417 "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name,
6418 set, report_data->FormatHandle(layout).c_str());
6419 } else {
6420 // Create an empty proxy in order to use the existing descriptor set update validation
6421 // TODO move the validation (like this) that doesn't need descriptor set state to the DSL object so we
6422 // don't have to do this.
6423 cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, nullptr, dsl, 0, this);
6424 skip |= ValidatePushDescriptorsUpdate(&proxy_ds, descriptorWriteCount, pDescriptorWrites, func_name);
6425 }
6426 }
6427 } else {
6428 skip = LogError(layout, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
6429 "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
6430 report_data->FormatHandle(layout).c_str(), static_cast<uint32_t>(set_layouts.size()));
6431 }
6432 }
6433
6434 return skip;
6435 }
6436
PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType) const6437 bool CoreChecks::PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
6438 VkIndexType indexType) const {
6439 const auto buffer_state = GetBufferState(buffer);
6440 const auto cb_node = GetCBState(commandBuffer);
6441 assert(buffer_state);
6442 assert(cb_node);
6443
6444 bool skip =
6445 ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, true, "VUID-vkCmdBindIndexBuffer-buffer-00433",
6446 "vkCmdBindIndexBuffer()", "VK_BUFFER_USAGE_INDEX_BUFFER_BIT");
6447 skip |= ValidateCmdQueueFlags(cb_node, "vkCmdBindIndexBuffer()", VK_QUEUE_GRAPHICS_BIT,
6448 "VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool");
6449 skip |= ValidateCmd(cb_node, CMD_BINDINDEXBUFFER, "vkCmdBindIndexBuffer()");
6450 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindIndexBuffer()", "VUID-vkCmdBindIndexBuffer-buffer-00434");
6451 const auto offset_align = GetIndexAlignment(indexType);
6452 if (offset % offset_align) {
6453 skip |= LogError(commandBuffer, "VUID-vkCmdBindIndexBuffer-offset-00432",
6454 "vkCmdBindIndexBuffer() offset (0x%" PRIxLEAST64 ") does not fall on alignment (%s) boundary.", offset,
6455 string_VkIndexType(indexType));
6456 }
6457 if (offset >= buffer_state->requirements.size) {
6458 skip |= LogError(commandBuffer, "VUID-vkCmdBindIndexBuffer-offset-00431",
6459 "vkCmdBindIndexBuffer() offset (0x%" PRIxLEAST64 ") is not less than the size (0x%" PRIxLEAST64
6460 ") of buffer (%s).",
6461 offset, buffer_state->requirements.size, report_data->FormatHandle(buffer_state->buffer).c_str());
6462 }
6463
6464 return skip;
6465 }
6466
PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets) const6467 bool CoreChecks::PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
6468 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) const {
6469 const auto cb_state = GetCBState(commandBuffer);
6470 assert(cb_state);
6471
6472 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindVertexBuffers()", VK_QUEUE_GRAPHICS_BIT,
6473 "VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool");
6474 skip |= ValidateCmd(cb_state, CMD_BINDVERTEXBUFFERS, "vkCmdBindVertexBuffers()");
6475 for (uint32_t i = 0; i < bindingCount; ++i) {
6476 const auto buffer_state = GetBufferState(pBuffers[i]);
6477 if (buffer_state) {
6478 skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, true,
6479 "VUID-vkCmdBindVertexBuffers-pBuffers-00627", "vkCmdBindVertexBuffers()",
6480 "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT");
6481 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindVertexBuffers()",
6482 "VUID-vkCmdBindVertexBuffers-pBuffers-00628");
6483 if (pOffsets[i] >= buffer_state->createInfo.size) {
6484 skip |=
6485 LogError(buffer_state->buffer, "VUID-vkCmdBindVertexBuffers-pOffsets-00626",
6486 "vkCmdBindVertexBuffers() offset (0x%" PRIxLEAST64 ") is beyond the end of the buffer.", pOffsets[i]);
6487 }
6488 }
6489 }
6490 return skip;
6491 }
6492
6493 // Validate that an image's sampleCount matches the requirement for a specific API call
ValidateImageSampleCount(const IMAGE_STATE * image_state,VkSampleCountFlagBits sample_count,const char * location,const std::string & msgCode) const6494 bool CoreChecks::ValidateImageSampleCount(const IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count, const char *location,
6495 const std::string &msgCode) const {
6496 bool skip = false;
6497 if (image_state->createInfo.samples != sample_count) {
6498 skip = LogError(image_state->image, msgCode, "%s for %s was created with a sample count of %s but must be %s.", location,
6499 report_data->FormatHandle(image_state->image).c_str(),
6500 string_VkSampleCountFlagBits(image_state->createInfo.samples), string_VkSampleCountFlagBits(sample_count));
6501 }
6502 return skip;
6503 }
6504
PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData) const6505 bool CoreChecks::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
6506 VkDeviceSize dataSize, const void *pData) const {
6507 const auto cb_state = GetCBState(commandBuffer);
6508 assert(cb_state);
6509 const auto dst_buffer_state = GetBufferState(dstBuffer);
6510 assert(dst_buffer_state);
6511
6512 bool skip = false;
6513 skip |= ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-dstBuffer-00035");
6514 // Validate that DST buffer has correct usage flags set
6515 skip |=
6516 ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdUpdateBuffer-dstBuffer-00034",
6517 "vkCmdUpdateBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
6518 skip |=
6519 ValidateCmdQueueFlags(cb_state, "vkCmdUpdateBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
6520 "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool");
6521 skip |= ValidateCmd(cb_state, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
6522 skip |= InsideRenderPass(cb_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-renderpass");
6523 skip |=
6524 ValidateProtectedBuffer(cb_state, dst_buffer_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-commandBuffer-01813");
6525 skip |=
6526 ValidateUnprotectedBuffer(cb_state, dst_buffer_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-commandBuffer-01814");
6527 return skip;
6528 }
6529
PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask) const6530 bool CoreChecks::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const {
6531 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6532 assert(cb_state);
6533 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
6534 "VUID-vkCmdSetEvent-commandBuffer-cmdpool");
6535 skip |= ValidateCmd(cb_state, CMD_SETEVENT, "vkCmdSetEvent()");
6536 skip |= InsideRenderPass(cb_state, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-renderpass");
6537 skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-stageMask-04090",
6538 "VUID-vkCmdSetEvent-stageMask-04091", "VUID-vkCmdSetEvent-stageMask-04095",
6539 "VUID-vkCmdSetEvent-stageMask-04096");
6540 skip |= ValidateStageMaskHost(stageMask, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-stageMask-01149");
6541 return skip;
6542 }
6543
PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask) const6544 bool CoreChecks::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const {
6545 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6546 assert(cb_state);
6547
6548 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdResetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
6549 "VUID-vkCmdResetEvent-commandBuffer-cmdpool");
6550 skip |= ValidateCmd(cb_state, CMD_RESETEVENT, "vkCmdResetEvent()");
6551 skip |= InsideRenderPass(cb_state, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-renderpass");
6552 skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-stageMask-04090",
6553 "VUID-vkCmdResetEvent-stageMask-04091", "VUID-vkCmdResetEvent-stageMask-04095",
6554 "VUID-vkCmdResetEvent-stageMask-04096");
6555 skip |= ValidateStageMaskHost(stageMask, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-stageMask-01153");
6556 return skip;
6557 }
6558
6559 // Return input pipeline stage flags, expanded for individual bits if VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT is set
ExpandPipelineStageFlags(const DeviceExtensions & extensions,VkPipelineStageFlags inflags)6560 static VkPipelineStageFlags ExpandPipelineStageFlags(const DeviceExtensions &extensions, VkPipelineStageFlags inflags) {
6561 if (~inflags & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) return inflags;
6562
6563 return (inflags & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) |
6564 (VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
6565 (extensions.vk_nv_mesh_shader ? (VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV | VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV) : 0) |
6566 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
6567 VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT |
6568 VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
6569 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
6570 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
6571 (extensions.vk_ext_conditional_rendering ? VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT : 0) |
6572 (extensions.vk_ext_transform_feedback ? VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT : 0) |
6573 (extensions.vk_nv_shading_rate_image ? VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV : 0) |
6574 (extensions.vk_ext_fragment_density_map ? VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT : 0) |
6575 (extensions.vk_ext_fragment_density_map_2 ? VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT : 0));
6576 }
6577
HasNonFramebufferStagePipelineStageFlags(VkPipelineStageFlags inflags)6578 static bool HasNonFramebufferStagePipelineStageFlags(VkPipelineStageFlags inflags) {
6579 return (inflags & ~(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
6580 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT)) != 0;
6581 }
6582
GetGraphicsPipelineStageLogicalOrdinal(VkPipelineStageFlagBits flag)6583 static int GetGraphicsPipelineStageLogicalOrdinal(VkPipelineStageFlagBits flag) {
6584 // Note that the list (and lookup) ignore invalid-for-enabled-extension condition. This should be checked elsewhere
6585 // and would greatly complicate this intentionally simple implementation
6586 // clang-format off
6587 const VkPipelineStageFlagBits ordered_array[] = {
6588 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
6589 VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
6590 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
6591 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
6592 VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
6593 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
6594 VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
6595 VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
6596
6597 // Including the task/mesh shaders here is not technically correct, as they are in a
6598 // separate logical pipeline - but it works for the case this is currently used, and
6599 // fixing it would require significant rework and end up with the code being far more
6600 // verbose for no practical gain.
6601 // However, worth paying attention to this if using this function in a new way.
6602 VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
6603 VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
6604
6605 VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
6606 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
6607 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
6608 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
6609 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
6610 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
6611 };
6612 // clang-format on
6613
6614 const int ordered_array_length = sizeof(ordered_array) / sizeof(VkPipelineStageFlagBits);
6615
6616 for (int i = 0; i < ordered_array_length; ++i) {
6617 if (ordered_array[i] == flag) {
6618 return i;
6619 }
6620 }
6621
6622 return -1;
6623 }
6624
6625 // The following two functions technically have O(N^2) complexity, but it's for a value of O that's largely
6626 // stable and also rather tiny - this could definitely be rejigged to work more efficiently, but the impact
6627 // on runtime is currently negligible, so it wouldn't gain very much.
6628 // If we add a lot more graphics pipeline stages, this set of functions should be rewritten to accomodate.
GetLogicallyEarliestGraphicsPipelineStage(VkPipelineStageFlags inflags)6629 static VkPipelineStageFlagBits GetLogicallyEarliestGraphicsPipelineStage(VkPipelineStageFlags inflags) {
6630 VkPipelineStageFlagBits earliest_bit = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
6631 int earliest_bit_order = GetGraphicsPipelineStageLogicalOrdinal(earliest_bit);
6632
6633 for (std::size_t i = 0; i < sizeof(VkPipelineStageFlagBits); ++i) {
6634 VkPipelineStageFlagBits current_flag = (VkPipelineStageFlagBits)((inflags & 0x1u) << i);
6635 if (current_flag) {
6636 int new_order = GetGraphicsPipelineStageLogicalOrdinal(current_flag);
6637 if (new_order != -1 && new_order < earliest_bit_order) {
6638 earliest_bit_order = new_order;
6639 earliest_bit = current_flag;
6640 }
6641 }
6642 inflags = inflags >> 1;
6643 }
6644 return earliest_bit;
6645 }
6646
GetLogicallyLatestGraphicsPipelineStage(VkPipelineStageFlags inflags)6647 static VkPipelineStageFlagBits GetLogicallyLatestGraphicsPipelineStage(VkPipelineStageFlags inflags) {
6648 VkPipelineStageFlagBits latest_bit = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
6649 int latest_bit_order = GetGraphicsPipelineStageLogicalOrdinal(latest_bit);
6650
6651 for (std::size_t i = 0; i < sizeof(VkPipelineStageFlagBits); ++i) {
6652 if (inflags & 0x1u) {
6653 int new_order = GetGraphicsPipelineStageLogicalOrdinal((VkPipelineStageFlagBits)((inflags & 0x1u) << i));
6654 if (new_order != -1 && new_order > latest_bit_order) {
6655 latest_bit_order = new_order;
6656 latest_bit = (VkPipelineStageFlagBits)((inflags & 0x1u) << i);
6657 }
6658 }
6659 inflags = inflags >> 1;
6660 }
6661 return latest_bit;
6662 }
6663
6664 // Verify image barrier image state and that the image is consistent with FB image
ValidateImageBarrierAttachment(const char * funcName,CMD_BUFFER_STATE const * cb_state,const FRAMEBUFFER_STATE * framebuffer,uint32_t active_subpass,const safe_VkSubpassDescription2 & sub_desc,const VkRenderPass rp_handle,uint32_t img_index,const VkImageMemoryBarrier & img_barrier) const6665 bool CoreChecks::ValidateImageBarrierAttachment(const char *funcName, CMD_BUFFER_STATE const *cb_state,
6666 const FRAMEBUFFER_STATE *framebuffer, uint32_t active_subpass,
6667 const safe_VkSubpassDescription2 &sub_desc, const VkRenderPass rp_handle,
6668 uint32_t img_index, const VkImageMemoryBarrier &img_barrier) const {
6669 bool skip = false;
6670 const auto *fb_state = framebuffer;
6671 assert(fb_state);
6672 const auto img_bar_image = img_barrier.image;
6673 bool image_match = false;
6674 bool sub_image_found = false; // Do we find a corresponding subpass description
6675 VkImageLayout sub_image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
6676 uint32_t attach_index = 0;
6677 // Verify that a framebuffer image matches barrier image
6678 const auto attachmentCount = fb_state->createInfo.attachmentCount;
6679 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
6680 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
6681 if (view_state && (img_bar_image == view_state->create_info.image)) {
6682 image_match = true;
6683 attach_index = attachment;
6684 break;
6685 }
6686 }
6687 if (image_match) { // Make sure subpass is referring to matching attachment
6688 if (sub_desc.pDepthStencilAttachment && sub_desc.pDepthStencilAttachment->attachment == attach_index) {
6689 sub_image_layout = sub_desc.pDepthStencilAttachment->layout;
6690 sub_image_found = true;
6691 }
6692 if (!sub_image_found && device_extensions.vk_khr_depth_stencil_resolve) {
6693 const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(sub_desc.pNext);
6694 if (resolve && resolve->pDepthStencilResolveAttachment &&
6695 resolve->pDepthStencilResolveAttachment->attachment == attach_index) {
6696 sub_image_layout = resolve->pDepthStencilResolveAttachment->layout;
6697 sub_image_found = true;
6698 }
6699 }
6700 if (!sub_image_found) {
6701 for (uint32_t j = 0; j < sub_desc.colorAttachmentCount; ++j) {
6702 if (sub_desc.pColorAttachments && sub_desc.pColorAttachments[j].attachment == attach_index) {
6703 sub_image_layout = sub_desc.pColorAttachments[j].layout;
6704 sub_image_found = true;
6705 break;
6706 }
6707 if (!sub_image_found && sub_desc.pResolveAttachments &&
6708 sub_desc.pResolveAttachments[j].attachment == attach_index) {
6709 sub_image_layout = sub_desc.pResolveAttachments[j].layout;
6710 sub_image_found = true;
6711 break;
6712 }
6713 }
6714 }
6715 if (!sub_image_found) {
6716 skip |= LogError(rp_handle, "VUID-vkCmdPipelineBarrier-image-04073",
6717 "%s: Barrier pImageMemoryBarriers[%d].%s is not referenced by the VkSubpassDescription for "
6718 "active subpass (%d) of current %s.",
6719 funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
6720 report_data->FormatHandle(rp_handle).c_str());
6721 }
6722 } else { // !image_match
6723 skip |=
6724 LogError(fb_state->framebuffer, "VUID-vkCmdPipelineBarrier-image-04073",
6725 "%s: Barrier pImageMemoryBarriers[%d].%s does not match an image from the current %s.", funcName, img_index,
6726 report_data->FormatHandle(img_bar_image).c_str(), report_data->FormatHandle(fb_state->framebuffer).c_str());
6727 }
6728 if (img_barrier.oldLayout != img_barrier.newLayout) {
6729 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-oldLayout-01181",
6730 "%s: As the Image Barrier for %s is being executed within a render pass instance, oldLayout must "
6731 "equal newLayout yet they are %s and %s.",
6732 funcName, report_data->FormatHandle(img_barrier.image).c_str(),
6733 string_VkImageLayout(img_barrier.oldLayout), string_VkImageLayout(img_barrier.newLayout));
6734 } else {
6735 if (sub_image_found && sub_image_layout != img_barrier.oldLayout) {
6736 LogObjectList objlist(rp_handle);
6737 objlist.add(img_bar_image);
6738 skip |= LogError(objlist, "VUID-vkCmdPipelineBarrier-oldLayout-01181",
6739 "%s: Barrier pImageMemoryBarriers[%d].%s is referenced by the VkSubpassDescription for active "
6740 "subpass (%d) of current %s as having layout %s, but image barrier has layout %s.",
6741 funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
6742 report_data->FormatHandle(rp_handle).c_str(), string_VkImageLayout(sub_image_layout),
6743 string_VkImageLayout(img_barrier.oldLayout));
6744 }
6745 }
6746 return skip;
6747 }
6748
6749 // Validate image barriers within a renderPass
ValidateRenderPassImageBarriers(const char * funcName,const CMD_BUFFER_STATE * cb_state,uint32_t active_subpass,const safe_VkSubpassDescription2 & sub_desc,const VkRenderPass rp_handle,const safe_VkSubpassDependency2 * dependencies,const std::vector<uint32_t> & self_dependencies,uint32_t image_mem_barrier_count,const VkImageMemoryBarrier * image_barriers) const6750 bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, const CMD_BUFFER_STATE *cb_state, uint32_t active_subpass,
6751 const safe_VkSubpassDescription2 &sub_desc, const VkRenderPass rp_handle,
6752 const safe_VkSubpassDependency2 *dependencies,
6753 const std::vector<uint32_t> &self_dependencies, uint32_t image_mem_barrier_count,
6754 const VkImageMemoryBarrier *image_barriers) const {
6755 bool skip = false;
6756 for (uint32_t i = 0; i < image_mem_barrier_count; ++i) {
6757 const auto &img_barrier = image_barriers[i];
6758 const auto &img_src_access_mask = img_barrier.srcAccessMask;
6759 const auto &img_dst_access_mask = img_barrier.dstAccessMask;
6760 bool access_mask_match = false;
6761 for (const auto self_dep_index : self_dependencies) {
6762 const auto &sub_dep = dependencies[self_dep_index];
6763 access_mask_match = (img_src_access_mask == (sub_dep.srcAccessMask & img_src_access_mask)) &&
6764 (img_dst_access_mask == (sub_dep.dstAccessMask & img_dst_access_mask));
6765 if (access_mask_match) break;
6766 }
6767 if (!access_mask_match) {
6768 std::stringstream self_dep_ss;
6769 stream_join(self_dep_ss, ", ", self_dependencies);
6770 skip |= LogError(rp_handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6771 "%s: Barrier pImageMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency "
6772 "srcAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
6773 funcName, i, img_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
6774 self_dep_ss.str().c_str());
6775 skip |= LogError(rp_handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6776 "%s: Barrier pImageMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency "
6777 "dstAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
6778 funcName, i, img_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
6779 self_dep_ss.str().c_str());
6780 }
6781 if (VK_QUEUE_FAMILY_IGNORED != img_barrier.srcQueueFamilyIndex ||
6782 VK_QUEUE_FAMILY_IGNORED != img_barrier.dstQueueFamilyIndex) {
6783 skip |= LogError(rp_handle, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
6784 "%s: Barrier pImageMemoryBarriers[%d].srcQueueFamilyIndex is %d and "
6785 "pImageMemoryBarriers[%d].dstQueueFamilyIndex is %d but both must be VK_QUEUE_FAMILY_IGNORED.",
6786 funcName, i, img_barrier.srcQueueFamilyIndex, i, img_barrier.dstQueueFamilyIndex);
6787 }
6788 // Secondary CBs can have null framebuffer so record will queue up validation in that case 'til FB is known
6789 if (VK_NULL_HANDLE != cb_state->activeFramebuffer) {
6790 skip |= ValidateImageBarrierAttachment(funcName, cb_state, cb_state->activeFramebuffer.get(), active_subpass, sub_desc,
6791 rp_handle, i, img_barrier);
6792 }
6793 }
6794 return skip;
6795 }
6796
6797 // Validate VUs for Pipeline Barriers that are within a renderPass
6798 // Pre: cb_state->activeRenderPass must be a pointer to valid renderPass state
ValidateRenderPassPipelineBarriers(const char * funcName,const CMD_BUFFER_STATE * cb_state,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,VkDependencyFlags dependency_flags,uint32_t mem_barrier_count,const VkMemoryBarrier * mem_barriers,uint32_t buffer_mem_barrier_count,const VkBufferMemoryBarrier * buffer_mem_barriers,uint32_t image_mem_barrier_count,const VkImageMemoryBarrier * image_barriers) const6799 bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, const CMD_BUFFER_STATE *cb_state,
6800 VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
6801 VkDependencyFlags dependency_flags, uint32_t mem_barrier_count,
6802 const VkMemoryBarrier *mem_barriers, uint32_t buffer_mem_barrier_count,
6803 const VkBufferMemoryBarrier *buffer_mem_barriers,
6804 uint32_t image_mem_barrier_count,
6805 const VkImageMemoryBarrier *image_barriers) const {
6806 bool skip = false;
6807 const auto rp_state = cb_state->activeRenderPass;
6808 const auto active_subpass = cb_state->activeSubpass;
6809 const auto &self_dependencies = rp_state->self_dependencies[active_subpass];
6810 const auto &dependencies = rp_state->createInfo.pDependencies;
6811 if (self_dependencies.size() == 0) {
6812 skip |= LogError(rp_state->renderPass, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6813 "%s: Barriers cannot be set during subpass %d of %s with no self-dependency specified.", funcName,
6814 active_subpass, report_data->FormatHandle(rp_state->renderPass).c_str());
6815 } else {
6816 // Grab ref to current subpassDescription up-front for use below
6817 const auto &sub_desc = rp_state->createInfo.pSubpasses[active_subpass];
6818 // Look for matching mask in any self-dependency
6819 bool stage_mask_match = false;
6820 for (const auto self_dep_index : self_dependencies) {
6821 const auto &sub_dep = dependencies[self_dep_index];
6822 const auto &sub_src_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.srcStageMask);
6823 const auto &sub_dst_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.dstStageMask);
6824 stage_mask_match = ((sub_src_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
6825 (src_stage_mask == (sub_src_stage_mask & src_stage_mask))) &&
6826 ((sub_dst_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
6827 (dst_stage_mask == (sub_dst_stage_mask & dst_stage_mask)));
6828 if (stage_mask_match) break;
6829 }
6830 if (!stage_mask_match) {
6831 std::stringstream self_dep_ss;
6832 stream_join(self_dep_ss, ", ", self_dependencies);
6833 skip |= LogError(rp_state->renderPass, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6834 "%s: Barrier srcStageMask(0x%X) is not a subset of VkSubpassDependency srcStageMask of any "
6835 "self-dependency of subpass %d of %s for which dstStageMask is also a subset. "
6836 "Candidate VkSubpassDependency are pDependencies entries [%s].",
6837 funcName, src_stage_mask, active_subpass, report_data->FormatHandle(rp_state->renderPass).c_str(),
6838 self_dep_ss.str().c_str());
6839 skip |= LogError(rp_state->renderPass, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6840 "%s: Barrier dstStageMask(0x%X) is not a subset of VkSubpassDependency dstStageMask of any "
6841 "self-dependency of subpass %d of %s for which srcStageMask is also a subset. "
6842 "Candidate VkSubpassDependency are pDependencies entries [%s].",
6843 funcName, dst_stage_mask, active_subpass, report_data->FormatHandle(rp_state->renderPass).c_str(),
6844 self_dep_ss.str().c_str());
6845 }
6846
6847 if (0 != buffer_mem_barrier_count) {
6848 skip |= LogError(rp_state->renderPass, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
6849 "%s: bufferMemoryBarrierCount is non-zero (%d) for subpass %d of %s.", funcName,
6850 buffer_mem_barrier_count, active_subpass, report_data->FormatHandle(rp_state->renderPass).c_str());
6851 }
6852 for (uint32_t i = 0; i < mem_barrier_count; ++i) {
6853 const auto &mb_src_access_mask = mem_barriers[i].srcAccessMask;
6854 const auto &mb_dst_access_mask = mem_barriers[i].dstAccessMask;
6855 bool access_mask_match = false;
6856 for (const auto self_dep_index : self_dependencies) {
6857 const auto &sub_dep = dependencies[self_dep_index];
6858 access_mask_match = (mb_src_access_mask == (sub_dep.srcAccessMask & mb_src_access_mask)) &&
6859 (mb_dst_access_mask == (sub_dep.dstAccessMask & mb_dst_access_mask));
6860 if (access_mask_match) break;
6861 }
6862
6863 if (!access_mask_match) {
6864 std::stringstream self_dep_ss;
6865 stream_join(self_dep_ss, ", ", self_dependencies);
6866 skip |= LogError(
6867 rp_state->renderPass, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6868 "%s: Barrier pMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency srcAccessMask "
6869 "for any self-dependency of subpass %d of %s for which dstAccessMask is also a subset. "
6870 "Candidate VkSubpassDependency are pDependencies entries [%s].",
6871 funcName, i, mb_src_access_mask, active_subpass, report_data->FormatHandle(rp_state->renderPass).c_str(),
6872 self_dep_ss.str().c_str());
6873 skip |= LogError(
6874 rp_state->renderPass, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6875 "%s: Barrier pMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency dstAccessMask "
6876 "for any self-dependency of subpass %d of %s for which srcAccessMask is also a subset. "
6877 "Candidate VkSubpassDependency are pDependencies entries [%s].",
6878 funcName, i, mb_dst_access_mask, active_subpass, report_data->FormatHandle(rp_state->renderPass).c_str(),
6879 self_dep_ss.str().c_str());
6880 }
6881 }
6882
6883 skip |= ValidateRenderPassImageBarriers(funcName, cb_state, active_subpass, sub_desc, rp_state->renderPass, dependencies,
6884 self_dependencies, image_mem_barrier_count, image_barriers);
6885
6886 bool flag_match = false;
6887 for (const auto self_dep_index : self_dependencies) {
6888 const auto &sub_dep = dependencies[self_dep_index];
6889 flag_match = sub_dep.dependencyFlags == dependency_flags;
6890 if (flag_match) break;
6891 }
6892 if (!flag_match) {
6893 std::stringstream self_dep_ss;
6894 stream_join(self_dep_ss, ", ", self_dependencies);
6895 skip |= LogError(rp_state->renderPass, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
6896 "%s: dependencyFlags param (0x%X) does not equal VkSubpassDependency dependencyFlags value for any "
6897 "self-dependency of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
6898 funcName, dependency_flags, cb_state->activeSubpass,
6899 report_data->FormatHandle(rp_state->renderPass).c_str(), self_dep_ss.str().c_str());
6900 }
6901 }
6902 return skip;
6903 }
6904
6905 // Array to mask individual accessMask to corresponding stageMask
6906 // accessMask active bit position (0-31) maps to index
6907 const static VkPipelineStageFlags AccessMaskToPipeStage[28] = {
6908 // VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0
6909 VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
6910 // VK_ACCESS_INDEX_READ_BIT = 1
6911 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
6912 // VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 2
6913 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
6914 // VK_ACCESS_UNIFORM_READ_BIT = 3
6915 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
6916 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
6917 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
6918 VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
6919 // VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 4
6920 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
6921 // VK_ACCESS_SHADER_READ_BIT = 5
6922 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
6923 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
6924 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
6925 VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
6926 // VK_ACCESS_SHADER_WRITE_BIT = 6
6927 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
6928 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
6929 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
6930 VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
6931 // VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 7
6932 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
6933 // VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 8
6934 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
6935 // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 9
6936 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
6937 // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 10
6938 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
6939 // VK_ACCESS_TRANSFER_READ_BIT = 11
6940 VK_PIPELINE_STAGE_TRANSFER_BIT,
6941 // VK_ACCESS_TRANSFER_WRITE_BIT = 12
6942 VK_PIPELINE_STAGE_TRANSFER_BIT,
6943 // VK_ACCESS_HOST_READ_BIT = 13
6944 VK_PIPELINE_STAGE_HOST_BIT,
6945 // VK_ACCESS_HOST_WRITE_BIT = 14
6946 VK_PIPELINE_STAGE_HOST_BIT,
6947 // VK_ACCESS_MEMORY_READ_BIT = 15
6948 VK_ACCESS_FLAG_BITS_MAX_ENUM, // Always match
6949 // VK_ACCESS_MEMORY_WRITE_BIT = 16
6950 VK_ACCESS_FLAG_BITS_MAX_ENUM, // Always match
6951 // VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 17
6952 VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
6953 // VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 18
6954 VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
6955 // VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 19
6956 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
6957 // VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 20
6958 VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
6959 // VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 21
6960 VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV | VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
6961 // VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 22
6962 VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
6963 // VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 23
6964 VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
6965 // VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 24
6966 VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
6967 // VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 25
6968 VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
6969 // VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 26
6970 VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
6971 // VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 27
6972 VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
6973 };
6974
6975 // Verify that all bits of access_mask are supported by the src_stage_mask
ValidateAccessMaskPipelineStage(const DeviceExtensions & extensions,VkAccessFlags access_mask,VkPipelineStageFlags stage_mask)6976 static bool ValidateAccessMaskPipelineStage(const DeviceExtensions &extensions, VkAccessFlags access_mask,
6977 VkPipelineStageFlags stage_mask) {
6978 // Early out if all commands set, or access_mask NULL
6979 if ((stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) || (0 == access_mask)) return true;
6980
6981 stage_mask = ExpandPipelineStageFlags(extensions, stage_mask);
6982 int index = 0;
6983 // for each of the set bits in access_mask, make sure that supporting stage mask bit(s) are set
6984 while (access_mask) {
6985 index = (u_ffs(access_mask) - 1);
6986 assert(index >= 0);
6987 // Must have "!= 0" compare to prevent warning from MSVC
6988 if ((AccessMaskToPipeStage[index] & stage_mask) == 0) return false; // early out
6989 access_mask &= ~(1 << index); // Mask off bit that's been checked
6990 }
6991 return true;
6992 }
6993
6994 namespace barrier_queue_families {
6995 enum VuIndex {
6996 kSrcOrDstMustBeIgnore,
6997 kSpecialOrIgnoreOnly,
6998 kSrcAndDstValidOrSpecial,
6999 kSrcAndDestMustBeIgnore,
7000 kSrcAndDstBothValid,
7001 kSubmitQueueMustMatchSrcOrDst
7002 };
7003 static const char *vu_summary[] = {"Source or destination queue family must be ignored.",
7004 "Source or destination queue family must be special or ignored.",
7005 "Destination queue family must be ignored if source queue family is.",
7006 "Destination queue family must be valid, ignored, or special.",
7007 "Source queue family must be valid, ignored, or special.",
7008 "Source and destination queue family must both be ignored.",
7009 "Source and destination queue family must both be ignore or both valid.",
7010 "Source or destination queue family must match submit queue family, if not ignored."};
7011
7012 static const std::string image_error_codes[] = {
7013 "VUID-VkImageMemoryBarrier-image-01381", // kSrcOrDstMustBeIgnore
7014 "VUID-VkImageMemoryBarrier-image-04071", // kSpecialOrIgnoreOnly
7015 "VUID-VkImageMemoryBarrier-image-04072", // kSrcAndDstValidOrSpecial
7016 "VUID-VkImageMemoryBarrier-image-01199", // kSrcAndDestMustBeIgnore
7017 "VUID-VkImageMemoryBarrier-image-04069", // kSrcAndDstBothValid
7018 "UNASSIGNED-CoreValidation-vkImageMemoryBarrier-sharing-mode-exclusive-same-family", // kSubmitQueueMustMatchSrcOrDst
7019 };
7020
7021 static const std::string buffer_error_codes[] = {
7022 "VUID-VkBufferMemoryBarrier-buffer-01191", // kSrcOrDstMustBeIgnore
7023 "VUID-VkBufferMemoryBarrier-buffer-04088", // kSpecialOrIgnoreOnly
7024 "VUID-VkBufferMemoryBarrier-buffer-04089", // kSrcAndDstValidOrSpecial
7025 "VUID-VkBufferMemoryBarrier-buffer-01190", // kSrcAndDestMustBeIgnore
7026 "VUID-VkBufferMemoryBarrier-buffer-04086", // kSrcAndDstBothValid
7027 "UNASSIGNED-CoreValidation-vkBufferMemoryBarrier-sharing-mode-exclusive-same-family", // kSubmitQueueMustMatchSrcOrDst
7028 };
7029
7030 class ValidatorState {
7031 public:
ValidatorState(const ValidationStateTracker * device_data,const char * func_name,const CMD_BUFFER_STATE * cb_state,const VulkanTypedHandle & barrier_handle,const VkSharingMode sharing_mode)7032 ValidatorState(const ValidationStateTracker *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
7033 const VulkanTypedHandle &barrier_handle, const VkSharingMode sharing_mode)
7034 : device_data_(device_data),
7035 func_name_(func_name),
7036 command_buffer_(cb_state->commandBuffer),
7037 barrier_handle_(barrier_handle),
7038 sharing_mode_(sharing_mode),
7039 val_codes_(barrier_handle.type == kVulkanObjectTypeImage ? image_error_codes : buffer_error_codes),
7040 limit_(static_cast<uint32_t>(device_data->physical_device_state->queue_family_properties.size())),
7041 mem_ext_(IsExtEnabled(device_data->device_extensions.vk_khr_external_memory)) {}
7042
7043 // Log the messages using boilerplate from object state, and Vu specific information from the template arg
7044 // One and two family versions, in the single family version, Vu holds the name of the passed parameter
LogMsg(VuIndex vu_index,uint32_t family,const char * param_name) const7045 bool LogMsg(VuIndex vu_index, uint32_t family, const char *param_name) const {
7046 const std::string &val_code = val_codes_[vu_index];
7047 const char *annotation = GetFamilyAnnotation(family);
7048 return device_data_->LogError(command_buffer_, val_code,
7049 "%s: Barrier using %s %s created with sharingMode %s, has %s %u%s. %s", func_name_,
7050 GetTypeString(), device_data_->report_data->FormatHandle(barrier_handle_).c_str(),
7051 GetModeString(), param_name, family, annotation, vu_summary[vu_index]);
7052 }
7053
LogMsg(VuIndex vu_index,uint32_t src_family,uint32_t dst_family) const7054 bool LogMsg(VuIndex vu_index, uint32_t src_family, uint32_t dst_family) const {
7055 const std::string &val_code = val_codes_[vu_index];
7056 const char *src_annotation = GetFamilyAnnotation(src_family);
7057 const char *dst_annotation = GetFamilyAnnotation(dst_family);
7058 return device_data_->LogError(
7059 command_buffer_, val_code,
7060 "%s: Barrier using %s %s created with sharingMode %s, has srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
7061 func_name_, GetTypeString(), device_data_->report_data->FormatHandle(barrier_handle_).c_str(), GetModeString(),
7062 src_family, src_annotation, dst_family, dst_annotation, vu_summary[vu_index]);
7063 }
7064
7065 // This abstract Vu can only be tested at submit time, thus we need a callback from the closure containing the needed
7066 // data. Note that the mem_barrier is copied to the closure as the lambda lifespan exceed the guarantees of validity for
7067 // application input.
ValidateAtQueueSubmit(const QUEUE_STATE * queue_state,const ValidationStateTracker * device_data,uint32_t src_family,uint32_t dst_family,const ValidatorState & val)7068 static bool ValidateAtQueueSubmit(const QUEUE_STATE *queue_state, const ValidationStateTracker *device_data,
7069 uint32_t src_family, uint32_t dst_family, const ValidatorState &val) {
7070 uint32_t queue_family = queue_state->queueFamilyIndex;
7071 if ((src_family != queue_family) && (dst_family != queue_family)) {
7072 const std::string &val_code = val.val_codes_[kSubmitQueueMustMatchSrcOrDst];
7073 const char *src_annotation = val.GetFamilyAnnotation(src_family);
7074 const char *dst_annotation = val.GetFamilyAnnotation(dst_family);
7075 return device_data->LogError(
7076 queue_state->queue, val_code,
7077 "%s: Barrier submitted to queue with family index %u, using %s %s created with sharingMode %s, has "
7078 "srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
7079 "vkQueueSubmit", queue_family, val.GetTypeString(),
7080 device_data->report_data->FormatHandle(val.barrier_handle_).c_str(), val.GetModeString(), src_family,
7081 src_annotation, dst_family, dst_annotation, vu_summary[kSubmitQueueMustMatchSrcOrDst]);
7082 }
7083 return false;
7084 }
7085 // Logical helpers for semantic clarity
KhrExternalMem() const7086 inline bool KhrExternalMem() const { return mem_ext_; }
IsValid(uint32_t queue_family) const7087 inline bool IsValid(uint32_t queue_family) const { return (queue_family < limit_); }
IsValidOrSpecial(uint32_t queue_family) const7088 inline bool IsValidOrSpecial(uint32_t queue_family) const {
7089 return IsValid(queue_family) || (mem_ext_ && QueueFamilyIsExternal(queue_family));
7090 }
7091
7092 // Helpers for LogMsg
GetModeString() const7093 const char *GetModeString() const { return string_VkSharingMode(sharing_mode_); }
7094
7095 // Descriptive text for the various types of queue family index
GetFamilyAnnotation(uint32_t family) const7096 const char *GetFamilyAnnotation(uint32_t family) const {
7097 const char *external = " (VK_QUEUE_FAMILY_EXTERNAL_KHR)";
7098 const char *foreign = " (VK_QUEUE_FAMILY_FOREIGN_EXT)";
7099 const char *ignored = " (VK_QUEUE_FAMILY_IGNORED)";
7100 const char *valid = " (VALID)";
7101 const char *invalid = " (INVALID)";
7102 switch (family) {
7103 case VK_QUEUE_FAMILY_EXTERNAL_KHR:
7104 return external;
7105 case VK_QUEUE_FAMILY_FOREIGN_EXT:
7106 return foreign;
7107 case VK_QUEUE_FAMILY_IGNORED:
7108 return ignored;
7109 default:
7110 if (IsValid(family)) {
7111 return valid;
7112 }
7113 return invalid;
7114 };
7115 }
GetTypeString() const7116 const char *GetTypeString() const { return object_string[barrier_handle_.type]; }
GetSharingMode() const7117 VkSharingMode GetSharingMode() const { return sharing_mode_; }
7118
7119 protected:
7120 const ValidationStateTracker *device_data_;
7121 const char *const func_name_;
7122 const VkCommandBuffer command_buffer_;
7123 const VulkanTypedHandle barrier_handle_;
7124 const VkSharingMode sharing_mode_;
7125 const std::string *val_codes_;
7126 const uint32_t limit_;
7127 const bool mem_ext_;
7128 };
7129
Validate(const CoreChecks * device_data,const char * func_name,const CMD_BUFFER_STATE * cb_state,const ValidatorState & val,const uint32_t src_queue_family,const uint32_t dst_queue_family)7130 bool Validate(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state, const ValidatorState &val,
7131 const uint32_t src_queue_family, const uint32_t dst_queue_family) {
7132 bool skip = false;
7133
7134 const bool mode_concurrent = val.GetSharingMode() == VK_SHARING_MODE_CONCURRENT;
7135 const bool src_ignored = QueueFamilyIsIgnored(src_queue_family);
7136 const bool dst_ignored = QueueFamilyIsIgnored(dst_queue_family);
7137 if (val.KhrExternalMem()) {
7138 if (mode_concurrent) {
7139 if (!(src_ignored || dst_ignored)) {
7140 skip |= val.LogMsg(kSrcOrDstMustBeIgnore, src_queue_family, dst_queue_family);
7141 }
7142 if ((src_ignored && !(dst_ignored || QueueFamilyIsExternal(dst_queue_family))) ||
7143 (dst_ignored && !(src_ignored || QueueFamilyIsExternal(src_queue_family)))) {
7144 skip |= val.LogMsg(kSpecialOrIgnoreOnly, src_queue_family, dst_queue_family);
7145 }
7146 } else {
7147 // VK_SHARING_MODE_EXCLUSIVE
7148 if (src_queue_family != dst_queue_family) {
7149 if (!val.IsValidOrSpecial(dst_queue_family)) {
7150 skip |= val.LogMsg(kSrcAndDstValidOrSpecial, dst_queue_family, "dstQueueFamilyIndex");
7151 }
7152 if (!val.IsValidOrSpecial(src_queue_family)) {
7153 skip |= val.LogMsg(kSrcAndDstValidOrSpecial, src_queue_family, "srcQueueFamilyIndex");
7154 }
7155 }
7156 }
7157 } else {
7158 // No memory extension
7159 if (mode_concurrent) {
7160 if (!src_ignored || !dst_ignored) {
7161 skip |= val.LogMsg(kSrcAndDestMustBeIgnore, src_queue_family, dst_queue_family);
7162 }
7163 } else {
7164 // VK_SHARING_MODE_EXCLUSIVE
7165 if ((src_queue_family != dst_queue_family) && !(val.IsValid(src_queue_family) && val.IsValid(dst_queue_family))) {
7166 skip |= val.LogMsg(kSrcAndDstBothValid, src_queue_family, dst_queue_family);
7167 }
7168 }
7169 }
7170 return skip;
7171 }
7172 } // namespace barrier_queue_families
7173
ValidateConcurrentBarrierAtSubmit(const ValidationStateTracker * state_data,const QUEUE_STATE * queue_state,const char * func_name,const CMD_BUFFER_STATE * cb_state,const VulkanTypedHandle & typed_handle,uint32_t src_queue_family,uint32_t dst_queue_family)7174 bool CoreChecks::ValidateConcurrentBarrierAtSubmit(const ValidationStateTracker *state_data, const QUEUE_STATE *queue_state,
7175 const char *func_name, const CMD_BUFFER_STATE *cb_state,
7176 const VulkanTypedHandle &typed_handle, uint32_t src_queue_family,
7177 uint32_t dst_queue_family) {
7178 using barrier_queue_families::ValidatorState;
7179 ValidatorState val(state_data, func_name, cb_state, typed_handle, VK_SHARING_MODE_CONCURRENT);
7180 return ValidatorState::ValidateAtQueueSubmit(queue_state, state_data, src_queue_family, dst_queue_family, val);
7181 }
7182
7183 // Type specific wrapper for image barriers
ValidateBarrierQueueFamilies(const char * func_name,const CMD_BUFFER_STATE * cb_state,const VkImageMemoryBarrier & barrier,const IMAGE_STATE * state_data) const7184 bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, const CMD_BUFFER_STATE *cb_state,
7185 const VkImageMemoryBarrier &barrier, const IMAGE_STATE *state_data) const {
7186 // State data is required
7187 if (!state_data) {
7188 return false;
7189 }
7190
7191 // Create the validator state from the image state
7192 barrier_queue_families::ValidatorState val(this, func_name, cb_state, VulkanTypedHandle(barrier.image, kVulkanObjectTypeImage),
7193 state_data->createInfo.sharingMode);
7194 const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
7195 const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
7196 return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
7197 }
7198
7199 // Type specific wrapper for buffer barriers
ValidateBarrierQueueFamilies(const char * func_name,const CMD_BUFFER_STATE * cb_state,const VkBufferMemoryBarrier & barrier,const BUFFER_STATE * state_data) const7200 bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, const CMD_BUFFER_STATE *cb_state,
7201 const VkBufferMemoryBarrier &barrier, const BUFFER_STATE *state_data) const {
7202 // State data is required
7203 if (!state_data) {
7204 return false;
7205 }
7206
7207 // Create the validator state from the buffer state
7208 barrier_queue_families::ValidatorState val(
7209 this, func_name, cb_state, VulkanTypedHandle(barrier.buffer, kVulkanObjectTypeBuffer), state_data->createInfo.sharingMode);
7210 const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
7211 const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
7212 return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
7213 }
7214
ValidateBarriers(const char * funcName,const CMD_BUFFER_STATE * cb_state,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,uint32_t memBarrierCount,const VkMemoryBarrier * pMemBarriers,uint32_t bufferBarrierCount,const VkBufferMemoryBarrier * pBufferMemBarriers,uint32_t imageMemBarrierCount,const VkImageMemoryBarrier * pImageMemBarriers) const7215 bool CoreChecks::ValidateBarriers(const char *funcName, const CMD_BUFFER_STATE *cb_state, VkPipelineStageFlags src_stage_mask,
7216 VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount,
7217 const VkMemoryBarrier *pMemBarriers, uint32_t bufferBarrierCount,
7218 const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
7219 const VkImageMemoryBarrier *pImageMemBarriers) const {
7220 bool skip = false;
7221 for (uint32_t i = 0; i < memBarrierCount; ++i) {
7222 const auto &mem_barrier = pMemBarriers[i];
7223 if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
7224 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
7225 "%s: pMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
7226 mem_barrier.srcAccessMask, src_stage_mask);
7227 }
7228 if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
7229 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
7230 "%s: pMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
7231 mem_barrier.dstAccessMask, dst_stage_mask);
7232 }
7233 }
7234 for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
7235 const auto &mem_barrier = pImageMemBarriers[i];
7236 if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
7237 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
7238 "%s: pImageMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
7239 mem_barrier.srcAccessMask, src_stage_mask);
7240 }
7241 if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
7242 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
7243 "%s: pImageMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
7244 mem_barrier.dstAccessMask, dst_stage_mask);
7245 }
7246
7247 auto image_data = GetImageState(mem_barrier.image);
7248 skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, image_data);
7249
7250 if (mem_barrier.newLayout == VK_IMAGE_LAYOUT_UNDEFINED || mem_barrier.newLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
7251 skip |= LogError(cb_state->commandBuffer, "VUID-VkImageMemoryBarrier-newLayout-01198",
7252 "%s: Image Layout cannot be transitioned to UNDEFINED or PREINITIALIZED.", funcName);
7253 }
7254
7255 if (image_data) {
7256 skip |= ValidateMemoryIsBoundToImage(image_data, funcName, "VUID-VkBufferMemoryBarrier-buffer-01931");
7257
7258 const auto aspect_mask = mem_barrier.subresourceRange.aspectMask;
7259 skip |= ValidateImageAspectMask(image_data->image, image_data->createInfo.format, aspect_mask, funcName);
7260
7261 const std::string param_name = "pImageMemoryBarriers[" + std::to_string(i) + "].subresourceRange";
7262 skip |= ValidateImageBarrierSubresourceRange(image_data, mem_barrier.subresourceRange, funcName, param_name.c_str());
7263 }
7264 }
7265
7266 for (uint32_t i = 0; i < bufferBarrierCount; ++i) {
7267 const auto &mem_barrier = pBufferMemBarriers[i];
7268
7269 if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
7270 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
7271 "%s: pBufferMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName,
7272 i, mem_barrier.srcAccessMask, src_stage_mask);
7273 }
7274 if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
7275 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
7276 "%s: pBufferMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName,
7277 i, mem_barrier.dstAccessMask, dst_stage_mask);
7278 }
7279 // Validate buffer barrier queue family indices
7280 auto buffer_state = GetBufferState(mem_barrier.buffer);
7281 skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, buffer_state);
7282
7283 if (buffer_state) {
7284 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, funcName, "VUID-VkBufferMemoryBarrier-buffer-01931");
7285
7286 auto buffer_size = buffer_state->createInfo.size;
7287 if (mem_barrier.offset >= buffer_size) {
7288 skip |= LogError(cb_state->commandBuffer, "VUID-VkBufferMemoryBarrier-offset-01187",
7289 "%s: Buffer Barrier %s has offset 0x%" PRIx64 " which is not less than total size 0x%" PRIx64 ".",
7290 funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(),
7291 HandleToUint64(mem_barrier.offset), HandleToUint64(buffer_size));
7292 } else if (mem_barrier.size != VK_WHOLE_SIZE && (mem_barrier.offset + mem_barrier.size > buffer_size)) {
7293 skip |= LogError(cb_state->commandBuffer, "VUID-VkBufferMemoryBarrier-size-01189",
7294 "%s: Buffer Barrier %s has offset 0x%" PRIx64 " and size 0x%" PRIx64
7295 " whose sum is greater than total size 0x%" PRIx64 ".",
7296 funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(),
7297 HandleToUint64(mem_barrier.offset), HandleToUint64(mem_barrier.size), HandleToUint64(buffer_size));
7298 }
7299 if (mem_barrier.size == 0) {
7300 skip |= LogError(cb_state->commandBuffer, "VUID-VkBufferMemoryBarrier-size-01188",
7301 "%s: Buffer Barrier %s has a size of 0.", funcName,
7302 report_data->FormatHandle(mem_barrier.buffer).c_str());
7303 }
7304 }
7305 }
7306
7307 skip |= ValidateBarriersQFOTransferUniqueness(funcName, cb_state, bufferBarrierCount, pBufferMemBarriers, imageMemBarrierCount,
7308 pImageMemBarriers);
7309
7310 return skip;
7311 }
7312
ValidateEventStageMask(const ValidationStateTracker * state_data,const CMD_BUFFER_STATE * pCB,size_t eventCount,size_t firstEventIndex,VkPipelineStageFlags sourceStageMask,EventToStageMap * localEventToStageMap)7313 bool CoreChecks::ValidateEventStageMask(const ValidationStateTracker *state_data, const CMD_BUFFER_STATE *pCB, size_t eventCount,
7314 size_t firstEventIndex, VkPipelineStageFlags sourceStageMask,
7315 EventToStageMap *localEventToStageMap) {
7316 bool skip = false;
7317 VkPipelineStageFlags stageMask = 0;
7318 const auto max_event = std::min((firstEventIndex + eventCount), pCB->events.size());
7319 for (size_t event_index = firstEventIndex; event_index < max_event; ++event_index) {
7320 auto event = pCB->events[event_index];
7321 auto event_data = localEventToStageMap->find(event);
7322 if (event_data != localEventToStageMap->end()) {
7323 stageMask |= event_data->second;
7324 } else {
7325 auto global_event_data = state_data->GetEventState(event);
7326 if (!global_event_data) {
7327 skip |= state_data->LogError(event, kVUID_Core_DrawState_InvalidEvent,
7328 "%s cannot be waited on if it has never been set.",
7329 state_data->report_data->FormatHandle(event).c_str());
7330 } else {
7331 stageMask |= global_event_data->stageMask;
7332 }
7333 }
7334 }
7335 // TODO: Need to validate that host_bit is only set if set event is called
7336 // but set event can be called at any time.
7337 if (sourceStageMask != stageMask && sourceStageMask != (stageMask | VK_PIPELINE_STAGE_HOST_BIT)) {
7338 skip |= state_data->LogError(
7339 pCB->commandBuffer, "VUID-vkCmdWaitEvents-srcStageMask-parameter",
7340 "Submitting cmdbuffer with call to VkCmdWaitEvents using srcStageMask 0x%X which must be the bitwise OR of "
7341 "the stageMask parameters used in calls to vkCmdSetEvent and VK_PIPELINE_STAGE_HOST_BIT if used with "
7342 "vkSetEvent but instead is 0x%X.",
7343 sourceStageMask, stageMask);
7344 }
7345 return skip;
7346 }
7347
7348 // Note that we only check bits that HAVE required queueflags -- don't care entries are skipped
7349 static std::unordered_map<VkPipelineStageFlags, VkQueueFlags> supported_pipeline_stages_table = {
7350 {VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
7351 {VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
7352 {VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_QUEUE_GRAPHICS_BIT},
7353 {VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
7354 {VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
7355 {VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
7356 {VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
7357 {VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
7358 {VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, VK_QUEUE_GRAPHICS_BIT},
7359 {VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_QUEUE_GRAPHICS_BIT},
7360 {VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_QUEUE_GRAPHICS_BIT},
7361 {VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_QUEUE_COMPUTE_BIT},
7362 {VK_PIPELINE_STAGE_TRANSFER_BIT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT},
7363 {VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
7364 {VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_QUEUE_GRAPHICS_BIT}};
7365
7366 static const VkPipelineStageFlags stage_flag_bit_array[] = {VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
7367 VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
7368 VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
7369 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
7370 VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
7371 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
7372 VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
7373 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
7374 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
7375 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
7376 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
7377 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
7378 VK_PIPELINE_STAGE_TRANSFER_BIT,
7379 VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
7380 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT};
7381
CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer,VkPipelineStageFlags stage_mask,VkQueueFlags queue_flags,const char * function,const char * src_or_dest,const char * error_code) const7382 bool CoreChecks::CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask,
7383 VkQueueFlags queue_flags, const char *function, const char *src_or_dest,
7384 const char *error_code) const {
7385 bool skip = false;
7386 // Lookup each bit in the stagemask and check for overlap between its table bits and queue_flags
7387 for (const auto &item : stage_flag_bit_array) {
7388 if (stage_mask & item) {
7389 if ((supported_pipeline_stages_table[item] & queue_flags) == 0) {
7390 skip |= LogError(command_buffer, error_code,
7391 "%s(): %s flag %s is not compatible with the queue family properties of this command buffer.",
7392 function, src_or_dest, string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(item)));
7393 }
7394 }
7395 }
7396 return skip;
7397 }
7398
7399 // Check if all barriers are of a given operation type.
7400 template <typename Barrier, typename OpCheck>
AllTransferOp(const COMMAND_POOL_STATE * pool,OpCheck & op_check,uint32_t count,const Barrier * barriers)7401 bool AllTransferOp(const COMMAND_POOL_STATE *pool, OpCheck &op_check, uint32_t count, const Barrier *barriers) {
7402 if (!pool) return false;
7403
7404 for (uint32_t b = 0; b < count; b++) {
7405 if (!op_check(pool, barriers + b)) return false;
7406 }
7407 return true;
7408 }
7409
7410 // Look at the barriers to see if we they are all release or all acquire, the result impacts queue properties validation
ComputeBarrierOperationsType(const CMD_BUFFER_STATE * cb_state,uint32_t buffer_barrier_count,const VkBufferMemoryBarrier * buffer_barriers,uint32_t image_barrier_count,const VkImageMemoryBarrier * image_barriers) const7411 BarrierOperationsType CoreChecks::ComputeBarrierOperationsType(const CMD_BUFFER_STATE *cb_state, uint32_t buffer_barrier_count,
7412 const VkBufferMemoryBarrier *buffer_barriers,
7413 uint32_t image_barrier_count,
7414 const VkImageMemoryBarrier *image_barriers) const {
7415 auto pool = cb_state->command_pool.get();
7416 BarrierOperationsType op_type = kGeneral;
7417
7418 // Look at the barrier details only if they exist
7419 // Note: AllTransferOp returns true for count == 0
7420 if ((buffer_barrier_count + image_barrier_count) != 0) {
7421 if (AllTransferOp(pool, TempIsReleaseOp<VkBufferMemoryBarrier>, buffer_barrier_count, buffer_barriers) &&
7422 AllTransferOp(pool, TempIsReleaseOp<VkImageMemoryBarrier>, image_barrier_count, image_barriers)) {
7423 op_type = kAllRelease;
7424 } else if (AllTransferOp(pool, IsAcquireOp<VkBufferMemoryBarrier>, buffer_barrier_count, buffer_barriers) &&
7425 AllTransferOp(pool, IsAcquireOp<VkImageMemoryBarrier>, image_barrier_count, image_barriers)) {
7426 op_type = kAllAcquire;
7427 }
7428 }
7429
7430 return op_type;
7431 }
7432
ValidateStageMasksAgainstQueueCapabilities(const CMD_BUFFER_STATE * cb_state,VkPipelineStageFlags source_stage_mask,VkPipelineStageFlags dest_stage_mask,BarrierOperationsType barrier_op_type,const char * function,const char * error_code) const7433 bool CoreChecks::ValidateStageMasksAgainstQueueCapabilities(const CMD_BUFFER_STATE *cb_state,
7434 VkPipelineStageFlags source_stage_mask,
7435 VkPipelineStageFlags dest_stage_mask,
7436 BarrierOperationsType barrier_op_type, const char *function,
7437 const char *error_code) const {
7438 bool skip = false;
7439 uint32_t queue_family_index = cb_state->command_pool->queueFamilyIndex;
7440 auto physical_device_state = GetPhysicalDeviceState();
7441
7442 // Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family
7443 // specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool
7444 // that commandBuffer was allocated from, as specified in the table of supported pipeline stages.
7445
7446 if (queue_family_index < physical_device_state->queue_family_properties.size()) {
7447 VkQueueFlags specified_queue_flags = physical_device_state->queue_family_properties[queue_family_index].queueFlags;
7448
7449 // Only check the source stage mask if any barriers aren't "acquire ownership"
7450 if ((barrier_op_type != kAllAcquire) && (source_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
7451 skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, source_stage_mask, specified_queue_flags, function,
7452 "srcStageMask", error_code);
7453 }
7454 // Only check the dest stage mask if any barriers aren't "release ownership"
7455 if ((barrier_op_type != kAllRelease) && (dest_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
7456 skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, dest_stage_mask, specified_queue_flags, function,
7457 "dstStageMask", error_code);
7458 }
7459 }
7460 return skip;
7461 }
7462
PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags sourceStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers) const7463 bool CoreChecks::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
7464 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
7465 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
7466 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
7467 uint32_t imageMemoryBarrierCount,
7468 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
7469 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7470 assert(cb_state);
7471
7472 auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
7473 imageMemoryBarrierCount, pImageMemoryBarriers);
7474 bool skip = ValidateStageMasksAgainstQueueCapabilities(cb_state, sourceStageMask, dstStageMask, barrier_op_type,
7475 "vkCmdWaitEvents", "VUID-vkCmdWaitEvents-srcStageMask-4098");
7476 skip |= ValidateStageMaskGsTsEnables(sourceStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-srcStageMask-04090",
7477 "VUID-vkCmdWaitEvents-srcStageMask-04091", "VUID-vkCmdWaitEvents-srcStageMask-04095",
7478 "VUID-vkCmdWaitEvents-srcStageMask-04096");
7479 skip |= ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-dstStageMask-04090",
7480 "VUID-vkCmdWaitEvents-dstStageMask-04091", "VUID-vkCmdWaitEvents-dstStageMask-04095",
7481 "VUID-vkCmdWaitEvents-dstStageMask-04096");
7482 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdWaitEvents()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7483 "VUID-vkCmdWaitEvents-commandBuffer-cmdpool");
7484 skip |= ValidateCmd(cb_state, CMD_WAITEVENTS, "vkCmdWaitEvents()");
7485 skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdWaitEvents()");
7486 skip |= ValidateBarriers("vkCmdWaitEvents()", cb_state, sourceStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
7487 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
7488 return skip;
7489 }
7490
PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags sourceStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)7491 void CoreChecks::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
7492 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
7493 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
7494 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
7495 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
7496 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7497 // The StateTracker added will add to the events vector.
7498 auto first_event_index = cb_state->events.size();
7499 StateTracker::PreCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask, dstStageMask, memoryBarrierCount,
7500 pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
7501 imageMemoryBarrierCount, pImageMemoryBarriers);
7502 auto event_added_count = cb_state->events.size() - first_event_index;
7503
7504 const CMD_BUFFER_STATE *cb_state_const = cb_state;
7505 cb_state->eventUpdates.emplace_back(
7506 [cb_state_const, event_added_count, first_event_index, sourceStageMask](
7507 const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
7508 if (!do_validate) return false;
7509 return ValidateEventStageMask(device_data, cb_state_const, event_added_count, first_event_index, sourceStageMask,
7510 localEventToStageMap);
7511 });
7512 TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
7513 }
7514
PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags sourceStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)7515 void CoreChecks::PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
7516 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
7517 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
7518 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
7519 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
7520 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7521 RecordBarrierValidationInfo("vkCmdWaitEvents", cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
7522 imageMemoryBarrierCount, pImageMemoryBarriers);
7523 }
7524
PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers) const7525 bool CoreChecks::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
7526 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
7527 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
7528 uint32_t bufferMemoryBarrierCount,
7529 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
7530 uint32_t imageMemoryBarrierCount,
7531 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
7532 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7533 assert(cb_state);
7534
7535 bool skip = false;
7536 if (bufferMemoryBarrierCount || imageMemoryBarrierCount) {
7537 auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
7538 imageMemoryBarrierCount, pImageMemoryBarriers);
7539 skip |= ValidateStageMasksAgainstQueueCapabilities(cb_state, srcStageMask, dstStageMask, barrier_op_type,
7540 "vkCmdPipelineBarrier", "VUID-vkCmdPipelineBarrier-srcStageMask-4098");
7541 }
7542 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPipelineBarrier()",
7543 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7544 "VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool");
7545 skip |= ValidateCmd(cb_state, CMD_PIPELINEBARRIER, "vkCmdPipelineBarrier()");
7546 skip |=
7547 ValidateStageMaskGsTsEnables(srcStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-srcStageMask-04090",
7548 "VUID-vkCmdPipelineBarrier-srcStageMask-04091", "VUID-vkCmdPipelineBarrier-srcStageMask-04095",
7549 "VUID-vkCmdPipelineBarrier-srcStageMask-04096");
7550 skip |=
7551 ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-dstStageMask-04090",
7552 "VUID-vkCmdPipelineBarrier-dstStageMask-04091", "VUID-vkCmdPipelineBarrier-dstStageMask-04095",
7553 "VUID-vkCmdPipelineBarrier-dstStageMask-04096");
7554 if (cb_state->activeRenderPass) {
7555 skip |= ValidateRenderPassPipelineBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, dependencyFlags,
7556 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
7557 pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
7558 if (skip) return true; // Early return to avoid redundant errors from below calls
7559 }
7560 skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdPipelineBarrier()");
7561 skip |= ValidateBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
7562 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
7563 return skip;
7564 }
7565
EnqueueSubmitTimeValidateImageBarrierAttachment(const char * func_name,CMD_BUFFER_STATE * cb_state,uint32_t imageMemBarrierCount,const VkImageMemoryBarrier * pImageMemBarriers)7566 void CoreChecks::EnqueueSubmitTimeValidateImageBarrierAttachment(const char *func_name, CMD_BUFFER_STATE *cb_state,
7567 uint32_t imageMemBarrierCount,
7568 const VkImageMemoryBarrier *pImageMemBarriers) {
7569 // Secondary CBs can have null framebuffer so queue up validation in that case 'til FB is known
7570 if ((cb_state->activeRenderPass) && (VK_NULL_HANDLE == cb_state->activeFramebuffer) &&
7571 (VK_COMMAND_BUFFER_LEVEL_SECONDARY == cb_state->createInfo.level)) {
7572 const auto active_subpass = cb_state->activeSubpass;
7573 const auto rp_state = cb_state->activeRenderPass;
7574 const auto &sub_desc = rp_state->createInfo.pSubpasses[active_subpass];
7575 for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
7576 const auto &img_barrier = pImageMemBarriers[i];
7577 // Secondary CB case w/o FB specified delay validation
7578 cb_state->cmd_execute_commands_functions.emplace_back(
7579 [=](const CMD_BUFFER_STATE *primary_cb, const FRAMEBUFFER_STATE *fb) {
7580 return ValidateImageBarrierAttachment(func_name, cb_state, fb, active_subpass, sub_desc, rp_state->renderPass,
7581 i, img_barrier);
7582 });
7583 }
7584 }
7585 }
7586
PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)7587 void CoreChecks::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
7588 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
7589 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
7590 uint32_t bufferMemoryBarrierCount,
7591 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
7592 uint32_t imageMemoryBarrierCount,
7593 const VkImageMemoryBarrier *pImageMemoryBarriers) {
7594 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7595 const char *func_name = "vkCmdPipelineBarrier";
7596
7597 RecordBarrierValidationInfo(func_name, cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
7598 pImageMemoryBarriers);
7599
7600 EnqueueSubmitTimeValidateImageBarrierAttachment(func_name, cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
7601 TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
7602 }
7603
ValidateBeginQuery(const CMD_BUFFER_STATE * cb_state,const QueryObject & query_obj,VkFlags flags,CMD_TYPE cmd,const char * cmd_name,const ValidateBeginQueryVuids * vuids) const7604 bool CoreChecks::ValidateBeginQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, VkFlags flags, CMD_TYPE cmd,
7605 const char *cmd_name, const ValidateBeginQueryVuids *vuids) const {
7606 bool skip = false;
7607 const auto *query_pool_state = GetQueryPoolState(query_obj.pool);
7608 const auto &query_pool_ci = query_pool_state->createInfo;
7609
7610 if (query_pool_ci.queryType == VK_QUERY_TYPE_TIMESTAMP) {
7611 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBeginQuery-queryType-02804",
7612 "%s: The querypool's query type must not be VK_QUERY_TYPE_TIMESTAMP.", cmd_name);
7613 }
7614
7615 // Check for nexted queries
7616 if (cb_state->activeQueries.size()) {
7617 for (auto a_query : cb_state->activeQueries) {
7618 auto active_query_pool_state = GetQueryPoolState(a_query.pool);
7619 if (active_query_pool_state->createInfo.queryType == query_pool_ci.queryType) {
7620 LogObjectList obj_list(cb_state->commandBuffer);
7621 obj_list.add(query_obj.pool);
7622 obj_list.add(a_query.pool);
7623 skip |= LogError(obj_list, vuids->vuid_dup_query_type,
7624 "%s: Within the same command buffer %s, query %d from pool %s has same queryType as active query "
7625 "%d from pool %s.",
7626 cmd_name, report_data->FormatHandle(cb_state->commandBuffer).c_str(), query_obj.index,
7627 report_data->FormatHandle(query_obj.pool).c_str(), a_query.index,
7628 report_data->FormatHandle(a_query.pool).c_str());
7629 }
7630 }
7631 }
7632
7633 // There are tighter queue constraints to test for certain query pools
7634 if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
7635 skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuids->vuid_queue_feedback);
7636 }
7637 if (query_pool_ci.queryType == VK_QUERY_TYPE_OCCLUSION) {
7638 skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuids->vuid_queue_occlusion);
7639 }
7640 if (query_pool_ci.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
7641 if (!cb_state->performance_lock_acquired) {
7642 skip |= LogError(cb_state->commandBuffer, vuids->vuid_profile_lock,
7643 "%s: profiling lock must be held before vkBeginCommandBuffer is called on "
7644 "a command buffer where performance queries are recorded.",
7645 cmd_name);
7646 }
7647
7648 if (query_pool_state->has_perf_scope_command_buffer && cb_state->commandCount > 0) {
7649 skip |= LogError(cb_state->commandBuffer, vuids->vuid_scope_not_first,
7650 "%s: Query pool %s was created with a counter of scope "
7651 "VK_QUERY_SCOPE_COMMAND_BUFFER_KHR but %s is not the first recorded "
7652 "command in the command buffer.",
7653 cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), cmd_name);
7654 }
7655
7656 if (query_pool_state->has_perf_scope_render_pass && cb_state->activeRenderPass) {
7657 skip |= LogError(cb_state->commandBuffer, vuids->vuid_scope_in_rp,
7658 "%s: Query pool %s was created with a counter of scope "
7659 "VK_QUERY_SCOPE_RENDER_PASS_KHR but %s is inside a render pass.",
7660 cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), cmd_name);
7661 }
7662 }
7663
7664 skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuids->vuid_queue_flags);
7665
7666 if (flags & VK_QUERY_CONTROL_PRECISE_BIT) {
7667 if (!enabled_features.core.occlusionQueryPrecise) {
7668 skip |= LogError(cb_state->commandBuffer, vuids->vuid_precise,
7669 "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but precise occlusion queries not enabled on the device.",
7670 cmd_name);
7671 }
7672
7673 if (query_pool_ci.queryType != VK_QUERY_TYPE_OCCLUSION) {
7674 skip |=
7675 LogError(cb_state->commandBuffer, vuids->vuid_precise,
7676 "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but pool query type is not VK_QUERY_TYPE_OCCLUSION", cmd_name);
7677 }
7678 }
7679
7680 if (query_obj.query >= query_pool_ci.queryCount) {
7681 skip |= LogError(cb_state->commandBuffer, vuids->vuid_query_count,
7682 "%s: Query index %" PRIu32 " must be less than query count %" PRIu32 " of %s.", cmd_name, query_obj.query,
7683 query_pool_ci.queryCount, report_data->FormatHandle(query_obj.pool).c_str());
7684 }
7685
7686 if (cb_state->unprotected == false) {
7687 skip |= LogError(cb_state->commandBuffer, vuids->vuid_protected_cb,
7688 "%s: command can't be used in protected command buffers.", cmd_name);
7689 }
7690
7691 skip |= ValidateCmd(cb_state, cmd, cmd_name);
7692 return skip;
7693 }
7694
PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot,VkFlags flags) const7695 bool CoreChecks::PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
7696 VkFlags flags) const {
7697 if (disabled[query_validation]) return false;
7698 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7699 assert(cb_state);
7700 QueryObject query_obj(queryPool, slot);
7701 ValidateBeginQueryVuids vuids = {"VUID-vkCmdBeginQuery-commandBuffer-cmdpool", "VUID-vkCmdBeginQuery-queryType-02327",
7702 "VUID-vkCmdBeginQuery-queryType-00803", "VUID-vkCmdBeginQuery-queryType-00800",
7703 "VUID-vkCmdBeginQuery-query-00802", "VUID-vkCmdBeginQuery-queryPool-03223",
7704 "VUID-vkCmdBeginQuery-queryPool-03224", "VUID-vkCmdBeginQuery-queryPool-03225",
7705 "VUID-vkCmdBeginQuery-queryPool-01922", "VUID-vkCmdBeginQuery-commandBuffer-01885"};
7706 return ValidateBeginQuery(cb_state, query_obj, flags, CMD_BEGINQUERY, "vkCmdBeginQuery()", &vuids);
7707 }
7708
VerifyQueryIsReset(const ValidationStateTracker * state_data,VkCommandBuffer commandBuffer,QueryObject query_obj,const char * func_name,VkQueryPool & firstPerfQueryPool,uint32_t perfPass,QueryMap * localQueryToStateMap)7709 bool CoreChecks::VerifyQueryIsReset(const ValidationStateTracker *state_data, VkCommandBuffer commandBuffer, QueryObject query_obj,
7710 const char *func_name, VkQueryPool &firstPerfQueryPool, uint32_t perfPass,
7711 QueryMap *localQueryToStateMap) {
7712 bool skip = false;
7713
7714 const auto *query_pool_state = state_data->GetQueryPoolState(query_obj.pool);
7715 const auto &query_pool_ci = query_pool_state->createInfo;
7716
7717 QueryState state = state_data->GetQueryState(localQueryToStateMap, query_obj.pool, query_obj.query, perfPass);
7718 // If reset was in another command buffer, check the global map
7719 if (state == QUERYSTATE_UNKNOWN)
7720 state = state_data->GetQueryState(&state_data->queryToStateMap, query_obj.pool, query_obj.query, perfPass);
7721 // Performance queries have limitation upon when they can be
7722 // reset.
7723 if (query_pool_ci.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR && state == QUERYSTATE_UNKNOWN &&
7724 perfPass >= query_pool_state->n_performance_passes) {
7725 // If the pass is invalid, assume RESET state, another error
7726 // will be raised in ValidatePerformanceQuery().
7727 state = QUERYSTATE_RESET;
7728 }
7729
7730 if (state != QUERYSTATE_RESET) {
7731 skip |= state_data->LogError(commandBuffer, kVUID_Core_DrawState_QueryNotReset,
7732 "%s: %s and query %" PRIu32
7733 ": query not reset. "
7734 "After query pool creation, each query must be reset before it is used. "
7735 "Queries must also be reset between uses.",
7736 func_name, state_data->report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
7737 }
7738
7739 return skip;
7740 }
7741
ValidatePerformanceQuery(const ValidationStateTracker * state_data,VkCommandBuffer commandBuffer,QueryObject query_obj,const char * func_name,VkQueryPool & firstPerfQueryPool,uint32_t perfPass,QueryMap * localQueryToStateMap)7742 bool CoreChecks::ValidatePerformanceQuery(const ValidationStateTracker *state_data, VkCommandBuffer commandBuffer,
7743 QueryObject query_obj, const char *func_name, VkQueryPool &firstPerfQueryPool,
7744 uint32_t perfPass, QueryMap *localQueryToStateMap) {
7745 const auto *query_pool_state = state_data->GetQueryPoolState(query_obj.pool);
7746 const auto &query_pool_ci = query_pool_state->createInfo;
7747
7748 if (query_pool_ci.queryType != VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) return false;
7749
7750 const CMD_BUFFER_STATE *cb_state = state_data->GetCBState(commandBuffer);
7751 bool skip = false;
7752
7753 if (perfPass >= query_pool_state->n_performance_passes) {
7754 skip |= state_data->LogError(commandBuffer, "VUID-VkPerformanceQuerySubmitInfoKHR-counterPassIndex-03221",
7755 "Invalid counterPassIndex (%u, maximum allowed %u) value for query pool %s.", perfPass,
7756 query_pool_state->n_performance_passes,
7757 state_data->report_data->FormatHandle(query_obj.pool).c_str());
7758 }
7759
7760 if (!cb_state->performance_lock_acquired || cb_state->performance_lock_released) {
7761 skip |= state_data->LogError(commandBuffer, "VUID-vkQueueSubmit-pCommandBuffers-03220",
7762 "Commandbuffer %s was submitted and contains a performance query but the"
7763 "profiling lock was not held continuously throughout the recording of commands.",
7764 state_data->report_data->FormatHandle(commandBuffer).c_str());
7765 }
7766
7767 QueryState command_buffer_state = state_data->GetQueryState(localQueryToStateMap, query_obj.pool, query_obj.query, perfPass);
7768 if (command_buffer_state == QUERYSTATE_RESET) {
7769 skip |= state_data->LogError(
7770 commandBuffer, query_obj.indexed ? "VUID-vkCmdBeginQueryIndexedEXT-None-02863" : "VUID-vkCmdBeginQuery-None-02863",
7771 "VkQuery begin command recorded in a command buffer that, either directly or "
7772 "through secondary command buffers, also contains a vkCmdResetQueryPool command "
7773 "affecting the same query.");
7774 }
7775
7776 if (firstPerfQueryPool != VK_NULL_HANDLE) {
7777 if (firstPerfQueryPool != query_obj.pool &&
7778 !state_data->enabled_features.performance_query_features.performanceCounterMultipleQueryPools) {
7779 skip |= state_data->LogError(
7780 commandBuffer,
7781 query_obj.indexed ? "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226" : "VUID-vkCmdBeginQuery-queryPool-03226",
7782 "Commandbuffer %s contains more than one performance query pool but "
7783 "performanceCounterMultipleQueryPools is not enabled.",
7784 state_data->report_data->FormatHandle(commandBuffer).c_str());
7785 }
7786 } else {
7787 firstPerfQueryPool = query_obj.pool;
7788 }
7789
7790 return skip;
7791 }
7792
EnqueueVerifyBeginQuery(VkCommandBuffer command_buffer,const QueryObject & query_obj,const char * func_name)7793 void CoreChecks::EnqueueVerifyBeginQuery(VkCommandBuffer command_buffer, const QueryObject &query_obj, const char *func_name) {
7794 CMD_BUFFER_STATE *cb_state = GetCBState(command_buffer);
7795
7796 // Enqueue the submit time validation here, ahead of the submit time state update in the StateTracker's PostCallRecord
7797 cb_state->queryUpdates.emplace_back([command_buffer, query_obj, func_name](const ValidationStateTracker *device_data,
7798 bool do_validate, VkQueryPool &firstPerfQueryPool,
7799 uint32_t perfPass, QueryMap *localQueryToStateMap) {
7800 if (!do_validate) return false;
7801 bool skip = false;
7802 skip |= ValidatePerformanceQuery(device_data, command_buffer, query_obj, func_name, firstPerfQueryPool, perfPass,
7803 localQueryToStateMap);
7804 skip |= VerifyQueryIsReset(device_data, command_buffer, query_obj, func_name, firstPerfQueryPool, perfPass,
7805 localQueryToStateMap);
7806 return skip;
7807 });
7808 }
7809
PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot,VkFlags flags)7810 void CoreChecks::PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
7811 if (disabled[query_validation]) return;
7812 QueryObject query_obj = {queryPool, slot};
7813 EnqueueVerifyBeginQuery(commandBuffer, query_obj, "vkCmdBeginQuery()");
7814 }
7815
EnqueueVerifyEndQuery(VkCommandBuffer command_buffer,const QueryObject & query_obj)7816 void CoreChecks::EnqueueVerifyEndQuery(VkCommandBuffer command_buffer, const QueryObject &query_obj) {
7817 CMD_BUFFER_STATE *cb_state = GetCBState(command_buffer);
7818
7819 // Enqueue the submit time validation here, ahead of the submit time state update in the StateTracker's PostCallRecord
7820 cb_state->queryUpdates.emplace_back([command_buffer, query_obj](const ValidationStateTracker *device_data, bool do_validate,
7821 VkQueryPool &firstPerfQueryPool, uint32_t perfPass,
7822 QueryMap *localQueryToStateMap) {
7823 if (!do_validate) return false;
7824 bool skip = false;
7825 const CMD_BUFFER_STATE *cb_state = device_data->GetCBState(command_buffer);
7826 const auto *query_pool_state = device_data->GetQueryPoolState(query_obj.pool);
7827 if (query_pool_state->has_perf_scope_command_buffer && (cb_state->commandCount - 1) != query_obj.endCommandIndex) {
7828 skip |= device_data->LogError(command_buffer, "VUID-vkCmdEndQuery-queryPool-03227",
7829 "vkCmdEndQuery: Query pool %s was created with a counter of scope"
7830 "VK_QUERY_SCOPE_COMMAND_BUFFER_KHR but the end of the query is not the last "
7831 "command in the command buffer %s.",
7832 device_data->report_data->FormatHandle(query_obj.pool).c_str(),
7833 device_data->report_data->FormatHandle(command_buffer).c_str());
7834 }
7835 return skip;
7836 });
7837 }
7838
ValidateCmdEndQuery(const CMD_BUFFER_STATE * cb_state,const QueryObject & query_obj,CMD_TYPE cmd,const char * cmd_name,const ValidateEndQueryVuids * vuids) const7839 bool CoreChecks::ValidateCmdEndQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, CMD_TYPE cmd,
7840 const char *cmd_name, const ValidateEndQueryVuids *vuids) const {
7841 bool skip = false;
7842 if (!cb_state->activeQueries.count(query_obj)) {
7843 skip |=
7844 LogError(cb_state->commandBuffer, vuids->vuid_active_queries, "%s: Ending a query before it was started: %s, index %d.",
7845 cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
7846 }
7847 const auto *query_pool_state = GetQueryPoolState(query_obj.pool);
7848 const auto &query_pool_ci = query_pool_state->createInfo;
7849 if (query_pool_ci.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
7850 if (query_pool_state->has_perf_scope_render_pass && cb_state->activeRenderPass) {
7851 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdEndQuery-queryPool-03228",
7852 "%s: Query pool %s was created with a counter of scope "
7853 "VK_QUERY_SCOPE_RENDER_PASS_KHR but %s is inside a render pass.",
7854 cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), cmd_name);
7855 }
7856 }
7857 skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuids->vuid_queue_flags);
7858 skip |= ValidateCmd(cb_state, cmd, cmd_name);
7859
7860 if (cb_state->unprotected == false) {
7861 skip |= LogError(cb_state->commandBuffer, vuids->vuid_protected_cb,
7862 "%s: command can't be used in protected command buffers.", cmd_name);
7863 }
7864 return skip;
7865 }
7866
PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot) const7867 bool CoreChecks::PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) const {
7868 if (disabled[query_validation]) return false;
7869 bool skip = false;
7870 QueryObject query_obj = {queryPool, slot};
7871 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7872 assert(cb_state);
7873
7874 ValidateEndQueryVuids vuids = {"VUID-vkCmdEndQuery-commandBuffer-cmdpool", "VUID-vkCmdEndQuery-None-01923",
7875 "VUID-vkCmdEndQuery-commandBuffer-01886"};
7876
7877 const QUERY_POOL_STATE *query_pool_state = GetQueryPoolState(queryPool);
7878 if (query_pool_state) {
7879 const uint32_t available_query_count = query_pool_state->createInfo.queryCount;
7880 // Only continue validating if the slot is even within range
7881 if (slot >= available_query_count) {
7882 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdEndQuery-query-00810",
7883 "vkCmdEndQuery(): query index (%u) is greater or equal to the queryPool size (%u).", slot,
7884 available_query_count);
7885 } else {
7886 skip |= ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERY, "vkCmdEndQuery()", &vuids);
7887 }
7888 }
7889 return skip;
7890 }
7891
PreCallRecordCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t slot)7892 void CoreChecks::PreCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
7893 if (disabled[query_validation]) return;
7894 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7895 QueryObject query_obj = {queryPool, slot};
7896 query_obj.endCommandIndex = cb_state->commandCount - 1;
7897 EnqueueVerifyEndQuery(commandBuffer, query_obj);
7898 }
7899
ValidateQueryPoolIndex(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,const char * func_name,const char * first_vuid,const char * sum_vuid) const7900 bool CoreChecks::ValidateQueryPoolIndex(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, const char *func_name,
7901 const char *first_vuid, const char *sum_vuid) const {
7902 bool skip = false;
7903 const QUERY_POOL_STATE *query_pool_state = GetQueryPoolState(queryPool);
7904 if (query_pool_state) {
7905 const uint32_t available_query_count = query_pool_state->createInfo.queryCount;
7906 if (firstQuery >= available_query_count) {
7907 skip |= LogError(queryPool, first_vuid,
7908 "%s: In Query %s the firstQuery (%u) is greater or equal to the queryPool size (%u).", func_name,
7909 report_data->FormatHandle(queryPool).c_str(), firstQuery, available_query_count);
7910 }
7911 if ((firstQuery + queryCount) > available_query_count) {
7912 skip |=
7913 LogError(queryPool, sum_vuid,
7914 "%s: In Query %s the sum of firstQuery (%u) + queryCount (%u) is greater than the queryPool size (%u).",
7915 func_name, report_data->FormatHandle(queryPool).c_str(), firstQuery, queryCount, available_query_count);
7916 }
7917 }
7918 return skip;
7919 }
7920
PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount) const7921 bool CoreChecks::PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
7922 uint32_t queryCount) const {
7923 if (disabled[query_validation]) return false;
7924 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
7925 assert(cb_state);
7926
7927 bool skip = InsideRenderPass(cb_state, "vkCmdResetQueryPool()", "VUID-vkCmdResetQueryPool-renderpass");
7928 skip |= ValidateCmd(cb_state, CMD_RESETQUERYPOOL, "VkCmdResetQueryPool()");
7929 skip |= ValidateCmdQueueFlags(cb_state, "VkCmdResetQueryPool()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7930 "VUID-vkCmdResetQueryPool-commandBuffer-cmdpool");
7931 skip |= ValidateQueryPoolIndex(queryPool, firstQuery, queryCount, "VkCmdResetQueryPool()",
7932 "VUID-vkCmdResetQueryPool-firstQuery-00796", "VUID-vkCmdResetQueryPool-firstQuery-00797");
7933
7934 return skip;
7935 }
7936
GetQueryResultType(QueryState state,VkQueryResultFlags flags)7937 static QueryResultType GetQueryResultType(QueryState state, VkQueryResultFlags flags) {
7938 switch (state) {
7939 case QUERYSTATE_UNKNOWN:
7940 return QUERYRESULT_UNKNOWN;
7941 case QUERYSTATE_RESET:
7942 case QUERYSTATE_RUNNING:
7943 if (flags & VK_QUERY_RESULT_WAIT_BIT) {
7944 return ((state == QUERYSTATE_RESET) ? QUERYRESULT_WAIT_ON_RESET : QUERYRESULT_WAIT_ON_RUNNING);
7945 } else if ((flags & VK_QUERY_RESULT_PARTIAL_BIT) || (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
7946 return QUERYRESULT_SOME_DATA;
7947 } else {
7948 return QUERYRESULT_NO_DATA;
7949 }
7950 case QUERYSTATE_ENDED:
7951 if ((flags & VK_QUERY_RESULT_WAIT_BIT) || (flags & VK_QUERY_RESULT_PARTIAL_BIT) ||
7952 (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
7953 return QUERYRESULT_SOME_DATA;
7954 } else {
7955 return QUERYRESULT_UNKNOWN;
7956 }
7957 case QUERYSTATE_AVAILABLE:
7958 return QUERYRESULT_SOME_DATA;
7959 }
7960 assert(false);
7961 return QUERYRESULT_UNKNOWN;
7962 }
7963
ValidateCopyQueryPoolResults(const ValidationStateTracker * state_data,VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,uint32_t perfPass,VkQueryResultFlags flags,QueryMap * localQueryToStateMap)7964 bool CoreChecks::ValidateCopyQueryPoolResults(const ValidationStateTracker *state_data, VkCommandBuffer commandBuffer,
7965 VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
7966 VkQueryResultFlags flags, QueryMap *localQueryToStateMap) {
7967 bool skip = false;
7968 for (uint32_t i = 0; i < queryCount; i++) {
7969 QueryState state = state_data->GetQueryState(localQueryToStateMap, queryPool, firstQuery + i, perfPass);
7970 QueryResultType result_type = GetQueryResultType(state, flags);
7971 if (result_type != QUERYRESULT_SOME_DATA && result_type != QUERYRESULT_UNKNOWN) {
7972 skip |= state_data->LogError(
7973 commandBuffer, kVUID_Core_DrawState_InvalidQuery,
7974 "vkCmdCopyQueryPoolResults(): Requesting a copy from query to buffer on %s query %" PRIu32 ": %s",
7975 state_data->report_data->FormatHandle(queryPool).c_str(), firstQuery + i, string_QueryResultType(result_type));
7976 }
7977 }
7978 return skip;
7979 }
7980
PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags) const7981 bool CoreChecks::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
7982 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
7983 VkDeviceSize stride, VkQueryResultFlags flags) const {
7984 if (disabled[query_validation]) return false;
7985 const auto cb_state = GetCBState(commandBuffer);
7986 const auto dst_buff_state = GetBufferState(dstBuffer);
7987 assert(cb_state);
7988 assert(dst_buff_state);
7989 bool skip = ValidateMemoryIsBoundToBuffer(dst_buff_state, "vkCmdCopyQueryPoolResults()",
7990 "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826");
7991 skip |= ValidateQueryPoolStride("VUID-vkCmdCopyQueryPoolResults-flags-00822", "VUID-vkCmdCopyQueryPoolResults-flags-00823",
7992 stride, "dstOffset", dstOffset, flags);
7993 // Validate that DST buffer has correct usage flags set
7994 skip |= ValidateBufferUsageFlags(dst_buff_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
7995 "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825", "vkCmdCopyQueryPoolResults()",
7996 "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
7997 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdCopyQueryPoolResults()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
7998 "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool");
7999 skip |= ValidateCmd(cb_state, CMD_COPYQUERYPOOLRESULTS, "vkCmdCopyQueryPoolResults()");
8000 skip |= InsideRenderPass(cb_state, "vkCmdCopyQueryPoolResults()", "VUID-vkCmdCopyQueryPoolResults-renderpass");
8001 skip |= ValidateQueryPoolIndex(queryPool, firstQuery, queryCount, "vkCmdCopyQueryPoolResults()",
8002 "VUID-vkCmdCopyQueryPoolResults-firstQuery-00820",
8003 "VUID-vkCmdCopyQueryPoolResults-firstQuery-00821");
8004
8005 if (dstOffset >= dst_buff_state->requirements.size) {
8006 skip |= LogError(commandBuffer, "VUID-vkCmdCopyQueryPoolResults-dstOffset-00819",
8007 "vkCmdCopyQueryPoolResults() dstOffset (0x%" PRIxLEAST64 ") is not less than the size (0x%" PRIxLEAST64
8008 ") of buffer (%s).",
8009 dstOffset, dst_buff_state->requirements.size, report_data->FormatHandle(dst_buff_state->buffer).c_str());
8010 } else if (dstOffset + (queryCount * stride) > dst_buff_state->requirements.size) {
8011 skip |=
8012 LogError(commandBuffer, "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00824",
8013 "vkCmdCopyQueryPoolResults() storage required (0x%" PRIxLEAST64
8014 ") equal to dstOffset + (queryCount * stride) is greater than the size (0x%" PRIxLEAST64 ") of buffer (%s).",
8015 dstOffset + (queryCount * stride), dst_buff_state->requirements.size,
8016 report_data->FormatHandle(dst_buff_state->buffer).c_str());
8017 }
8018
8019 auto query_pool_state_iter = queryPoolMap.find(queryPool);
8020 if (query_pool_state_iter != queryPoolMap.end()) {
8021 auto query_pool_state = query_pool_state_iter->second.get();
8022 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
8023 skip |= ValidatePerformanceQueryResults("vkCmdCopyQueryPoolResults", query_pool_state, firstQuery, queryCount, flags);
8024 if (!phys_dev_ext_props.performance_query_props.allowCommandBufferQueryCopies) {
8025 skip |= LogError(commandBuffer, "VUID-vkCmdCopyQueryPoolResults-queryType-03232",
8026 "vkCmdCopyQueryPoolResults called with query pool %s but "
8027 "VkPhysicalDevicePerformanceQueryPropertiesKHR::allowCommandBufferQueryCopies "
8028 "is not set.",
8029 report_data->FormatHandle(queryPool).c_str());
8030 }
8031 }
8032 if ((query_pool_state->createInfo.queryType == VK_QUERY_TYPE_TIMESTAMP) && ((flags & VK_QUERY_RESULT_PARTIAL_BIT) != 0)) {
8033 skip |= LogError(commandBuffer, "VUID-vkCmdCopyQueryPoolResults-queryType-00827",
8034 "vkCmdCopyQueryPoolResults() query pool %s was created with VK_QUERY_TYPE_TIMESTAMP so flags must not "
8035 "contain VK_QUERY_RESULT_PARTIAL_BIT.",
8036 report_data->FormatHandle(queryPool).c_str());
8037 }
8038 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL) {
8039 skip |= LogError(queryPool, "VUID-vkCmdCopyQueryPoolResults-queryType-02734",
8040 "vkCmdCopyQueryPoolResults() called but QueryPool %s was created with queryType "
8041 "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL.",
8042 report_data->FormatHandle(queryPool).c_str());
8043 }
8044 }
8045
8046 return skip;
8047 }
8048
PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)8049 void CoreChecks::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
8050 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
8051 VkDeviceSize stride, VkQueryResultFlags flags) {
8052 if (disabled[query_validation]) return;
8053 auto cb_state = GetCBState(commandBuffer);
8054 cb_state->queryUpdates.emplace_back([commandBuffer, queryPool, firstQuery, queryCount, flags](
8055 const ValidationStateTracker *device_data, bool do_validate,
8056 VkQueryPool &firstPerfQueryPool, uint32_t perfPass, QueryMap *localQueryToStateMap) {
8057 if (!do_validate) return false;
8058 return ValidateCopyQueryPoolResults(device_data, commandBuffer, queryPool, firstQuery, queryCount, perfPass, flags,
8059 localQueryToStateMap);
8060 });
8061 }
8062
PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues) const8063 bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
8064 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
8065 const void *pValues) const {
8066 bool skip = false;
8067 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8068 assert(cb_state);
8069 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPushConstants()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
8070 "VUID-vkCmdPushConstants-commandBuffer-cmdpool");
8071 skip |= ValidateCmd(cb_state, CMD_PUSHCONSTANTS, "vkCmdPushConstants()");
8072 skip |= ValidatePushConstantRange(offset, size, "vkCmdPushConstants()");
8073 if (0 == stageFlags) {
8074 skip |= LogError(commandBuffer, "VUID-vkCmdPushConstants-stageFlags-requiredbitmask",
8075 "vkCmdPushConstants() call has no stageFlags set.");
8076 }
8077
8078 // Check if pipeline_layout VkPushConstantRange(s) overlapping offset, size have stageFlags set for each stage in the command
8079 // stageFlags argument, *and* that the command stageFlags argument has bits set for the stageFlags in each overlapping range.
8080 if (!skip) {
8081 const auto &ranges = *GetPipelineLayout(layout)->push_constant_ranges;
8082 VkShaderStageFlags found_stages = 0;
8083 for (const auto &range : ranges) {
8084 if ((offset >= range.offset) && (offset + size <= range.offset + range.size)) {
8085 VkShaderStageFlags matching_stages = range.stageFlags & stageFlags;
8086 if (matching_stages != range.stageFlags) {
8087 skip |=
8088 LogError(commandBuffer, "VUID-vkCmdPushConstants-offset-01796",
8089 "vkCmdPushConstants(): stageFlags (%s, offset (%" PRIu32 "), and size (%" PRIu32
8090 "), must contain all stages in overlapping VkPushConstantRange stageFlags (%s), offset (%" PRIu32
8091 "), and size (%" PRIu32 ") in %s.",
8092 string_VkShaderStageFlags(stageFlags).c_str(), offset, size,
8093 string_VkShaderStageFlags(range.stageFlags).c_str(), range.offset, range.size,
8094 report_data->FormatHandle(layout).c_str());
8095 }
8096
8097 // Accumulate all stages we've found
8098 found_stages = matching_stages | found_stages;
8099 }
8100 }
8101 if (found_stages != stageFlags) {
8102 uint32_t missing_stages = ~found_stages & stageFlags;
8103 skip |= LogError(
8104 commandBuffer, "VUID-vkCmdPushConstants-offset-01795",
8105 "vkCmdPushConstants(): %s, VkPushConstantRange in %s overlapping offset = %d and size = %d, do not contain %s.",
8106 string_VkShaderStageFlags(stageFlags).c_str(), report_data->FormatHandle(layout).c_str(), offset, size,
8107 string_VkShaderStageFlags(missing_stages).c_str());
8108 }
8109 }
8110 return skip;
8111 }
8112
PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot) const8113 bool CoreChecks::PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
8114 VkQueryPool queryPool, uint32_t slot) const {
8115 if (disabled[query_validation]) return false;
8116 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8117 assert(cb_state);
8118 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdWriteTimestamp()",
8119 VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
8120 "VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool");
8121 skip |= ValidateCmd(cb_state, CMD_WRITETIMESTAMP, "vkCmdWriteTimestamp()");
8122
8123 const QUERY_POOL_STATE *query_pool_state = GetQueryPoolState(queryPool);
8124 if ((query_pool_state != nullptr) && (query_pool_state->createInfo.queryType != VK_QUERY_TYPE_TIMESTAMP)) {
8125 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdWriteTimestamp-queryPool-01416",
8126 "vkCmdWriteTimestamp(): Query Pool %s was not created with VK_QUERY_TYPE_TIMESTAMP.",
8127 report_data->FormatHandle(queryPool).c_str());
8128 }
8129
8130 const uint32_t timestampValidBits =
8131 GetPhysicalDeviceState()->queue_family_properties[cb_state->command_pool->queueFamilyIndex].timestampValidBits;
8132 if (timestampValidBits == 0) {
8133 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdWriteTimestamp-timestampValidBits-00829",
8134 "vkCmdWriteTimestamp(): Query Pool %s has a timestampValidBits value of zero.",
8135 report_data->FormatHandle(queryPool).c_str());
8136 }
8137
8138 return skip;
8139 }
8140
PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot)8141 void CoreChecks::PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
8142 VkQueryPool queryPool, uint32_t slot) {
8143 if (disabled[query_validation]) return;
8144 // Enqueue the submit time validation check here, before the submit time state update in StateTracker::PostCall...
8145 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
8146 QueryObject query = {queryPool, slot};
8147 const char *func_name = "vkCmdWriteTimestamp()";
8148 cb_state->queryUpdates.emplace_back([commandBuffer, query, func_name](const ValidationStateTracker *device_data,
8149 bool do_validate, VkQueryPool &firstPerfQueryPool,
8150 uint32_t perfPass, QueryMap *localQueryToStateMap) {
8151 if (!do_validate) return false;
8152 return VerifyQueryIsReset(device_data, commandBuffer, query, func_name, firstPerfQueryPool, perfPass, localQueryToStateMap);
8153 });
8154 }
8155
MatchUsage(uint32_t count,const VkAttachmentReference2KHR * attachments,const VkFramebufferCreateInfo * fbci,VkImageUsageFlagBits usage_flag,const char * error_code) const8156 bool CoreChecks::MatchUsage(uint32_t count, const VkAttachmentReference2KHR *attachments, const VkFramebufferCreateInfo *fbci,
8157 VkImageUsageFlagBits usage_flag, const char *error_code) const {
8158 bool skip = false;
8159
8160 if (attachments) {
8161 for (uint32_t attach = 0; attach < count; attach++) {
8162 if (attachments[attach].attachment != VK_ATTACHMENT_UNUSED) {
8163 // Attachment counts are verified elsewhere, but prevent an invalid access
8164 if (attachments[attach].attachment < fbci->attachmentCount) {
8165 if ((fbci->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
8166 const VkImageView *image_view = &fbci->pAttachments[attachments[attach].attachment];
8167 auto view_state = GetImageViewState(*image_view);
8168 if (view_state) {
8169 const VkImageCreateInfo *ici = &GetImageState(view_state->create_info.image)->createInfo;
8170 if (ici != nullptr) {
8171 auto creation_usage = ici->usage;
8172 const auto stencil_usage_info = lvl_find_in_chain<VkImageStencilUsageCreateInfo>(ici->pNext);
8173 if (stencil_usage_info) {
8174 creation_usage |= stencil_usage_info->stencilUsage;
8175 }
8176 if ((creation_usage & usage_flag) == 0) {
8177 skip |= LogError(device, error_code,
8178 "vkCreateFramebuffer: Framebuffer Attachment (%d) conflicts with the image's "
8179 "IMAGE_USAGE flags (%s).",
8180 attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
8181 }
8182 }
8183 }
8184 } else {
8185 const VkFramebufferAttachmentsCreateInfoKHR *fbaci =
8186 lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(fbci->pNext);
8187 if (fbaci != nullptr && fbaci->pAttachmentImageInfos != nullptr &&
8188 fbaci->attachmentImageInfoCount > attachments[attach].attachment) {
8189 uint32_t image_usage = fbaci->pAttachmentImageInfos[attachments[attach].attachment].usage;
8190 if ((image_usage & usage_flag) == 0) {
8191 skip |=
8192 LogError(device, error_code,
8193 "vkCreateFramebuffer: Framebuffer attachment info (%d) conflicts with the image's "
8194 "IMAGE_USAGE flags (%s).",
8195 attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
8196 }
8197 }
8198 }
8199 }
8200 }
8201 }
8202 }
8203 return skip;
8204 }
8205
ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo * pCreateInfo) const8206 bool CoreChecks::ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo *pCreateInfo) const {
8207 bool skip = false;
8208
8209 const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
8210 lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pCreateInfo->pNext);
8211 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) != 0) {
8212 if (!enabled_features.core12.imagelessFramebuffer) {
8213 skip |=
8214 LogError(device, "VUID-VkFramebufferCreateInfo-flags-03189",
8215 "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
8216 "but the imagelessFramebuffer feature is not enabled.");
8217 }
8218
8219 if (pFramebufferAttachmentsCreateInfo == nullptr) {
8220 skip |=
8221 LogError(device, "VUID-VkFramebufferCreateInfo-flags-03190",
8222 "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
8223 "but no instance of VkFramebufferAttachmentsCreateInfoKHR is present in the pNext chain.");
8224 } else {
8225 if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != 0 &&
8226 pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pCreateInfo->attachmentCount) {
8227 skip |= LogError(device, "VUID-VkFramebufferCreateInfo-flags-03191",
8228 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount is %u, but "
8229 "VkFramebufferAttachmentsCreateInfoKHR attachmentImageInfoCount is %u.",
8230 pCreateInfo->attachmentCount, pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
8231 }
8232 }
8233 }
8234
8235 auto rp_state = GetRenderPassState(pCreateInfo->renderPass);
8236 if (rp_state) {
8237 const VkRenderPassCreateInfo2 *rpci = rp_state->createInfo.ptr();
8238 if (rpci->attachmentCount != pCreateInfo->attachmentCount) {
8239 skip |= LogError(pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-attachmentCount-00876",
8240 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount of %u does not match attachmentCount "
8241 "of %u of %s being used to create Framebuffer.",
8242 pCreateInfo->attachmentCount, rpci->attachmentCount,
8243 report_data->FormatHandle(pCreateInfo->renderPass).c_str());
8244 } else {
8245 // attachmentCounts match, so make sure corresponding attachment details line up
8246 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
8247 const VkImageView *image_views = pCreateInfo->pAttachments;
8248 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
8249 auto view_state = GetImageViewState(image_views[i]);
8250 if (view_state == nullptr) {
8251 skip |= LogError(
8252 image_views[i], "VUID-VkFramebufferCreateInfo-flags-02778",
8253 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u is not a valid VkImageView.", i);
8254 } else {
8255 auto &ivci = view_state->create_info;
8256 if (ivci.format != rpci->pAttachments[i].format) {
8257 skip |= LogError(
8258 pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-pAttachments-00880",
8259 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has format of %s that does not "
8260 "match the format of %s used by the corresponding attachment for %s.",
8261 i, string_VkFormat(ivci.format), string_VkFormat(rpci->pAttachments[i].format),
8262 report_data->FormatHandle(pCreateInfo->renderPass).c_str());
8263 }
8264 const VkImageCreateInfo *ici = &GetImageState(ivci.image)->createInfo;
8265 if (ici->samples != rpci->pAttachments[i].samples) {
8266 skip |=
8267 LogError(pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-pAttachments-00881",
8268 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has %s samples that do not "
8269 "match the %s "
8270 "samples used by the corresponding attachment for %s.",
8271 i, string_VkSampleCountFlagBits(ici->samples),
8272 string_VkSampleCountFlagBits(rpci->pAttachments[i].samples),
8273 report_data->FormatHandle(pCreateInfo->renderPass).c_str());
8274 }
8275
8276 // Verify that image memory is valid
8277 auto image_data = GetImageState(ivci.image);
8278 skip |= ValidateMemoryIsBoundToImage(image_data, "vkCreateFramebuffer()",
8279 "UNASSIGNED-CoreValidation-BoundResourceFreedMemoryAccess");
8280
8281 // Verify that view only has a single mip level
8282 if (ivci.subresourceRange.levelCount != 1) {
8283 skip |= LogError(
8284 device, "VUID-VkFramebufferCreateInfo-pAttachments-00883",
8285 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has mip levelCount of %u but "
8286 "only a single mip level (levelCount == 1) is allowed when creating a Framebuffer.",
8287 i, ivci.subresourceRange.levelCount);
8288 }
8289 const uint32_t mip_level = ivci.subresourceRange.baseMipLevel;
8290 uint32_t mip_width = max(1u, ici->extent.width >> mip_level);
8291 uint32_t mip_height = max(1u, ici->extent.height >> mip_level);
8292 if (!(rpci->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
8293 rpci->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT)) {
8294 if ((ivci.subresourceRange.layerCount < pCreateInfo->layers) || (mip_width < pCreateInfo->width) ||
8295 (mip_height < pCreateInfo->height)) {
8296 skip |= LogError(
8297 device, "VUID-VkFramebufferCreateInfo-pAttachments-00882",
8298 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has dimensions "
8299 "smaller than the corresponding framebuffer dimensions. Here are the respective dimensions for "
8300 "attachment #%u, framebuffer:\n"
8301 "width: %u, %u\n"
8302 "height: %u, %u\n"
8303 "layerCount: %u, %u\n",
8304 i, ivci.subresourceRange.baseMipLevel, i, mip_width, pCreateInfo->width, mip_height,
8305 pCreateInfo->height, ivci.subresourceRange.layerCount, pCreateInfo->layers);
8306 }
8307 } else {
8308 if (device_extensions.vk_ext_fragment_density_map || device_extensions.vk_ext_fragment_density_map_2) {
8309 uint32_t ceiling_width = (uint32_t)ceil(
8310 (float)pCreateInfo->width /
8311 std::max((float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.width,
8312 1.0f));
8313 if (mip_width < ceiling_width) {
8314 skip |= LogError(
8315 device, "VUID-VkFramebufferCreateInfo-pAttachments-02555",
8316 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has width "
8317 "smaller than the corresponding the ceiling of framebuffer width / "
8318 "maxFragmentDensityTexelSize.width "
8319 "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
8320 "attachment #%u, framebuffer:\n"
8321 "width: %u, the ceiling value: %u\n",
8322 i, ivci.subresourceRange.baseMipLevel, i, i, mip_width, ceiling_width);
8323 }
8324 uint32_t ceiling_height = (uint32_t)ceil(
8325 (float)pCreateInfo->height /
8326 std::max(
8327 (float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.height,
8328 1.0f));
8329 if (mip_height < ceiling_height) {
8330 skip |= LogError(
8331 device, "VUID-VkFramebufferCreateInfo-pAttachments-02556",
8332 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has height "
8333 "smaller than the corresponding the ceiling of framebuffer height / "
8334 "maxFragmentDensityTexelSize.height "
8335 "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
8336 "attachment #%u, framebuffer:\n"
8337 "height: %u, the ceiling value: %u\n",
8338 i, ivci.subresourceRange.baseMipLevel, i, i, mip_height, ceiling_height);
8339 }
8340 }
8341 }
8342 if (IsIdentitySwizzle(ivci.components) == false) {
8343 skip |= LogError(
8344 device, "VUID-VkFramebufferCreateInfo-pAttachments-00884",
8345 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has non-identy swizzle. All "
8346 "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
8347 "swizzle values:\n"
8348 "r swizzle = %s\n"
8349 "g swizzle = %s\n"
8350 "b swizzle = %s\n"
8351 "a swizzle = %s\n",
8352 i, string_VkComponentSwizzle(ivci.components.r), string_VkComponentSwizzle(ivci.components.g),
8353 string_VkComponentSwizzle(ivci.components.b), string_VkComponentSwizzle(ivci.components.a));
8354 }
8355 if ((ivci.viewType == VK_IMAGE_VIEW_TYPE_2D) || (ivci.viewType == VK_IMAGE_VIEW_TYPE_2D)) {
8356 const auto image_state = GetImageState(ivci.image);
8357 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
8358 if (FormatIsDepthOrStencil(ivci.format)) {
8359 LogObjectList objlist(device);
8360 objlist.add(ivci.image);
8361 skip |= LogError(
8362 objlist, "VUID-VkFramebufferCreateInfo-pAttachments-00891",
8363 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has an image view type of "
8364 "%s "
8365 "which was taken from image %s of type VK_IMAGE_TYPE_3D, but the image view format is a "
8366 "depth/stencil format %s",
8367 i, string_VkImageViewType(ivci.viewType), report_data->FormatHandle(ivci.image).c_str(),
8368 string_VkFormat(ivci.format));
8369 }
8370 }
8371 }
8372 }
8373 }
8374 } else if (pFramebufferAttachmentsCreateInfo) {
8375 // VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR is set
8376 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
8377 auto &aii = pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
8378 bool formatFound = false;
8379 for (uint32_t j = 0; j < aii.viewFormatCount; ++j) {
8380 if (aii.pViewFormats[j] == rpci->pAttachments[i].format) {
8381 formatFound = true;
8382 }
8383 }
8384 if (!formatFound) {
8385 skip |= LogError(pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-flags-03205",
8386 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u does not include "
8387 "format %s used "
8388 "by the corresponding attachment for renderPass (%s).",
8389 i, string_VkFormat(rpci->pAttachments[i].format),
8390 report_data->FormatHandle(pCreateInfo->renderPass).c_str());
8391 }
8392
8393 const char *mismatchedLayersNoMultiviewVuid = device_extensions.vk_khr_multiview
8394 ? "VUID-VkFramebufferCreateInfo-renderPass-03199"
8395 : "VUID-VkFramebufferCreateInfo-flags-03200";
8396 if ((rpci->subpassCount == 0) || (rpci->pSubpasses[0].viewMask == 0)) {
8397 if (aii.layerCount < pCreateInfo->layers) {
8398 skip |=
8399 LogError(device, mismatchedLayersNoMultiviewVuid,
8400 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has only #%u layers, "
8401 "but framebuffer has #%u layers.",
8402 i, aii.layerCount, pCreateInfo->layers);
8403 }
8404 }
8405
8406 if (!device_extensions.vk_ext_fragment_density_map && !device_extensions.vk_ext_fragment_density_map_2) {
8407 if (aii.width < pCreateInfo->width) {
8408 skip |= LogError(
8409 device, "VUID-VkFramebufferCreateInfo-flags-03192",
8410 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a width of only #%u, "
8411 "but framebuffer has a width of #%u.",
8412 i, aii.width, pCreateInfo->width);
8413 }
8414
8415 if (aii.height < pCreateInfo->height) {
8416 skip |= LogError(
8417 device, "VUID-VkFramebufferCreateInfo-flags-03193",
8418 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a height of only #%u, "
8419 "but framebuffer has a height of #%u.",
8420 i, aii.height, pCreateInfo->height);
8421 }
8422 }
8423 }
8424
8425 // Validate image usage
8426 uint32_t attachment_index = VK_ATTACHMENT_UNUSED;
8427 for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
8428 skip |= MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pColorAttachments, pCreateInfo,
8429 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
8430 skip |=
8431 MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pResolveAttachments, pCreateInfo,
8432 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
8433 skip |= MatchUsage(1, rpci->pSubpasses[i].pDepthStencilAttachment, pCreateInfo,
8434 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03202");
8435 skip |= MatchUsage(rpci->pSubpasses[i].inputAttachmentCount, rpci->pSubpasses[i].pInputAttachments, pCreateInfo,
8436 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03204");
8437
8438 const VkSubpassDescriptionDepthStencilResolve *pDepthStencilResolve =
8439 lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(rpci->pSubpasses[i].pNext);
8440 if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr) {
8441 skip |= MatchUsage(1, pDepthStencilResolve->pDepthStencilResolveAttachment, pCreateInfo,
8442 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03203");
8443 }
8444 }
8445
8446 if (device_extensions.vk_khr_multiview) {
8447 if ((rpci->subpassCount > 0) && (rpci->pSubpasses[0].viewMask != 0)) {
8448 for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
8449 const VkSubpassDescriptionDepthStencilResolve *pDepthStencilResolve =
8450 lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(rpci->pSubpasses[i].pNext);
8451 uint32_t view_bits = rpci->pSubpasses[i].viewMask;
8452 uint32_t highest_view_bit = 0;
8453
8454 for (int j = 0; j < 32; ++j) {
8455 if (((view_bits >> j) & 1) != 0) {
8456 highest_view_bit = j;
8457 }
8458 }
8459
8460 for (uint32_t j = 0; j < rpci->pSubpasses[i].colorAttachmentCount; ++j) {
8461 attachment_index = rpci->pSubpasses[i].pColorAttachments[j].attachment;
8462 if (attachment_index != VK_ATTACHMENT_UNUSED) {
8463 uint32_t layer_count =
8464 pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
8465 if (layer_count <= highest_view_bit) {
8466 skip |= LogError(
8467 pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-renderPass-03198",
8468 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
8469 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
8470 "includes layer %u, with that attachment specified as a color attachment %u.",
8471 attachment_index, layer_count, i,
8472 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
8473 }
8474 }
8475 if (rpci->pSubpasses[i].pResolveAttachments) {
8476 attachment_index = rpci->pSubpasses[i].pResolveAttachments[j].attachment;
8477 if (attachment_index != VK_ATTACHMENT_UNUSED) {
8478 uint32_t layer_count =
8479 pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
8480 if (layer_count <= highest_view_bit) {
8481 skip |= LogError(
8482 pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-renderPass-03198",
8483 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
8484 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
8485 "includes layer %u, with that attachment specified as a resolve attachment %u.",
8486 attachment_index, layer_count, i,
8487 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
8488 }
8489 }
8490 }
8491 }
8492
8493 for (uint32_t j = 0; j < rpci->pSubpasses[i].inputAttachmentCount; ++j) {
8494 attachment_index = rpci->pSubpasses[i].pInputAttachments[j].attachment;
8495 if (attachment_index != VK_ATTACHMENT_UNUSED) {
8496 uint32_t layer_count =
8497 pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
8498 if (layer_count <= highest_view_bit) {
8499 skip |= LogError(
8500 pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-renderPass-03198",
8501 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
8502 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
8503 "includes layer %u, with that attachment specified as an input attachment %u.",
8504 attachment_index, layer_count, i,
8505 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
8506 }
8507 }
8508 }
8509
8510 if (rpci->pSubpasses[i].pDepthStencilAttachment != nullptr) {
8511 attachment_index = rpci->pSubpasses[i].pDepthStencilAttachment->attachment;
8512 if (attachment_index != VK_ATTACHMENT_UNUSED) {
8513 uint32_t layer_count =
8514 pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
8515 if (layer_count <= highest_view_bit) {
8516 skip |= LogError(
8517 pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-renderPass-03198",
8518 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
8519 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
8520 "includes layer %u, with that attachment specified as a depth/stencil attachment.",
8521 attachment_index, layer_count, i,
8522 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
8523 }
8524 }
8525
8526 if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr &&
8527 pDepthStencilResolve->pDepthStencilResolveAttachment != nullptr) {
8528 attachment_index = pDepthStencilResolve->pDepthStencilResolveAttachment->attachment;
8529 if (attachment_index != VK_ATTACHMENT_UNUSED) {
8530 uint32_t layer_count =
8531 pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
8532 if (layer_count <= highest_view_bit) {
8533 skip |= LogError(
8534 pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-renderPass-03198",
8535 "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
8536 "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
8537 "includes layer %u, with that attachment specified as a depth/stencil resolve "
8538 "attachment.",
8539 attachment_index, layer_count, i,
8540 report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
8541 }
8542 }
8543 }
8544 }
8545 }
8546 }
8547 }
8548 }
8549
8550 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
8551 // Verify correct attachment usage flags
8552 for (uint32_t subpass = 0; subpass < rpci->subpassCount; subpass++) {
8553 const VkSubpassDescription2 &subpass_description = rpci->pSubpasses[subpass];
8554 // Verify input attachments:
8555 skip |= MatchUsage(subpass_description.inputAttachmentCount, subpass_description.pInputAttachments, pCreateInfo,
8556 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-00879");
8557 // Verify color attachments:
8558 skip |= MatchUsage(subpass_description.colorAttachmentCount, subpass_description.pColorAttachments, pCreateInfo,
8559 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-00877");
8560 // Verify depth/stencil attachments:
8561 skip |=
8562 MatchUsage(1, subpass_description.pDepthStencilAttachment, pCreateInfo,
8563 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
8564 // Verify depth/stecnil resolve
8565 if (device_extensions.vk_khr_depth_stencil_resolve) {
8566 const VkSubpassDescriptionDepthStencilResolve *ds_resolve =
8567 lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(subpass_description.pNext);
8568 if (ds_resolve) {
8569 skip |= MatchUsage(1, ds_resolve->pDepthStencilResolveAttachment, pCreateInfo,
8570 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
8571 "VUID-VkFramebufferCreateInfo-pAttachments-02634");
8572 }
8573 }
8574 }
8575 }
8576
8577 bool bHasNonZeroViewMasks = false;
8578 for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
8579 if (rpci->pSubpasses[i].viewMask != 0) {
8580 bHasNonZeroViewMasks = true;
8581 break;
8582 }
8583 }
8584
8585 if (bHasNonZeroViewMasks && pCreateInfo->layers != 1) {
8586 skip |= LogError(pCreateInfo->renderPass, "VUID-VkFramebufferCreateInfo-renderPass-02531",
8587 "vkCreateFramebuffer(): VkFramebufferCreateInfo has #%u layers but "
8588 "renderPass (%s) was specified with non-zero view masks\n",
8589 pCreateInfo->layers, report_data->FormatHandle(pCreateInfo->renderPass).c_str());
8590 }
8591 }
8592 }
8593 // Verify FB dimensions are within physical device limits
8594 if (pCreateInfo->width > phys_dev_props.limits.maxFramebufferWidth) {
8595 skip |= LogError(device, "VUID-VkFramebufferCreateInfo-width-00886",
8596 "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width exceeds physical device limits. Requested "
8597 "width: %u, device max: %u\n",
8598 pCreateInfo->width, phys_dev_props.limits.maxFramebufferWidth);
8599 }
8600 if (pCreateInfo->height > phys_dev_props.limits.maxFramebufferHeight) {
8601 skip |=
8602 LogError(device, "VUID-VkFramebufferCreateInfo-height-00888",
8603 "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height exceeds physical device limits. Requested "
8604 "height: %u, device max: %u\n",
8605 pCreateInfo->height, phys_dev_props.limits.maxFramebufferHeight);
8606 }
8607 if (pCreateInfo->layers > phys_dev_props.limits.maxFramebufferLayers) {
8608 skip |=
8609 LogError(device, "VUID-VkFramebufferCreateInfo-layers-00890",
8610 "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers exceeds physical device limits. Requested "
8611 "layers: %u, device max: %u\n",
8612 pCreateInfo->layers, phys_dev_props.limits.maxFramebufferLayers);
8613 }
8614 // Verify FB dimensions are greater than zero
8615 if (pCreateInfo->width <= 0) {
8616 skip |= LogError(device, "VUID-VkFramebufferCreateInfo-width-00885",
8617 "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width must be greater than zero.");
8618 }
8619 if (pCreateInfo->height <= 0) {
8620 skip |= LogError(device, "VUID-VkFramebufferCreateInfo-height-00887",
8621 "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height must be greater than zero.");
8622 }
8623 if (pCreateInfo->layers <= 0) {
8624 skip |= LogError(device, "VUID-VkFramebufferCreateInfo-layers-00889",
8625 "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers must be greater than zero.");
8626 }
8627 return skip;
8628 }
8629
PreCallValidateCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer) const8630 bool CoreChecks::PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
8631 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) const {
8632 // TODO : Verify that renderPass FB is created with is compatible with FB
8633 bool skip = false;
8634 skip |= ValidateFramebufferCreateInfo(pCreateInfo);
8635 return skip;
8636 }
8637
FindDependency(const uint32_t index,const uint32_t dependent,const std::vector<DAGNode> & subpass_to_node,std::unordered_set<uint32_t> & processed_nodes)8638 static bool FindDependency(const uint32_t index, const uint32_t dependent, const std::vector<DAGNode> &subpass_to_node,
8639 std::unordered_set<uint32_t> &processed_nodes) {
8640 // If we have already checked this node we have not found a dependency path so return false.
8641 if (processed_nodes.count(index)) return false;
8642 processed_nodes.insert(index);
8643 const DAGNode &node = subpass_to_node[index];
8644 // Look for a dependency path. If one exists return true else recurse on the previous nodes.
8645 if (std::find(node.prev.begin(), node.prev.end(), dependent) == node.prev.end()) {
8646 for (auto elem : node.prev) {
8647 if (FindDependency(elem, dependent, subpass_to_node, processed_nodes)) return true;
8648 }
8649 } else {
8650 return true;
8651 }
8652 return false;
8653 }
8654
IsImageLayoutReadOnly(VkImageLayout layout) const8655 bool CoreChecks::IsImageLayoutReadOnly(VkImageLayout layout) const {
8656 if ((layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) || (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) ||
8657 (layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) ||
8658 (layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL)) {
8659 return true;
8660 }
8661 return false;
8662 }
8663
CheckDependencyExists(const VkRenderPass renderpass,const uint32_t subpass,const VkImageLayout layout,const std::vector<SubpassLayout> & dependent_subpasses,const std::vector<DAGNode> & subpass_to_node,bool & skip) const8664 bool CoreChecks::CheckDependencyExists(const VkRenderPass renderpass, const uint32_t subpass, const VkImageLayout layout,
8665 const std::vector<SubpassLayout> &dependent_subpasses,
8666 const std::vector<DAGNode> &subpass_to_node, bool &skip) const {
8667 bool result = true;
8668 bool bImageLayoutReadOnly = IsImageLayoutReadOnly(layout);
8669 // Loop through all subpasses that share the same attachment and make sure a dependency exists
8670 for (uint32_t k = 0; k < dependent_subpasses.size(); ++k) {
8671 const SubpassLayout &sp = dependent_subpasses[k];
8672 if (subpass == sp.index) continue;
8673 if (bImageLayoutReadOnly && IsImageLayoutReadOnly(sp.layout)) continue;
8674
8675 const DAGNode &node = subpass_to_node[subpass];
8676 // Check for a specified dependency between the two nodes. If one exists we are done.
8677 auto prev_elem = std::find(node.prev.begin(), node.prev.end(), sp.index);
8678 auto next_elem = std::find(node.next.begin(), node.next.end(), sp.index);
8679 if (prev_elem == node.prev.end() && next_elem == node.next.end()) {
8680 // If no dependency exits an implicit dependency still might. If not, throw an error.
8681 std::unordered_set<uint32_t> processed_nodes;
8682 if (!(FindDependency(subpass, sp.index, subpass_to_node, processed_nodes) ||
8683 FindDependency(sp.index, subpass, subpass_to_node, processed_nodes))) {
8684 skip |=
8685 LogError(renderpass, kVUID_Core_DrawState_InvalidRenderpass,
8686 "A dependency between subpasses %d and %d must exist but one is not specified.", subpass, sp.index);
8687 result = false;
8688 }
8689 }
8690 }
8691 return result;
8692 }
8693
CheckPreserved(const VkRenderPass renderpass,const VkRenderPassCreateInfo2 * pCreateInfo,const int index,const uint32_t attachment,const std::vector<DAGNode> & subpass_to_node,int depth,bool & skip) const8694 bool CoreChecks::CheckPreserved(const VkRenderPass renderpass, const VkRenderPassCreateInfo2 *pCreateInfo, const int index,
8695 const uint32_t attachment, const std::vector<DAGNode> &subpass_to_node, int depth,
8696 bool &skip) const {
8697 const DAGNode &node = subpass_to_node[index];
8698 // If this node writes to the attachment return true as next nodes need to preserve the attachment.
8699 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[index];
8700 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
8701 if (attachment == subpass.pColorAttachments[j].attachment) return true;
8702 }
8703 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
8704 if (attachment == subpass.pInputAttachments[j].attachment) return true;
8705 }
8706 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
8707 if (attachment == subpass.pDepthStencilAttachment->attachment) return true;
8708 }
8709 bool result = false;
8710 // Loop through previous nodes and see if any of them write to the attachment.
8711 for (auto elem : node.prev) {
8712 result |= CheckPreserved(renderpass, pCreateInfo, elem, attachment, subpass_to_node, depth + 1, skip);
8713 }
8714 // If the attachment was written to by a previous node than this node needs to preserve it.
8715 if (result && depth > 0) {
8716 bool has_preserved = false;
8717 for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
8718 if (subpass.pPreserveAttachments[j] == attachment) {
8719 has_preserved = true;
8720 break;
8721 }
8722 }
8723 if (!has_preserved) {
8724 skip |= LogError(renderpass, kVUID_Core_DrawState_InvalidRenderpass,
8725 "Attachment %d is used by a later subpass and must be preserved in subpass %d.", attachment, index);
8726 }
8727 }
8728 return result;
8729 }
8730
8731 template <class T>
IsRangeOverlapping(T offset1,T size1,T offset2,T size2)8732 bool IsRangeOverlapping(T offset1, T size1, T offset2, T size2) {
8733 return (((offset1 + size1) > offset2) && ((offset1 + size1) < (offset2 + size2))) ||
8734 ((offset1 > offset2) && (offset1 < (offset2 + size2)));
8735 }
8736
IsRegionOverlapping(VkImageSubresourceRange range1,VkImageSubresourceRange range2)8737 bool IsRegionOverlapping(VkImageSubresourceRange range1, VkImageSubresourceRange range2) {
8738 return (IsRangeOverlapping(range1.baseMipLevel, range1.levelCount, range2.baseMipLevel, range2.levelCount) &&
8739 IsRangeOverlapping(range1.baseArrayLayer, range1.layerCount, range2.baseArrayLayer, range2.layerCount));
8740 }
8741
ValidateDependencies(FRAMEBUFFER_STATE const * framebuffer,RENDER_PASS_STATE const * renderPass) const8742 bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, RENDER_PASS_STATE const *renderPass) const {
8743 bool skip = false;
8744 auto const pFramebufferInfo = framebuffer->createInfo.ptr();
8745 auto const pCreateInfo = renderPass->createInfo.ptr();
8746 auto const &subpass_to_node = renderPass->subpassToNode;
8747
8748 struct Attachment {
8749 std::vector<SubpassLayout> outputs;
8750 std::vector<SubpassLayout> inputs;
8751 std::vector<uint32_t> overlapping;
8752 };
8753
8754 std::vector<Attachment> attachments(pCreateInfo->attachmentCount);
8755
8756 if (!(pFramebufferInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT)) {
8757 // Find overlapping attachments
8758 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
8759 for (uint32_t j = i + 1; j < pCreateInfo->attachmentCount; ++j) {
8760 VkImageView viewi = pFramebufferInfo->pAttachments[i];
8761 VkImageView viewj = pFramebufferInfo->pAttachments[j];
8762 if (viewi == viewj) {
8763 attachments[i].overlapping.emplace_back(j);
8764 attachments[j].overlapping.emplace_back(i);
8765 continue;
8766 }
8767 auto view_state_i = GetImageViewState(viewi);
8768 auto view_state_j = GetImageViewState(viewj);
8769 if (!view_state_i || !view_state_j) {
8770 continue;
8771 }
8772 auto view_ci_i = view_state_i->create_info;
8773 auto view_ci_j = view_state_j->create_info;
8774 if (view_ci_i.image == view_ci_j.image &&
8775 IsRegionOverlapping(view_ci_i.subresourceRange, view_ci_j.subresourceRange)) {
8776 attachments[i].overlapping.emplace_back(j);
8777 attachments[j].overlapping.emplace_back(i);
8778 continue;
8779 }
8780 auto image_data_i = GetImageState(view_ci_i.image);
8781 auto image_data_j = GetImageState(view_ci_j.image);
8782 if (!image_data_i || !image_data_j) {
8783 continue;
8784 }
8785 if (image_data_i->binding.mem_state == image_data_j->binding.mem_state &&
8786 IsRangeOverlapping(image_data_i->binding.offset, image_data_i->binding.size, image_data_j->binding.offset,
8787 image_data_j->binding.size)) {
8788 attachments[i].overlapping.emplace_back(j);
8789 attachments[j].overlapping.emplace_back(i);
8790 }
8791 }
8792 }
8793 }
8794 // Find for each attachment the subpasses that use them.
8795 unordered_set<uint32_t> attachmentIndices;
8796 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
8797 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
8798 attachmentIndices.clear();
8799 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
8800 uint32_t attachment = subpass.pInputAttachments[j].attachment;
8801 if (attachment == VK_ATTACHMENT_UNUSED) continue;
8802 SubpassLayout sp = {i, subpass.pInputAttachments[j].layout};
8803 attachments[attachment].inputs.emplace_back(sp);
8804 for (auto overlapping_attachment : attachments[attachment].overlapping) {
8805 attachments[overlapping_attachment].inputs.emplace_back(sp);
8806 }
8807 }
8808 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
8809 uint32_t attachment = subpass.pColorAttachments[j].attachment;
8810 if (attachment == VK_ATTACHMENT_UNUSED) continue;
8811 SubpassLayout sp = {i, subpass.pColorAttachments[j].layout};
8812 attachments[attachment].outputs.emplace_back(sp);
8813 for (auto overlapping_attachment : attachments[attachment].overlapping) {
8814 attachments[overlapping_attachment].outputs.emplace_back(sp);
8815 }
8816 attachmentIndices.insert(attachment);
8817 }
8818 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
8819 uint32_t attachment = subpass.pDepthStencilAttachment->attachment;
8820 SubpassLayout sp = {i, subpass.pDepthStencilAttachment->layout};
8821 attachments[attachment].outputs.emplace_back(sp);
8822 for (auto overlapping_attachment : attachments[attachment].overlapping) {
8823 attachments[overlapping_attachment].outputs.emplace_back(sp);
8824 }
8825
8826 if (attachmentIndices.count(attachment)) {
8827 skip |=
8828 LogError(renderPass->renderPass, kVUID_Core_DrawState_InvalidRenderpass,
8829 "Cannot use same attachment (%u) as both color and depth output in same subpass (%u).", attachment, i);
8830 }
8831 }
8832 }
8833 // If there is a dependency needed make sure one exists
8834 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
8835 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
8836 // If the attachment is an input then all subpasses that output must have a dependency relationship
8837 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
8838 uint32_t attachment = subpass.pInputAttachments[j].attachment;
8839 if (attachment == VK_ATTACHMENT_UNUSED) continue;
8840 CheckDependencyExists(renderPass->renderPass, i, subpass.pInputAttachments[j].layout, attachments[attachment].outputs,
8841 subpass_to_node, skip);
8842 }
8843 // If the attachment is an output then all subpasses that use the attachment must have a dependency relationship
8844 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
8845 uint32_t attachment = subpass.pColorAttachments[j].attachment;
8846 if (attachment == VK_ATTACHMENT_UNUSED) continue;
8847 CheckDependencyExists(renderPass->renderPass, i, subpass.pColorAttachments[j].layout, attachments[attachment].outputs,
8848 subpass_to_node, skip);
8849 CheckDependencyExists(renderPass->renderPass, i, subpass.pColorAttachments[j].layout, attachments[attachment].inputs,
8850 subpass_to_node, skip);
8851 }
8852 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
8853 const uint32_t &attachment = subpass.pDepthStencilAttachment->attachment;
8854 CheckDependencyExists(renderPass->renderPass, i, subpass.pDepthStencilAttachment->layout,
8855 attachments[attachment].outputs, subpass_to_node, skip);
8856 CheckDependencyExists(renderPass->renderPass, i, subpass.pDepthStencilAttachment->layout,
8857 attachments[attachment].inputs, subpass_to_node, skip);
8858 }
8859 }
8860 // Loop through implicit dependencies, if this pass reads make sure the attachment is preserved for all passes after it was
8861 // written.
8862 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
8863 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
8864 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
8865 CheckPreserved(renderPass->renderPass, pCreateInfo, i, subpass.pInputAttachments[j].attachment, subpass_to_node, 0,
8866 skip);
8867 }
8868 }
8869 return skip;
8870 }
8871
ValidateRenderPassDAG(RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2 * pCreateInfo) const8872 bool CoreChecks::ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2 *pCreateInfo) const {
8873 bool skip = false;
8874 const char *vuid;
8875 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
8876
8877 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
8878 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
8879 VkPipelineStageFlagBits latest_src_stage = GetLogicallyLatestGraphicsPipelineStage(dependency.srcStageMask);
8880 VkPipelineStageFlagBits earliest_dst_stage = GetLogicallyEarliestGraphicsPipelineStage(dependency.dstStageMask);
8881
8882 // The first subpass here serves as a good proxy for "is multiview enabled" - since all view masks need to be non-zero if
8883 // any are, which enables multiview.
8884 if (use_rp2 && (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && (pCreateInfo->pSubpasses[0].viewMask == 0)) {
8885 skip |= LogError(
8886 device, "VUID-VkRenderPassCreateInfo2-viewMask-03059",
8887 "Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but multiview is not enabled for this render pass.", i);
8888 } else if (use_rp2 && !(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && dependency.viewOffset != 0) {
8889 skip |= LogError(device, "VUID-VkSubpassDependency2-dependencyFlags-03092",
8890 "Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but also specifies a view offset of %u.", i,
8891 dependency.viewOffset);
8892 } else if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL || dependency.dstSubpass == VK_SUBPASS_EXTERNAL) {
8893 if (dependency.srcSubpass == dependency.dstSubpass) {
8894 vuid = use_rp2 ? "VUID-VkSubpassDependency2-srcSubpass-03085" : "VUID-VkSubpassDependency-srcSubpass-00865";
8895 skip |= LogError(device, vuid, "The src and dst subpasses in dependency %u are both external.", i);
8896 } else if (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) {
8897 if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
8898 vuid = "VUID-VkSubpassDependency-dependencyFlags-02520";
8899 } else { // dependency.dstSubpass == VK_SUBPASS_EXTERNAL
8900 vuid = "VUID-VkSubpassDependency-dependencyFlags-02521";
8901 }
8902 if (use_rp2) {
8903 // Create render pass 2 distinguishes between source and destination external dependencies.
8904 if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
8905 vuid = "VUID-VkSubpassDependency2-dependencyFlags-03090";
8906 } else {
8907 vuid = "VUID-VkSubpassDependency2-dependencyFlags-03091";
8908 }
8909 }
8910 skip |=
8911 LogError(device, vuid,
8912 "Dependency %u specifies an external dependency but also specifies VK_DEPENDENCY_VIEW_LOCAL_BIT.", i);
8913 }
8914 } else if (dependency.srcSubpass > dependency.dstSubpass) {
8915 vuid = use_rp2 ? "VUID-VkSubpassDependency2-srcSubpass-03084" : "VUID-VkSubpassDependency-srcSubpass-00864";
8916 skip |= LogError(device, vuid,
8917 "Dependency %u specifies a dependency from a later subpass (%u) to an earlier subpass (%u), which is "
8918 "disallowed to prevent cyclic dependencies.",
8919 i, dependency.srcSubpass, dependency.dstSubpass);
8920 } else if (dependency.srcSubpass == dependency.dstSubpass) {
8921 if (dependency.viewOffset != 0) {
8922 vuid = use_rp2 ? "VUID-VkSubpassDependency2-viewOffset-02530" : "VUID-VkRenderPassCreateInfo-pNext-01930";
8923 skip |= LogError(device, vuid, "Dependency %u specifies a self-dependency but has a non-zero view offset of %u", i,
8924 dependency.viewOffset);
8925 } else if ((dependency.dependencyFlags | VK_DEPENDENCY_VIEW_LOCAL_BIT) != dependency.dependencyFlags &&
8926 pCreateInfo->pSubpasses[dependency.srcSubpass].viewMask > 1) {
8927 vuid = use_rp2 ? "VUID-VkRenderPassCreateInfo2-pDependencies-03060" : "VUID-VkSubpassDependency-srcSubpass-00872";
8928 skip |= LogError(device, vuid,
8929 "Dependency %u specifies a self-dependency for subpass %u with a non-zero view mask, but does not "
8930 "specify VK_DEPENDENCY_VIEW_LOCAL_BIT.",
8931 i, dependency.srcSubpass);
8932 } else if ((HasNonFramebufferStagePipelineStageFlags(dependency.srcStageMask) ||
8933 HasNonFramebufferStagePipelineStageFlags(dependency.dstStageMask)) &&
8934 (GetGraphicsPipelineStageLogicalOrdinal(latest_src_stage) >
8935 GetGraphicsPipelineStageLogicalOrdinal(earliest_dst_stage))) {
8936 vuid = use_rp2 ? "VUID-VkSubpassDependency2-srcSubpass-03087" : "VUID-VkSubpassDependency-srcSubpass-00867";
8937 skip |= LogError(
8938 device, vuid,
8939 "Dependency %u specifies a self-dependency from logically-later stage (%s) to a logically-earlier stage (%s).",
8940 i, string_VkPipelineStageFlagBits(latest_src_stage), string_VkPipelineStageFlagBits(earliest_dst_stage));
8941 } else if ((HasNonFramebufferStagePipelineStageFlags(dependency.srcStageMask) == false) &&
8942 (HasNonFramebufferStagePipelineStageFlags(dependency.dstStageMask) == false) &&
8943 ((dependency.dependencyFlags & VK_DEPENDENCY_BY_REGION_BIT) == 0)) {
8944 vuid = use_rp2 ? "VUID-VkSubpassDependency2-srcSubpass-02245" : "VUID-VkSubpassDependency-srcSubpass-02243";
8945 skip |= LogError(device, vuid,
8946 "Dependency %u specifies a self-dependency for subpass %u with both stages including a "
8947 "framebuffer-space stage, but does not specify VK_DEPENDENCY_BY_REGION_BIT in dependencyFlags.",
8948 i, dependency.srcSubpass);
8949 }
8950 }
8951 }
8952 return skip;
8953 }
8954
ValidateAttachmentIndex(RenderPassCreateVersion rp_version,uint32_t attachment,uint32_t attachment_count,const char * error_type,const char * function_name) const8955 bool CoreChecks::ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uint32_t attachment, uint32_t attachment_count,
8956 const char *error_type, const char *function_name) const {
8957 bool skip = false;
8958 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
8959 assert(attachment != VK_ATTACHMENT_UNUSED);
8960 if (attachment >= attachment_count) {
8961 const char *vuid =
8962 use_rp2 ? "VUID-VkRenderPassCreateInfo2-attachment-03051" : "VUID-VkRenderPassCreateInfo-attachment-00834";
8963 skip |= LogError(device, vuid, "%s: %s attachment %d must be less than the total number of attachments %d.", function_name,
8964 error_type, attachment, attachment_count);
8965 }
8966 return skip;
8967 }
8968
8969 enum AttachmentType {
8970 ATTACHMENT_COLOR = 1,
8971 ATTACHMENT_DEPTH = 2,
8972 ATTACHMENT_INPUT = 4,
8973 ATTACHMENT_PRESERVE = 8,
8974 ATTACHMENT_RESOLVE = 16,
8975 };
8976
StringAttachmentType(uint8_t type)8977 char const *StringAttachmentType(uint8_t type) {
8978 switch (type) {
8979 case ATTACHMENT_COLOR:
8980 return "color";
8981 case ATTACHMENT_DEPTH:
8982 return "depth";
8983 case ATTACHMENT_INPUT:
8984 return "input";
8985 case ATTACHMENT_PRESERVE:
8986 return "preserve";
8987 case ATTACHMENT_RESOLVE:
8988 return "resolve";
8989 default:
8990 return "(multiple)";
8991 }
8992 }
8993
AddAttachmentUse(RenderPassCreateVersion rp_version,uint32_t subpass,std::vector<uint8_t> & attachment_uses,std::vector<VkImageLayout> & attachment_layouts,uint32_t attachment,uint8_t new_use,VkImageLayout new_layout) const8994 bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t subpass, std::vector<uint8_t> &attachment_uses,
8995 std::vector<VkImageLayout> &attachment_layouts, uint32_t attachment, uint8_t new_use,
8996 VkImageLayout new_layout) const {
8997 if (attachment >= attachment_uses.size()) return false; /* out of range, but already reported */
8998
8999 bool skip = false;
9000 auto &uses = attachment_uses[attachment];
9001 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
9002 const char *vuid;
9003 const char *const function_name = use_rp2 ? "vkCreateRenderPass2()" : "vkCreateRenderPass()";
9004
9005 if (uses & new_use) {
9006 if (attachment_layouts[attachment] != new_layout) {
9007 vuid = use_rp2 ? "VUID-VkSubpassDescription2-layout-02528" : "VUID-VkSubpassDescription-layout-02519";
9008 skip |= LogError(device, vuid, "%s: subpass %u already uses attachment %u with a different image layout (%s vs %s).",
9009 function_name, subpass, attachment, string_VkImageLayout(attachment_layouts[attachment]),
9010 string_VkImageLayout(new_layout));
9011 }
9012 } else if (uses & ~ATTACHMENT_INPUT || (uses && (new_use == ATTACHMENT_RESOLVE || new_use == ATTACHMENT_PRESERVE))) {
9013 /* Note: input attachments are assumed to be done first. */
9014 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pPreserveAttachments-03074"
9015 : "VUID-VkSubpassDescription-pPreserveAttachments-00854";
9016 skip |= LogError(device, vuid, "%s: subpass %u uses attachment %u as both %s and %s attachment.", function_name, subpass,
9017 attachment, StringAttachmentType(uses), StringAttachmentType(new_use));
9018 } else {
9019 attachment_layouts[attachment] = new_layout;
9020 uses |= new_use;
9021 }
9022
9023 return skip;
9024 }
9025
9026 // Handles attachment references regardless of type (input, color, depth, etc)
9027 // Input attachments have extra VUs associated with them
ValidateAttachmentReference(RenderPassCreateVersion rp_version,VkAttachmentReference2 reference,bool input,const char * error_type,const char * function_name) const9028 bool CoreChecks::ValidateAttachmentReference(RenderPassCreateVersion rp_version, VkAttachmentReference2 reference, bool input,
9029 const char *error_type, const char *function_name) const {
9030 bool skip = false;
9031
9032 // Currently all VUs require attachment to not be UNUSED
9033 assert(reference.attachment != VK_ATTACHMENT_UNUSED);
9034
9035 // currently VkAttachmentReference and VkAttachmentReference2 have no overlapping VUs
9036 if (rp_version == RENDER_PASS_VERSION_1) {
9037 switch (reference.layout) {
9038 case VK_IMAGE_LAYOUT_UNDEFINED:
9039 case VK_IMAGE_LAYOUT_PREINITIALIZED:
9040 case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
9041 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL:
9042 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL:
9043 case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL:
9044 case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL:
9045 skip |= LogError(device, "VUID-VkAttachmentReference-layout-00857",
9046 "%s: Layout for %s is %s but must not be "
9047 "VK_IMAGE_LAYOUT_[UNDEFINED|PREINITIALIZED|PRESENT_SRC_KHR|DEPTH_ATTACHMENT_OPTIMAL|DEPTH_READ_"
9048 "ONLY_OPTIMAL|STENCIL_ATTACHMENT_OPTIMAL|STENCIL_READ_ONLY_OPTIMAL].",
9049 function_name, error_type, string_VkImageLayout(reference.layout));
9050 break;
9051 default:
9052 break;
9053 }
9054 } else {
9055 const auto *attachment_reference_stencil_layout = lvl_find_in_chain<VkAttachmentReferenceStencilLayoutKHR>(reference.pNext);
9056 switch (reference.layout) {
9057 case VK_IMAGE_LAYOUT_UNDEFINED:
9058 case VK_IMAGE_LAYOUT_PREINITIALIZED:
9059 case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
9060 skip |=
9061 LogError(device, "VUID-VkAttachmentReference2-layout-03077",
9062 "%s: Layout for %s is %s but must not be VK_IMAGE_LAYOUT_[UNDEFINED|PREINITIALIZED|PRESENT_SRC_KHR].",
9063 function_name, error_type, string_VkImageLayout(reference.layout));
9064 break;
9065
9066 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
9067 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
9068 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
9069 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
9070 if ((input == true) && ((reference.aspectMask & (VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT)) == 0)) {
9071 skip |= LogError(device, "VUID-VkAttachmentReference2-attachment-03311",
9072 "%s: Layout for %s can't be %s because the current aspectMask (%x) does not include "
9073 "VK_IMAGE_ASPECT_STENCIL_BIT or VK_IMAGE_ASPECT_DEPTH_BIT.",
9074 function_name, error_type, string_VkImageLayout(reference.layout), reference.aspectMask);
9075 }
9076 break;
9077
9078 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
9079 if ((input == true) && ((reference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) == 0)) {
9080 skip |= LogError(device, "VUID-VkAttachmentReference2-attachment-03312",
9081 "%s: Layout for %s can't be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL because the current "
9082 "aspectMask (%x) does not include VK_IMAGE_ASPECT_COLOR_BIT.",
9083 function_name, error_type, reference.aspectMask);
9084 }
9085 break;
9086
9087 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL:
9088 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL:
9089 // This check doesn't rely on the aspect mask value
9090 if (attachment_reference_stencil_layout) {
9091 const VkImageLayout stencilLayout = attachment_reference_stencil_layout->stencilLayout;
9092 // clang-format off
9093 if (stencilLayout == VK_IMAGE_LAYOUT_UNDEFINED ||
9094 stencilLayout == VK_IMAGE_LAYOUT_PREINITIALIZED ||
9095 stencilLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ||
9096 stencilLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL ||
9097 stencilLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL ||
9098 stencilLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
9099 stencilLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
9100 stencilLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
9101 stencilLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
9102 stencilLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
9103 skip |= LogError(device, "VUID-VkAttachmentReferenceStencilLayout-stencilLayout-03318",
9104 "%s: In %s with pNext chain instance VkAttachmentReferenceStencilLayoutKHR, "
9105 "the stencilLayout (%s) must not be "
9106 "VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, "
9107 "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
9108 "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, "
9109 "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, "
9110 "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
9111 "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
9112 "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, "
9113 "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, or "
9114 "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.",
9115 function_name, error_type, string_VkImageLayout(stencilLayout));
9116 }
9117 }
9118 // clang-format on
9119
9120 if (input == true) {
9121 if ((!attachment_reference_stencil_layout) &&
9122 ((reference.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) ==
9123 (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
9124 skip |= LogError(device, "VUID-VkAttachmentReference2-attachment-03315",
9125 "%s: The layout for %s is %s but the pNext chain does not include a valid "
9126 "VkAttachmentReferenceStencilLayout instance.",
9127 function_name, error_type, string_VkImageLayout(reference.layout));
9128 } else if ((reference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != 0) {
9129 skip |=
9130 LogError(device, "VUID-VkAttachmentReference2-attachment-03317",
9131 "%s: The aspectMask for %s is only VK_IMAGE_ASPECT_STENCIL_BIT so the layout (%s) must not be "
9132 "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL.",
9133 function_name, error_type, string_VkImageLayout(reference.layout));
9134 }
9135 }
9136 break;
9137
9138 case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL:
9139 case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL:
9140 if ((input == true) && ((reference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != 0)) {
9141 skip |= LogError(device, "VUID-VkAttachmentReference2-attachment-03316",
9142 "%s: The aspectMask for %s is only VK_IMAGE_ASPECT_DEPTH_BIT so the layout (%s) must not be "
9143 "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL.",
9144 function_name, error_type, string_VkImageLayout(reference.layout));
9145 }
9146 break;
9147
9148 default:
9149 break;
9150 }
9151
9152 // Extra case to check for all 4 seperate depth/stencil layout
9153 // This makes the above switch case much easier to read
9154 switch (reference.layout) {
9155 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL:
9156 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL:
9157 case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL:
9158 case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL:
9159 if (!enabled_features.core12.separateDepthStencilLayouts) {
9160 skip |= LogError(device, "VUID-VkAttachmentReference2-separateDepthStencilLayouts-03313",
9161 "%s: Layout for %s is %s but without separateDepthStencilLayouts enabled the layout must not "
9162 "be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, "
9163 "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL.",
9164 function_name, error_type, string_VkImageLayout(reference.layout));
9165 }
9166 if ((input == true) && ((reference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) != 0)) {
9167 skip |= LogError(device, "VUID-VkAttachmentReference2-attachment-03314",
9168 "%s: Layout for %s aspectMask include VK_IMAGE_ASPECT_COLOR_BIT but the layout is %s.",
9169 function_name, error_type, string_VkImageLayout(reference.layout));
9170 }
9171 default:
9172 break;
9173 }
9174 }
9175
9176 return skip;
9177 }
9178
ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2 * pCreateInfo,const char * function_name) const9179 bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2 *pCreateInfo,
9180 const char *function_name) const {
9181 bool skip = false;
9182 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
9183 const char *vuid;
9184
9185 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
9186 VkFormat format = pCreateInfo->pAttachments[i].format;
9187 if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
9188 if ((FormatIsColor(format) || FormatHasDepth(format)) &&
9189 pCreateInfo->pAttachments[i].loadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
9190 skip |= LogWarning(device, kVUID_Core_DrawState_InvalidRenderpass,
9191 "%s: Render pass pAttachment[%u] has loadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout == "
9192 "VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you intended. Consider using "
9193 "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the "
9194 "render pass.",
9195 function_name, i);
9196 }
9197 if (FormatHasStencil(format) && pCreateInfo->pAttachments[i].stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
9198 skip |=
9199 LogWarning(device, kVUID_Core_DrawState_InvalidRenderpass,
9200 "%s: Render pass pAttachment[%u] has stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout "
9201 "== VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you intended. Consider using "
9202 "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the "
9203 "render pass.",
9204 function_name, i);
9205 }
9206 }
9207 }
9208
9209 // Track when we're observing the first use of an attachment
9210 std::vector<bool> attach_first_use(pCreateInfo->attachmentCount, true);
9211 // Track if attachments are used as input as well as another type
9212 std::unordered_set<uint32_t> input_attachments;
9213
9214 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
9215 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
9216 std::vector<uint8_t> attachment_uses(pCreateInfo->attachmentCount);
9217 std::vector<VkImageLayout> attachment_layouts(pCreateInfo->attachmentCount);
9218
9219 if (subpass.pipelineBindPoint != VK_PIPELINE_BIND_POINT_GRAPHICS) {
9220 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pipelineBindPoint-03062"
9221 : "VUID-VkSubpassDescription-pipelineBindPoint-00844";
9222 skip |= LogError(device, vuid, "%s: Pipeline bind point for pSubpasses[%d] must be VK_PIPELINE_BIND_POINT_GRAPHICS.",
9223 function_name, i);
9224 }
9225
9226 // Check input attachments first
9227 // - so we can detect first-use-as-input for VU #00349
9228 // - if other color or depth/stencil is also input, it limits valid layouts
9229 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
9230 auto const &attachment_ref = subpass.pInputAttachments[j];
9231 const uint32_t attachment_index = attachment_ref.attachment;
9232 const VkImageAspectFlags aspect_mask = attachment_ref.aspectMask;
9233 if (attachment_index != VK_ATTACHMENT_UNUSED) {
9234 input_attachments.insert(attachment_index);
9235 std::string error_type = "pSubpasses[" + std::to_string(i) + "].pInputAttachments[" + std::to_string(j) + "]";
9236 skip |= ValidateAttachmentReference(rp_version, attachment_ref, true, error_type.c_str(), function_name);
9237 skip |= ValidateAttachmentIndex(rp_version, attachment_index, pCreateInfo->attachmentCount, error_type.c_str(),
9238 function_name);
9239
9240 if (aspect_mask & VK_IMAGE_ASPECT_METADATA_BIT) {
9241 vuid = use_rp2 ? "VUID-VkSubpassDescription2-attachment-02801"
9242 : "VUID-VkInputAttachmentAspectReference-aspectMask-01964";
9243 skip |= LogError(
9244 device, vuid,
9245 "%s: Aspect mask for input attachment reference %d in subpass %d includes VK_IMAGE_ASPECT_METADATA_BIT.",
9246 function_name, j, i);
9247 } else if (aspect_mask & (VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT | VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT |
9248 VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT | VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT)) {
9249 // TODO - Add VUID when new headers are added
9250 vuid = use_rp2 ? "UNASSIGNED-VkSubpassDescription2-attachment"
9251 : "VUID-VkInputAttachmentAspectReference-aspectMask-02250";
9252 skip |= LogError(device, vuid,
9253 "%s: Aspect mask for input attachment reference %d in subpass %d includes "
9254 "VK_IMAGE_ASPECT_MEMORY_PLANE_*_BIT_EXT bit.",
9255 function_name, j, i);
9256 }
9257
9258 if (attachment_index < pCreateInfo->attachmentCount) {
9259 skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_index, ATTACHMENT_INPUT,
9260 attachment_ref.layout);
9261
9262 vuid = use_rp2 ? "VUID-VkRenderPassCreateInfo2-attachment-02525" : "VUID-VkRenderPassCreateInfo-pNext-01963";
9263 skip |= ValidateImageAspectMask(VK_NULL_HANDLE, pCreateInfo->pAttachments[attachment_index].format, aspect_mask,
9264 function_name, vuid);
9265
9266 if (attach_first_use[attachment_index]) {
9267 skip |=
9268 ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pInputAttachments[j].layout,
9269 attachment_index, pCreateInfo->pAttachments[attachment_index]);
9270
9271 bool used_as_depth = (subpass.pDepthStencilAttachment != NULL &&
9272 subpass.pDepthStencilAttachment->attachment == attachment_index);
9273 bool used_as_color = false;
9274 for (uint32_t k = 0; !used_as_depth && !used_as_color && k < subpass.colorAttachmentCount; ++k) {
9275 used_as_color = (subpass.pColorAttachments[k].attachment == attachment_index);
9276 }
9277 if (!used_as_depth && !used_as_color &&
9278 pCreateInfo->pAttachments[attachment_index].loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
9279 vuid = use_rp2 ? "VUID-VkSubpassDescription2-loadOp-03064" : "VUID-VkSubpassDescription-loadOp-00846";
9280 skip |= LogError(device, vuid,
9281 "%s: attachment %u is first used as an input attachment in %s with loadOp set to "
9282 "VK_ATTACHMENT_LOAD_OP_CLEAR.",
9283 function_name, attachment_index, error_type.c_str());
9284 }
9285 }
9286 attach_first_use[attachment_index] = false;
9287 }
9288
9289 if (rp_version == RENDER_PASS_VERSION_2) {
9290 // These are validated automatically as part of parameter validation for create renderpass 1
9291 // as they are in a struct that only applies to input attachments - not so for v2.
9292
9293 // Check for 0
9294 if (aspect_mask == 0) {
9295 skip |= LogError(device, "VUID-VkSubpassDescription2-attachment-02800",
9296 "%s: Input attachment %s aspect mask must not be 0.", function_name, error_type.c_str());
9297 } else {
9298 const VkImageAspectFlags valid_bits =
9299 (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT |
9300 VK_IMAGE_ASPECT_METADATA_BIT | VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT |
9301 VK_IMAGE_ASPECT_PLANE_2_BIT | VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT |
9302 VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT | VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT |
9303 VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT);
9304
9305 // Check for valid aspect mask bits
9306 if (aspect_mask & ~valid_bits) {
9307 skip |= LogError(device, "VUID-VkSubpassDescription2-attachment-02799",
9308 "%s: Input attachment %s aspect mask (0x%" PRIx32 ")is invalid.", function_name,
9309 error_type.c_str(), aspect_mask);
9310 }
9311 }
9312 }
9313
9314 const VkFormatFeatureFlags valid_flags =
9315 VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
9316 const VkFormat attachment_format = pCreateInfo->pAttachments[attachment_index].format;
9317 const VkFormatFeatureFlags format_features = GetPotentialFormatFeatures(attachment_format);
9318 if ((format_features & valid_flags) == 0) {
9319 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pInputAttachments-02897"
9320 : "VUID-VkSubpassDescription-pInputAttachments-02647";
9321 skip |= LogError(device, vuid,
9322 "%s: Input attachment %s format (%s) does not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT "
9323 "| VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT.",
9324 function_name, error_type.c_str(), string_VkFormat(attachment_format));
9325 }
9326
9327 // Validate layout
9328 vuid = use_rp2 ? "VUID-VkSubpassDescription2-None-04439" : "VUID-VkSubpassDescription-None-04437";
9329 switch (attachment_ref.layout) {
9330 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
9331 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
9332 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
9333 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
9334 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
9335 case VK_IMAGE_LAYOUT_GENERAL:
9336 break; // valid layouts
9337 default:
9338 skip |= LogError(device, vuid,
9339 "%s: %s layout is %s but input attachments must be "
9340 "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, "
9341 "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, "
9342 "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, "
9343 "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
9344 "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_GENERAL.",
9345 function_name, error_type.c_str(), string_VkImageLayout(attachment_ref.layout));
9346 break;
9347 }
9348 }
9349 }
9350
9351 for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
9352 std::string error_type = "pSubpasses[" + std::to_string(i) + "].pPreserveAttachments[" + std::to_string(j) + "]";
9353 uint32_t attachment = subpass.pPreserveAttachments[j];
9354 if (attachment == VK_ATTACHMENT_UNUSED) {
9355 vuid = use_rp2 ? "VUID-VkSubpassDescription2-attachment-03073" : "VUID-VkSubpassDescription-attachment-00853";
9356 skip |= LogError(device, vuid, "%s: Preserve attachment (%d) must not be VK_ATTACHMENT_UNUSED.", function_name, j);
9357 } else {
9358 skip |= ValidateAttachmentIndex(rp_version, attachment, pCreateInfo->attachmentCount, error_type.c_str(),
9359 function_name);
9360 if (attachment < pCreateInfo->attachmentCount) {
9361 skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment, ATTACHMENT_PRESERVE,
9362 VkImageLayout(0) /* preserve doesn't have any layout */);
9363 }
9364 }
9365 }
9366
9367 bool subpass_performs_resolve = false;
9368
9369 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
9370 if (subpass.pResolveAttachments) {
9371 std::string error_type = "pSubpasses[" + std::to_string(i) + "].pResolveAttachments[" + std::to_string(j) + "]";
9372 auto const &attachment_ref = subpass.pResolveAttachments[j];
9373 if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
9374 skip |= ValidateAttachmentReference(rp_version, attachment_ref, false, error_type.c_str(), function_name);
9375 skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount,
9376 error_type.c_str(), function_name);
9377
9378 if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
9379 skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
9380 ATTACHMENT_RESOLVE, attachment_ref.layout);
9381
9382 subpass_performs_resolve = true;
9383
9384 if (pCreateInfo->pAttachments[attachment_ref.attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
9385 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pResolveAttachments-03067"
9386 : "VUID-VkSubpassDescription-pResolveAttachments-00849";
9387 skip |= LogError(
9388 device, vuid,
9389 "%s: Subpass %u requests multisample resolve into attachment %u, which must "
9390 "have VK_SAMPLE_COUNT_1_BIT but has %s.",
9391 function_name, i, attachment_ref.attachment,
9392 string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples));
9393 }
9394 }
9395
9396 const VkFormat attachment_format = pCreateInfo->pAttachments[attachment_ref.attachment].format;
9397 const VkFormatFeatureFlags format_features = GetPotentialFormatFeatures(attachment_format);
9398 if ((format_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) == 0) {
9399 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pResolveAttachments-02899"
9400 : "VUID-VkSubpassDescription-pResolveAttachments-02649";
9401 skip |= LogError(device, vuid,
9402 "%s: Resolve attachment %s format (%s) does not contain "
9403 "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT.",
9404 function_name, error_type.c_str(), string_VkFormat(attachment_format));
9405 }
9406 }
9407 }
9408 }
9409
9410 if (subpass.pDepthStencilAttachment) {
9411 std::string error_type = "pSubpasses[" + std::to_string(i) + "].pDepthStencilAttachment";
9412 const uint32_t attachment = subpass.pDepthStencilAttachment->attachment;
9413 const VkImageLayout imageLayout = subpass.pDepthStencilAttachment->layout;
9414 if (attachment != VK_ATTACHMENT_UNUSED) {
9415 skip |= ValidateAttachmentReference(rp_version, *subpass.pDepthStencilAttachment, false, error_type.c_str(),
9416 function_name);
9417 skip |= ValidateAttachmentIndex(rp_version, attachment, pCreateInfo->attachmentCount, error_type.c_str(),
9418 function_name);
9419 if (attachment < pCreateInfo->attachmentCount) {
9420 skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment, ATTACHMENT_DEPTH,
9421 imageLayout);
9422
9423 if (attach_first_use[attachment]) {
9424 skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, imageLayout, attachment,
9425 pCreateInfo->pAttachments[attachment]);
9426 }
9427 attach_first_use[attachment] = false;
9428 }
9429
9430 const VkFormat attachment_format = pCreateInfo->pAttachments[attachment].format;
9431 const VkFormatFeatureFlags format_features = GetPotentialFormatFeatures(attachment_format);
9432 if ((format_features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) {
9433 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pDepthStencilAttachment-02900"
9434 : "VUID-VkSubpassDescription-pDepthStencilAttachment-02650";
9435 skip |= LogError(device, vuid,
9436 "%s: Depth Stencil %s format (%s) does not contain "
9437 "VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT.",
9438 function_name, error_type.c_str(), string_VkFormat(attachment_format));
9439 }
9440
9441 // Check for valid imageLayout
9442 vuid = use_rp2 ? "VUID-VkSubpassDescription2-None-04439" : "VUID-VkSubpassDescription-None-04437";
9443 switch (imageLayout) {
9444 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
9445 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
9446 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
9447 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
9448 case VK_IMAGE_LAYOUT_GENERAL:
9449 break; // valid layouts
9450 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
9451 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL:
9452 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL:
9453 case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL:
9454 case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL:
9455 if (input_attachments.find(attachment) != input_attachments.end()) {
9456 skip |= LogError(
9457 device, vuid,
9458 "%s: %s is also an input attachment so the layout (%s) must not be "
9459 "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, "
9460 "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, or "
9461 "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL.",
9462 function_name, error_type.c_str(), string_VkImageLayout(imageLayout));
9463 }
9464 break;
9465 default:
9466 skip |= LogError(device, vuid,
9467 "%s: %s layout is %s but depth/stencil attachments must be "
9468 "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
9469 "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, "
9470 "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, "
9471 "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, "
9472 "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, "
9473 "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, "
9474 "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, "
9475 "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, "
9476 "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, or "
9477 "VK_IMAGE_LAYOUT_GENERAL.",
9478 function_name, error_type.c_str(), string_VkImageLayout(imageLayout));
9479 break;
9480 }
9481 }
9482 }
9483
9484 uint32_t last_sample_count_attachment = VK_ATTACHMENT_UNUSED;
9485 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
9486 std::string error_type = "pSubpasses[" + std::to_string(i) + "].pColorAttachments[" + std::to_string(j) + "]";
9487 auto const &attachment_ref = subpass.pColorAttachments[j];
9488 const uint32_t attachment_index = attachment_ref.attachment;
9489 if (attachment_index != VK_ATTACHMENT_UNUSED) {
9490 skip |= ValidateAttachmentReference(rp_version, attachment_ref, false, error_type.c_str(), function_name);
9491 skip |= ValidateAttachmentIndex(rp_version, attachment_index, pCreateInfo->attachmentCount, error_type.c_str(),
9492 function_name);
9493
9494 if (attachment_index < pCreateInfo->attachmentCount) {
9495 skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_index, ATTACHMENT_COLOR,
9496 attachment_ref.layout);
9497
9498 VkSampleCountFlagBits current_sample_count = pCreateInfo->pAttachments[attachment_index].samples;
9499 if (last_sample_count_attachment != VK_ATTACHMENT_UNUSED) {
9500 VkSampleCountFlagBits last_sample_count =
9501 pCreateInfo->pAttachments[subpass.pColorAttachments[last_sample_count_attachment].attachment].samples;
9502 if (current_sample_count != last_sample_count) {
9503 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pColorAttachments-03069"
9504 : "VUID-VkSubpassDescription-pColorAttachments-01417";
9505 skip |= LogError(
9506 device, vuid,
9507 "%s: Subpass %u attempts to render to color attachments with inconsistent sample counts."
9508 "Color attachment ref %u has sample count %s, whereas previous color attachment ref %u has "
9509 "sample count %s.",
9510 function_name, i, j, string_VkSampleCountFlagBits(current_sample_count),
9511 last_sample_count_attachment, string_VkSampleCountFlagBits(last_sample_count));
9512 }
9513 }
9514 last_sample_count_attachment = j;
9515
9516 if (subpass_performs_resolve && current_sample_count == VK_SAMPLE_COUNT_1_BIT) {
9517 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pResolveAttachments-03066"
9518 : "VUID-VkSubpassDescription-pResolveAttachments-00848";
9519 skip |= LogError(device, vuid,
9520 "%s: ubpass %u requests multisample resolve from attachment %u which has "
9521 "VK_SAMPLE_COUNT_1_BIT.",
9522 function_name, i, attachment_index);
9523 }
9524
9525 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED &&
9526 subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount) {
9527 const auto depth_stencil_sample_count =
9528 pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples;
9529
9530 if (device_extensions.vk_amd_mixed_attachment_samples) {
9531 if (pCreateInfo->pAttachments[attachment_index].samples > depth_stencil_sample_count) {
9532 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pColorAttachments-03070"
9533 : "VUID-VkSubpassDescription-pColorAttachments-01506";
9534 skip |= LogError(device, vuid, "%s: %s has %s which is larger than depth/stencil attachment %s.",
9535 function_name, error_type.c_str(),
9536 string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_index].samples),
9537 string_VkSampleCountFlagBits(depth_stencil_sample_count));
9538 break;
9539 }
9540 }
9541
9542 if (!device_extensions.vk_amd_mixed_attachment_samples &&
9543 !device_extensions.vk_nv_framebuffer_mixed_samples &&
9544 current_sample_count != depth_stencil_sample_count) {
9545 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pDepthStencilAttachment-03071"
9546 : "VUID-VkSubpassDescription-pDepthStencilAttachment-01418";
9547 skip |= LogError(device, vuid,
9548 "%s: Subpass %u attempts to render to use a depth/stencil attachment with sample "
9549 "count that differs "
9550 "from color attachment %u."
9551 "The depth attachment ref has sample count %s, whereas color attachment ref %u has "
9552 "sample count %s.",
9553 function_name, i, j, string_VkSampleCountFlagBits(depth_stencil_sample_count), j,
9554 string_VkSampleCountFlagBits(current_sample_count));
9555 break;
9556 }
9557 }
9558
9559 const VkFormat attachment_format = pCreateInfo->pAttachments[attachment_index].format;
9560 const VkFormatFeatureFlags format_features = GetPotentialFormatFeatures(attachment_format);
9561 if ((format_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) == 0) {
9562 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pColorAttachments-02898"
9563 : "VUID-VkSubpassDescription-pColorAttachments-02648";
9564 skip |= LogError(device, vuid,
9565 "%s: Color attachment %s format (%s) does not contain "
9566 "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT.",
9567 function_name, error_type.c_str(), string_VkFormat(attachment_format));
9568 }
9569
9570 if (attach_first_use[attachment_index]) {
9571 skip |=
9572 ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pColorAttachments[j].layout,
9573 attachment_index, pCreateInfo->pAttachments[attachment_index]);
9574 }
9575 attach_first_use[attachment_index] = false;
9576 }
9577
9578 // Check for valid imageLayout
9579 vuid = use_rp2 ? "VUID-VkSubpassDescription2-None-04439" : "VUID-VkSubpassDescription-None-04437";
9580 switch (attachment_ref.layout) {
9581 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
9582 case VK_IMAGE_LAYOUT_GENERAL:
9583 break; // valid layouts
9584 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
9585 if (input_attachments.find(attachment_index) != input_attachments.end()) {
9586 skip |= LogError(device, vuid,
9587 "%s: %s is also an input attachment so the layout (%s) must not be "
9588 "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL.",
9589 function_name, error_type.c_str(), string_VkImageLayout(attachment_ref.layout));
9590 }
9591 break;
9592 default:
9593 skip |= LogError(device, vuid,
9594 "%s: %s layout is %s but color attachments must be "
9595 "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
9596 "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, or "
9597 "VK_IMAGE_LAYOUT_GENERAL.",
9598 function_name, error_type.c_str(), string_VkImageLayout(attachment_ref.layout));
9599 break;
9600 }
9601 }
9602
9603 if (subpass_performs_resolve && subpass.pResolveAttachments[j].attachment != VK_ATTACHMENT_UNUSED &&
9604 subpass.pResolveAttachments[j].attachment < pCreateInfo->attachmentCount) {
9605 if (attachment_index == VK_ATTACHMENT_UNUSED) {
9606 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pResolveAttachments-03065"
9607 : "VUID-VkSubpassDescription-pResolveAttachments-00847";
9608 skip |= LogError(device, vuid,
9609 "%s: Subpass %u requests multisample resolve from attachment %u which has "
9610 "attachment=VK_ATTACHMENT_UNUSED.",
9611 function_name, i, attachment_index);
9612 } else {
9613 const auto &color_desc = pCreateInfo->pAttachments[attachment_index];
9614 const auto &resolve_desc = pCreateInfo->pAttachments[subpass.pResolveAttachments[j].attachment];
9615 if (color_desc.format != resolve_desc.format) {
9616 vuid = use_rp2 ? "VUID-VkSubpassDescription2-pResolveAttachments-03068"
9617 : "VUID-VkSubpassDescription-pResolveAttachments-00850";
9618 skip |= LogError(device, vuid,
9619 "%s: %s resolves to an attachment with a "
9620 "different format. color format: %u, resolve format: %u.",
9621 function_name, error_type.c_str(), color_desc.format, resolve_desc.format);
9622 }
9623 }
9624 }
9625 }
9626 }
9627 return skip;
9628 }
9629
ValidateCreateRenderPass(VkDevice device,RenderPassCreateVersion rp_version,const VkRenderPassCreateInfo2 * pCreateInfo,const char * function_name) const9630 bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersion rp_version,
9631 const VkRenderPassCreateInfo2 *pCreateInfo, const char *function_name) const {
9632 bool skip = false;
9633 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
9634 const char *vuid;
9635
9636 skip |= ValidateRenderpassAttachmentUsage(rp_version, pCreateInfo, function_name);
9637
9638 skip |= ValidateRenderPassDAG(rp_version, pCreateInfo);
9639
9640 // Validate multiview correlation and view masks
9641 bool viewMaskZero = false;
9642 bool viewMaskNonZero = false;
9643
9644 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
9645 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
9646 if (subpass.viewMask != 0) {
9647 viewMaskNonZero = true;
9648 } else {
9649 viewMaskZero = true;
9650 }
9651
9652 if ((subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX) != 0 &&
9653 (subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX) == 0) {
9654 vuid = use_rp2 ? "VUID-VkSubpassDescription2-flags-03076" : "VUID-VkSubpassDescription-flags-00856";
9655 skip |= LogError(device, vuid,
9656 "%s: The flags parameter of subpass description %u includes "
9657 "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX but does not also include "
9658 "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX.",
9659 function_name, i);
9660 }
9661 }
9662
9663 if (rp_version == RENDER_PASS_VERSION_2) {
9664 if (viewMaskNonZero && viewMaskZero) {
9665 skip |= LogError(device, "VUID-VkRenderPassCreateInfo2-viewMask-03058",
9666 "%s: Some view masks are non-zero whilst others are zero.", function_name);
9667 }
9668
9669 if (viewMaskZero && pCreateInfo->correlatedViewMaskCount != 0) {
9670 skip |= LogError(device, "VUID-VkRenderPassCreateInfo2-viewMask-03057",
9671 "%s: Multiview is not enabled but correlation masks are still provided", function_name);
9672 }
9673 }
9674 uint32_t aggregated_cvms = 0;
9675 for (uint32_t i = 0; i < pCreateInfo->correlatedViewMaskCount; ++i) {
9676 if (aggregated_cvms & pCreateInfo->pCorrelatedViewMasks[i]) {
9677 vuid = use_rp2 ? "VUID-VkRenderPassCreateInfo2-pCorrelatedViewMasks-03056"
9678 : "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841";
9679 skip |=
9680 LogError(device, vuid, "%s: pCorrelatedViewMasks[%u] contains a previously appearing view bit.", function_name, i);
9681 }
9682 aggregated_cvms |= pCreateInfo->pCorrelatedViewMasks[i];
9683 }
9684
9685 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
9686 auto const &dependency = pCreateInfo->pDependencies[i];
9687 if (rp_version == RENDER_PASS_VERSION_2) {
9688 skip |= ValidateStageMaskGsTsEnables(
9689 dependency.srcStageMask, function_name, "VUID-VkSubpassDependency2-srcStageMask-03080",
9690 "VUID-VkSubpassDependency2-srcStageMask-03082", "VUID-VkSubpassDependency2-srcStageMask-02103",
9691 "VUID-VkSubpassDependency2-srcStageMask-02104");
9692 skip |= ValidateStageMaskGsTsEnables(
9693 dependency.dstStageMask, function_name, "VUID-VkSubpassDependency2-dstStageMask-03081",
9694 "VUID-VkSubpassDependency2-dstStageMask-03083", "VUID-VkSubpassDependency2-dstStageMask-02105",
9695 "VUID-VkSubpassDependency2-dstStageMask-02106");
9696 } else {
9697 skip |= ValidateStageMaskGsTsEnables(
9698 dependency.srcStageMask, function_name, "VUID-VkSubpassDependency-srcStageMask-00860",
9699 "VUID-VkSubpassDependency-srcStageMask-00862", "VUID-VkSubpassDependency-srcStageMask-02099",
9700 "VUID-VkSubpassDependency-srcStageMask-02100");
9701 skip |= ValidateStageMaskGsTsEnables(
9702 dependency.dstStageMask, function_name, "VUID-VkSubpassDependency-dstStageMask-00861",
9703 "VUID-VkSubpassDependency-dstStageMask-00863", "VUID-VkSubpassDependency-dstStageMask-02101",
9704 "VUID-VkSubpassDependency-dstStageMask-02102");
9705 }
9706
9707 if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.srcAccessMask, dependency.srcStageMask)) {
9708 vuid = use_rp2 ? "VUID-VkSubpassDependency2-srcAccessMask-03088" : "VUID-VkSubpassDependency-srcAccessMask-00868";
9709 skip |=
9710 LogError(device, vuid,
9711 "%s: pDependencies[%u].srcAccessMask (0x%" PRIx32 ") is not supported by srcStageMask (0x%" PRIx32 ").",
9712 function_name, i, dependency.srcAccessMask, dependency.srcStageMask);
9713 }
9714
9715 if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.dstAccessMask, dependency.dstStageMask)) {
9716 vuid = use_rp2 ? "VUID-VkSubpassDependency2-dstAccessMask-03089" : "VUID-VkSubpassDependency-dstAccessMask-00869";
9717 skip |=
9718 LogError(device, vuid,
9719 "%s: pDependencies[%u].dstAccessMask (0x%" PRIx32 ") is not supported by dstStageMask (0x%" PRIx32 ").",
9720 function_name, i, dependency.dstAccessMask, dependency.dstStageMask);
9721 }
9722 }
9723 return skip;
9724 }
9725
PreCallValidateCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass) const9726 bool CoreChecks::PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
9727 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const {
9728 bool skip = false;
9729 // Handle extension structs from KHR_multiview and KHR_maintenance2 that can only be validated for RP1 (indices out of bounds)
9730 const VkRenderPassMultiviewCreateInfo *pMultiviewInfo = lvl_find_in_chain<VkRenderPassMultiviewCreateInfo>(pCreateInfo->pNext);
9731 if (pMultiviewInfo) {
9732 if (pMultiviewInfo->subpassCount && pMultiviewInfo->subpassCount != pCreateInfo->subpassCount) {
9733 skip |= LogError(device, "VUID-VkRenderPassCreateInfo-pNext-01928",
9734 "vkCreateRenderPass(): Subpass count is %u but multiview info has a subpass count of %u.",
9735 pCreateInfo->subpassCount, pMultiviewInfo->subpassCount);
9736 } else if (pMultiviewInfo->dependencyCount && pMultiviewInfo->dependencyCount != pCreateInfo->dependencyCount) {
9737 skip |= LogError(device, "VUID-VkRenderPassCreateInfo-pNext-01929",
9738 "vkCreateRenderPass(): Dependency count is %u but multiview info has a dependency count of %u.",
9739 pCreateInfo->dependencyCount, pMultiviewInfo->dependencyCount);
9740 }
9741 }
9742 const VkRenderPassInputAttachmentAspectCreateInfo *pInputAttachmentAspectInfo =
9743 lvl_find_in_chain<VkRenderPassInputAttachmentAspectCreateInfo>(pCreateInfo->pNext);
9744 if (pInputAttachmentAspectInfo) {
9745 for (uint32_t i = 0; i < pInputAttachmentAspectInfo->aspectReferenceCount; ++i) {
9746 uint32_t subpass = pInputAttachmentAspectInfo->pAspectReferences[i].subpass;
9747 uint32_t attachment = pInputAttachmentAspectInfo->pAspectReferences[i].inputAttachmentIndex;
9748 if (subpass >= pCreateInfo->subpassCount) {
9749 skip |= LogError(device, "VUID-VkRenderPassCreateInfo-pNext-01926",
9750 "vkCreateRenderPass(): Subpass index %u specified by input attachment aspect info %u is greater "
9751 "than the subpass "
9752 "count of %u for this render pass.",
9753 subpass, i, pCreateInfo->subpassCount);
9754 } else if (pCreateInfo->pSubpasses && attachment >= pCreateInfo->pSubpasses[subpass].inputAttachmentCount) {
9755 skip |= LogError(device, "VUID-VkRenderPassCreateInfo-pNext-01927",
9756 "vkCreateRenderPass(): Input attachment index %u specified by input attachment aspect info %u is "
9757 "greater than the "
9758 "input attachment count of %u for this subpass.",
9759 attachment, i, pCreateInfo->pSubpasses[subpass].inputAttachmentCount);
9760 }
9761 }
9762 }
9763 const VkRenderPassFragmentDensityMapCreateInfoEXT *pFragmentDensityMapInfo =
9764 lvl_find_in_chain<VkRenderPassFragmentDensityMapCreateInfoEXT>(pCreateInfo->pNext);
9765 if (pFragmentDensityMapInfo) {
9766 if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment != VK_ATTACHMENT_UNUSED) {
9767 if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment >= pCreateInfo->attachmentCount) {
9768 skip |= LogError(device, "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547",
9769 "vkCreateRenderPass(): fragmentDensityMapAttachment %u must be less than attachmentCount %u of "
9770 "for this render pass.",
9771 pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment, pCreateInfo->attachmentCount);
9772 } else {
9773 if (!(pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout ==
9774 VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
9775 pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout == VK_IMAGE_LAYOUT_GENERAL)) {
9776 skip |= LogError(device, "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549",
9777 "vkCreateRenderPass(): Layout of fragmentDensityMapAttachment %u' must be equal to "
9778 "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, or VK_IMAGE_LAYOUT_GENERAL.",
9779 pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
9780 }
9781 if (!(pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
9782 VK_ATTACHMENT_LOAD_OP_LOAD ||
9783 pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
9784 VK_ATTACHMENT_LOAD_OP_DONT_CARE)) {
9785 skip |= LogError(
9786 device, "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550",
9787 "vkCreateRenderPass(): FragmentDensityMapAttachment %u' must reference an attachment with a loadOp "
9788 "equal to VK_ATTACHMENT_LOAD_OP_LOAD or VK_ATTACHMENT_LOAD_OP_DONT_CARE.",
9789 pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
9790 }
9791 if (pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].storeOp !=
9792 VK_ATTACHMENT_STORE_OP_DONT_CARE) {
9793 skip |= LogError(
9794 device, "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551",
9795 "vkCreateRenderPass(): FragmentDensityMapAttachment %u' must reference an attachment with a storeOp "
9796 "equal to VK_ATTACHMENT_STORE_OP_DONT_CARE.",
9797 pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
9798 }
9799 }
9800 }
9801 }
9802
9803 if (!skip) {
9804 safe_VkRenderPassCreateInfo2 create_info_2;
9805 ConvertVkRenderPassCreateInfoToV2KHR(*pCreateInfo, &create_info_2);
9806 skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_1, create_info_2.ptr(), "vkCreateRenderPass()");
9807 }
9808
9809 return skip;
9810 }
9811
ValidateDepthStencilResolve(const VkPhysicalDeviceVulkan12Properties & core12_props,const VkRenderPassCreateInfo2 * pCreateInfo,const char * function_name) const9812 bool CoreChecks::ValidateDepthStencilResolve(const VkPhysicalDeviceVulkan12Properties &core12_props,
9813 const VkRenderPassCreateInfo2 *pCreateInfo, const char *function_name) const {
9814 bool skip = false;
9815
9816 // If the pNext list of VkSubpassDescription2 includes a VkSubpassDescriptionDepthStencilResolve structure,
9817 // then that structure describes depth/stencil resolve operations for the subpass.
9818 for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
9819 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
9820 const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolve>(subpass.pNext);
9821
9822 if (resolve == nullptr) {
9823 continue;
9824 }
9825
9826 const bool resolve_attachment_not_unused = (resolve->pDepthStencilResolveAttachment != nullptr &&
9827 resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED);
9828 const bool valid_resolve_attachment_index =
9829 (resolve_attachment_not_unused && resolve->pDepthStencilResolveAttachment->attachment < pCreateInfo->attachmentCount);
9830
9831 const bool ds_attachment_not_unused =
9832 (subpass.pDepthStencilAttachment != nullptr && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED);
9833 const bool valid_ds_attachment_index =
9834 (ds_attachment_not_unused && subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount);
9835
9836 if (resolve_attachment_not_unused && subpass.pDepthStencilAttachment != nullptr &&
9837 subpass.pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED) {
9838 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03177",
9839 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9840 "structure with resolve attachment %u, but pDepthStencilAttachment=VK_ATTACHMENT_UNUSED.",
9841 function_name, i, resolve->pDepthStencilResolveAttachment->attachment);
9842 }
9843
9844 if (resolve_attachment_not_unused && resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR &&
9845 resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR) {
9846 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03178",
9847 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9848 "structure with resolve attachment %u, but both depth and stencil resolve modes are "
9849 "VK_RESOLVE_MODE_NONE_KHR.",
9850 function_name, i, resolve->pDepthStencilResolveAttachment->attachment);
9851 }
9852
9853 if (resolve_attachment_not_unused && valid_ds_attachment_index &&
9854 pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples == VK_SAMPLE_COUNT_1_BIT) {
9855 skip |= LogError(
9856 device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03179",
9857 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9858 "structure with resolve attachment %u. However pDepthStencilAttachment has sample count=VK_SAMPLE_COUNT_1_BIT.",
9859 function_name, i, resolve->pDepthStencilResolveAttachment->attachment);
9860 }
9861
9862 if (valid_resolve_attachment_index &&
9863 pCreateInfo->pAttachments[resolve->pDepthStencilResolveAttachment->attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
9864 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03180",
9865 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9866 "structure with resolve attachment %u which has sample count=VK_SAMPLE_COUNT_1_BIT.",
9867 function_name, i, resolve->pDepthStencilResolveAttachment->attachment);
9868 }
9869
9870 VkFormat pDepthStencilAttachmentFormat =
9871 (valid_ds_attachment_index ? pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].format
9872 : VK_FORMAT_UNDEFINED);
9873 VkFormat pDepthStencilResolveAttachmentFormat =
9874 (valid_resolve_attachment_index ? pCreateInfo->pAttachments[resolve->pDepthStencilResolveAttachment->attachment].format
9875 : VK_FORMAT_UNDEFINED);
9876
9877 if (valid_ds_attachment_index && valid_resolve_attachment_index) {
9878 const auto resolve_depth_size = FormatDepthSize(pDepthStencilResolveAttachmentFormat);
9879 const auto resolve_stencil_size = FormatStencilSize(pDepthStencilResolveAttachmentFormat);
9880
9881 if (resolve_depth_size > 0 && ((FormatDepthSize(pDepthStencilAttachmentFormat) != resolve_depth_size) ||
9882 (FormatDepthNumericalType(pDepthStencilAttachmentFormat) !=
9883 FormatDepthNumericalType(pDepthStencilResolveAttachmentFormat)))) {
9884 skip |= LogError(
9885 device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03181",
9886 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9887 "structure with resolve attachment %u which has a depth component (size %u). The depth component "
9888 "of pDepthStencilAttachment must have the same number of bits (currently %u) and the same numerical type.",
9889 function_name, i, resolve->pDepthStencilResolveAttachment->attachment, resolve_depth_size,
9890 FormatDepthSize(pDepthStencilAttachmentFormat));
9891 }
9892
9893 if (resolve_stencil_size > 0 && ((FormatStencilSize(pDepthStencilAttachmentFormat) != resolve_stencil_size) ||
9894 (FormatStencilNumericalType(pDepthStencilAttachmentFormat) !=
9895 FormatStencilNumericalType(pDepthStencilResolveAttachmentFormat)))) {
9896 skip |= LogError(
9897 device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03182",
9898 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9899 "structure with resolve attachment %u which has a stencil component (size %u). The stencil component "
9900 "of pDepthStencilAttachment must have the same number of bits (currently %u) and the same numerical type.",
9901 function_name, i, resolve->pDepthStencilResolveAttachment->attachment, resolve_stencil_size,
9902 FormatStencilSize(pDepthStencilAttachmentFormat));
9903 }
9904 }
9905
9906 if (!(resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
9907 resolve->depthResolveMode & core12_props.supportedDepthResolveModes)) {
9908 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-depthResolveMode-03183",
9909 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9910 "structure with invalid depthResolveMode=%u.",
9911 function_name, i, resolve->depthResolveMode);
9912 }
9913
9914 if (!(resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
9915 resolve->stencilResolveMode & core12_props.supportedStencilResolveModes)) {
9916 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-stencilResolveMode-03184",
9917 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9918 "structure with invalid stencilResolveMode=%u.",
9919 function_name, i, resolve->stencilResolveMode);
9920 }
9921
9922 if (valid_resolve_attachment_index && FormatIsDepthAndStencil(pDepthStencilResolveAttachmentFormat) &&
9923 core12_props.independentResolve == VK_FALSE && core12_props.independentResolveNone == VK_FALSE &&
9924 !(resolve->depthResolveMode == resolve->stencilResolveMode)) {
9925 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03185",
9926 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9927 "structure. The values of depthResolveMode (%u) and stencilResolveMode (%u) must be identical.",
9928 function_name, i, resolve->depthResolveMode, resolve->stencilResolveMode);
9929 }
9930
9931 if (valid_resolve_attachment_index && FormatIsDepthAndStencil(pDepthStencilResolveAttachmentFormat) &&
9932 core12_props.independentResolve == VK_FALSE && core12_props.independentResolveNone == VK_TRUE &&
9933 !(resolve->depthResolveMode == resolve->stencilResolveMode || resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
9934 resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR)) {
9935 skip |= LogError(device, "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03186",
9936 "%s: Subpass %u includes a VkSubpassDescriptionDepthStencilResolve "
9937 "structure. The values of depthResolveMode (%u) and stencilResolveMode (%u) must be identical, or "
9938 "one of them must be %u.",
9939 function_name, i, resolve->depthResolveMode, resolve->stencilResolveMode, VK_RESOLVE_MODE_NONE_KHR);
9940 }
9941 }
9942
9943 return skip;
9944 }
9945
ValidateCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass,const char * function_name) const9946 bool CoreChecks::ValidateCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
9947 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
9948 const char *function_name) const {
9949 bool skip = false;
9950
9951 if (device_extensions.vk_khr_depth_stencil_resolve) {
9952 skip |= ValidateDepthStencilResolve(phys_dev_props_core12, pCreateInfo, function_name);
9953 }
9954
9955 safe_VkRenderPassCreateInfo2 create_info_2(pCreateInfo);
9956 skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_2, create_info_2.ptr(), function_name);
9957
9958 return skip;
9959 }
9960
PreCallValidateCreateRenderPass2KHR(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass) const9961 bool CoreChecks::PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
9962 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const {
9963 return ValidateCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, "vkCreateRenderPass2KHR()");
9964 }
9965
PreCallValidateCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass) const9966 bool CoreChecks::PreCallValidateCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2 *pCreateInfo,
9967 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const {
9968 return ValidateCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, "vkCreateRenderPass2()");
9969 }
9970
ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE * pCB,char const * cmd_name,const char * error_code) const9971 bool CoreChecks::ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE *pCB, char const *cmd_name, const char *error_code) const {
9972 bool skip = false;
9973 if (pCB->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
9974 skip |= LogError(pCB->commandBuffer, error_code, "Cannot execute command %s on a secondary command buffer.", cmd_name);
9975 }
9976 return skip;
9977 }
9978
VerifyRenderAreaBounds(const VkRenderPassBeginInfo * pRenderPassBegin) const9979 bool CoreChecks::VerifyRenderAreaBounds(const VkRenderPassBeginInfo *pRenderPassBegin) const {
9980 bool skip = false;
9981 const safe_VkFramebufferCreateInfo *pFramebufferInfo = &GetFramebufferState(pRenderPassBegin->framebuffer)->createInfo;
9982 if (pRenderPassBegin->renderArea.offset.x < 0 ||
9983 (pRenderPassBegin->renderArea.offset.x + pRenderPassBegin->renderArea.extent.width) > pFramebufferInfo->width ||
9984 pRenderPassBegin->renderArea.offset.y < 0 ||
9985 (pRenderPassBegin->renderArea.offset.y + pRenderPassBegin->renderArea.extent.height) > pFramebufferInfo->height) {
9986 skip |= static_cast<bool>(LogError(
9987 pRenderPassBegin->renderPass, kVUID_Core_DrawState_InvalidRenderArea,
9988 "Cannot execute a render pass with renderArea not within the bound of the framebuffer. RenderArea: x %d, y %d, width "
9989 "%d, height %d. Framebuffer: width %d, height %d.",
9990 pRenderPassBegin->renderArea.offset.x, pRenderPassBegin->renderArea.offset.y, pRenderPassBegin->renderArea.extent.width,
9991 pRenderPassBegin->renderArea.extent.height, pFramebufferInfo->width, pFramebufferInfo->height));
9992 }
9993 return skip;
9994 }
9995
VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo * pRenderPassBeginInfo) const9996 bool CoreChecks::VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo *pRenderPassBeginInfo) const {
9997 bool skip = false;
9998 const VkRenderPassAttachmentBeginInfoKHR *pRenderPassAttachmentBeginInfo =
9999 lvl_find_in_chain<VkRenderPassAttachmentBeginInfoKHR>(pRenderPassBeginInfo->pNext);
10000
10001 if (pRenderPassAttachmentBeginInfo && pRenderPassAttachmentBeginInfo->attachmentCount != 0) {
10002 const safe_VkFramebufferCreateInfo *pFramebufferCreateInfo =
10003 &GetFramebufferState(pRenderPassBeginInfo->framebuffer)->createInfo;
10004 const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
10005 lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pFramebufferCreateInfo->pNext);
10006 if ((pFramebufferCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
10007 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03207",
10008 "VkRenderPassBeginInfo: Image views specified at render pass begin, but framebuffer not created with "
10009 "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR");
10010 } else if (pFramebufferAttachmentsCreateInfo) {
10011 if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pRenderPassAttachmentBeginInfo->attachmentCount) {
10012 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03208",
10013 "VkRenderPassBeginInfo: %u image views specified at render pass begin, but framebuffer "
10014 "created expecting %u attachments",
10015 pRenderPassAttachmentBeginInfo->attachmentCount,
10016 pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
10017 } else {
10018 const safe_VkRenderPassCreateInfo2 *pRenderPassCreateInfo =
10019 &GetRenderPassState(pRenderPassBeginInfo->renderPass)->createInfo;
10020 for (uint32_t i = 0; i < pRenderPassAttachmentBeginInfo->attachmentCount; ++i) {
10021 const VkImageViewCreateInfo *pImageViewCreateInfo =
10022 &GetImageViewState(pRenderPassAttachmentBeginInfo->pAttachments[i])->create_info;
10023 const VkFramebufferAttachmentImageInfoKHR *pFramebufferAttachmentImageInfo =
10024 &pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
10025 const VkImageCreateInfo *pImageCreateInfo = &GetImageState(pImageViewCreateInfo->image)->createInfo;
10026
10027 if (pFramebufferAttachmentImageInfo->flags != pImageCreateInfo->flags) {
10028 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03209",
10029 "VkRenderPassBeginInfo: Image view #%u created from an image with flags set as 0x%X, "
10030 "but image info #%u used to create the framebuffer had flags set as 0x%X",
10031 i, pImageCreateInfo->flags, i, pFramebufferAttachmentImageInfo->flags);
10032 }
10033
10034 if (pFramebufferAttachmentImageInfo->usage != pImageCreateInfo->usage) {
10035 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03210",
10036 "VkRenderPassBeginInfo: Image view #%u created from an image with usage set as 0x%X, "
10037 "but image info #%u used to create the framebuffer had usage set as 0x%X",
10038 i, pImageCreateInfo->usage, i, pFramebufferAttachmentImageInfo->usage);
10039 }
10040
10041 if (pFramebufferAttachmentImageInfo->width != pImageCreateInfo->extent.width) {
10042 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03211",
10043 "VkRenderPassBeginInfo: Image view #%u created from an image with width set as %u, "
10044 "but image info #%u used to create the framebuffer had width set as %u",
10045 i, pImageCreateInfo->extent.width, i, pFramebufferAttachmentImageInfo->width);
10046 }
10047
10048 if (pFramebufferAttachmentImageInfo->height != pImageCreateInfo->extent.height) {
10049 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03212",
10050 "VkRenderPassBeginInfo: Image view #%u created from an image with height set as %u, "
10051 "but image info #%u used to create the framebuffer had height set as %u",
10052 i, pImageCreateInfo->extent.height, i, pFramebufferAttachmentImageInfo->height);
10053 }
10054
10055 if (pFramebufferAttachmentImageInfo->layerCount != pImageViewCreateInfo->subresourceRange.layerCount) {
10056 skip |= LogError(
10057 pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03213",
10058 "VkRenderPassBeginInfo: Image view #%u created with a subresource range with a layerCount of %u, "
10059 "but image info #%u used to create the framebuffer had layerCount set as %u",
10060 i, pImageViewCreateInfo->subresourceRange.layerCount, i, pFramebufferAttachmentImageInfo->layerCount);
10061 }
10062
10063 const VkImageFormatListCreateInfoKHR *pImageFormatListCreateInfo =
10064 lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pImageCreateInfo->pNext);
10065 if (pImageFormatListCreateInfo) {
10066 if (pImageFormatListCreateInfo->viewFormatCount != pFramebufferAttachmentImageInfo->viewFormatCount) {
10067 skip |= LogError(
10068 pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03214",
10069 "VkRenderPassBeginInfo: Image view #%u created with an image with a viewFormatCount of %u, "
10070 "but image info #%u used to create the framebuffer had viewFormatCount set as %u",
10071 i, pImageFormatListCreateInfo->viewFormatCount, i,
10072 pFramebufferAttachmentImageInfo->viewFormatCount);
10073 }
10074
10075 for (uint32_t j = 0; j < pImageFormatListCreateInfo->viewFormatCount; ++j) {
10076 bool formatFound = false;
10077 for (uint32_t k = 0; k < pFramebufferAttachmentImageInfo->viewFormatCount; ++k) {
10078 if (pImageFormatListCreateInfo->pViewFormats[j] ==
10079 pFramebufferAttachmentImageInfo->pViewFormats[k]) {
10080 formatFound = true;
10081 }
10082 }
10083 if (!formatFound) {
10084 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03215",
10085 "VkRenderPassBeginInfo: Image view #%u created with an image including the format "
10086 "%s in its view format list, "
10087 "but image info #%u used to create the framebuffer does not include this format",
10088 i, string_VkFormat(pImageFormatListCreateInfo->pViewFormats[j]), i);
10089 }
10090 }
10091 }
10092
10093 if (pRenderPassCreateInfo->pAttachments[i].format != pImageViewCreateInfo->format) {
10094 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03216",
10095 "VkRenderPassBeginInfo: Image view #%u created with a format of %s, "
10096 "but render pass attachment description #%u created with a format of %s",
10097 i, string_VkFormat(pImageViewCreateInfo->format), i,
10098 string_VkFormat(pRenderPassCreateInfo->pAttachments[i].format));
10099 }
10100
10101 if (pRenderPassCreateInfo->pAttachments[i].samples != pImageCreateInfo->samples) {
10102 skip |= LogError(pRenderPassBeginInfo->renderPass, "VUID-VkRenderPassBeginInfo-framebuffer-03217",
10103 "VkRenderPassBeginInfo: Image view #%u created with an image with %s samples, "
10104 "but render pass attachment description #%u created with %s samples",
10105 i, string_VkSampleCountFlagBits(pImageCreateInfo->samples), i,
10106 string_VkSampleCountFlagBits(pRenderPassCreateInfo->pAttachments[i].samples));
10107 }
10108
10109 if (pImageViewCreateInfo->subresourceRange.levelCount != 1) {
10110 skip |= LogError(pRenderPassAttachmentBeginInfo->pAttachments[i],
10111 "VUID-VkRenderPassAttachmentBeginInfo-pAttachments-03218",
10112 "VkRenderPassAttachmentBeginInfo: Image view #%u created with multiple (%u) mip levels.",
10113 i, pImageViewCreateInfo->subresourceRange.levelCount);
10114 }
10115
10116 if (IsIdentitySwizzle(pImageViewCreateInfo->components) == false) {
10117 skip |= LogError(
10118 pRenderPassAttachmentBeginInfo->pAttachments[i],
10119 "VUID-VkRenderPassAttachmentBeginInfo-pAttachments-03219",
10120 "VkRenderPassAttachmentBeginInfo: Image view #%u created with non-identity swizzle. All "
10121 "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
10122 "swizzle values:\n"
10123 "r swizzle = %s\n"
10124 "g swizzle = %s\n"
10125 "b swizzle = %s\n"
10126 "a swizzle = %s\n",
10127 i, string_VkComponentSwizzle(pImageViewCreateInfo->components.r),
10128 string_VkComponentSwizzle(pImageViewCreateInfo->components.g),
10129 string_VkComponentSwizzle(pImageViewCreateInfo->components.b),
10130 string_VkComponentSwizzle(pImageViewCreateInfo->components.a));
10131 }
10132 }
10133 }
10134 }
10135 }
10136
10137 return skip;
10138 }
10139
10140 // If this is a stencil format, make sure the stencil[Load|Store]Op flag is checked, while if it is a depth/color attachment the
10141 // [load|store]Op flag must be checked
10142 // TODO: The memory valid flag in DEVICE_MEMORY_STATE should probably be split to track the validity of stencil memory separately.
10143 template <typename T>
FormatSpecificLoadAndStoreOpSettings(VkFormat format,T color_depth_op,T stencil_op,T op)10144 static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_op, T stencil_op, T op) {
10145 if (color_depth_op != op && stencil_op != op) {
10146 return false;
10147 }
10148 bool check_color_depth_load_op = !FormatIsStencilOnly(format);
10149 bool check_stencil_load_op = FormatIsDepthAndStencil(format) || !check_color_depth_load_op;
10150
10151 return ((check_color_depth_load_op && (color_depth_op == op)) || (check_stencil_load_op && (stencil_op == op)));
10152 }
10153
ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer,RenderPassCreateVersion rp_version,const VkRenderPassBeginInfo * pRenderPassBegin) const10154 bool CoreChecks::ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
10155 const VkRenderPassBeginInfo *pRenderPassBegin) const {
10156 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10157 assert(cb_state);
10158 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
10159 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
10160
10161 bool skip = false;
10162 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10163 const char *vuid;
10164 const char *const function_name = use_rp2 ? "vkCmdBeginRenderPass2()" : "vkCmdBeginRenderPass()";
10165
10166 if (render_pass_state) {
10167 uint32_t clear_op_size = 0; // Make sure pClearValues is at least as large as last LOAD_OP_CLEAR
10168
10169 // Handle extension struct from EXT_sample_locations
10170 const VkRenderPassSampleLocationsBeginInfoEXT *pSampleLocationsBeginInfo =
10171 lvl_find_in_chain<VkRenderPassSampleLocationsBeginInfoEXT>(pRenderPassBegin->pNext);
10172 if (pSampleLocationsBeginInfo) {
10173 for (uint32_t i = 0; i < pSampleLocationsBeginInfo->attachmentInitialSampleLocationsCount; ++i) {
10174 const VkAttachmentSampleLocationsEXT &sample_location =
10175 pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i];
10176 skip |= ValidateSampleLocationsInfo(&sample_location.sampleLocationsInfo, function_name);
10177 if (sample_location.attachmentIndex >= render_pass_state->createInfo.attachmentCount) {
10178 skip |=
10179 LogError(device, "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531",
10180 "%s: Attachment index %u specified by attachment sample locations %u is greater than the "
10181 "attachment count of %u for the render pass being begun.",
10182 function_name, sample_location.attachmentIndex, i, render_pass_state->createInfo.attachmentCount);
10183 }
10184 }
10185
10186 for (uint32_t i = 0; i < pSampleLocationsBeginInfo->postSubpassSampleLocationsCount; ++i) {
10187 const VkSubpassSampleLocationsEXT &sample_location = pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i];
10188 skip |= ValidateSampleLocationsInfo(&sample_location.sampleLocationsInfo, function_name);
10189 if (sample_location.subpassIndex >= render_pass_state->createInfo.subpassCount) {
10190 skip |=
10191 LogError(device, "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532",
10192 "%s: Subpass index %u specified by subpass sample locations %u is greater than the subpass count "
10193 "of %u for the render pass being begun.",
10194 function_name, sample_location.subpassIndex, i, render_pass_state->createInfo.subpassCount);
10195 }
10196 }
10197 }
10198
10199 for (uint32_t i = 0; i < render_pass_state->createInfo.attachmentCount; ++i) {
10200 auto pAttachment = &render_pass_state->createInfo.pAttachments[i];
10201 if (FormatSpecificLoadAndStoreOpSettings(pAttachment->format, pAttachment->loadOp, pAttachment->stencilLoadOp,
10202 VK_ATTACHMENT_LOAD_OP_CLEAR)) {
10203 clear_op_size = static_cast<uint32_t>(i) + 1;
10204 }
10205 }
10206
10207 if (clear_op_size > pRenderPassBegin->clearValueCount) {
10208 skip |= LogError(render_pass_state->renderPass, "VUID-VkRenderPassBeginInfo-clearValueCount-00902",
10209 "In %s the VkRenderPassBeginInfo struct has a clearValueCount of %u but there "
10210 "must be at least %u entries in pClearValues array to account for the highest index attachment in "
10211 "%s that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array is indexed by "
10212 "attachment number so even if some pClearValues entries between 0 and %u correspond to attachments "
10213 "that aren't cleared they will be ignored.",
10214 function_name, pRenderPassBegin->clearValueCount, clear_op_size,
10215 report_data->FormatHandle(render_pass_state->renderPass).c_str(), clear_op_size, clear_op_size - 1);
10216 }
10217 skip |= VerifyFramebufferAndRenderPassImageViews(pRenderPassBegin);
10218 skip |= VerifyRenderAreaBounds(pRenderPassBegin);
10219 skip |= VerifyFramebufferAndRenderPassLayouts(rp_version, cb_state, pRenderPassBegin,
10220 GetFramebufferState(pRenderPassBegin->framebuffer));
10221 if (framebuffer->rp_state->renderPass != render_pass_state->renderPass) {
10222 skip |= ValidateRenderPassCompatibility("render pass", render_pass_state, "framebuffer", framebuffer->rp_state.get(),
10223 function_name, "VUID-VkRenderPassBeginInfo-renderPass-00904");
10224 }
10225
10226 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2-renderpass" : "VUID-vkCmdBeginRenderPass-renderpass";
10227 skip |= InsideRenderPass(cb_state, function_name, vuid);
10228 skip |= ValidateDependencies(framebuffer, render_pass_state);
10229
10230 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2-bufferlevel" : "VUID-vkCmdBeginRenderPass-bufferlevel";
10231 skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
10232
10233 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2-commandBuffer-cmdpool" : "VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool";
10234 skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
10235
10236 const CMD_TYPE cmd_type = use_rp2 ? CMD_BEGINRENDERPASS2 : CMD_BEGINRENDERPASS;
10237 skip |= ValidateCmd(cb_state, cmd_type, function_name);
10238 }
10239
10240 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
10241 if (chained_device_group_struct) {
10242 skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_device_group_struct->deviceMask, pRenderPassBegin->renderPass,
10243 "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905");
10244 skip |= ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, pRenderPassBegin->renderPass,
10245 "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906");
10246 skip |= ValidateDeviceMaskToCommandBuffer(cb_state, chained_device_group_struct->deviceMask, pRenderPassBegin->renderPass,
10247 "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907");
10248
10249 if (chained_device_group_struct->deviceRenderAreaCount != 0 &&
10250 chained_device_group_struct->deviceRenderAreaCount != physical_device_count) {
10251 skip |= LogError(pRenderPassBegin->renderPass, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908",
10252 "%s: deviceRenderAreaCount[%" PRIu32 "] is invaild. Physical device count is %" PRIu32 ".",
10253 function_name, chained_device_group_struct->deviceRenderAreaCount, physical_device_count);
10254 }
10255 }
10256 return skip;
10257 }
10258
PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents) const10259 bool CoreChecks::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10260 VkSubpassContents contents) const {
10261 bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_1, pRenderPassBegin);
10262 return skip;
10263 }
10264
PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo) const10265 bool CoreChecks::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10266 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
10267 bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_2, pRenderPassBegin);
10268 return skip;
10269 }
10270
PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo) const10271 bool CoreChecks::PreCallValidateCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10272 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
10273 bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_2, pRenderPassBegin);
10274 return skip;
10275 }
10276
RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassContents contents)10277 void CoreChecks::RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10278 const VkSubpassContents contents) {
10279 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10280 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
10281 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
10282 if (render_pass_state) {
10283 // transition attachments to the correct layouts for beginning of renderPass and first subpass
10284 TransitionBeginRenderPassLayouts(cb_state, render_pass_state, framebuffer);
10285 }
10286 }
10287
PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)10288 void CoreChecks::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10289 VkSubpassContents contents) {
10290 StateTracker::PreCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
10291 RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, contents);
10292 }
10293
PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)10294 void CoreChecks::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10295 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
10296 StateTracker::PreCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
10297 RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
10298 }
10299
PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)10300 void CoreChecks::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
10301 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
10302 StateTracker::PreCallRecordCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
10303 RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
10304 }
10305
ValidateCmdNextSubpass(RenderPassCreateVersion rp_version,VkCommandBuffer commandBuffer) const10306 bool CoreChecks::ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
10307 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10308 assert(cb_state);
10309 bool skip = false;
10310 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10311 const char *vuid;
10312 const char *const function_name = use_rp2 ? "vkCmdNextSubpass2()" : "vkCmdNextSubpass()";
10313
10314 vuid = use_rp2 ? "VUID-vkCmdNextSubpass2-bufferlevel" : "VUID-vkCmdNextSubpass-bufferlevel";
10315 skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
10316
10317 vuid = use_rp2 ? "VUID-vkCmdNextSubpass2-commandBuffer-cmdpool" : "VUID-vkCmdNextSubpass-commandBuffer-cmdpool";
10318 skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
10319 const CMD_TYPE cmd_type = use_rp2 ? CMD_NEXTSUBPASS2 : CMD_NEXTSUBPASS;
10320 skip |= ValidateCmd(cb_state, cmd_type, function_name);
10321
10322 vuid = use_rp2 ? "VUID-vkCmdNextSubpass2-renderpass" : "VUID-vkCmdNextSubpass-renderpass";
10323 skip |= OutsideRenderPass(cb_state, function_name, vuid);
10324
10325 auto subpassCount = cb_state->activeRenderPass->createInfo.subpassCount;
10326 if (cb_state->activeSubpass == subpassCount - 1) {
10327 vuid = use_rp2 ? "VUID-vkCmdNextSubpass2-None-03102" : "VUID-vkCmdNextSubpass-None-00909";
10328 skip |= LogError(commandBuffer, vuid, "%s: Attempted to advance beyond final subpass.", function_name);
10329 }
10330 return skip;
10331 }
10332
PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents) const10333 bool CoreChecks::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const {
10334 return ValidateCmdNextSubpass(RENDER_PASS_VERSION_1, commandBuffer);
10335 }
10336
PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo) const10337 bool CoreChecks::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
10338 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
10339 return ValidateCmdNextSubpass(RENDER_PASS_VERSION_2, commandBuffer);
10340 }
10341
PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo) const10342 bool CoreChecks::PreCallValidateCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
10343 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
10344 return ValidateCmdNextSubpass(RENDER_PASS_VERSION_2, commandBuffer);
10345 }
10346
RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer,VkSubpassContents contents)10347 void CoreChecks::RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
10348 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10349 TransitionSubpassLayouts(cb_state, cb_state->activeRenderPass.get(), cb_state->activeSubpass,
10350 Get<FRAMEBUFFER_STATE>(cb_state->activeRenderPassBeginInfo.framebuffer));
10351 }
10352
PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)10353 void CoreChecks::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
10354 StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
10355 RecordCmdNextSubpassLayouts(commandBuffer, contents);
10356 }
10357
PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)10358 void CoreChecks::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
10359 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
10360 StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
10361 RecordCmdNextSubpassLayouts(commandBuffer, pSubpassBeginInfo->contents);
10362 }
10363
PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)10364 void CoreChecks::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
10365 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
10366 StateTracker::PostCallRecordCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
10367 RecordCmdNextSubpassLayouts(commandBuffer, pSubpassBeginInfo->contents);
10368 }
10369
ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version,VkCommandBuffer commandBuffer) const10370 bool CoreChecks::ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
10371 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10372 assert(cb_state);
10373 bool skip = false;
10374 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
10375 const char *vuid;
10376 const char *const function_name = use_rp2 ? "vkCmdEndRenderPass2KHR()" : "vkCmdEndRenderPass()";
10377
10378 RENDER_PASS_STATE *rp_state = cb_state->activeRenderPass.get();
10379 if (rp_state) {
10380 if (cb_state->activeSubpass != rp_state->createInfo.subpassCount - 1) {
10381 vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2-None-03103" : "VUID-vkCmdEndRenderPass-None-00910";
10382 skip |= LogError(commandBuffer, vuid, "%s: Called before reaching final subpass.", function_name);
10383 }
10384 }
10385
10386 vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2-renderpass" : "VUID-vkCmdEndRenderPass-renderpass";
10387 skip |= OutsideRenderPass(cb_state, function_name, vuid);
10388
10389 vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2-bufferlevel" : "VUID-vkCmdEndRenderPass-bufferlevel";
10390 skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
10391
10392 vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2-commandBuffer-cmdpool" : "VUID-vkCmdEndRenderPass-commandBuffer-cmdpool";
10393 skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
10394
10395 const CMD_TYPE cmd_type = use_rp2 ? CMD_ENDRENDERPASS2 : CMD_ENDRENDERPASS;
10396 skip |= ValidateCmd(cb_state, cmd_type, function_name);
10397 return skip;
10398 }
10399
PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const10400 bool CoreChecks::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const {
10401 bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_1, commandBuffer);
10402 return skip;
10403 }
10404
PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo) const10405 bool CoreChecks::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
10406 const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
10407 bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_2, commandBuffer);
10408 return skip;
10409 }
10410
PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo) const10411 bool CoreChecks::PreCallValidateCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
10412 bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_2, commandBuffer);
10413 return skip;
10414 }
10415
RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer)10416 void CoreChecks::RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer) {
10417 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10418 TransitionFinalSubpassLayouts(cb_state, cb_state->activeRenderPassBeginInfo.ptr(), cb_state->activeFramebuffer.get());
10419 }
10420
PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer)10421 void CoreChecks::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
10422 // Record the end at the CoreLevel to ensure StateTracker cleanup doesn't step on anything we need.
10423 RecordCmdEndRenderPassLayouts(commandBuffer);
10424 StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
10425 }
10426
PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)10427 void CoreChecks::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
10428 // Record the end at the CoreLevel to ensure StateTracker cleanup doesn't step on anything we need.
10429 RecordCmdEndRenderPassLayouts(commandBuffer);
10430 StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
10431 }
10432
PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)10433 void CoreChecks::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
10434 RecordCmdEndRenderPassLayouts(commandBuffer);
10435 StateTracker::PostCallRecordCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
10436 }
10437
ValidateFramebuffer(VkCommandBuffer primaryBuffer,const CMD_BUFFER_STATE * pCB,VkCommandBuffer secondaryBuffer,const CMD_BUFFER_STATE * pSubCB,const char * caller) const10438 bool CoreChecks::ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BUFFER_STATE *pCB, VkCommandBuffer secondaryBuffer,
10439 const CMD_BUFFER_STATE *pSubCB, const char *caller) const {
10440 bool skip = false;
10441 if (!pSubCB->beginInfo.pInheritanceInfo) {
10442 return skip;
10443 }
10444 VkFramebuffer primary_fb = pCB->activeFramebuffer ? pCB->activeFramebuffer->framebuffer : VK_NULL_HANDLE;
10445 VkFramebuffer secondary_fb = pSubCB->beginInfo.pInheritanceInfo->framebuffer;
10446 if (secondary_fb != VK_NULL_HANDLE) {
10447 if (primary_fb != secondary_fb) {
10448 LogObjectList objlist(primaryBuffer);
10449 objlist.add(secondaryBuffer);
10450 objlist.add(secondary_fb);
10451 objlist.add(primary_fb);
10452 skip |= LogError(objlist, "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
10453 "vkCmdExecuteCommands() called w/ invalid secondary %s which has a %s"
10454 " that is not the same as the primary command buffer's current active %s.",
10455 report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str(),
10456 report_data->FormatHandle(primary_fb).c_str());
10457 }
10458 auto fb = GetFramebufferState(secondary_fb);
10459 if (!fb) {
10460 LogObjectList objlist(primaryBuffer);
10461 objlist.add(secondaryBuffer);
10462 objlist.add(secondary_fb);
10463 skip |= LogError(objlist, kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
10464 "vkCmdExecuteCommands() called w/ invalid %s which has invalid %s.",
10465 report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str());
10466 return skip;
10467 }
10468 }
10469 return skip;
10470 }
10471
ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE * pCB,const CMD_BUFFER_STATE * pSubCB) const10472 bool CoreChecks::ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE *pCB, const CMD_BUFFER_STATE *pSubCB) const {
10473 bool skip = false;
10474 unordered_set<int> activeTypes;
10475 if (!disabled[query_validation]) {
10476 for (auto queryObject : pCB->activeQueries) {
10477 auto query_pool_state = GetQueryPoolState(queryObject.pool);
10478 if (query_pool_state) {
10479 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS &&
10480 pSubCB->beginInfo.pInheritanceInfo) {
10481 VkQueryPipelineStatisticFlags cmdBufStatistics = pSubCB->beginInfo.pInheritanceInfo->pipelineStatistics;
10482 if ((cmdBufStatistics & query_pool_state->createInfo.pipelineStatistics) != cmdBufStatistics) {
10483 LogObjectList objlist(pCB->commandBuffer);
10484 objlist.add(queryObject.pool);
10485 skip |= LogError(
10486 objlist, "VUID-vkCmdExecuteCommands-commandBuffer-00104",
10487 "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
10488 ". Pipeline statistics is being queried so the command buffer must have all bits set on the queryPool.",
10489 report_data->FormatHandle(pCB->commandBuffer).c_str(),
10490 report_data->FormatHandle(queryObject.pool).c_str());
10491 }
10492 }
10493 activeTypes.insert(query_pool_state->createInfo.queryType);
10494 }
10495 }
10496 for (auto queryObject : pSubCB->startedQueries) {
10497 auto query_pool_state = GetQueryPoolState(queryObject.pool);
10498 if (query_pool_state && activeTypes.count(query_pool_state->createInfo.queryType)) {
10499 LogObjectList objlist(pCB->commandBuffer);
10500 objlist.add(queryObject.pool);
10501 skip |= LogError(objlist, kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
10502 "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
10503 " of type %d but a query of that type has been started on secondary %s.",
10504 report_data->FormatHandle(pCB->commandBuffer).c_str(),
10505 report_data->FormatHandle(queryObject.pool).c_str(), query_pool_state->createInfo.queryType,
10506 report_data->FormatHandle(pSubCB->commandBuffer).c_str());
10507 }
10508 }
10509 }
10510 auto primary_pool = pCB->command_pool.get();
10511 auto secondary_pool = pSubCB->command_pool.get();
10512 if (primary_pool && secondary_pool && (primary_pool->queueFamilyIndex != secondary_pool->queueFamilyIndex)) {
10513 LogObjectList objlist(pSubCB->commandBuffer);
10514 objlist.add(pCB->commandBuffer);
10515 skip |= LogError(objlist, kVUID_Core_DrawState_InvalidQueueFamily,
10516 "vkCmdExecuteCommands(): Primary %s created in queue family %d has secondary "
10517 "%s created in queue family %d.",
10518 report_data->FormatHandle(pCB->commandBuffer).c_str(), primary_pool->queueFamilyIndex,
10519 report_data->FormatHandle(pSubCB->commandBuffer).c_str(), secondary_pool->queueFamilyIndex);
10520 }
10521
10522 return skip;
10523 }
10524
PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBuffersCount,const VkCommandBuffer * pCommandBuffers) const10525 bool CoreChecks::PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
10526 const VkCommandBuffer *pCommandBuffers) const {
10527 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
10528 assert(cb_state);
10529 bool skip = false;
10530 const CMD_BUFFER_STATE *sub_cb_state = NULL;
10531 std::unordered_set<const CMD_BUFFER_STATE *> linked_command_buffers;
10532
10533 for (uint32_t i = 0; i < commandBuffersCount; i++) {
10534 sub_cb_state = GetCBState(pCommandBuffers[i]);
10535 assert(sub_cb_state);
10536 if (VK_COMMAND_BUFFER_LEVEL_PRIMARY == sub_cb_state->createInfo.level) {
10537 skip |= LogError(pCommandBuffers[i], "VUID-vkCmdExecuteCommands-pCommandBuffers-00088",
10538 "vkCmdExecuteCommands() called w/ Primary %s in element %u of pCommandBuffers array. All "
10539 "cmd buffers in pCommandBuffers array must be secondary.",
10540 report_data->FormatHandle(pCommandBuffers[i]).c_str(), i);
10541 } else if (VK_COMMAND_BUFFER_LEVEL_SECONDARY == sub_cb_state->createInfo.level) {
10542 if (sub_cb_state->beginInfo.pInheritanceInfo != nullptr) {
10543 const auto secondary_rp_state = GetRenderPassState(sub_cb_state->beginInfo.pInheritanceInfo->renderPass);
10544 if (cb_state->activeRenderPass &&
10545 !(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
10546 LogObjectList objlist(pCommandBuffers[i]);
10547 objlist.add(cb_state->activeRenderPass->renderPass);
10548 skip |= LogError(objlist, "VUID-vkCmdExecuteCommands-pCommandBuffers-00096",
10549 "vkCmdExecuteCommands(): Secondary %s is executed within a %s "
10550 "instance scope, but the Secondary Command Buffer does not have the "
10551 "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
10552 "the vkBeginCommandBuffer() was called.",
10553 report_data->FormatHandle(pCommandBuffers[i]).c_str(),
10554 report_data->FormatHandle(cb_state->activeRenderPass->renderPass).c_str());
10555 } else if (!cb_state->activeRenderPass &&
10556 (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
10557 skip |= LogError(pCommandBuffers[i], "VUID-vkCmdExecuteCommands-pCommandBuffers-00100",
10558 "vkCmdExecuteCommands(): Secondary %s is executed outside a render pass "
10559 "instance scope, but the Secondary Command Buffer does have the "
10560 "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
10561 "the vkBeginCommandBuffer() was called.",
10562 report_data->FormatHandle(pCommandBuffers[i]).c_str());
10563 } else if (cb_state->activeRenderPass &&
10564 (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
10565 // Make sure render pass is compatible with parent command buffer pass if has continue
10566 if (cb_state->activeRenderPass->renderPass != secondary_rp_state->renderPass) {
10567 skip |= ValidateRenderPassCompatibility(
10568 "primary command buffer", cb_state->activeRenderPass.get(), "secondary command buffer",
10569 secondary_rp_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098");
10570 }
10571 // If framebuffer for secondary CB is not NULL, then it must match active FB from primaryCB
10572 skip |=
10573 ValidateFramebuffer(commandBuffer, cb_state, pCommandBuffers[i], sub_cb_state, "vkCmdExecuteCommands()");
10574 if (!sub_cb_state->cmd_execute_commands_functions.empty()) {
10575 // Inherit primary's activeFramebuffer and while running validate functions
10576 for (auto &function : sub_cb_state->cmd_execute_commands_functions) {
10577 skip |= function(cb_state, cb_state->activeFramebuffer.get());
10578 }
10579 }
10580 }
10581 }
10582 }
10583
10584 // TODO(mlentine): Move more logic into this method
10585 skip |= ValidateSecondaryCommandBufferState(cb_state, sub_cb_state);
10586 skip |= ValidateCommandBufferState(sub_cb_state, "vkCmdExecuteCommands()", 0,
10587 "VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
10588 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
10589 if (sub_cb_state->in_use.load()) {
10590 skip |= LogError(
10591 cb_state->commandBuffer, "VUID-vkCmdExecuteCommands-pCommandBuffers-00091",
10592 "vkCmdExecuteCommands(): Cannot execute pending %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
10593 report_data->FormatHandle(sub_cb_state->commandBuffer).c_str());
10594 }
10595 // We use an const_cast, because one cannot query a container keyed on a non-const pointer using a const pointer
10596 if (cb_state->linkedCommandBuffers.count(const_cast<CMD_BUFFER_STATE *>(sub_cb_state))) {
10597 LogObjectList objlist(cb_state->commandBuffer);
10598 objlist.add(sub_cb_state->commandBuffer);
10599 skip |= LogError(objlist, "VUID-vkCmdExecuteCommands-pCommandBuffers-00092",
10600 "vkCmdExecuteCommands(): Cannot execute %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT "
10601 "set if previously executed in %s",
10602 report_data->FormatHandle(sub_cb_state->commandBuffer).c_str(),
10603 report_data->FormatHandle(cb_state->commandBuffer).c_str());
10604 }
10605
10606 const auto insert_pair = linked_command_buffers.insert(sub_cb_state);
10607 if (!insert_pair.second) {
10608 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdExecuteCommands-pCommandBuffers-00093",
10609 "vkCmdExecuteCommands(): Cannot duplicate %s in pCommandBuffers without "
10610 "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
10611 report_data->FormatHandle(cb_state->commandBuffer).c_str());
10612 }
10613
10614 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
10615 // Warn that non-simultaneous secondary cmd buffer renders primary non-simultaneous
10616 LogObjectList objlist(pCommandBuffers[i]);
10617 objlist.add(cb_state->commandBuffer);
10618 skip |= LogWarning(objlist, kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse,
10619 "vkCmdExecuteCommands(): Secondary %s does not have "
10620 "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and will cause primary "
10621 "%s to be treated as if it does not have "
10622 "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, even though it does.",
10623 report_data->FormatHandle(pCommandBuffers[i]).c_str(),
10624 report_data->FormatHandle(cb_state->commandBuffer).c_str());
10625 }
10626 }
10627 if (!cb_state->activeQueries.empty() && !enabled_features.core.inheritedQueries) {
10628 skip |= LogError(pCommandBuffers[i], "VUID-vkCmdExecuteCommands-commandBuffer-00101",
10629 "vkCmdExecuteCommands(): Secondary %s cannot be submitted with a query in flight and "
10630 "inherited queries not supported on this device.",
10631 report_data->FormatHandle(pCommandBuffers[i]).c_str());
10632 }
10633 // Validate initial layout uses vs. the primary cmd buffer state
10634 // Novel Valid usage: "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001"
10635 // initial layout usage of secondary command buffers resources must match parent command buffer
10636 const auto *const_cb_state = static_cast<const CMD_BUFFER_STATE *>(cb_state);
10637 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
10638 const auto image = sub_layout_map_entry.first;
10639 const auto *image_state = GetImageState(image);
10640 if (!image_state) continue; // Can't set layouts of a dead image
10641
10642 const auto *cb_subres_map = GetImageSubresourceLayoutMap(const_cb_state, image);
10643 // Const getter can be null in which case we have nothing to check against for this image...
10644 if (!cb_subres_map) continue;
10645
10646 const auto &sub_cb_subres_map = sub_layout_map_entry.second;
10647 // Validate the initial_uses, that they match the current state of the primary cb, or absent a current state,
10648 // that the match any initial_layout.
10649 for (const auto &subres_layout : *sub_cb_subres_map) {
10650 const auto &sub_layout = subres_layout.initial_layout;
10651 const auto &subresource = subres_layout.subresource;
10652 if (VK_IMAGE_LAYOUT_UNDEFINED == sub_layout) continue; // secondary doesn't care about current or initial
10653
10654 // Look up the layout to compared to the intial layout of the sub command buffer (current else initial)
10655 auto cb_layouts = cb_subres_map->GetSubresourceLayouts(subresource);
10656 auto cb_layout = cb_layouts.current_layout;
10657 const char *layout_type = "current";
10658 if (cb_layouts.current_layout == kInvalidLayout) {
10659 cb_layout = cb_layouts.initial_layout;
10660 layout_type = "initial";
10661 }
10662 if ((cb_layout != kInvalidLayout) && (cb_layout != sub_layout)) {
10663 skip |= LogError(pCommandBuffers[i], "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001",
10664 "%s: Executed secondary command buffer using %s (subresource: aspectMask 0x%X array layer %u, "
10665 "mip level %u) which expects layout %s--instead, image %s layout is %s.",
10666 "vkCmdExecuteCommands():", report_data->FormatHandle(image).c_str(), subresource.aspectMask,
10667 subresource.arrayLayer, subresource.mipLevel, string_VkImageLayout(sub_layout), layout_type,
10668 string_VkImageLayout(cb_layout));
10669 }
10670 }
10671 }
10672
10673 // All commands buffers involved must be protected or unprotected
10674 if ((cb_state->unprotected == false) && (sub_cb_state->unprotected == true)) {
10675 LogObjectList objlist(cb_state->commandBuffer);
10676 objlist.add(sub_cb_state->commandBuffer);
10677 skip |= LogError(
10678 objlist, "VUID-vkCmdExecuteCommands-commandBuffer-01820",
10679 "vkCmdExecuteCommands(): command buffer %s is protected while secondary command buffer %s is a unprotected",
10680 report_data->FormatHandle(cb_state->commandBuffer).c_str(),
10681 report_data->FormatHandle(sub_cb_state->commandBuffer).c_str());
10682 } else if ((cb_state->unprotected == true) && (sub_cb_state->unprotected == false)) {
10683 LogObjectList objlist(cb_state->commandBuffer);
10684 objlist.add(sub_cb_state->commandBuffer);
10685 skip |= LogError(
10686 objlist, "VUID-vkCmdExecuteCommands-commandBuffer-01821",
10687 "vkCmdExecuteCommands(): command buffer %s is unprotected while secondary command buffer %s is a protected",
10688 report_data->FormatHandle(cb_state->commandBuffer).c_str(),
10689 report_data->FormatHandle(sub_cb_state->commandBuffer).c_str());
10690 }
10691 }
10692
10693 skip |= ValidatePrimaryCommandBuffer(cb_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-bufferlevel");
10694 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdExecuteCommands()",
10695 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
10696 "VUID-vkCmdExecuteCommands-commandBuffer-cmdpool");
10697 skip |= ValidateCmd(cb_state, CMD_EXECUTECOMMANDS, "vkCmdExecuteCommands()");
10698 return skip;
10699 }
10700
PreCallValidateMapMemory(VkDevice device,VkDeviceMemory mem,VkDeviceSize offset,VkDeviceSize size,VkFlags flags,void ** ppData) const10701 bool CoreChecks::PreCallValidateMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
10702 VkFlags flags, void **ppData) const {
10703 bool skip = false;
10704 const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
10705 if (mem_info) {
10706 if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
10707 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) {
10708 skip = LogError(mem, "VUID-vkMapMemory-memory-00682",
10709 "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT set: %s.",
10710 report_data->FormatHandle(mem).c_str());
10711 }
10712
10713 if (mem_info->multi_instance) {
10714 skip = LogError(mem, "VUID-vkMapMemory-memory-00683",
10715 "Memory (%s) must not have been allocated with multiple instances -- either by supplying a deviceMask "
10716 "with more than one bit set, or by allocation from a heap with the MULTI_INSTANCE heap flag set.",
10717 report_data->FormatHandle(mem).c_str());
10718 }
10719
10720 skip |= ValidateMapMemRange(mem_info, offset, size);
10721 }
10722 return skip;
10723 }
10724
PreCallValidateUnmapMemory(VkDevice device,VkDeviceMemory mem) const10725 bool CoreChecks::PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory mem) const {
10726 bool skip = false;
10727 const auto mem_info = GetDevMemState(mem);
10728 if (mem_info && !mem_info->mapped_range.size) {
10729 // Valid Usage: memory must currently be mapped
10730 skip |= LogError(mem, "VUID-vkUnmapMemory-memory-00689", "Unmapping Memory without memory being mapped: %s.",
10731 report_data->FormatHandle(mem).c_str());
10732 }
10733 return skip;
10734 }
10735
ValidateMemoryIsMapped(const char * funcName,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges) const10736 bool CoreChecks::ValidateMemoryIsMapped(const char *funcName, uint32_t memRangeCount, const VkMappedMemoryRange *pMemRanges) const {
10737 bool skip = false;
10738 for (uint32_t i = 0; i < memRangeCount; ++i) {
10739 auto mem_info = GetDevMemState(pMemRanges[i].memory);
10740 if (mem_info) {
10741 // Makes sure the memory is already mapped
10742 if (mem_info->mapped_range.size == 0) {
10743 skip = LogError(pMemRanges[i].memory, "VUID-VkMappedMemoryRange-memory-00684",
10744 "%s: Attempting to use memory (%s) that is not currently host mapped.", funcName,
10745 report_data->FormatHandle(pMemRanges[i].memory).c_str());
10746 }
10747
10748 if (pMemRanges[i].size == VK_WHOLE_SIZE) {
10749 if (mem_info->mapped_range.offset > pMemRanges[i].offset) {
10750 skip |= LogError(pMemRanges[i].memory, "VUID-VkMappedMemoryRange-size-00686",
10751 "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER
10752 ") is less than Memory Object's offset (" PRINTF_SIZE_T_SPECIFIER ").",
10753 funcName, static_cast<size_t>(pMemRanges[i].offset),
10754 static_cast<size_t>(mem_info->mapped_range.offset));
10755 }
10756 } else {
10757 const uint64_t data_end = (mem_info->mapped_range.size == VK_WHOLE_SIZE)
10758 ? mem_info->alloc_info.allocationSize
10759 : (mem_info->mapped_range.offset + mem_info->mapped_range.size);
10760 if ((mem_info->mapped_range.offset > pMemRanges[i].offset) ||
10761 (data_end < (pMemRanges[i].offset + pMemRanges[i].size))) {
10762 skip |= LogError(pMemRanges[i].memory, "VUID-VkMappedMemoryRange-size-00685",
10763 "%s: Flush/Invalidate size or offset (" PRINTF_SIZE_T_SPECIFIER ", " PRINTF_SIZE_T_SPECIFIER
10764 ") exceed the Memory Object's upper-bound (" PRINTF_SIZE_T_SPECIFIER ").",
10765 funcName, static_cast<size_t>(pMemRanges[i].offset + pMemRanges[i].size),
10766 static_cast<size_t>(pMemRanges[i].offset), static_cast<size_t>(data_end));
10767 }
10768 }
10769 }
10770 }
10771 return skip;
10772 }
10773
ValidateMappedMemoryRangeDeviceLimits(const char * func_name,uint32_t mem_range_count,const VkMappedMemoryRange * mem_ranges) const10774 bool CoreChecks::ValidateMappedMemoryRangeDeviceLimits(const char *func_name, uint32_t mem_range_count,
10775 const VkMappedMemoryRange *mem_ranges) const {
10776 bool skip = false;
10777 for (uint32_t i = 0; i < mem_range_count; ++i) {
10778 uint64_t atom_size = phys_dev_props.limits.nonCoherentAtomSize;
10779 if (SafeModulo(mem_ranges[i].offset, atom_size) != 0) {
10780 skip |= LogError(mem_ranges->memory, "VUID-VkMappedMemoryRange-offset-00687",
10781 "%s: Offset in pMemRanges[%d] is 0x%" PRIxLEAST64
10782 ", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
10783 func_name, i, mem_ranges[i].offset, atom_size);
10784 }
10785 auto mem_info = GetDevMemState(mem_ranges[i].memory);
10786 if (mem_info) {
10787 if ((mem_ranges[i].size != VK_WHOLE_SIZE) &&
10788 (mem_ranges[i].size + mem_ranges[i].offset != mem_info->alloc_info.allocationSize) &&
10789 (SafeModulo(mem_ranges[i].size, atom_size) != 0)) {
10790 skip |= LogError(mem_ranges->memory, "VUID-VkMappedMemoryRange-size-01390",
10791 "%s: Size in pMemRanges[%d] is 0x%" PRIxLEAST64
10792 ", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
10793 func_name, i, mem_ranges[i].size, atom_size);
10794 }
10795 }
10796 }
10797 return skip;
10798 }
10799
PreCallValidateFlushMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges) const10800 bool CoreChecks::PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
10801 const VkMappedMemoryRange *pMemRanges) const {
10802 bool skip = false;
10803 skip |= ValidateMappedMemoryRangeDeviceLimits("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
10804 skip |= ValidateMemoryIsMapped("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
10805 return skip;
10806 }
10807
PreCallValidateInvalidateMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges) const10808 bool CoreChecks::PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
10809 const VkMappedMemoryRange *pMemRanges) const {
10810 bool skip = false;
10811 skip |= ValidateMappedMemoryRangeDeviceLimits("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
10812 skip |= ValidateMemoryIsMapped("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
10813 return skip;
10814 }
10815
PreCallValidateGetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory mem,VkDeviceSize * pCommittedMem) const10816 bool CoreChecks::PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory mem, VkDeviceSize *pCommittedMem) const {
10817 bool skip = false;
10818 const auto mem_info = GetDevMemState(mem);
10819
10820 if (mem_info) {
10821 if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
10822 VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) == 0) {
10823 skip = LogError(mem, "VUID-vkGetDeviceMemoryCommitment-memory-00690",
10824 "vkGetDeviceMemoryCommitment(): Querying commitment for memory without "
10825 "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT set: %s.",
10826 report_data->FormatHandle(mem).c_str());
10827 }
10828 }
10829 return skip;
10830 }
10831
ValidateBindImageMemory(uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos,const char * api_name) const10832 bool CoreChecks::ValidateBindImageMemory(uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos,
10833 const char *api_name) const {
10834 bool skip = false;
10835
10836 bool bind_image_mem_2 = strcmp(api_name, "vkBindImageMemory()") != 0;
10837 char error_prefix[128];
10838 strcpy(error_prefix, api_name);
10839
10840 // Track all image sub resources if they are bound for bind_image_mem_2
10841 // uint32_t[3] is which index in pBindInfos for max 3 planes
10842 // Non disjoint images act as a single plane
10843 std::unordered_map<VkImage, std::array<uint32_t, 3>> resources_bound;
10844
10845 for (uint32_t i = 0; i < bindInfoCount; i++) {
10846 if (bind_image_mem_2 == true) {
10847 sprintf(error_prefix, "%s pBindInfos[%u]", api_name, i);
10848 }
10849
10850 const VkBindImageMemoryInfo &bindInfo = pBindInfos[i];
10851 const IMAGE_STATE *image_state = GetImageState(bindInfo.image);
10852 if (image_state) {
10853 // Track objects tied to memory
10854 skip |= ValidateSetMemBinding(bindInfo.memory, VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage), error_prefix);
10855
10856 const auto plane_info = lvl_find_in_chain<VkBindImagePlaneMemoryInfo>(bindInfo.pNext);
10857 const auto mem_info = GetDevMemState(bindInfo.memory);
10858
10859 // Need extra check for disjoint flag incase called without bindImage2 and don't want false postive errors
10860 // no 'else' case as if that happens another VUID is already being triggered for it being invalid
10861 if ((plane_info == nullptr) && (image_state->disjoint == false)) {
10862 // Check non-disjoint images VkMemoryRequirements
10863
10864 // All validation using the image_state->requirements for external AHB is check in android only section
10865 if (image_state->external_ahb == false) {
10866 const VkMemoryRequirements mem_req = image_state->requirements;
10867
10868 // Validate memory requirements alignment
10869 if (SafeModulo(bindInfo.memoryOffset, mem_req.alignment) != 0) {
10870 const char *validation_error;
10871 if (bind_image_mem_2 == false) {
10872 validation_error = "VUID-vkBindImageMemory-memoryOffset-01048";
10873 } else if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
10874 validation_error = "VUID-VkBindImageMemoryInfo-pNext-01616";
10875 } else {
10876 validation_error = "VUID-VkBindImageMemoryInfo-memoryOffset-01613";
10877 }
10878 skip |=
10879 LogError(bindInfo.image, validation_error,
10880 "%s: memoryOffset is 0x%" PRIxLEAST64
10881 " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
10882 ", returned from a call to vkGetImageMemoryRequirements with image.",
10883 error_prefix, bindInfo.memoryOffset, mem_req.alignment);
10884 }
10885
10886 if (mem_info) {
10887 safe_VkMemoryAllocateInfo alloc_info = mem_info->alloc_info;
10888 // Validate memory requirements size
10889 if (mem_req.size > alloc_info.allocationSize - bindInfo.memoryOffset) {
10890 const char *validation_error;
10891 if (bind_image_mem_2 == false) {
10892 validation_error = "VUID-vkBindImageMemory-size-01049";
10893 } else if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
10894 validation_error = "VUID-VkBindImageMemoryInfo-pNext-01617";
10895 } else {
10896 validation_error = "VUID-VkBindImageMemoryInfo-memory-01614";
10897 }
10898 skip |= LogError(bindInfo.image, validation_error,
10899 "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
10900 " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
10901 ", returned from a call to vkGetImageMemoryRequirements with image.",
10902 error_prefix, alloc_info.allocationSize - bindInfo.memoryOffset, mem_req.size);
10903 }
10904
10905 // Validate memory type used
10906 {
10907 const char *validation_error;
10908 if (bind_image_mem_2 == false) {
10909 validation_error = "VUID-vkBindImageMemory-memory-01047";
10910 } else if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
10911 validation_error = "VUID-VkBindImageMemoryInfo-pNext-01615";
10912 } else {
10913 validation_error = "VUID-VkBindImageMemoryInfo-memory-01612";
10914 }
10915 skip |= ValidateMemoryTypes(mem_info, mem_req.memoryTypeBits, error_prefix, validation_error);
10916 }
10917 }
10918 }
10919
10920 if (bind_image_mem_2 == true) {
10921 // since its a non-disjoint image, finding VkImage in map is a duplicate
10922 auto it = resources_bound.find(image_state->image);
10923 if (it == resources_bound.end()) {
10924 std::array<uint32_t, 3> bound_index = {i, UINT32_MAX, UINT32_MAX};
10925 resources_bound.emplace(image_state->image, bound_index);
10926 } else {
10927 skip |= LogError(
10928 bindInfo.image, "VUID-vkBindImageMemory2-pBindInfos-04006",
10929 "%s: The same non-disjoint image resource is being bound twice at pBindInfos[%d] and pBindInfos[%d]",
10930 error_prefix, it->second[0], i);
10931 }
10932 }
10933 } else if ((plane_info != nullptr) && (image_state->disjoint == true)) {
10934 // Check disjoint images VkMemoryRequirements for given plane
10935 int plane = 0;
10936
10937 // All validation using the image_state->plane*_requirements for external AHB is check in android only section
10938 if (image_state->external_ahb == false) {
10939 VkMemoryRequirements disjoint_mem_req = {};
10940 const VkImageAspectFlagBits aspect = plane_info->planeAspect;
10941 switch (aspect) {
10942 case VK_IMAGE_ASPECT_PLANE_0_BIT:
10943 plane = 0;
10944 disjoint_mem_req = image_state->plane0_requirements;
10945 break;
10946 case VK_IMAGE_ASPECT_PLANE_1_BIT:
10947 plane = 1;
10948 disjoint_mem_req = image_state->plane1_requirements;
10949 break;
10950 case VK_IMAGE_ASPECT_PLANE_2_BIT:
10951 plane = 2;
10952 disjoint_mem_req = image_state->plane2_requirements;
10953 break;
10954 default:
10955 assert(false); // parameter validation should have caught this
10956 break;
10957 }
10958
10959 // Validate memory requirements alignment
10960 if (SafeModulo(bindInfo.memoryOffset, disjoint_mem_req.alignment) != 0) {
10961 skip |= LogError(
10962 bindInfo.image, "VUID-VkBindImageMemoryInfo-pNext-01620",
10963 "%s: memoryOffset is 0x%" PRIxLEAST64
10964 " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
10965 ", returned from a call to vkGetImageMemoryRequirements2 with disjoint image for aspect plane %s.",
10966 error_prefix, bindInfo.memoryOffset, disjoint_mem_req.alignment, string_VkImageAspectFlagBits(aspect));
10967 }
10968
10969 if (mem_info) {
10970 safe_VkMemoryAllocateInfo alloc_info = mem_info->alloc_info;
10971
10972 // Validate memory requirements size
10973 if (disjoint_mem_req.size > alloc_info.allocationSize - bindInfo.memoryOffset) {
10974 skip |= LogError(
10975 bindInfo.image, "VUID-VkBindImageMemoryInfo-pNext-01621",
10976 "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
10977 " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
10978 ", returned from a call to vkGetImageMemoryRequirements with disjoint image for aspect plane %s.",
10979 error_prefix, alloc_info.allocationSize - bindInfo.memoryOffset, disjoint_mem_req.size,
10980 string_VkImageAspectFlagBits(aspect));
10981 }
10982
10983 // Validate memory type used
10984 {
10985 skip |= ValidateMemoryTypes(mem_info, disjoint_mem_req.memoryTypeBits, error_prefix,
10986 "VUID-VkBindImageMemoryInfo-pNext-01619");
10987 }
10988 }
10989 }
10990
10991 auto it = resources_bound.find(image_state->image);
10992 if (it == resources_bound.end()) {
10993 std::array<uint32_t, 3> bound_index = {UINT32_MAX, UINT32_MAX, UINT32_MAX};
10994 bound_index[plane] = i;
10995 resources_bound.emplace(image_state->image, bound_index);
10996 } else {
10997 if (it->second[plane] == UINT32_MAX) {
10998 it->second[plane] = i;
10999 } else {
11000 skip |= LogError(bindInfo.image, "VUID-vkBindImageMemory2-pBindInfos-04006",
11001 "%s: The same disjoint image sub-resource for plane %d is being bound twice at "
11002 "pBindInfos[%d] and pBindInfos[%d]",
11003 error_prefix, plane, it->second[plane], i);
11004 }
11005 }
11006 }
11007
11008 if (mem_info) {
11009 // Validate bound memory range information
11010 // if memory is exported to an AHB then the mem_info->allocationSize must be zero and this check is not needed
11011 if ((mem_info->is_export == false) || ((mem_info->export_handle_type_flags &
11012 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) == 0)) {
11013 skip |= ValidateInsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, error_prefix);
11014 }
11015
11016 // Validate dedicated allocation
11017 if (mem_info->is_dedicated) {
11018 if (enabled_features.dedicated_allocation_image_aliasing_features.dedicatedAllocationImageAliasing) {
11019 const auto orig_image_state = GetImageState(mem_info->dedicated_image);
11020 const auto current_image_state = GetImageState(bindInfo.image);
11021 if ((bindInfo.memoryOffset != 0) || !orig_image_state || !current_image_state ||
11022 !current_image_state->IsCreateInfoDedicatedAllocationImageAliasingCompatible(
11023 orig_image_state->createInfo)) {
11024 const char *validation_error;
11025 if (bind_image_mem_2 == false) {
11026 validation_error = "VUID-vkBindImageMemory-memory-02629";
11027 } else {
11028 validation_error = "VUID-VkBindImageMemoryInfo-memory-02629";
11029 }
11030 LogObjectList objlist(bindInfo.image);
11031 objlist.add(bindInfo.memory);
11032 objlist.add(mem_info->dedicated_image);
11033 skip |= LogError(
11034 objlist, validation_error,
11035 "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR:: %s must compatible "
11036 "with %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
11037 error_prefix, report_data->FormatHandle(bindInfo.memory).c_str(),
11038 report_data->FormatHandle(mem_info->dedicated_image).c_str(),
11039 report_data->FormatHandle(bindInfo.image).c_str(), bindInfo.memoryOffset);
11040 }
11041 } else {
11042 if ((bindInfo.memoryOffset != 0) || (mem_info->dedicated_image != bindInfo.image)) {
11043 const char *validation_error;
11044 if (bind_image_mem_2 == false) {
11045 validation_error = "VUID-vkBindImageMemory-memory-01509";
11046 } else {
11047 validation_error = "VUID-VkBindImageMemoryInfo-memory-01509";
11048 }
11049 LogObjectList objlist(bindInfo.image);
11050 objlist.add(bindInfo.memory);
11051 objlist.add(mem_info->dedicated_image);
11052 skip |= LogError(
11053 objlist, validation_error,
11054 "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR:: %s must be equal "
11055 "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
11056 error_prefix, report_data->FormatHandle(bindInfo.memory).c_str(),
11057 report_data->FormatHandle(mem_info->dedicated_image).c_str(),
11058 report_data->FormatHandle(bindInfo.image).c_str(), bindInfo.memoryOffset);
11059 }
11060 }
11061 }
11062
11063 // Validate export memory handles
11064 if ((mem_info->export_handle_type_flags != 0) &&
11065 ((mem_info->export_handle_type_flags & image_state->external_memory_handle) == 0)) {
11066 const char *vuid =
11067 bind_image_mem_2 ? "VUID-VkBindImageMemoryInfo-memory-02728" : "VUID-vkBindImageMemory-memory-02728";
11068 LogObjectList objlist(bindInfo.image);
11069 objlist.add(bindInfo.memory);
11070 skip |= LogError(objlist, vuid,
11071 "%s: The VkDeviceMemory (%s) has an external handleType of %s which does not include at least "
11072 "one handle from VkImage (%s) handleType %s.",
11073 error_prefix, report_data->FormatHandle(bindInfo.memory).c_str(),
11074 string_VkExternalMemoryHandleTypeFlags(mem_info->export_handle_type_flags).c_str(),
11075 report_data->FormatHandle(bindInfo.image).c_str(),
11076 string_VkExternalMemoryHandleTypeFlags(image_state->external_memory_handle).c_str());
11077 }
11078
11079 // Validate import memory handles
11080 if (mem_info->is_import_ahb == true) {
11081 skip |= ValidateImageImportedHandleANDROID(api_name, image_state->external_memory_handle, bindInfo.memory,
11082 bindInfo.image);
11083 } else if (mem_info->is_import == true) {
11084 if ((mem_info->import_handle_type_flags & image_state->external_memory_handle) == 0) {
11085 const char *vuid = nullptr;
11086 if ((bind_image_mem_2) && (device_extensions.vk_android_external_memory_android_hardware_buffer)) {
11087 vuid = "VUID-VkBindImageMemoryInfo-memory-02989";
11088 } else if ((!bind_image_mem_2) && (device_extensions.vk_android_external_memory_android_hardware_buffer)) {
11089 vuid = "VUID-vkBindImageMemory-memory-02989";
11090 } else if ((bind_image_mem_2) && (!device_extensions.vk_android_external_memory_android_hardware_buffer)) {
11091 vuid = "VUID-VkBindImageMemoryInfo-memory-02729";
11092 } else if ((!bind_image_mem_2) && (!device_extensions.vk_android_external_memory_android_hardware_buffer)) {
11093 vuid = "VUID-vkBindImageMemory-memory-02729";
11094 }
11095 LogObjectList objlist(bindInfo.image);
11096 objlist.add(bindInfo.memory);
11097 skip |= LogError(objlist, vuid,
11098 "%s: The VkDeviceMemory (%s) was created with an import operation with handleType of %s "
11099 "which is not set in the VkImage (%s) VkExternalMemoryImageCreateInfo::handleType (%s)",
11100 api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
11101 string_VkExternalMemoryHandleTypeFlags(mem_info->import_handle_type_flags).c_str(),
11102 report_data->FormatHandle(bindInfo.image).c_str(),
11103 string_VkExternalMemoryHandleTypeFlags(image_state->external_memory_handle).c_str());
11104 }
11105 }
11106
11107 // Validate mix of protected buffer and memory
11108 if ((image_state->unprotected == false) && (mem_info->unprotected == true)) {
11109 const char *vuid =
11110 bind_image_mem_2 ? "VUID-VkBindImageMemoryInfo-None-01901" : "VUID-vkBindImageMemory-None-01901";
11111 LogObjectList objlist(bindInfo.image);
11112 objlist.add(bindInfo.memory);
11113 skip |= LogError(objlist, vuid,
11114 "%s: The VkDeviceMemory (%s) was not created with protected memory but the VkImage (%s) was "
11115 "set to use protected memory.",
11116 api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
11117 report_data->FormatHandle(bindInfo.image).c_str());
11118 } else if ((image_state->unprotected == true) && (mem_info->unprotected == false)) {
11119 const char *vuid =
11120 bind_image_mem_2 ? "VUID-VkBindImageMemoryInfo-None-01902" : "VUID-vkBindImageMemory-None-01902";
11121 LogObjectList objlist(bindInfo.image);
11122 objlist.add(bindInfo.memory);
11123 skip |= LogError(objlist, vuid,
11124 "%s: The VkDeviceMemory (%s) was created with protected memory but the VkImage (%s) was not "
11125 "set to use protected memory.",
11126 api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
11127 report_data->FormatHandle(bindInfo.image).c_str());
11128 }
11129 }
11130
11131 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
11132 if (swapchain_info) {
11133 if (bindInfo.memory != VK_NULL_HANDLE) {
11134 skip |= LogError(bindInfo.image, "VUID-VkBindImageMemoryInfo-pNext-01631", "%s: %s is not VK_NULL_HANDLE.",
11135 error_prefix, report_data->FormatHandle(bindInfo.memory).c_str());
11136 }
11137 if (image_state->create_from_swapchain != swapchain_info->swapchain) {
11138 LogObjectList objlist(image_state->image);
11139 objlist.add(image_state->create_from_swapchain);
11140 objlist.add(swapchain_info->swapchain);
11141 skip |= LogError(
11142 objlist, kVUID_Core_BindImageMemory_Swapchain,
11143 "%s: %s is created by %s, but the image is bound by %s. The image should be created and bound by the same "
11144 "swapchain",
11145 error_prefix, report_data->FormatHandle(image_state->image).c_str(),
11146 report_data->FormatHandle(image_state->create_from_swapchain).c_str(),
11147 report_data->FormatHandle(swapchain_info->swapchain).c_str());
11148 }
11149 const auto swapchain_state = GetSwapchainState(swapchain_info->swapchain);
11150 if (swapchain_state && swapchain_state->images.size() <= swapchain_info->imageIndex) {
11151 skip |= LogError(bindInfo.image, "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644",
11152 "%s: imageIndex (%i) is out of bounds of %s images (size: %i)", error_prefix,
11153 swapchain_info->imageIndex, report_data->FormatHandle(swapchain_info->swapchain).c_str(),
11154 (int)swapchain_state->images.size());
11155 }
11156 } else {
11157 if (image_state->create_from_swapchain) {
11158 skip |= LogError(bindInfo.image, "VUID-VkBindImageMemoryInfo-image-01630",
11159 "%s: pNext of VkBindImageMemoryInfo doesn't include VkBindImageMemorySwapchainInfoKHR.",
11160 error_prefix);
11161 }
11162 if (!mem_info) {
11163 skip |= LogError(bindInfo.image, "VUID-VkBindImageMemoryInfo-pNext-01632", "%s: %s is invalid.", error_prefix,
11164 report_data->FormatHandle(bindInfo.memory).c_str());
11165 }
11166 }
11167
11168 if (plane_info) {
11169 // Checks for disjoint bit in image
11170 if (image_state->disjoint == false) {
11171 skip |= LogError(
11172 bindInfo.image, "VUID-VkBindImageMemoryInfo-pNext-01618",
11173 "%s: pNext of VkBindImageMemoryInfo contains VkBindImagePlaneMemoryInfo and %s is not created with "
11174 "VK_IMAGE_CREATE_DISJOINT_BIT.",
11175 error_prefix, report_data->FormatHandle(image_state->image).c_str());
11176 }
11177
11178 // Make sure planeAspect is only a single, valid plane
11179 uint32_t planes = FormatPlaneCount(image_state->createInfo.format);
11180 VkImageAspectFlags aspect = plane_info->planeAspect;
11181 if ((2 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT)) {
11182 skip |= LogError(
11183 bindInfo.image, "VUID-VkBindImagePlaneMemoryInfo-planeAspect-02283",
11184 "%s: Image %s VkBindImagePlaneMemoryInfo::planeAspect is %s but can only be VK_IMAGE_ASPECT_PLANE_0_BIT"
11185 "or VK_IMAGE_ASPECT_PLANE_1_BIT.",
11186 error_prefix, report_data->FormatHandle(image_state->image).c_str(),
11187 string_VkImageAspectFlags(aspect).c_str());
11188 }
11189 if ((3 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT) &&
11190 (aspect != VK_IMAGE_ASPECT_PLANE_2_BIT)) {
11191 skip |= LogError(
11192 bindInfo.image, "VUID-VkBindImagePlaneMemoryInfo-planeAspect-02283",
11193 "%s: Image %s VkBindImagePlaneMemoryInfo::planeAspect is %s but can only be VK_IMAGE_ASPECT_PLANE_0_BIT"
11194 "or VK_IMAGE_ASPECT_PLANE_1_BIT or VK_IMAGE_ASPECT_PLANE_2_BIT.",
11195 error_prefix, report_data->FormatHandle(image_state->image).c_str(),
11196 string_VkImageAspectFlags(aspect).c_str());
11197 }
11198 }
11199 }
11200 }
11201
11202 // Check to make sure all disjoint planes were bound
11203 for (std::pair<const VkImage, std::array<uint32_t, 3>> &resource : resources_bound) {
11204 const IMAGE_STATE *image_state = GetImageState(resource.first);
11205 if (image_state->disjoint == true) {
11206 uint32_t total_planes = FormatPlaneCount(image_state->createInfo.format);
11207 for (uint32_t i = 0; i < total_planes; i++) {
11208 if (resource.second[i] == UINT32_MAX) {
11209 skip |= LogError(resource.first, "VUID-vkBindImageMemory2-pBindInfos-02858",
11210 "%s: Plane %u of the disjoint image was not bound. All %d planes need to bound individually "
11211 "in separate pBindInfos in a single call.",
11212 api_name, i, total_planes);
11213 }
11214 }
11215 }
11216 }
11217
11218 return skip;
11219 }
11220
PreCallValidateBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory mem,VkDeviceSize memoryOffset) const11221 bool CoreChecks::PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
11222 VkDeviceSize memoryOffset) const {
11223 bool skip = false;
11224 const IMAGE_STATE *image_state = GetImageState(image);
11225 if (image_state) {
11226 // Checks for no disjoint bit
11227 if (image_state->disjoint == true) {
11228 skip |=
11229 LogError(image, "VUID-vkBindImageMemory-image-01608",
11230 "%s must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT (need to use vkBindImageMemory2).",
11231 report_data->FormatHandle(image).c_str());
11232 }
11233 }
11234
11235 VkBindImageMemoryInfo bindInfo = {};
11236 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
11237 bindInfo.pNext = nullptr;
11238 bindInfo.image = image;
11239 bindInfo.memory = mem;
11240 bindInfo.memoryOffset = memoryOffset;
11241 skip |= ValidateBindImageMemory(1, &bindInfo, "vkBindImageMemory()");
11242 return skip;
11243 }
11244
PreCallValidateBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfoKHR * pBindInfos) const11245 bool CoreChecks::PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
11246 const VkBindImageMemoryInfoKHR *pBindInfos) const {
11247 return ValidateBindImageMemory(bindInfoCount, pBindInfos, "vkBindImageMemory2()");
11248 }
11249
PreCallValidateBindImageMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfoKHR * pBindInfos) const11250 bool CoreChecks::PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
11251 const VkBindImageMemoryInfoKHR *pBindInfos) const {
11252 return ValidateBindImageMemory(bindInfoCount, pBindInfos, "vkBindImageMemory2KHR()");
11253 }
11254
PreCallValidateSetEvent(VkDevice device,VkEvent event) const11255 bool CoreChecks::PreCallValidateSetEvent(VkDevice device, VkEvent event) const {
11256 bool skip = false;
11257 const auto event_state = GetEventState(event);
11258 if (event_state) {
11259 if (event_state->write_in_use) {
11260 skip |=
11261 LogError(event, kVUID_Core_DrawState_QueueForwardProgress,
11262 "vkSetEvent(): %s that is already in use by a command buffer.", report_data->FormatHandle(event).c_str());
11263 }
11264 }
11265 return skip;
11266 }
11267
PreCallValidateQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence) const11268 bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
11269 VkFence fence) const {
11270 const auto queue_data = GetQueueState(queue);
11271 const auto pFence = GetFenceState(fence);
11272 bool skip = ValidateFenceForSubmit(pFence, "VUID-vkQueueBindSparse-fence-01114", "VUID-vkQueueBindSparse-fence-01113",
11273 "VkQueueBindSparse()");
11274 if (skip) {
11275 return true;
11276 }
11277
11278 const auto queueFlags = GetPhysicalDeviceState()->queue_family_properties[queue_data->queueFamilyIndex].queueFlags;
11279 if (!(queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
11280 skip |= LogError(queue, "VUID-vkQueueBindSparse-queuetype",
11281 "vkQueueBindSparse(): a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set.");
11282 }
11283
11284 unordered_set<VkSemaphore> signaled_semaphores;
11285 unordered_set<VkSemaphore> unsignaled_semaphores;
11286 unordered_set<VkSemaphore> internal_semaphores;
11287 auto *vuid_error = device_extensions.vk_khr_timeline_semaphore ? "VUID-vkQueueBindSparse-pWaitSemaphores-03245"
11288 : kVUID_Core_DrawState_QueueForwardProgress;
11289 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
11290 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
11291
11292 auto timeline_semaphore_submit_info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo->pNext);
11293 std::vector<SEMAPHORE_WAIT> semaphore_waits;
11294 std::vector<VkSemaphore> semaphore_signals;
11295 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
11296 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
11297 const auto pSemaphore = GetSemaphoreState(semaphore);
11298 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
11299 skip |= LogError(semaphore, "VUID-VkBindSparseInfo-pWaitSemaphores-03246",
11300 "VkQueueBindSparse: pBindInfo[%u].pWaitSemaphores[%u] (%s) is a timeline semaphore, but "
11301 "pBindInfo[%u] does not include an instance of VkTimelineSemaphoreSubmitInfoKHR",
11302 bindIdx, i, report_data->FormatHandle(semaphore).c_str(), bindIdx);
11303 }
11304 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
11305 bindInfo.waitSemaphoreCount != timeline_semaphore_submit_info->waitSemaphoreValueCount) {
11306 skip |= LogError(semaphore, "VUID-VkBindSparseInfo-pNext-03247",
11307 "VkQueueBindSparse: pBindInfo[%u].pWaitSemaphores[%u] (%s) is a timeline semaphore, it contains "
11308 "an instance of VkTimelineSemaphoreSubmitInfoKHR, but waitSemaphoreValueCount (%u) is different "
11309 "than pBindInfo[%u].waitSemaphoreCount (%u)",
11310 bindIdx, i, report_data->FormatHandle(semaphore).c_str(),
11311 timeline_semaphore_submit_info->waitSemaphoreValueCount, bindIdx, bindInfo.waitSemaphoreCount);
11312 }
11313 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
11314 (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
11315 if (unsignaled_semaphores.count(semaphore) ||
11316 (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled) && !SemaphoreWasSignaled(semaphore))) {
11317 LogObjectList objlist(semaphore);
11318 objlist.add(queue);
11319 skip |= LogError(
11320 objlist, pSemaphore->scope == kSyncScopeInternal ? vuid_error : kVUID_Core_DrawState_QueueForwardProgress,
11321 "vkQueueBindSparse(): Queue %s is waiting on pBindInfo[%u].pWaitSemaphores[%u] (%s) that has no way to be "
11322 "signaled.",
11323 report_data->FormatHandle(queue).c_str(), bindIdx, i, report_data->FormatHandle(semaphore).c_str());
11324 } else {
11325 signaled_semaphores.erase(semaphore);
11326 unsignaled_semaphores.insert(semaphore);
11327 }
11328 }
11329 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
11330 pSemaphore->scope == kSyncScopeExternalTemporary) {
11331 internal_semaphores.insert(semaphore);
11332 }
11333 }
11334
11335 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
11336 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
11337 const auto pSemaphore = GetSemaphoreState(semaphore);
11338 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
11339 skip |= LogError(semaphore, "VUID-VkBindSparseInfo-pWaitSemaphores-03246",
11340 "VkQueueBindSparse: pBindInfo[%u].pSignalSemaphores[%u] (%s) is a timeline semaphore, but "
11341 "pBindInfo[%u] does not include an instance of VkTimelineSemaphoreSubmitInfoKHR",
11342 bindIdx, i, report_data->FormatHandle(semaphore).c_str(), bindIdx);
11343 }
11344 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
11345 timeline_semaphore_submit_info->pSignalSemaphoreValues[i] <= pSemaphore->payload) {
11346 LogObjectList objlist(semaphore);
11347 objlist.add(queue);
11348 skip |= LogError(objlist, "VUID-VkBindSparseInfo-pSignalSemaphores-03249",
11349 "VkQueueBindSparse: signal value (0x%" PRIx64
11350 ") in %s must be greater than current timeline semaphore %s value (0x%" PRIx64
11351 ") in pBindInfo[%u].pSignalSemaphores[%u]",
11352 pSemaphore->payload, report_data->FormatHandle(queue).c_str(),
11353 report_data->FormatHandle(semaphore).c_str(),
11354 timeline_semaphore_submit_info->pSignalSemaphoreValues[i], bindIdx, i);
11355 }
11356 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
11357 bindInfo.signalSemaphoreCount != timeline_semaphore_submit_info->signalSemaphoreValueCount) {
11358 skip |= LogError(semaphore, "VUID-VkBindSparseInfo-pNext-03248",
11359 "VkQueueBindSparse: pBindInfo[%u].pSignalSemaphores[%u] (%s) is a timeline semaphore, it contains "
11360 "an instance of VkTimelineSemaphoreSubmitInfoKHR, but signalSemaphoreValueCount (%u) is different "
11361 "than pBindInfo[%u].signalSemaphoreCount (%u)",
11362 bindIdx, i, report_data->FormatHandle(semaphore).c_str(),
11363 timeline_semaphore_submit_info->signalSemaphoreValueCount, bindIdx, bindInfo.signalSemaphoreCount);
11364 }
11365 if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR && pSemaphore->scope == kSyncScopeInternal) {
11366 if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
11367 LogObjectList objlist(semaphore);
11368 objlist.add(queue);
11369 objlist.add(pSemaphore->signaler.first);
11370 skip |=
11371 LogError(objlist, kVUID_Core_DrawState_QueueForwardProgress,
11372 "vkQueueBindSparse(): %s is signaling pBindInfo[%u].pSignalSemaphores[%u] (%s) that was "
11373 "previously signaled by %s but has not since been waited on by any queue.",
11374 report_data->FormatHandle(queue).c_str(), bindIdx, i, report_data->FormatHandle(semaphore).c_str(),
11375 report_data->FormatHandle(pSemaphore->signaler.first).c_str());
11376 } else {
11377 unsignaled_semaphores.erase(semaphore);
11378 signaled_semaphores.insert(semaphore);
11379 }
11380 }
11381 }
11382
11383 for (uint32_t image_idx = 0; image_idx < bindInfo.imageBindCount; ++image_idx) {
11384 const VkSparseImageMemoryBindInfo &image_bind = bindInfo.pImageBinds[image_idx];
11385 const auto image_state = GetImageState(image_bind.image);
11386
11387 if (image_state && !(image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT)) {
11388 skip |= LogError(image_bind.image, "VUID-VkSparseImageMemoryBindInfo-image-02901",
11389 "vkQueueBindSparse(): pBindInfo[%u].pImageBinds[%u]: image must have been created with "
11390 "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT set",
11391 bindIdx, image_idx);
11392 }
11393 }
11394 }
11395
11396 if (skip) return skip;
11397
11398 // Now verify maxTimelineSemaphoreValueDifference
11399 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
11400 const VkBindSparseInfo *bindInfo = &pBindInfo[bindIdx];
11401 auto *info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(bindInfo->pNext);
11402 if (info) {
11403 // If there are any timeline semaphores, this condition gets checked before the early return above
11404 if (info->waitSemaphoreValueCount)
11405 for (uint32_t i = 0; i < bindInfo->waitSemaphoreCount; ++i) {
11406 VkSemaphore semaphore = bindInfo->pWaitSemaphores[i];
11407 skip |=
11408 ValidateMaxTimelineSemaphoreValueDifference(semaphore, info->pWaitSemaphoreValues[i], "VkQueueBindSparse",
11409 "VUID-VkBindSparseInfo-pWaitSemaphores-03250");
11410 }
11411 // If there are any timeline semaphores, this condition gets checked before the early return above
11412 if (info->signalSemaphoreValueCount)
11413 for (uint32_t i = 0; i < bindInfo->signalSemaphoreCount; ++i) {
11414 VkSemaphore semaphore = bindInfo->pSignalSemaphores[i];
11415 skip |=
11416 ValidateMaxTimelineSemaphoreValueDifference(semaphore, info->pSignalSemaphoreValues[i], "VkQueueBindSparse",
11417 "VUID-VkBindSparseInfo-pSignalSemaphores-03251");
11418 }
11419 }
11420 }
11421
11422 return skip;
11423 }
11424
ValidateSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfoKHR * pSignalInfo,const char * api_name) const11425 bool CoreChecks::ValidateSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo, const char *api_name) const {
11426 bool skip = false;
11427 const auto pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
11428 if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
11429 skip |= LogError(pSignalInfo->semaphore, "VUID-VkSemaphoreSignalInfo-semaphore-03257",
11430 "%s(): semaphore %s must be of VK_SEMAPHORE_TYPE_TIMELINE_KHR type", api_name,
11431 report_data->FormatHandle(pSignalInfo->semaphore).c_str());
11432 return skip;
11433 }
11434 if (pSemaphore && pSemaphore->payload >= pSignalInfo->value) {
11435 skip |= LogError(pSignalInfo->semaphore, "VUID-VkSemaphoreSignalInfo-value-03258",
11436 "%s(): value must be greater than current semaphore %s value", api_name,
11437 report_data->FormatHandle(pSignalInfo->semaphore).c_str());
11438 }
11439 for (auto &pair : queueMap) {
11440 const QUEUE_STATE &queueState = pair.second;
11441 for (const auto &submission : queueState.submissions) {
11442 for (const auto &signalSemaphore : submission.signalSemaphores) {
11443 if (signalSemaphore.semaphore == pSignalInfo->semaphore && pSignalInfo->value >= signalSemaphore.payload) {
11444 skip |= LogError(pSignalInfo->semaphore, "VUID-VkSemaphoreSignalInfo-value-03259",
11445 "%s(): value must be greater than value of pending signal operation "
11446 "for semaphore %s",
11447 api_name, report_data->FormatHandle(pSignalInfo->semaphore).c_str());
11448 }
11449 }
11450 }
11451 }
11452
11453 if (!skip) {
11454 skip |= ValidateMaxTimelineSemaphoreValueDifference(pSignalInfo->semaphore, pSignalInfo->value, "VkSignalSemaphoreKHR",
11455 "VUID-VkSemaphoreSignalInfo-value-03260");
11456 }
11457
11458 return skip;
11459 }
11460
PreCallValidateSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo) const11461 bool CoreChecks::PreCallValidateSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo) const {
11462 return ValidateSignalSemaphore(device, pSignalInfo, "vkSignalSemaphore");
11463 }
11464
PreCallValidateSignalSemaphoreKHR(VkDevice device,const VkSemaphoreSignalInfoKHR * pSignalInfo) const11465 bool CoreChecks::PreCallValidateSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo) const {
11466 return ValidateSignalSemaphore(device, pSignalInfo, "vkSignalSemaphoreKHR");
11467 }
11468
ValidateImportSemaphore(VkSemaphore semaphore,const char * caller_name) const11469 bool CoreChecks::ValidateImportSemaphore(VkSemaphore semaphore, const char *caller_name) const {
11470 bool skip = false;
11471 const SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
11472 if (sema_node) {
11473 const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
11474 skip |= ValidateObjectNotInUse(sema_node, obj_struct, caller_name, kVUIDUndefined);
11475 }
11476 return skip;
11477 }
11478
11479 #ifdef VK_USE_PLATFORM_WIN32_KHR
PreCallValidateImportSemaphoreWin32HandleKHR(VkDevice device,const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo) const11480 bool CoreChecks::PreCallValidateImportSemaphoreWin32HandleKHR(
11481 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo) const {
11482 return ValidateImportSemaphore(pImportSemaphoreWin32HandleInfo->semaphore, "vkImportSemaphoreWin32HandleKHR");
11483 }
11484
11485 #endif // VK_USE_PLATFORM_WIN32_KHR
11486
PreCallValidateImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo) const11487 bool CoreChecks::PreCallValidateImportSemaphoreFdKHR(VkDevice device,
11488 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo) const {
11489 return ValidateImportSemaphore(pImportSemaphoreFdInfo->semaphore, "vkImportSemaphoreFdKHR");
11490 }
11491
ValidateImportFence(VkFence fence,const char * vuid,const char * caller_name) const11492 bool CoreChecks::ValidateImportFence(VkFence fence, const char *vuid, const char *caller_name) const {
11493 const FENCE_STATE *fence_node = GetFenceState(fence);
11494 bool skip = false;
11495 if (fence_node && fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
11496 skip |=
11497 LogError(fence, vuid, "%s: Fence %s that is currently in use.", caller_name, report_data->FormatHandle(fence).c_str());
11498 }
11499 return skip;
11500 }
11501
11502 #ifdef VK_USE_PLATFORM_WIN32_KHR
PreCallValidateImportFenceWin32HandleKHR(VkDevice device,const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo) const11503 bool CoreChecks::PreCallValidateImportFenceWin32HandleKHR(
11504 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo) const {
11505 return ValidateImportFence(pImportFenceWin32HandleInfo->fence, "VUID-vkImportFenceWin32HandleKHR-fence-04448",
11506 "vkImportFenceWin32HandleKHR()");
11507 }
11508 #endif // VK_USE_PLATFORM_WIN32_KHR
11509
PreCallValidateImportFenceFdKHR(VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo) const11510 bool CoreChecks::PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo) const {
11511 return ValidateImportFence(pImportFenceFdInfo->fence, "VUID-vkImportFenceFdKHR-fence-01463", "vkImportFenceFdKHR()");
11512 }
11513
GetSwapchainImpliedImageCreateInfo(VkSwapchainCreateInfoKHR const * pCreateInfo)11514 static VkImageCreateInfo GetSwapchainImpliedImageCreateInfo(VkSwapchainCreateInfoKHR const *pCreateInfo) {
11515 VkImageCreateInfo result = {};
11516 result.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
11517 result.pNext = nullptr;
11518
11519 if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
11520 result.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
11521 if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) result.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
11522 if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
11523 result.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
11524
11525 result.imageType = VK_IMAGE_TYPE_2D;
11526 result.format = pCreateInfo->imageFormat;
11527 result.extent.width = pCreateInfo->imageExtent.width;
11528 result.extent.height = pCreateInfo->imageExtent.height;
11529 result.extent.depth = 1;
11530 result.mipLevels = 1;
11531 result.arrayLayers = pCreateInfo->imageArrayLayers;
11532 result.samples = VK_SAMPLE_COUNT_1_BIT;
11533 result.tiling = VK_IMAGE_TILING_OPTIMAL;
11534 result.usage = pCreateInfo->imageUsage;
11535 result.sharingMode = pCreateInfo->imageSharingMode;
11536 result.queueFamilyIndexCount = pCreateInfo->queueFamilyIndexCount;
11537 result.pQueueFamilyIndices = pCreateInfo->pQueueFamilyIndices;
11538 result.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
11539
11540 return result;
11541 }
11542
ValidateCreateSwapchain(const char * func_name,VkSwapchainCreateInfoKHR const * pCreateInfo,const SURFACE_STATE * surface_state,const SWAPCHAIN_NODE * old_swapchain_state) const11543 bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreateInfoKHR const *pCreateInfo,
11544 const SURFACE_STATE *surface_state, const SWAPCHAIN_NODE *old_swapchain_state) const {
11545 // All physical devices and queue families are required to be able to present to any native window on Android; require the
11546 // application to have established support on any other platform.
11547 if (!instance_extensions.vk_khr_android_surface) {
11548 auto support_predicate = [this](decltype(surface_state->gpu_queue_support)::value_type qs) -> bool {
11549 // TODO: should restrict search only to queue families of VkDeviceQueueCreateInfos, not whole phys. device
11550 return (qs.first.gpu == physical_device) && qs.second;
11551 };
11552 const auto &support = surface_state->gpu_queue_support;
11553 bool is_supported = std::any_of(support.begin(), support.end(), support_predicate);
11554
11555 if (!is_supported) {
11556 if (LogError(
11557 device, "VUID-VkSwapchainCreateInfoKHR-surface-01270",
11558 "%s: pCreateInfo->surface is not known at this time to be supported for presentation by this device. The "
11559 "vkGetPhysicalDeviceSurfaceSupportKHR() must be called beforehand, and it must return VK_TRUE support with "
11560 "this surface for at least one queue family of this device.",
11561 func_name))
11562 return true;
11563 }
11564 }
11565
11566 if (old_swapchain_state) {
11567 if (old_swapchain_state->createInfo.surface != pCreateInfo->surface) {
11568 if (LogError(pCreateInfo->oldSwapchain, "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
11569 "%s: pCreateInfo->oldSwapchain's surface is not pCreateInfo->surface", func_name))
11570 return true;
11571 }
11572 if (old_swapchain_state->retired) {
11573 if (LogError(pCreateInfo->oldSwapchain, "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
11574 "%s: pCreateInfo->oldSwapchain is retired", func_name))
11575 return true;
11576 }
11577 }
11578
11579 if ((pCreateInfo->imageExtent.width == 0) || (pCreateInfo->imageExtent.height == 0)) {
11580 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689",
11581 "%s: pCreateInfo->imageExtent = (%d, %d) which is illegal.", func_name, pCreateInfo->imageExtent.width,
11582 pCreateInfo->imageExtent.height))
11583 return true;
11584 }
11585
11586 auto physical_device_state = GetPhysicalDeviceState();
11587 bool skip = false;
11588 VkSurfaceTransformFlagBitsKHR currentTransform = physical_device_state->surfaceCapabilities.currentTransform;
11589 if ((pCreateInfo->preTransform & currentTransform) != pCreateInfo->preTransform) {
11590 skip |= LogPerformanceWarning(physical_device, kVUID_Core_Swapchain_PreTransform,
11591 "%s: pCreateInfo->preTransform (%s) doesn't match the currentTransform (%s) returned by "
11592 "vkGetPhysicalDeviceSurfaceCapabilitiesKHR, the presentation engine will transform the image "
11593 "content as part of the presentation operation.",
11594 func_name, string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform),
11595 string_VkSurfaceTransformFlagBitsKHR(currentTransform));
11596 }
11597
11598 VkSurfaceCapabilitiesKHR capabilities{};
11599 DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_state->phys_device, pCreateInfo->surface, &capabilities);
11600 // Validate pCreateInfo->minImageCount against VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
11601 if (pCreateInfo->minImageCount < capabilities.minImageCount) {
11602 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271",
11603 "%s called with minImageCount = %d, which is outside the bounds returned by "
11604 "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
11605 func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
11606 return true;
11607 }
11608
11609 if ((capabilities.maxImageCount > 0) && (pCreateInfo->minImageCount > capabilities.maxImageCount)) {
11610 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272",
11611 "%s called with minImageCount = %d, which is outside the bounds returned by "
11612 "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
11613 func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
11614 return true;
11615 }
11616
11617 // Validate pCreateInfo->imageExtent against VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
11618 if ((pCreateInfo->imageExtent.width < capabilities.minImageExtent.width) ||
11619 (pCreateInfo->imageExtent.width > capabilities.maxImageExtent.width) ||
11620 (pCreateInfo->imageExtent.height < capabilities.minImageExtent.height) ||
11621 (pCreateInfo->imageExtent.height > capabilities.maxImageExtent.height)) {
11622 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274",
11623 "%s called with imageExtent = (%d,%d), which is outside the bounds returned by "
11624 "vkGetPhysicalDeviceSurfaceCapabilitiesKHR(): currentExtent = (%d,%d), minImageExtent = (%d,%d), "
11625 "maxImageExtent = (%d,%d).",
11626 func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height, capabilities.currentExtent.width,
11627 capabilities.currentExtent.height, capabilities.minImageExtent.width, capabilities.minImageExtent.height,
11628 capabilities.maxImageExtent.width, capabilities.maxImageExtent.height))
11629 return true;
11630 }
11631 // pCreateInfo->preTransform should have exactly one bit set, and that bit must also be set in
11632 // VkSurfaceCapabilitiesKHR::supportedTransforms.
11633 if (!pCreateInfo->preTransform || (pCreateInfo->preTransform & (pCreateInfo->preTransform - 1)) ||
11634 !(pCreateInfo->preTransform & capabilities.supportedTransforms)) {
11635 // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message. Build
11636 // it up a little at a time, and then log it:
11637 std::string errorString = "";
11638 char str[1024];
11639 // Here's the first part of the message:
11640 sprintf(str, "%s called with a non-supported pCreateInfo->preTransform (i.e. %s). Supported values are:\n", func_name,
11641 string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform));
11642 errorString += str;
11643 for (int i = 0; i < 32; i++) {
11644 // Build up the rest of the message:
11645 if ((1 << i) & capabilities.supportedTransforms) {
11646 const char *newStr = string_VkSurfaceTransformFlagBitsKHR((VkSurfaceTransformFlagBitsKHR)(1 << i));
11647 sprintf(str, " %s\n", newStr);
11648 errorString += str;
11649 }
11650 }
11651 // Log the message that we've built up:
11652 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "%s.", errorString.c_str())) return true;
11653 }
11654
11655 // pCreateInfo->compositeAlpha should have exactly one bit set, and that bit must also be set in
11656 // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha
11657 if (!pCreateInfo->compositeAlpha || (pCreateInfo->compositeAlpha & (pCreateInfo->compositeAlpha - 1)) ||
11658 !((pCreateInfo->compositeAlpha) & capabilities.supportedCompositeAlpha)) {
11659 // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message. Build
11660 // it up a little at a time, and then log it:
11661 std::string errorString = "";
11662 char str[1024];
11663 // Here's the first part of the message:
11664 sprintf(str, "%s called with a non-supported pCreateInfo->compositeAlpha (i.e. %s). Supported values are:\n", func_name,
11665 string_VkCompositeAlphaFlagBitsKHR(pCreateInfo->compositeAlpha));
11666 errorString += str;
11667 for (int i = 0; i < 32; i++) {
11668 // Build up the rest of the message:
11669 if ((1 << i) & capabilities.supportedCompositeAlpha) {
11670 const char *newStr = string_VkCompositeAlphaFlagBitsKHR((VkCompositeAlphaFlagBitsKHR)(1 << i));
11671 sprintf(str, " %s\n", newStr);
11672 errorString += str;
11673 }
11674 }
11675 // Log the message that we've built up:
11676 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "%s.", errorString.c_str())) return true;
11677 }
11678 // Validate pCreateInfo->imageArrayLayers against VkSurfaceCapabilitiesKHR::maxImageArrayLayers:
11679 if (pCreateInfo->imageArrayLayers > capabilities.maxImageArrayLayers) {
11680 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275",
11681 "%s called with a non-supported imageArrayLayers (i.e. %d). Maximum value is %d.", func_name,
11682 pCreateInfo->imageArrayLayers, capabilities.maxImageArrayLayers))
11683 return true;
11684 }
11685 // Validate pCreateInfo->imageUsage against VkSurfaceCapabilitiesKHR::supportedUsageFlags:
11686 if (pCreateInfo->imageUsage != (pCreateInfo->imageUsage & capabilities.supportedUsageFlags)) {
11687 const char *validation_error = "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276";
11688 if ((IsExtEnabled(device_extensions.vk_khr_shared_presentable_image) == true) &&
11689 ((pCreateInfo->presentMode == VK_PRESENT_MODE_IMMEDIATE_KHR) ||
11690 (pCreateInfo->presentMode == VK_PRESENT_MODE_MAILBOX_KHR) || (pCreateInfo->presentMode == VK_PRESENT_MODE_FIFO_KHR) ||
11691 (pCreateInfo->presentMode == VK_PRESENT_MODE_FIFO_RELAXED_KHR))) {
11692 validation_error = "VUID-VkSwapchainCreateInfoKHR-presentMode-01427";
11693 }
11694 if (LogError(device, validation_error,
11695 "%s called with a non-supported pCreateInfo->imageUsage (i.e. 0x%08x). Supported flag bits are 0x%08x.",
11696 func_name, pCreateInfo->imageUsage, capabilities.supportedUsageFlags))
11697 return true;
11698 }
11699
11700 if (device_extensions.vk_khr_surface_protected_capabilities && (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)) {
11701 VkPhysicalDeviceSurfaceInfo2KHR surfaceInfo = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR};
11702 surfaceInfo.surface = pCreateInfo->surface;
11703 VkSurfaceProtectedCapabilitiesKHR surfaceProtectedCapabilities = {VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR};
11704 VkSurfaceCapabilities2KHR surfaceCapabilities = {VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR};
11705 surfaceCapabilities.pNext = &surfaceProtectedCapabilities;
11706 DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(physical_device_state->phys_device, &surfaceInfo, &surfaceCapabilities);
11707
11708 if (!surfaceProtectedCapabilities.supportsProtected) {
11709 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-flags-03187",
11710 "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR but the surface "
11711 "capabilities does not have VkSurfaceProtectedCapabilitiesKHR.supportsProtected set to VK_TRUE.",
11712 func_name))
11713 return true;
11714 }
11715 }
11716
11717 std::vector<VkSurfaceFormatKHR> surface_formats;
11718 const auto *surface_formats_ref = &surface_formats;
11719
11720 // Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfaceFormatsKHR():
11721 if (physical_device_state->surface_formats.empty()) {
11722 uint32_t surface_format_count = 0;
11723 DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count, nullptr);
11724 surface_formats.resize(surface_format_count);
11725 DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count,
11726 &surface_formats[0]);
11727 } else {
11728 surface_formats_ref = &physical_device_state->surface_formats;
11729 }
11730
11731 {
11732 // Validate pCreateInfo->imageFormat against VkSurfaceFormatKHR::format:
11733 bool foundFormat = false;
11734 bool foundColorSpace = false;
11735 bool foundMatch = false;
11736 for (auto const &format : *surface_formats_ref) {
11737 if (pCreateInfo->imageFormat == format.format) {
11738 // Validate pCreateInfo->imageColorSpace against VkSurfaceFormatKHR::colorSpace:
11739 foundFormat = true;
11740 if (pCreateInfo->imageColorSpace == format.colorSpace) {
11741 foundMatch = true;
11742 break;
11743 }
11744 } else {
11745 if (pCreateInfo->imageColorSpace == format.colorSpace) {
11746 foundColorSpace = true;
11747 }
11748 }
11749 }
11750 if (!foundMatch) {
11751 if (!foundFormat) {
11752 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
11753 "%s called with a non-supported pCreateInfo->imageFormat (%s).", func_name,
11754 string_VkFormat(pCreateInfo->imageFormat)))
11755 return true;
11756 }
11757 if (!foundColorSpace) {
11758 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
11759 "%s called with a non-supported pCreateInfo->imageColorSpace (%s).", func_name,
11760 string_VkColorSpaceKHR(pCreateInfo->imageColorSpace)))
11761 return true;
11762 }
11763 }
11764 }
11765
11766 std::vector<VkPresentModeKHR> present_modes;
11767 const auto *present_modes_ref = &present_modes;
11768
11769 // Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfacePresentModesKHR():
11770 if (physical_device_state->present_modes.empty()) {
11771 uint32_t present_mode_count = 0;
11772 DispatchGetPhysicalDeviceSurfacePresentModesKHR(physical_device_state->phys_device, pCreateInfo->surface,
11773 &present_mode_count, nullptr);
11774 present_modes.resize(present_mode_count);
11775 DispatchGetPhysicalDeviceSurfacePresentModesKHR(physical_device_state->phys_device, pCreateInfo->surface,
11776 &present_mode_count, &present_modes[0]);
11777 } else {
11778 present_modes_ref = &physical_device_state->present_modes;
11779 }
11780
11781 // Validate pCreateInfo->presentMode against vkGetPhysicalDeviceSurfacePresentModesKHR():
11782 bool foundMatch =
11783 std::find(present_modes_ref->begin(), present_modes_ref->end(), pCreateInfo->presentMode) != present_modes_ref->end();
11784 if (!foundMatch) {
11785 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-presentMode-01281",
11786 "%s called with a non-supported presentMode (i.e. %s).", func_name,
11787 string_VkPresentModeKHR(pCreateInfo->presentMode)))
11788 return true;
11789 }
11790
11791 // Validate state for shared presentable case
11792 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
11793 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
11794 if (!device_extensions.vk_khr_shared_presentable_image) {
11795 if (LogError(
11796 device, kVUID_Core_DrawState_ExtensionNotEnabled,
11797 "%s called with presentMode %s which requires the VK_KHR_shared_presentable_image extension, which has not "
11798 "been enabled.",
11799 func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
11800 return true;
11801 } else if (pCreateInfo->minImageCount != 1) {
11802 if (LogError(
11803 device, "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383",
11804 "%s called with presentMode %s, but minImageCount value is %d. For shared presentable image, minImageCount "
11805 "must be 1.",
11806 func_name, string_VkPresentModeKHR(pCreateInfo->presentMode), pCreateInfo->minImageCount))
11807 return true;
11808 }
11809 }
11810
11811 if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
11812 if (!device_extensions.vk_khr_swapchain_mutable_format) {
11813 if (LogError(device, kVUID_Core_DrawState_ExtensionNotEnabled,
11814 "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR which requires the "
11815 "VK_KHR_swapchain_mutable_format extension, which has not been enabled.",
11816 func_name))
11817 return true;
11818 } else {
11819 const auto *image_format_list = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pCreateInfo->pNext);
11820 if (image_format_list == nullptr) {
11821 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-flags-03168",
11822 "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the pNext chain of "
11823 "pCreateInfo does not contain an instance of VkImageFormatListCreateInfoKHR.",
11824 func_name))
11825 return true;
11826 } else if (image_format_list->viewFormatCount == 0) {
11827 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-flags-03168",
11828 "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the viewFormatCount "
11829 "member of VkImageFormatListCreateInfoKHR in the pNext chain is zero.",
11830 func_name))
11831 return true;
11832 } else {
11833 bool found_base_format = false;
11834 for (uint32_t i = 0; i < image_format_list->viewFormatCount; ++i) {
11835 if (image_format_list->pViewFormats[i] == pCreateInfo->imageFormat) {
11836 found_base_format = true;
11837 break;
11838 }
11839 }
11840 if (!found_base_format) {
11841 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-flags-03168",
11842 "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but none of the "
11843 "elements of the pViewFormats member of VkImageFormatListCreateInfoKHR match "
11844 "pCreateInfo->imageFormat.",
11845 func_name))
11846 return true;
11847 }
11848 }
11849 }
11850 }
11851
11852 if ((pCreateInfo->imageSharingMode == VK_SHARING_MODE_CONCURRENT) && pCreateInfo->pQueueFamilyIndices) {
11853 bool skip1 = ValidatePhysicalDeviceQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices,
11854 "vkCreateBuffer", "pCreateInfo->pQueueFamilyIndices",
11855 "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428");
11856 if (skip1) return true;
11857 }
11858
11859 // Validate pCreateInfo->imageUsage against GetPhysicalDeviceFormatProperties
11860 const VkFormatProperties format_properties = GetPDFormatProperties(pCreateInfo->imageFormat);
11861 const VkFormatFeatureFlags tiling_features = format_properties.optimalTilingFeatures;
11862
11863 if (tiling_features == 0) {
11864 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11865 "%s: pCreateInfo->imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL has no supported format features on this "
11866 "physical device.",
11867 func_name, string_VkFormat(pCreateInfo->imageFormat)))
11868 return true;
11869 } else if ((pCreateInfo->imageUsage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(tiling_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
11870 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11871 "%s: pCreateInfo->imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes "
11872 "VK_IMAGE_USAGE_SAMPLED_BIT.",
11873 func_name, string_VkFormat(pCreateInfo->imageFormat)))
11874 return true;
11875 } else if ((pCreateInfo->imageUsage & VK_IMAGE_USAGE_STORAGE_BIT) && !(tiling_features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
11876 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11877 "%s: pCreateInfo->imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes "
11878 "VK_IMAGE_USAGE_STORAGE_BIT.",
11879 func_name, string_VkFormat(pCreateInfo->imageFormat)))
11880 return true;
11881 } else if ((pCreateInfo->imageUsage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) &&
11882 !(tiling_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
11883 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11884 "%s: pCreateInfo->imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes "
11885 "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT.",
11886 func_name, string_VkFormat(pCreateInfo->imageFormat)))
11887 return true;
11888 } else if ((pCreateInfo->imageUsage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) &&
11889 !(tiling_features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
11890 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11891 "%s: pCreateInfo->imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes "
11892 "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT.",
11893 func_name, string_VkFormat(pCreateInfo->imageFormat)))
11894 return true;
11895 } else if ((pCreateInfo->imageUsage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) &&
11896 !(tiling_features & (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))) {
11897 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11898 "%s: pCreateInfo->imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes "
11899 "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT or VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT.",
11900 func_name, string_VkFormat(pCreateInfo->imageFormat)))
11901 return true;
11902 }
11903
11904 const VkImageCreateInfo image_create_info = GetSwapchainImpliedImageCreateInfo(pCreateInfo);
11905 VkImageFormatProperties image_properties = {};
11906 const VkResult image_properties_result = DispatchGetPhysicalDeviceImageFormatProperties(
11907 physical_device, image_create_info.format, image_create_info.imageType, image_create_info.tiling, image_create_info.usage,
11908 image_create_info.flags, &image_properties);
11909
11910 if (image_properties_result != VK_SUCCESS) {
11911 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11912 "vkGetPhysicalDeviceImageFormatProperties() unexpectedly failed, "
11913 "when called for %s validation with following params: "
11914 "format: %s, imageType: %s, "
11915 "tiling: %s, usage: %s, "
11916 "flags: %s.",
11917 func_name, string_VkFormat(image_create_info.format), string_VkImageType(image_create_info.imageType),
11918 string_VkImageTiling(image_create_info.tiling), string_VkImageUsageFlags(image_create_info.usage).c_str(),
11919 string_VkImageCreateFlags(image_create_info.flags).c_str()))
11920 return true;
11921 }
11922
11923 // Validate pCreateInfo->imageArrayLayers against VkImageFormatProperties::maxArrayLayers
11924 if (pCreateInfo->imageArrayLayers > image_properties.maxArrayLayers) {
11925 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11926 "%s called with a non-supported imageArrayLayers (i.e. %d). "
11927 "Maximum value returned by vkGetPhysicalDeviceImageFormatProperties() is %d "
11928 "for imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL",
11929 func_name, pCreateInfo->imageArrayLayers, image_properties.maxArrayLayers,
11930 string_VkFormat(pCreateInfo->imageFormat)))
11931 return true;
11932 }
11933
11934 // Validate pCreateInfo->imageExtent against VkImageFormatProperties::maxExtent
11935 if ((pCreateInfo->imageExtent.width > image_properties.maxExtent.width) ||
11936 (pCreateInfo->imageExtent.height > image_properties.maxExtent.height)) {
11937 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
11938 "%s called with imageExtent = (%d,%d), which is bigger than max extent (%d,%d)"
11939 "returned by vkGetPhysicalDeviceImageFormatProperties(): "
11940 "for imageFormat %s with tiling VK_IMAGE_TILING_OPTIMAL",
11941 func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height, image_properties.maxExtent.width,
11942 image_properties.maxExtent.height, string_VkFormat(pCreateInfo->imageFormat)))
11943 return true;
11944 }
11945
11946 if ((pCreateInfo->flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR) &&
11947 device_group_create_info.physicalDeviceCount == 1) {
11948 if (LogError(device, "VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429",
11949 "%s called with flags containing VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR"
11950 "but logical device was created with VkDeviceGroupDeviceCreateInfo::physicalDeviceCount equal to 1",
11951 func_name))
11952 return true;
11953 }
11954 return skip;
11955 }
11956
PreCallValidateCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain) const11957 bool CoreChecks::PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
11958 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) const {
11959 const auto surface_state = GetSurfaceState(pCreateInfo->surface);
11960 const auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
11961 return ValidateCreateSwapchain("vkCreateSwapchainKHR()", pCreateInfo, surface_state, old_swapchain_state);
11962 }
11963
PreCallRecordDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)11964 void CoreChecks::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
11965 const VkAllocationCallbacks *pAllocator) {
11966 if (swapchain) {
11967 auto swapchain_data = GetSwapchainState(swapchain);
11968 if (swapchain_data) {
11969 for (const auto &swapchain_image : swapchain_data->images) {
11970 imageLayoutMap.erase(swapchain_image.image);
11971 EraseQFOImageRelaseBarriers(swapchain_image.image);
11972 }
11973 }
11974 }
11975 StateTracker::PreCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
11976 }
11977
PreCallValidateGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages) const11978 bool CoreChecks::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
11979 VkImage *pSwapchainImages) const {
11980 auto swapchain_state = GetSwapchainState(swapchain);
11981 bool skip = false;
11982 if (swapchain_state && pSwapchainImages) {
11983 if (*pSwapchainImageCount > swapchain_state->get_swapchain_image_count) {
11984 skip |=
11985 LogError(device, kVUID_Core_Swapchain_InvalidCount,
11986 "vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImages, and with pSwapchainImageCount set to a "
11987 "value (%d) that is greater than the value (%d) that was returned when pSwapchainImages was NULL.",
11988 *pSwapchainImageCount, swapchain_state->get_swapchain_image_count);
11989 }
11990 }
11991 return skip;
11992 }
11993
PostCallRecordGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages,VkResult result)11994 void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
11995 VkImage *pSwapchainImages, VkResult result) {
11996 // This function will run twice. The first is to get pSwapchainImageCount. The second is to get pSwapchainImages.
11997 // The first time in StateTracker::PostCallRecordGetSwapchainImagesKHR only generates the container's size.
11998 // The second time in StateTracker::PostCallRecordGetSwapchainImagesKHR will create VKImage and IMAGE_STATE.
11999
12000 // So GlobalImageLayoutMap saving new IMAGE_STATEs has to run in the second time.
12001 // pSwapchainImages is not nullptr and it needs to wait until StateTracker::PostCallRecordGetSwapchainImagesKHR.
12002
12003 uint32_t new_swapchain_image_index = 0;
12004 if (((result == VK_SUCCESS) || (result == VK_INCOMPLETE)) && pSwapchainImages) {
12005 auto swapchain_state = GetSwapchainState(swapchain);
12006 const auto image_vector_size = swapchain_state->images.size();
12007
12008 for (; new_swapchain_image_index < *pSwapchainImageCount; ++new_swapchain_image_index) {
12009 if ((new_swapchain_image_index >= image_vector_size) ||
12010 (swapchain_state->images[new_swapchain_image_index].image == VK_NULL_HANDLE))
12011 break;
12012 ;
12013 }
12014 }
12015 StateTracker::PostCallRecordGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages, result);
12016
12017 if (((result == VK_SUCCESS) || (result == VK_INCOMPLETE)) && pSwapchainImages) {
12018 for (; new_swapchain_image_index < *pSwapchainImageCount; ++new_swapchain_image_index) {
12019 auto image_state = Get<IMAGE_STATE>(pSwapchainImages[new_swapchain_image_index]);
12020 AddInitialLayoutintoImageLayoutMap(*image_state, imageLayoutMap);
12021 }
12022 }
12023 }
12024
PreCallValidateQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo) const12025 bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) const {
12026 bool skip = false;
12027 const auto queue_state = GetQueueState(queue);
12028
12029 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
12030 const auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
12031 if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_BINARY_KHR) {
12032 skip |= LogError(pPresentInfo->pWaitSemaphores[i], "VUID-vkQueuePresentKHR-pWaitSemaphores-03267",
12033 "vkQueuePresentKHR: pWaitSemaphores[%u] (%s) is not a VK_SEMAPHORE_TYPE_BINARY_KHR", i,
12034 report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
12035 }
12036 if (pSemaphore && !pSemaphore->signaled && !SemaphoreWasSignaled(pPresentInfo->pWaitSemaphores[i])) {
12037 LogObjectList objlist(queue);
12038 objlist.add(pPresentInfo->pWaitSemaphores[i]);
12039 skip |= LogError(objlist, "VUID-vkQueuePresentKHR-pWaitSemaphores-03268",
12040 "vkQueuePresentKHR: Queue %s is waiting on pWaitSemaphores[%u] (%s) that has no way to be signaled.",
12041 report_data->FormatHandle(queue).c_str(), i,
12042 report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
12043 }
12044 }
12045
12046 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
12047 const auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
12048 if (swapchain_data) {
12049 if (pPresentInfo->pImageIndices[i] >= swapchain_data->images.size()) {
12050 skip |= LogError(
12051 pPresentInfo->pSwapchains[i], kVUID_Core_DrawState_SwapchainInvalidImage,
12052 "vkQueuePresentKHR: pSwapchains[%u] image index is too large (%u). There are only %u images in this swapchain.",
12053 i, pPresentInfo->pImageIndices[i], (uint32_t)swapchain_data->images.size());
12054 } else {
12055 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
12056 const auto image_state = GetImageState(image);
12057
12058 if (!image_state->acquired) {
12059 skip |= LogError(pPresentInfo->pSwapchains[i], kVUID_Core_DrawState_SwapchainImageNotAcquired,
12060 "vkQueuePresentKHR: pSwapchains[%u] image index %u has not been acquired.", i,
12061 pPresentInfo->pImageIndices[i]);
12062 }
12063
12064 vector<VkImageLayout> layouts;
12065 if (FindLayouts(image, layouts)) {
12066 for (auto layout : layouts) {
12067 if ((layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) && (!device_extensions.vk_khr_shared_presentable_image ||
12068 (layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR))) {
12069 const char *validation_error = (device_extensions.vk_khr_shared_presentable_image)
12070 ? "VUID-VkPresentInfoKHR-pImageIndices-01430"
12071 : "VUID-VkPresentInfoKHR-pImageIndices-01296";
12072 skip |= LogError(queue, validation_error,
12073 "vkQueuePresentKHR(): pSwapchains[%u] images passed to present must be in layout "
12074 "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or "
12075 "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR but is in %s.",
12076 i, string_VkImageLayout(layout));
12077 }
12078 }
12079 }
12080 }
12081
12082 // All physical devices and queue families are required to be able to present to any native window on Android; require
12083 // the application to have established support on any other platform.
12084 if (!instance_extensions.vk_khr_android_surface) {
12085 const auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
12086 auto support_it = surface_state->gpu_queue_support.find({physical_device, queue_state->queueFamilyIndex});
12087
12088 if (support_it == surface_state->gpu_queue_support.end()) {
12089 skip |= LogError(
12090 pPresentInfo->pSwapchains[i], kVUID_Core_DrawState_SwapchainUnsupportedQueue,
12091 "vkQueuePresentKHR: Presenting pSwapchains[%u] image without calling vkGetPhysicalDeviceSurfaceSupportKHR",
12092 i);
12093 } else if (!support_it->second) {
12094 skip |= LogError(
12095 pPresentInfo->pSwapchains[i], "VUID-vkQueuePresentKHR-pSwapchains-01292",
12096 "vkQueuePresentKHR: Presenting pSwapchains[%u] image on queue that cannot present to this surface.", i);
12097 }
12098 }
12099 }
12100 }
12101 if (pPresentInfo->pNext) {
12102 // Verify ext struct
12103 const auto *present_regions = lvl_find_in_chain<VkPresentRegionsKHR>(pPresentInfo->pNext);
12104 if (present_regions) {
12105 for (uint32_t i = 0; i < present_regions->swapchainCount; ++i) {
12106 const auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
12107 assert(swapchain_data);
12108 VkPresentRegionKHR region = present_regions->pRegions[i];
12109 for (uint32_t j = 0; j < region.rectangleCount; ++j) {
12110 VkRectLayerKHR rect = region.pRectangles[j];
12111 if ((rect.offset.x + rect.extent.width) > swapchain_data->createInfo.imageExtent.width) {
12112 skip |= LogError(pPresentInfo->pSwapchains[i], "VUID-VkRectLayerKHR-offset-01261",
12113 "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
12114 "pRegion[%i].pRectangles[%i], the sum of offset.x (%i) and extent.width (%i) is greater "
12115 "than the corresponding swapchain's imageExtent.width (%i).",
12116 i, j, rect.offset.x, rect.extent.width, swapchain_data->createInfo.imageExtent.width);
12117 }
12118 if ((rect.offset.y + rect.extent.height) > swapchain_data->createInfo.imageExtent.height) {
12119 skip |= LogError(pPresentInfo->pSwapchains[i], "VUID-VkRectLayerKHR-offset-01261",
12120 "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
12121 "pRegion[%i].pRectangles[%i], the sum of offset.y (%i) and extent.height (%i) is greater "
12122 "than the corresponding swapchain's imageExtent.height (%i).",
12123 i, j, rect.offset.y, rect.extent.height, swapchain_data->createInfo.imageExtent.height);
12124 }
12125 if (rect.layer > swapchain_data->createInfo.imageArrayLayers) {
12126 skip |= LogError(
12127 pPresentInfo->pSwapchains[i], "VUID-VkRectLayerKHR-layer-01262",
12128 "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, pRegion[%i].pRectangles[%i], the layer "
12129 "(%i) is greater than the corresponding swapchain's imageArrayLayers (%i).",
12130 i, j, rect.layer, swapchain_data->createInfo.imageArrayLayers);
12131 }
12132 }
12133 }
12134 }
12135
12136 const auto *present_times_info = lvl_find_in_chain<VkPresentTimesInfoGOOGLE>(pPresentInfo->pNext);
12137 if (present_times_info) {
12138 if (pPresentInfo->swapchainCount != present_times_info->swapchainCount) {
12139 skip |=
12140 LogError(pPresentInfo->pSwapchains[0], "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247",
12141 "vkQueuePresentKHR(): VkPresentTimesInfoGOOGLE.swapchainCount is %i but pPresentInfo->swapchainCount "
12142 "is %i. For VkPresentTimesInfoGOOGLE down pNext chain of VkPresentInfoKHR, "
12143 "VkPresentTimesInfoGOOGLE.swapchainCount must equal VkPresentInfoKHR.swapchainCount.",
12144 present_times_info->swapchainCount, pPresentInfo->swapchainCount);
12145 }
12146 }
12147 }
12148
12149 return skip;
12150 }
12151
PreCallValidateCreateSharedSwapchainsKHR(VkDevice device,uint32_t swapchainCount,const VkSwapchainCreateInfoKHR * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchains) const12152 bool CoreChecks::PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
12153 const VkSwapchainCreateInfoKHR *pCreateInfos,
12154 const VkAllocationCallbacks *pAllocator,
12155 VkSwapchainKHR *pSwapchains) const {
12156 bool skip = false;
12157 if (pCreateInfos) {
12158 for (uint32_t i = 0; i < swapchainCount; i++) {
12159 const auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
12160 const auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
12161 std::stringstream func_name;
12162 func_name << "vkCreateSharedSwapchainsKHR[" << swapchainCount << "]()";
12163 skip |= ValidateCreateSwapchain(func_name.str().c_str(), &pCreateInfos[i], surface_state, old_swapchain_state);
12164 }
12165 }
12166 return skip;
12167 }
12168
ValidateAcquireNextImage(VkDevice device,const CommandVersion cmd_version,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex,const char * func_name,const char * semaphore_type_vuid) const12169 bool CoreChecks::ValidateAcquireNextImage(VkDevice device, const CommandVersion cmd_version, VkSwapchainKHR swapchain,
12170 uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
12171 const char *func_name, const char *semaphore_type_vuid) const {
12172 bool skip = false;
12173
12174 auto pSemaphore = GetSemaphoreState(semaphore);
12175 if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_BINARY_KHR) {
12176 skip |= LogError(semaphore, semaphore_type_vuid, "%s: %s is not a VK_SEMAPHORE_TYPE_BINARY_KHR", func_name,
12177 report_data->FormatHandle(semaphore).c_str());
12178 }
12179 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal && pSemaphore->signaled) {
12180 skip |= LogError(semaphore, "VUID-vkAcquireNextImageKHR-semaphore-01286",
12181 "%s: Semaphore must not be currently signaled or in a wait state.", func_name);
12182 }
12183
12184 auto pFence = GetFenceState(fence);
12185 if (pFence) {
12186 skip |= ValidateFenceForSubmit(pFence, "VUID-vkAcquireNextImageKHR-fence-01287", "VUID-vkAcquireNextImageKHR-fence-01287",
12187 "vkAcquireNextImageKHR()");
12188 }
12189
12190 const auto swapchain_data = GetSwapchainState(swapchain);
12191 if (swapchain_data) {
12192 if (swapchain_data->retired) {
12193 skip |= LogError(swapchain, "VUID-vkAcquireNextImageKHR-swapchain-01285",
12194 "%s: This swapchain has been retired. The application can still present any images it "
12195 "has acquired, but cannot acquire any more.",
12196 func_name);
12197 }
12198
12199 auto physical_device_state = GetPhysicalDeviceState();
12200 // TODO: this is technically wrong on many levels, but requires massive cleanup
12201 if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called) {
12202 const uint32_t acquired_images = static_cast<uint32_t>(
12203 std::count_if(swapchain_data->images.begin(), swapchain_data->images.end(), [=](SWAPCHAIN_IMAGE image) {
12204 auto const state = GetImageState(image.image);
12205 return (state && state->acquired);
12206 }));
12207
12208 const uint32_t swapchain_image_count = static_cast<uint32_t>(swapchain_data->images.size());
12209 const auto min_image_count = physical_device_state->surfaceCapabilities.minImageCount;
12210 const bool too_many_already_acquired = acquired_images > swapchain_image_count - min_image_count;
12211 if (timeout == UINT64_MAX && too_many_already_acquired) {
12212 const char *vuid = "INVALID-vuid";
12213 if (cmd_version == CMD_VERSION_1)
12214 vuid = "VUID-vkAcquireNextImageKHR-swapchain-01802";
12215 else if (cmd_version == CMD_VERSION_2)
12216 vuid = "VUID-vkAcquireNextImage2KHR-swapchain-01803";
12217 else
12218 assert(false);
12219
12220 const uint32_t acquirable = swapchain_image_count - min_image_count + 1;
12221 skip |= LogError(swapchain, vuid,
12222 "%s: Application has already previously acquired %" PRIu32 " image%s from swapchain. Only %" PRIu32
12223 " %s available to be acquired using a timeout of UINT64_MAX (given the swapchain has %" PRIu32
12224 ", and VkSurfaceCapabilitiesKHR::minImageCount is %" PRIu32 ").",
12225 func_name, acquired_images, acquired_images > 1 ? "s" : "", acquirable,
12226 acquirable > 1 ? "are" : "is", swapchain_image_count, min_image_count);
12227 }
12228 }
12229 }
12230 return skip;
12231 }
12232
PreCallValidateAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex) const12233 bool CoreChecks::PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
12234 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) const {
12235 return ValidateAcquireNextImage(device, CMD_VERSION_1, swapchain, timeout, semaphore, fence, pImageIndex,
12236 "vkAcquireNextImageKHR", "VUID-vkAcquireNextImageKHR-semaphore-03265");
12237 }
12238
PreCallValidateAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex) const12239 bool CoreChecks::PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
12240 uint32_t *pImageIndex) const {
12241 bool skip = false;
12242 skip |= ValidateDeviceMaskToPhysicalDeviceCount(pAcquireInfo->deviceMask, pAcquireInfo->swapchain,
12243 "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290");
12244 skip |= ValidateDeviceMaskToZero(pAcquireInfo->deviceMask, pAcquireInfo->swapchain,
12245 "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291");
12246 skip |= ValidateAcquireNextImage(device, CMD_VERSION_2, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
12247 pAcquireInfo->fence, pImageIndex, "vkAcquireNextImage2KHR",
12248 "VUID-VkAcquireNextImageInfoKHR-semaphore-03266");
12249 return skip;
12250 }
12251
PreCallValidateDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator) const12252 bool CoreChecks::PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
12253 const VkAllocationCallbacks *pAllocator) const {
12254 const auto surface_state = GetSurfaceState(surface);
12255 bool skip = false;
12256 if ((surface_state) && (surface_state->swapchain)) {
12257 skip |= LogError(instance, "VUID-vkDestroySurfaceKHR-surface-01266",
12258 "vkDestroySurfaceKHR() called before its associated VkSwapchainKHR was destroyed.");
12259 }
12260 return skip;
12261 }
12262
12263 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display) const12264 bool CoreChecks::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
12265 uint32_t queueFamilyIndex,
12266 struct wl_display *display) const {
12267 const auto pd_state = GetPhysicalDeviceState(physicalDevice);
12268 return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
12269 "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306",
12270 "vkGetPhysicalDeviceWaylandPresentationSupportKHR", "queueFamilyIndex");
12271 }
12272 #endif // VK_USE_PLATFORM_WAYLAND_KHR
12273
12274 #ifdef VK_USE_PLATFORM_WIN32_KHR
PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex) const12275 bool CoreChecks::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
12276 uint32_t queueFamilyIndex) const {
12277 const auto pd_state = GetPhysicalDeviceState(physicalDevice);
12278 return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
12279 "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309",
12280 "vkGetPhysicalDeviceWin32PresentationSupportKHR", "queueFamilyIndex");
12281 }
12282 #endif // VK_USE_PLATFORM_WIN32_KHR
12283
12284 #ifdef VK_USE_PLATFORM_XCB_KHR
PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id) const12285 bool CoreChecks::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
12286 uint32_t queueFamilyIndex, xcb_connection_t *connection,
12287 xcb_visualid_t visual_id) const {
12288 const auto pd_state = GetPhysicalDeviceState(physicalDevice);
12289 return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
12290 "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312",
12291 "vkGetPhysicalDeviceXcbPresentationSupportKHR", "queueFamilyIndex");
12292 }
12293 #endif // VK_USE_PLATFORM_XCB_KHR
12294
12295 #ifdef VK_USE_PLATFORM_XLIB_KHR
PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,Display * dpy,VisualID visualID) const12296 bool CoreChecks::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
12297 uint32_t queueFamilyIndex, Display *dpy,
12298 VisualID visualID) const {
12299 const auto pd_state = GetPhysicalDeviceState(physicalDevice);
12300 return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
12301 "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315",
12302 "vkGetPhysicalDeviceXlibPresentationSupportKHR", "queueFamilyIndex");
12303 }
12304 #endif // VK_USE_PLATFORM_XLIB_KHR
12305
PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported) const12306 bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
12307 VkSurfaceKHR surface, VkBool32 *pSupported) const {
12308 const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
12309 return ValidateQueueFamilyIndex(physical_device_state, queueFamilyIndex,
12310 "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269",
12311 "vkGetPhysicalDeviceSurfaceSupportKHR", "queueFamilyIndex");
12312 }
12313
ValidateDescriptorUpdateTemplate(const char * func_name,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo) const12314 bool CoreChecks::ValidateDescriptorUpdateTemplate(const char *func_name,
12315 const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo) const {
12316 bool skip = false;
12317 const auto layout = GetDescriptorSetLayoutShared(pCreateInfo->descriptorSetLayout);
12318 if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET == pCreateInfo->templateType && !layout) {
12319 skip |= LogError(pCreateInfo->descriptorSetLayout, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350",
12320 "%s: Invalid pCreateInfo->descriptorSetLayout (%s)", func_name,
12321 report_data->FormatHandle(pCreateInfo->descriptorSetLayout).c_str());
12322 } else if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR == pCreateInfo->templateType) {
12323 auto bind_point = pCreateInfo->pipelineBindPoint;
12324 bool valid_bp = (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) || (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) ||
12325 (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
12326 if (!valid_bp) {
12327 skip |=
12328 LogError(device, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351",
12329 "%s: Invalid pCreateInfo->pipelineBindPoint (%" PRIu32 ").", func_name, static_cast<uint32_t>(bind_point));
12330 }
12331 const auto pipeline_layout = GetPipelineLayout(pCreateInfo->pipelineLayout);
12332 if (!pipeline_layout) {
12333 skip |= LogError(pCreateInfo->pipelineLayout, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352",
12334 "%s: Invalid pCreateInfo->pipelineLayout (%s)", func_name,
12335 report_data->FormatHandle(pCreateInfo->pipelineLayout).c_str());
12336 } else {
12337 const uint32_t pd_set = pCreateInfo->set;
12338 if ((pd_set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[pd_set] ||
12339 !pipeline_layout->set_layouts[pd_set]->IsPushDescriptor()) {
12340 skip |= LogError(pCreateInfo->pipelineLayout, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353",
12341 "%s: pCreateInfo->set (%" PRIu32
12342 ") does not refer to the push descriptor set layout for pCreateInfo->pipelineLayout (%s).",
12343 func_name, pd_set, report_data->FormatHandle(pCreateInfo->pipelineLayout).c_str());
12344 }
12345 }
12346 }
12347 return skip;
12348 }
12349
PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate) const12350 bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device,
12351 const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
12352 const VkAllocationCallbacks *pAllocator,
12353 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) const {
12354 bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplate()", pCreateInfo);
12355 return skip;
12356 }
12357
PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate) const12358 bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,
12359 const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
12360 const VkAllocationCallbacks *pAllocator,
12361 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) const {
12362 bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplateKHR()", pCreateInfo);
12363 return skip;
12364 }
12365
ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData) const12366 bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,
12367 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
12368 const void *pData) const {
12369 bool skip = false;
12370 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
12371 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
12372 // Object tracker will report errors for invalid descriptorUpdateTemplate values, avoiding a crash in release builds
12373 // but retaining the assert as template support is new enough to want to investigate these in debug builds.
12374 assert(0);
12375 } else {
12376 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
12377 // TODO: Validate template push descriptor updates
12378 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
12379 skip = ValidateUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
12380 }
12381 }
12382 return skip;
12383 }
12384
PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData) const12385 bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
12386 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
12387 const void *pData) const {
12388 return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
12389 }
12390
PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData) const12391 bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
12392 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
12393 const void *pData) const {
12394 return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
12395 }
12396
PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,VkPipelineLayout layout,uint32_t set,const void * pData) const12397 bool CoreChecks::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
12398 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
12399 VkPipelineLayout layout, uint32_t set,
12400 const void *pData) const {
12401 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12402 assert(cb_state);
12403 const char *const func_name = "vkPushDescriptorSetWithTemplateKHR()";
12404 bool skip = false;
12405 skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, func_name);
12406
12407 const auto layout_data = GetPipelineLayout(layout);
12408 const auto dsl = GetDslFromPipelineLayout(layout_data, set);
12409
12410 // Validate the set index points to a push descriptor set and is in range
12411 if (dsl) {
12412 if (!dsl->IsPushDescriptor()) {
12413 skip = LogError(layout, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
12414 "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name, set,
12415 report_data->FormatHandle(layout).c_str());
12416 }
12417 } else if (layout_data && (set >= layout_data->set_layouts.size())) {
12418 skip = LogError(layout, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
12419 "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
12420 report_data->FormatHandle(layout).c_str(), static_cast<uint32_t>(layout_data->set_layouts.size()));
12421 }
12422
12423 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
12424 if (template_state) {
12425 const auto &template_ci = template_state->create_info;
12426 static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
12427 std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
12428 std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
12429 std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
12430 "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366")};
12431 skip |= ValidatePipelineBindPoint(cb_state, template_ci.pipelineBindPoint, func_name, bind_errors);
12432
12433 if (template_ci.templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
12434 skip |= LogError(cb_state->commandBuffer, kVUID_Core_PushDescriptorUpdate_TemplateType,
12435 "%s: descriptorUpdateTemplate %s was not created with flag "
12436 "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR.",
12437 func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str());
12438 }
12439 if (template_ci.set != set) {
12440 skip |= LogError(cb_state->commandBuffer, kVUID_Core_PushDescriptorUpdate_Template_SetMismatched,
12441 "%s: descriptorUpdateTemplate %s created with set %" PRIu32
12442 " does not match command parameter set %" PRIu32 ".",
12443 func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(), template_ci.set, set);
12444 }
12445 if (!CompatForSet(set, layout_data, GetPipelineLayout(template_ci.pipelineLayout))) {
12446 LogObjectList objlist(cb_state->commandBuffer);
12447 objlist.add(descriptorUpdateTemplate);
12448 objlist.add(template_ci.pipelineLayout);
12449 objlist.add(layout);
12450 skip |= LogError(objlist, kVUID_Core_PushDescriptorUpdate_Template_LayoutMismatched,
12451 "%s: descriptorUpdateTemplate %s created with %s is incompatible with command parameter "
12452 "%s for set %" PRIu32,
12453 func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(),
12454 report_data->FormatHandle(template_ci.pipelineLayout).c_str(),
12455 report_data->FormatHandle(layout).c_str(), set);
12456 }
12457 }
12458
12459 if (dsl && template_state) {
12460 // Create an empty proxy in order to use the existing descriptor set update validation
12461 cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, nullptr, dsl, 0, this);
12462 // Decode the template into a set of write updates
12463 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
12464 dsl->GetDescriptorSetLayout());
12465 // Validate the decoded update against the proxy_ds
12466 skip |= ValidatePushDescriptorsUpdate(&proxy_ds, static_cast<uint32_t>(decoded_template.desc_writes.size()),
12467 decoded_template.desc_writes.data(), func_name);
12468 }
12469
12470 return skip;
12471 }
12472
ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice,uint32_t planeIndex,const char * api_name) const12473 bool CoreChecks::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
12474 const char *api_name) const {
12475 bool skip = false;
12476 const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
12477 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called) {
12478 if (planeIndex >= physical_device_state->display_plane_property_count) {
12479 skip |= LogError(physicalDevice, "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249",
12480 "%s(): planeIndex must be in the range [0, %d] that was returned by "
12481 "vkGetPhysicalDeviceDisplayPlanePropertiesKHR "
12482 "or vkGetPhysicalDeviceDisplayPlaneProperties2KHR. Do you have the plane index hardcoded?",
12483 api_name, physical_device_state->display_plane_property_count - 1);
12484 }
12485 }
12486
12487 return skip;
12488 }
12489
PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays) const12490 bool CoreChecks::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
12491 uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) const {
12492 bool skip = false;
12493 skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex,
12494 "vkGetDisplayPlaneSupportedDisplaysKHR");
12495 return skip;
12496 }
12497
PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities) const12498 bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
12499 uint32_t planeIndex,
12500 VkDisplayPlaneCapabilitiesKHR *pCapabilities) const {
12501 bool skip = false;
12502 skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex, "vkGetDisplayPlaneCapabilitiesKHR");
12503 return skip;
12504 }
12505
PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo,VkDisplayPlaneCapabilities2KHR * pCapabilities) const12506 bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
12507 const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
12508 VkDisplayPlaneCapabilities2KHR *pCapabilities) const {
12509 bool skip = false;
12510 skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, pDisplayPlaneInfo->planeIndex,
12511 "vkGetDisplayPlaneCapabilities2KHR");
12512 return skip;
12513 }
12514
PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,const VkDebugMarkerMarkerInfoEXT * pMarkerInfo) const12515 bool CoreChecks::PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,
12516 const VkDebugMarkerMarkerInfoEXT *pMarkerInfo) const {
12517 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12518 assert(cb_state);
12519 return ValidateCmd(cb_state, CMD_DEBUGMARKERBEGINEXT, "vkCmdDebugMarkerBeginEXT()");
12520 }
12521
PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const12522 bool CoreChecks::PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const {
12523 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12524 assert(cb_state);
12525 return ValidateCmd(cb_state, CMD_DEBUGMARKERENDEXT, "vkCmdDebugMarkerEndEXT()");
12526 }
12527
PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index) const12528 bool CoreChecks::PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
12529 VkQueryControlFlags flags, uint32_t index) const {
12530 if (disabled[query_validation]) return false;
12531 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12532 assert(cb_state);
12533 QueryObject query_obj(queryPool, query, index);
12534 const char *cmd_name = "vkCmdBeginQueryIndexedEXT()";
12535 ValidateBeginQueryVuids vuids = {
12536 "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool", "VUID-vkCmdBeginQueryIndexedEXT-queryType-02338",
12537 "VUID-vkCmdBeginQueryIndexedEXT-queryType-00803", "VUID-vkCmdBeginQueryIndexedEXT-queryType-00800",
12538 "VUID-vkCmdBeginQueryIndexedEXT-query-00802", "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223",
12539 "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224", "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225",
12540 "VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922", "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885"};
12541
12542 bool skip = ValidateBeginQuery(cb_state, query_obj, flags, CMD_BEGINQUERYINDEXEDEXT, cmd_name, &vuids);
12543
12544 // Extension specific VU's
12545 const auto &query_pool_ci = GetQueryPoolState(query_obj.pool)->createInfo;
12546 if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
12547 if (device_extensions.vk_ext_transform_feedback &&
12548 (index >= phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams)) {
12549 skip |= LogError(
12550 cb_state->commandBuffer, "VUID-vkCmdBeginQueryIndexedEXT-queryType-02339",
12551 "%s: index %" PRIu32
12552 " must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams %" PRIu32 ".",
12553 cmd_name, index, phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams);
12554 }
12555 } else if (index != 0) {
12556 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdBeginQueryIndexedEXT-queryType-02340",
12557 "%s: index %" PRIu32
12558 " must be zero if %s was not created with type VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT.",
12559 cmd_name, index, report_data->FormatHandle(queryPool).c_str());
12560 }
12561 return skip;
12562 }
12563
PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)12564 void CoreChecks::PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
12565 VkQueryControlFlags flags, uint32_t index) {
12566 if (disabled[query_validation]) return;
12567 QueryObject query_obj = {queryPool, query, index};
12568 EnqueueVerifyBeginQuery(commandBuffer, query_obj, "vkCmdBeginQueryIndexedEXT()");
12569 }
12570
PreCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index)12571 void CoreChecks::PreCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
12572 uint32_t index) {
12573 if (disabled[query_validation]) return;
12574 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12575 QueryObject query_obj = {queryPool, query, index};
12576 query_obj.endCommandIndex = cb_state->commandCount - 1;
12577 EnqueueVerifyEndQuery(commandBuffer, query_obj);
12578 }
12579
PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index) const12580 bool CoreChecks::PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
12581 uint32_t index) const {
12582 if (disabled[query_validation]) return false;
12583 QueryObject query_obj = {queryPool, query, index};
12584 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12585 assert(cb_state);
12586 ValidateEndQueryVuids vuids = {"VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool", "VUID-vkCmdEndQueryIndexedEXT-None-02342",
12587 "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-02344"};
12588
12589 return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERYINDEXEDEXT, "vkCmdEndQueryIndexedEXT()", &vuids);
12590 }
12591
PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer,uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VkRect2D * pDiscardRectangles) const12592 bool CoreChecks::PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
12593 uint32_t discardRectangleCount,
12594 const VkRect2D *pDiscardRectangles) const {
12595 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12596 // Minimal validation for command buffer state
12597 return ValidateCmd(cb_state, CMD_SETDISCARDRECTANGLEEXT, "vkCmdSetDiscardRectangleEXT()");
12598 }
12599
PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,const VkSampleLocationsInfoEXT * pSampleLocationsInfo) const12600 bool CoreChecks::PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
12601 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) const {
12602 bool skip = false;
12603 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
12604 // Minimal validation for command buffer state
12605 skip |= ValidateCmd(cb_state, CMD_SETSAMPLELOCATIONSEXT, "vkCmdSetSampleLocationsEXT()");
12606 skip |= ValidateSampleLocationsInfo(pSampleLocationsInfo, "vkCmdSetSampleLocationsEXT");
12607 const auto last_bound_it = cb_state->lastBound.find(VK_PIPELINE_BIND_POINT_GRAPHICS);
12608 if (last_bound_it != cb_state->lastBound.cend()) {
12609 const PIPELINE_STATE *pPipe = last_bound_it->second.pipeline_state;
12610 if (pPipe != nullptr) {
12611 // Check same error with different log messages
12612 const safe_VkPipelineMultisampleStateCreateInfo *multisample_state = pPipe->graphicsPipelineCI.pMultisampleState;
12613 if (multisample_state == nullptr) {
12614 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdSetSampleLocationsEXT-sampleLocationsPerPixel-01529",
12615 "vkCmdSetSampleLocationsEXT(): pSampleLocationsInfo->sampleLocationsPerPixel must be equal to "
12616 "rasterizationSamples, but the bound graphics pipeline was created without a multisample state");
12617 } else if (multisample_state->rasterizationSamples != pSampleLocationsInfo->sampleLocationsPerPixel) {
12618 skip |= LogError(cb_state->commandBuffer, "VUID-vkCmdSetSampleLocationsEXT-sampleLocationsPerPixel-01529",
12619 "vkCmdSetSampleLocationsEXT(): pSampleLocationsInfo->sampleLocationsPerPixel (%s) is not equal to "
12620 "the last bound pipeline's rasterizationSamples (%s)",
12621 string_VkSampleCountFlagBits(pSampleLocationsInfo->sampleLocationsPerPixel),
12622 string_VkSampleCountFlagBits(multisample_state->rasterizationSamples));
12623 }
12624 }
12625 }
12626
12627 return skip;
12628 }
12629
ValidateCreateSamplerYcbcrConversion(const char * func_name,const VkSamplerYcbcrConversionCreateInfo * create_info) const12630 bool CoreChecks::ValidateCreateSamplerYcbcrConversion(const char *func_name,
12631 const VkSamplerYcbcrConversionCreateInfo *create_info) const {
12632 bool skip = false;
12633 const VkFormat conversion_format = create_info->format;
12634
12635 // Need to check for external format conversion first as it allows for non-UNORM format
12636 bool external_format = false;
12637 #ifdef VK_USE_PLATFORM_ANDROID_KHR
12638 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
12639 if ((nullptr != ext_format_android) && (0 != ext_format_android->externalFormat)) {
12640 external_format = true;
12641 if (VK_FORMAT_UNDEFINED != create_info->format) {
12642 return LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
12643 "%s: CreateInfo format is not VK_FORMAT_UNDEFINED while "
12644 "there is a chained VkExternalFormatANDROID struct with a non-zero externalFormat.",
12645 func_name);
12646 }
12647 }
12648 #endif
12649
12650 if ((external_format == false) && (FormatIsUNorm(conversion_format) == false)) {
12651 const char *vuid = (device_extensions.vk_android_external_memory_android_hardware_buffer)
12652 ? "VUID-VkSamplerYcbcrConversionCreateInfo-format-04061"
12653 : "VUID-VkSamplerYcbcrConversionCreateInfo-format-04060";
12654 skip |=
12655 LogError(device, vuid,
12656 "%s: CreateInfo format (%s) is not an UNORM format and there is no external format conversion being created.",
12657 func_name, string_VkFormat(conversion_format));
12658 }
12659
12660 // Gets VkFormatFeatureFlags according to Sampler Ycbcr Conversion Format Features
12661 // (vkspec.html#potential-format-features)
12662 VkFormatFeatureFlags format_features = VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM;
12663 if (conversion_format == VK_FORMAT_UNDEFINED) {
12664 #ifdef VK_USE_PLATFORM_ANDROID_KHR
12665 // only check for external format inside VK_FORMAT_UNDEFINED check to prevent unnecessary extra errors from no format
12666 // features being supported
12667 if (external_format == true) {
12668 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
12669 if (it != ahb_ext_formats_map.end()) {
12670 format_features = it->second;
12671 }
12672 }
12673 #endif
12674 } else {
12675 format_features = GetPotentialFormatFeatures(conversion_format);
12676 }
12677
12678 // Check all VUID that are based off of VkFormatFeatureFlags
12679 // These can't be in StatelessValidation due to needing possible External AHB state for feature support
12680 if (((format_features & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT) == 0) &&
12681 ((format_features & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT) == 0)) {
12682 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01650",
12683 "%s: Format %s does not support either VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or "
12684 "VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT",
12685 func_name, string_VkFormat(conversion_format));
12686 }
12687 if ((format_features & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT) == 0) {
12688 if (FormatIsXChromaSubsampled(conversion_format) && create_info->xChromaOffset == VK_CHROMA_LOCATION_COSITED_EVEN) {
12689 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651",
12690 "%s: Format %s does not support VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT so xChromaOffset can't "
12691 "be VK_CHROMA_LOCATION_COSITED_EVEN",
12692 func_name, string_VkFormat(conversion_format));
12693 }
12694 if (FormatIsYChromaSubsampled(conversion_format) && create_info->yChromaOffset == VK_CHROMA_LOCATION_COSITED_EVEN) {
12695 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651",
12696 "%s: Format %s does not support VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT so yChromaOffset can't "
12697 "be VK_CHROMA_LOCATION_COSITED_EVEN",
12698 func_name, string_VkFormat(conversion_format));
12699 }
12700 }
12701 if ((format_features & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT) == 0) {
12702 if (FormatIsXChromaSubsampled(conversion_format) && create_info->xChromaOffset == VK_CHROMA_LOCATION_MIDPOINT) {
12703 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652",
12704 "%s: Format %s does not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT so xChromaOffset can't "
12705 "be VK_CHROMA_LOCATION_MIDPOINT",
12706 func_name, string_VkFormat(conversion_format));
12707 }
12708 if (FormatIsYChromaSubsampled(conversion_format) && create_info->yChromaOffset == VK_CHROMA_LOCATION_MIDPOINT) {
12709 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652",
12710 "%s: Format %s does not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT so yChromaOffset can't "
12711 "be VK_CHROMA_LOCATION_MIDPOINT",
12712 func_name, string_VkFormat(conversion_format));
12713 }
12714 }
12715 if (((format_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT) == 0) &&
12716 (create_info->forceExplicitReconstruction == VK_TRUE)) {
12717 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-forceExplicitReconstruction-01656",
12718 "%s: Format %s does not support "
12719 "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT so "
12720 "forceExplicitReconstruction must be VK_FALSE",
12721 func_name, string_VkFormat(conversion_format));
12722 }
12723 if (((format_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) == 0) &&
12724 (create_info->chromaFilter == VK_FILTER_LINEAR)) {
12725 skip |= LogError(device, "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-01657",
12726 "%s: Format %s does not support VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT so "
12727 "chromaFilter must not be VK_FILTER_LINEAR",
12728 func_name, string_VkFormat(conversion_format));
12729 }
12730
12731 return skip;
12732 }
12733
PreCallValidateCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion) const12734 bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
12735 const VkAllocationCallbacks *pAllocator,
12736 VkSamplerYcbcrConversion *pYcbcrConversion) const {
12737 return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversion()", pCreateInfo);
12738 }
12739
PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion) const12740 bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device,
12741 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
12742 const VkAllocationCallbacks *pAllocator,
12743 VkSamplerYcbcrConversion *pYcbcrConversion) const {
12744 return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversionKHR()", pCreateInfo);
12745 }
12746
PreCallValidateCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler) const12747 bool CoreChecks::PreCallValidateCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
12748 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) const {
12749 bool skip = false;
12750
12751 if (samplerMap.size() >= phys_dev_props.limits.maxSamplerAllocationCount) {
12752 skip |= LogError(
12753 device, "VUID-vkCreateSampler-maxSamplerAllocationCount-04110",
12754 "vkCreateSampler(): Number of currently valid sampler objects (%zu) is not less than the maximum allowed (%u).",
12755 samplerMap.size(), phys_dev_props.limits.maxSamplerAllocationCount);
12756 }
12757
12758 if (enabled_features.core11.samplerYcbcrConversion == VK_TRUE) {
12759 const VkSamplerYcbcrConversionInfo *conversion_info = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(pCreateInfo->pNext);
12760 if (conversion_info != nullptr) {
12761 const VkSamplerYcbcrConversion sampler_ycbcr_conversion = conversion_info->conversion;
12762 const SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state = GetSamplerYcbcrConversionState(sampler_ycbcr_conversion);
12763 if ((ycbcr_state->format_features &
12764 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT) == 0) {
12765 const VkFilter chroma_filter = ycbcr_state->chromaFilter;
12766 if (pCreateInfo->minFilter != chroma_filter) {
12767 skip |= LogError(
12768 device, "VUID-VkSamplerCreateInfo-minFilter-01645",
12769 "VkCreateSampler: VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT is "
12770 "not supported for SamplerYcbcrConversion's (%u) format %s so minFilter (%s) needs to be equal to "
12771 "chromaFilter (%s)",
12772 report_data->FormatHandle(sampler_ycbcr_conversion).c_str(), string_VkFormat(ycbcr_state->format),
12773 string_VkFilter(pCreateInfo->minFilter), string_VkFilter(chroma_filter));
12774 }
12775 if (pCreateInfo->magFilter != chroma_filter) {
12776 skip |= LogError(
12777 device, "VUID-VkSamplerCreateInfo-minFilter-01645",
12778 "VkCreateSampler: VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT is "
12779 "not supported for SamplerYcbcrConversion's (%u) format %s so minFilter (%s) needs to be equal to "
12780 "chromaFilter (%s)",
12781 report_data->FormatHandle(sampler_ycbcr_conversion).c_str(), string_VkFormat(ycbcr_state->format),
12782 string_VkFilter(pCreateInfo->minFilter), string_VkFilter(chroma_filter));
12783 }
12784 }
12785 // At this point there is a known sampler YCbCr conversion enabled
12786 const auto *sampler_reduction = lvl_find_in_chain<VkSamplerReductionModeCreateInfo>(pCreateInfo->pNext);
12787 if (sampler_reduction != nullptr) {
12788 if (sampler_reduction->reductionMode != VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE) {
12789 skip |= LogError(device, "VUID-VkSamplerCreateInfo-None-01647",
12790 "A sampler YCbCr Conversion is being used creating this sampler so the sampler reduction mode "
12791 "must be VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE.");
12792 }
12793 }
12794 }
12795 }
12796
12797 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
12798 pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
12799 if (!enabled_features.custom_border_color_features.customBorderColors) {
12800 skip |=
12801 LogError(device, "VUID-VkSamplerCreateInfo-customBorderColors-04085",
12802 "vkCreateSampler(): A custom border color was specified without enabling the custom border color feature");
12803 }
12804 auto custom_create_info = lvl_find_in_chain<VkSamplerCustomBorderColorCreateInfoEXT>(pCreateInfo->pNext);
12805 if (custom_create_info) {
12806 if (custom_create_info->format == VK_FORMAT_UNDEFINED &&
12807 !enabled_features.custom_border_color_features.customBorderColorWithoutFormat) {
12808 skip |= LogError(device, "VUID-VkSamplerCustomBorderColorCreateInfoEXT-format-04014",
12809 "vkCreateSampler(): A custom border color was specified as VK_FORMAT_UNDEFINED without the "
12810 "customBorderColorWithoutFormat feature being enabled");
12811 }
12812 }
12813 if (custom_border_color_sampler_count >= phys_dev_ext_props.custom_border_color_props.maxCustomBorderColorSamplers) {
12814 skip |= LogError(device, "VUID-VkSamplerCreateInfo-None-04012",
12815 "vkCreateSampler(): Creating a sampler with a custom border color will exceed the "
12816 "maxCustomBorderColorSamplers limit of %d",
12817 phys_dev_ext_props.custom_border_color_props.maxCustomBorderColorSamplers);
12818 }
12819 }
12820
12821 return skip;
12822 }
12823
ValidateGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfoKHR * pInfo,const char * apiName) const12824 bool CoreChecks::ValidateGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfoKHR *pInfo,
12825 const char *apiName) const {
12826 bool skip = false;
12827
12828 if (!enabled_features.core12.bufferDeviceAddress && !enabled_features.buffer_device_address_ext.bufferDeviceAddress) {
12829 skip |= LogError(pInfo->buffer, "VUID-vkGetBufferDeviceAddress-bufferDeviceAddress-03324",
12830 "%s: The bufferDeviceAddress feature must: be enabled.", apiName);
12831 }
12832
12833 if (physical_device_count > 1 && !enabled_features.core12.bufferDeviceAddressMultiDevice &&
12834 !enabled_features.buffer_device_address_ext.bufferDeviceAddressMultiDevice) {
12835 skip |= LogError(pInfo->buffer, "VUID-vkGetBufferDeviceAddress-device-03325",
12836 "%s: If device was created with multiple physical devices, then the "
12837 "bufferDeviceAddressMultiDevice feature must: be enabled.",
12838 apiName);
12839 }
12840
12841 const auto buffer_state = GetBufferState(pInfo->buffer);
12842 if (buffer_state) {
12843 if (!(buffer_state->createInfo.flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT)) {
12844 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, apiName, "VUID-VkBufferDeviceAddressInfo-buffer-02600");
12845 }
12846
12847 skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, true,
12848 "VUID-VkBufferDeviceAddressInfo-buffer-02601", apiName,
12849 "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT");
12850 }
12851
12852 return skip;
12853 }
12854
PreCallValidateGetBufferDeviceAddressEXT(VkDevice device,const VkBufferDeviceAddressInfoEXT * pInfo) const12855 bool CoreChecks::PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT *pInfo) const {
12856 return ValidateGetBufferDeviceAddress(device, (const VkBufferDeviceAddressInfoKHR *)pInfo, "vkGetBufferDeviceAddressEXT");
12857 }
12858
PreCallValidateGetBufferDeviceAddressKHR(VkDevice device,const VkBufferDeviceAddressInfo * pInfo) const12859 bool CoreChecks::PreCallValidateGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo *pInfo) const {
12860 return ValidateGetBufferDeviceAddress(device, (const VkBufferDeviceAddressInfoKHR *)pInfo, "vkGetBufferDeviceAddressKHR");
12861 }
12862
PreCallValidateGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo) const12863 bool CoreChecks::PreCallValidateGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo) const {
12864 return ValidateGetBufferDeviceAddress(device, (const VkBufferDeviceAddressInfoKHR *)pInfo, "vkGetBufferDeviceAddress");
12865 }
12866
ValidateGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfoKHR * pInfo,const char * apiName) const12867 bool CoreChecks::ValidateGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfoKHR *pInfo,
12868 const char *apiName) const {
12869 bool skip = false;
12870
12871 if (!enabled_features.core12.bufferDeviceAddress) {
12872 skip |= LogError(pInfo->buffer, "VUID-vkGetBufferOpaqueCaptureAddress-None-03326",
12873 "%s(): The bufferDeviceAddress feature must: be enabled.", apiName);
12874 }
12875
12876 if (physical_device_count > 1 && !enabled_features.core12.bufferDeviceAddressMultiDevice) {
12877 skip |= LogError(pInfo->buffer, "VUID-vkGetBufferOpaqueCaptureAddress-device-03327",
12878 "%s(): If device was created with multiple physical devices, then the "
12879 "bufferDeviceAddressMultiDevice feature must: be enabled.",
12880 apiName);
12881 }
12882 return skip;
12883 }
12884
PreCallValidateGetBufferOpaqueCaptureAddressKHR(VkDevice device,const VkBufferDeviceAddressInfoKHR * pInfo) const12885 bool CoreChecks::PreCallValidateGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR *pInfo) const {
12886 return ValidateGetBufferOpaqueCaptureAddress(device, static_cast<const VkBufferDeviceAddressInfoKHR *>(pInfo),
12887 "vkGetBufferOpaqueCaptureAddressKHR");
12888 }
12889
PreCallValidateGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo) const12890 bool CoreChecks::PreCallValidateGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo) const {
12891 return ValidateGetBufferOpaqueCaptureAddress(device, static_cast<const VkBufferDeviceAddressInfoKHR *>(pInfo),
12892 "vkGetBufferOpaqueCaptureAddress");
12893 }
12894
ValidateGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfoKHR * pInfo,const char * apiName) const12895 bool CoreChecks::ValidateGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,
12896 const VkDeviceMemoryOpaqueCaptureAddressInfoKHR *pInfo,
12897 const char *apiName) const {
12898 bool skip = false;
12899
12900 if (!enabled_features.core12.bufferDeviceAddress) {
12901 skip |= LogError(pInfo->memory, "VUID-vkGetDeviceMemoryOpaqueCaptureAddress-None-03334",
12902 "%s(): The bufferDeviceAddress feature must: be enabled.", apiName);
12903 }
12904
12905 if (physical_device_count > 1 && !enabled_features.core12.bufferDeviceAddressMultiDevice) {
12906 skip |= LogError(pInfo->memory, "VUID-vkGetDeviceMemoryOpaqueCaptureAddress-device-03335",
12907 "%s(): If device was created with multiple physical devices, then the "
12908 "bufferDeviceAddressMultiDevice feature must: be enabled.",
12909 apiName);
12910 }
12911
12912 const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(pInfo->memory);
12913 if (mem_info) {
12914 auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(mem_info->alloc_info.pNext);
12915 if (!chained_flags_struct || !(chained_flags_struct->flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR)) {
12916 skip |= LogError(pInfo->memory, "VUID-VkDeviceMemoryOpaqueCaptureAddressInfo-memory-03336",
12917 "%s(): memory must have been allocated with VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT.", apiName);
12918 }
12919 }
12920
12921 return skip;
12922 }
12923
PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfoKHR * pInfo) const12924 bool CoreChecks::PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(
12925 VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR *pInfo) const {
12926 return ValidateGetDeviceMemoryOpaqueCaptureAddress(
12927 device, static_cast<const VkDeviceMemoryOpaqueCaptureAddressInfoKHR *>(pInfo), "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
12928 }
12929
PreCallValidateGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo) const12930 bool CoreChecks::PreCallValidateGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,
12931 const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo) const {
12932 return ValidateGetDeviceMemoryOpaqueCaptureAddress(
12933 device, static_cast<const VkDeviceMemoryOpaqueCaptureAddressInfoKHR *>(pInfo), "vkGetDeviceMemoryOpaqueCaptureAddress");
12934 }
12935
ValidateQueryRange(VkDevice device,VkQueryPool queryPool,uint32_t totalCount,uint32_t firstQuery,uint32_t queryCount,const char * vuid_badfirst,const char * vuid_badrange,const char * apiName) const12936 bool CoreChecks::ValidateQueryRange(VkDevice device, VkQueryPool queryPool, uint32_t totalCount, uint32_t firstQuery,
12937 uint32_t queryCount, const char *vuid_badfirst, const char *vuid_badrange,
12938 const char *apiName) const {
12939 bool skip = false;
12940
12941 if (firstQuery >= totalCount) {
12942 skip |= LogError(device, vuid_badfirst,
12943 "%s(): firstQuery (%" PRIu32 ") greater than or equal to query pool count (%" PRIu32 ") for %s", apiName,
12944 firstQuery, totalCount, report_data->FormatHandle(queryPool).c_str());
12945 }
12946
12947 if ((firstQuery + queryCount) > totalCount) {
12948 skip |= LogError(device, vuid_badrange,
12949 "%s(): Query range [%" PRIu32 ", %" PRIu32 ") goes beyond query pool count (%" PRIu32 ") for %s", apiName,
12950 firstQuery, firstQuery + queryCount, totalCount, report_data->FormatHandle(queryPool).c_str());
12951 }
12952
12953 return skip;
12954 }
12955
ValidateResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,const char * apiName) const12956 bool CoreChecks::ValidateResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
12957 const char *apiName) const {
12958 if (disabled[query_validation]) return false;
12959
12960 bool skip = false;
12961
12962 if (!enabled_features.core12.hostQueryReset) {
12963 skip |= LogError(device, "VUID-vkResetQueryPool-None-02665", "%s(): Host query reset not enabled for device", apiName);
12964 }
12965
12966 const auto query_pool_state = GetQueryPoolState(queryPool);
12967 if (query_pool_state) {
12968 skip |= ValidateQueryRange(device, queryPool, query_pool_state->createInfo.queryCount, firstQuery, queryCount,
12969 "VUID-vkResetQueryPool-firstQuery-02666", "VUID-vkResetQueryPool-firstQuery-02667", apiName);
12970 }
12971
12972 return skip;
12973 }
12974
PreCallValidateResetQueryPoolEXT(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount) const12975 bool CoreChecks::PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
12976 uint32_t queryCount) const {
12977 return ValidateResetQueryPool(device, queryPool, firstQuery, queryCount, "vkResetQueryPoolEXT");
12978 }
12979
PreCallValidateResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount) const12980 bool CoreChecks::PreCallValidateResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
12981 uint32_t queryCount) const {
12982 return ValidateResetQueryPool(device, queryPool, firstQuery, queryCount, "vkResetQueryPool");
12983 }
12984
CoreLayerCreateValidationCacheEXT(VkDevice device,const VkValidationCacheCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkValidationCacheEXT * pValidationCache)12985 VkResult CoreChecks::CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT *pCreateInfo,
12986 const VkAllocationCallbacks *pAllocator,
12987 VkValidationCacheEXT *pValidationCache) {
12988 *pValidationCache = ValidationCache::Create(pCreateInfo);
12989 return *pValidationCache ? VK_SUCCESS : VK_ERROR_INITIALIZATION_FAILED;
12990 }
12991
CoreLayerDestroyValidationCacheEXT(VkDevice device,VkValidationCacheEXT validationCache,const VkAllocationCallbacks * pAllocator)12992 void CoreChecks::CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache,
12993 const VkAllocationCallbacks *pAllocator) {
12994 delete CastFromHandle<ValidationCache *>(validationCache);
12995 }
12996
CoreLayerGetValidationCacheDataEXT(VkDevice device,VkValidationCacheEXT validationCache,size_t * pDataSize,void * pData)12997 VkResult CoreChecks::CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t *pDataSize,
12998 void *pData) {
12999 size_t inSize = *pDataSize;
13000 CastFromHandle<ValidationCache *>(validationCache)->Write(pDataSize, pData);
13001 return (pData && *pDataSize != inSize) ? VK_INCOMPLETE : VK_SUCCESS;
13002 }
13003
CoreLayerMergeValidationCachesEXT(VkDevice device,VkValidationCacheEXT dstCache,uint32_t srcCacheCount,const VkValidationCacheEXT * pSrcCaches)13004 VkResult CoreChecks::CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount,
13005 const VkValidationCacheEXT *pSrcCaches) {
13006 bool skip = false;
13007 auto dst = CastFromHandle<ValidationCache *>(dstCache);
13008 VkResult result = VK_SUCCESS;
13009 for (uint32_t i = 0; i < srcCacheCount; i++) {
13010 auto src = CastFromHandle<const ValidationCache *>(pSrcCaches[i]);
13011 if (src == dst) {
13012 skip |= LogError(device, "VUID-vkMergeValidationCachesEXT-dstCache-01536",
13013 "vkMergeValidationCachesEXT: dstCache (0x%" PRIx64 ") must not appear in pSrcCaches array.",
13014 HandleToUint64(dstCache));
13015 result = VK_ERROR_VALIDATION_FAILED_EXT;
13016 }
13017 if (!skip) {
13018 dst->Merge(src);
13019 }
13020 }
13021
13022 return result;
13023 }
13024
ValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask,const char * func_name) const13025 bool CoreChecks::ValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask, const char *func_name) const {
13026 bool skip = false;
13027 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13028 skip |= ValidateCmd(cb_state, CMD_SETDEVICEMASK, func_name);
13029 skip |= ValidateDeviceMaskToPhysicalDeviceCount(deviceMask, commandBuffer, "VUID-vkCmdSetDeviceMask-deviceMask-00108");
13030 skip |= ValidateDeviceMaskToZero(deviceMask, commandBuffer, "VUID-vkCmdSetDeviceMask-deviceMask-00109");
13031 skip |= ValidateDeviceMaskToCommandBuffer(cb_state, deviceMask, commandBuffer, "VUID-vkCmdSetDeviceMask-deviceMask-00110");
13032 if (cb_state->activeRenderPass) {
13033 skip |= ValidateDeviceMaskToRenderPass(cb_state, deviceMask, "VUID-vkCmdSetDeviceMask-deviceMask-00111");
13034 }
13035 return skip;
13036 }
13037
PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask) const13038 bool CoreChecks::PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const {
13039 return ValidateCmdSetDeviceMask(commandBuffer, deviceMask, "vkSetDeviceMask()");
13040 }
13041
PreCallValidateCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer,uint32_t deviceMask) const13042 bool CoreChecks::PreCallValidateCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const {
13043 return ValidateCmdSetDeviceMask(commandBuffer, deviceMask, "vkSetDeviceMaskKHR()");
13044 }
13045
ValidateGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue,const char * apiName) const13046 bool CoreChecks::ValidateGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
13047 const char *apiName) const {
13048 bool skip = false;
13049 const auto *pSemaphore = GetSemaphoreState(semaphore);
13050 if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
13051 skip |= LogError(semaphore, "VUID-vkGetSemaphoreCounterValue-semaphore-03255",
13052 "%s(): semaphore %s must be of VK_SEMAPHORE_TYPE_TIMELINE type", apiName,
13053 report_data->FormatHandle(semaphore).c_str());
13054 }
13055 return skip;
13056 }
13057
PreCallValidateGetSemaphoreCounterValueKHR(VkDevice device,VkSemaphore semaphore,uint64_t * pValue) const13058 bool CoreChecks::PreCallValidateGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue) const {
13059 return ValidateGetSemaphoreCounterValue(device, semaphore, pValue, "vkGetSemaphoreCounterValueKHR");
13060 }
PreCallValidateGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue) const13061 bool CoreChecks::PreCallValidateGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue) const {
13062 return ValidateGetSemaphoreCounterValue(device, semaphore, pValue, "vkGetSemaphoreCounterValue");
13063 }
ValidateQueryPoolStride(const std::string & vuid_not_64,const std::string & vuid_64,const VkDeviceSize stride,const char * parameter_name,const uint64_t parameter_value,const VkQueryResultFlags flags) const13064 bool CoreChecks::ValidateQueryPoolStride(const std::string &vuid_not_64, const std::string &vuid_64, const VkDeviceSize stride,
13065 const char *parameter_name, const uint64_t parameter_value,
13066 const VkQueryResultFlags flags) const {
13067 bool skip = false;
13068 if (flags & VK_QUERY_RESULT_64_BIT) {
13069 static const int condition_multiples = 0b0111;
13070 if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
13071 skip |= LogError(device, vuid_64, "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name,
13072 parameter_value);
13073 }
13074 } else {
13075 static const int condition_multiples = 0b0011;
13076 if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
13077 skip |= LogError(device, vuid_not_64, "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name,
13078 parameter_value);
13079 }
13080 }
13081 return skip;
13082 }
13083
ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer,const std::string & vuid,const uint32_t stride,const char * struct_name,const uint32_t struct_size) const13084 bool CoreChecks::ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
13085 const char *struct_name, const uint32_t struct_size) const {
13086 bool skip = false;
13087 static const int condition_multiples = 0b0011;
13088 if ((stride & condition_multiples) || (stride < struct_size)) {
13089 skip |= LogError(commandBuffer, vuid, "stride %d is invalid or less than sizeof(%s) %d.", stride, struct_name, struct_size);
13090 }
13091 return skip;
13092 }
13093
ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer,const std::string & vuid,const uint32_t stride,const char * struct_name,const uint32_t struct_size,const uint32_t drawCount,const VkDeviceSize offset,const BUFFER_STATE * buffer_state) const13094 bool CoreChecks::ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
13095 const char *struct_name, const uint32_t struct_size, const uint32_t drawCount,
13096 const VkDeviceSize offset, const BUFFER_STATE *buffer_state) const {
13097 bool skip = false;
13098 uint64_t validation_value = stride * (drawCount - 1) + offset + struct_size;
13099 if (validation_value > buffer_state->createInfo.size) {
13100 skip |= LogError(commandBuffer, vuid,
13101 "stride[%d] * (drawCount[%d] - 1) + offset[%" PRIx64 "] + sizeof(%s)[%d] = %" PRIx64
13102 " is greater than the size[%" PRIx64 "] of %s.",
13103 stride, drawCount, offset, struct_name, struct_size, validation_value, buffer_state->createInfo.size,
13104 report_data->FormatHandle(buffer_state->buffer).c_str());
13105 }
13106 return skip;
13107 }
13108
PreCallValidateReleaseProfilingLockKHR(VkDevice device) const13109 bool CoreChecks::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
13110 bool skip = false;
13111
13112 if (!performance_lock_acquired) {
13113 skip |= LogError(device, "VUID-vkReleaseProfilingLockKHR-device-03235",
13114 "vkReleaseProfilingLockKHR(): The profiling lock of device must have been held via a previous successful "
13115 "call to vkAcquireProfilingLockKHR.");
13116 }
13117
13118 return skip;
13119 }
13120
PreCallValidateCmdSetCheckpointNV(VkCommandBuffer commandBuffer,const void * pCheckpointMarker) const13121 bool CoreChecks::PreCallValidateCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void *pCheckpointMarker) const {
13122 {
13123 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13124 assert(cb_state);
13125 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetCheckpointNV()",
13126 VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
13127 "VUID-vkCmdSetCheckpointNV-commandBuffer-cmdpool");
13128 skip |= ValidateCmd(cb_state, CMD_SETCHECKPOINTNV, "vkCmdSetCheckpointNV()");
13129 return skip;
13130 }
13131 }
13132
PreCallValidateWriteAccelerationStructuresPropertiesKHR(VkDevice device,uint32_t accelerationStructureCount,const VkAccelerationStructureKHR * pAccelerationStructures,VkQueryType queryType,size_t dataSize,void * pData,size_t stride) const13133 bool CoreChecks::PreCallValidateWriteAccelerationStructuresPropertiesKHR(VkDevice device, uint32_t accelerationStructureCount,
13134 const VkAccelerationStructureKHR *pAccelerationStructures,
13135 VkQueryType queryType, size_t dataSize, void *pData,
13136 size_t stride) const {
13137 bool skip = false;
13138 for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
13139 const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pAccelerationStructures[i]);
13140 const auto &as_info = as_state->create_infoKHR;
13141 if (queryType == VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR) {
13142 if (!(as_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR)) {
13143 skip |= LogError(device, "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431",
13144 "vkWriteAccelerationStructuresPropertiesKHR: All acceleration structures (%s) in "
13145 "accelerationStructures must have been built with"
13146 "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is "
13147 "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR.",
13148 report_data->FormatHandle(as_state->acceleration_structure).c_str());
13149 }
13150 }
13151 }
13152 return skip;
13153 }
13154
PreCallValidateCmdWriteAccelerationStructuresPropertiesKHR(VkCommandBuffer commandBuffer,uint32_t accelerationStructureCount,const VkAccelerationStructureKHR * pAccelerationStructures,VkQueryType queryType,VkQueryPool queryPool,uint32_t firstQuery) const13155 bool CoreChecks::PreCallValidateCmdWriteAccelerationStructuresPropertiesKHR(
13156 VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR *pAccelerationStructures,
13157 VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const {
13158 bool skip = false;
13159 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13160 skip |= ValidateCmdQueueFlags(cb_state, "vkCmdWriteAccelerationStructuresPropertiesKHR()", VK_QUEUE_COMPUTE_BIT,
13161 "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-cmdpool");
13162 skip |= ValidateCmd(cb_state, CMD_WRITEACCELERATIONSTRUCTURESPROPERTIESKHR, "vkCmdWriteAccelerationStructuresPropertiesKHR()");
13163 // This command must only be called outside of a render pass instance
13164 skip |= InsideRenderPass(cb_state, "vkCmdWriteAccelerationStructuresPropertiesKHR()",
13165 "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-renderpass");
13166 const auto *query_pool_state = GetQueryPoolState(queryPool);
13167 const auto &query_pool_ci = query_pool_state->createInfo;
13168 if (query_pool_ci.queryType != queryType) {
13169 skip |= LogError(
13170 device, "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02493",
13171 "vkCmdWriteAccelerationStructuresPropertiesKHR: queryPool must have been created with a queryType matching queryType.");
13172 }
13173 for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
13174 if (queryType == VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR) {
13175 const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pAccelerationStructures[i]);
13176 if (!(as_state->create_infoKHR.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR)) {
13177 skip |=
13178 LogError(device, "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431",
13179 "vkCmdWriteAccelerationStructuresPropertiesKHR: All acceleration structures in accelerationStructures "
13180 "must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is "
13181 "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR.");
13182 }
13183 }
13184 }
13185 return skip;
13186 }
13187
PreCallValidateGetRayTracingShaderGroupHandlesKHR(VkDevice device,VkPipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData) const13188 bool CoreChecks::PreCallValidateGetRayTracingShaderGroupHandlesKHR(VkDevice device, VkPipeline pipeline, uint32_t firstGroup,
13189 uint32_t groupCount, size_t dataSize, void *pData) const {
13190 bool skip = false;
13191 const PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
13192 if (pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_LIBRARY_BIT_KHR) {
13193 skip |= LogError(
13194 device, "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-03482",
13195 "vkGetRayTracingShaderGroupHandlesKHR: pipeline must have not been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR.");
13196 }
13197 if (dataSize < phys_dev_ext_props.ray_tracing_propsKHR.shaderGroupHandleSize) {
13198 skip |= LogError(device, "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-02420",
13199 "vkGetRayTracingShaderGroupHandlesKHR: dataSize (%zu) must be at least "
13200 "VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize.",
13201 dataSize);
13202 }
13203 if (firstGroup >= pipeline_state->raytracingPipelineCI.groupCount) {
13204 skip |=
13205 LogError(device, "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-04050",
13206 "vkGetRayTracingShaderGroupHandlesKHR: firstGroup must be less than the number of shader groups in pipeline.");
13207 }
13208 if ((firstGroup + groupCount) > pipeline_state->raytracingPipelineCI.groupCount) {
13209 skip |= LogError(
13210 device, "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-02419",
13211 "vkGetRayTracingShaderGroupHandlesKHR: The sum of firstGroup and groupCount must be less than or equal the number "
13212 "of shader groups in pipeline.");
13213 }
13214 return skip;
13215 }
PreCallValidateGetRayTracingCaptureReplayShaderGroupHandlesKHR(VkDevice device,VkPipeline pipeline,uint32_t firstGroup,uint32_t groupCount,size_t dataSize,void * pData) const13216 bool CoreChecks::PreCallValidateGetRayTracingCaptureReplayShaderGroupHandlesKHR(VkDevice device, VkPipeline pipeline,
13217 uint32_t firstGroup, uint32_t groupCount,
13218 size_t dataSize, void *pData) const {
13219 bool skip = false;
13220 if (dataSize < phys_dev_ext_props.ray_tracing_propsKHR.shaderGroupHandleCaptureReplaySize) {
13221 skip |= LogError(device, "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-03484",
13222 "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR: dataSize (%zu) must be at least "
13223 "VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleCaptureReplaySize.",
13224 dataSize);
13225 }
13226 const PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
13227 if (firstGroup >= pipeline_state->raytracingPipelineCI.groupCount) {
13228 skip |= LogError(device, "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-04051",
13229 "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR: firstGroup must be less than the number of shader "
13230 "groups in pipeline.");
13231 }
13232 if ((firstGroup + groupCount) > pipeline_state->raytracingPipelineCI.groupCount) {
13233 skip |= LogError(device, "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-03483",
13234 "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR: The sum of firstGroup and groupCount must be less "
13235 "than or equal to the number of shader groups in pipeline.");
13236 }
13237 return skip;
13238 }
13239
PreCallValidateCmdBuildAccelerationStructureIndirectKHR(VkCommandBuffer commandBuffer,const VkAccelerationStructureBuildGeometryInfoKHR * pInfo,VkBuffer indirectBuffer,VkDeviceSize indirectOffset,uint32_t indirectStride) const13240 bool CoreChecks::PreCallValidateCmdBuildAccelerationStructureIndirectKHR(VkCommandBuffer commandBuffer,
13241 const VkAccelerationStructureBuildGeometryInfoKHR *pInfo,
13242 VkBuffer indirectBuffer, VkDeviceSize indirectOffset,
13243 uint32_t indirectStride) const {
13244 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13245 assert(cb_state);
13246 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBuildAccelerationStructureIndirectKHR()", VK_QUEUE_COMPUTE_BIT,
13247 "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-cmdpool");
13248 skip |= ValidateCmd(cb_state, CMD_BUILDACCELERATIONSTRUCTUREINDIRECTKHR, "vkCmdBuildAccelerationStructureIndirectKHR()");
13249 skip |= InsideRenderPass(cb_state, "vkCmdBuildAccelerationStructureIndirectKHR()",
13250 "VUID-vkCmdBuildAccelerationStructureIndirectKHR-renderpass");
13251 return skip;
13252 }
13253
ValidateCopyAccelerationStructureInfoKHR(const VkCopyAccelerationStructureInfoKHR * pInfo,const char * api_name) const13254 bool CoreChecks::ValidateCopyAccelerationStructureInfoKHR(const VkCopyAccelerationStructureInfoKHR *pInfo,
13255 const char *api_name) const {
13256 bool skip = false;
13257 if (pInfo->mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR) {
13258 const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
13259 if (!(src_as_state->create_infoKHR.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR)) {
13260 skip |= LogError(device, "VUID-VkCopyAccelerationStructureInfoKHR-src-03411",
13261 "(%s): src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR"
13262 "if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR.",
13263 api_name);
13264 }
13265 }
13266 return skip;
13267 }
PreCallValidateCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,const VkCopyAccelerationStructureInfoKHR * pInfo) const13268 bool CoreChecks::PreCallValidateCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
13269 const VkCopyAccelerationStructureInfoKHR *pInfo) const {
13270 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13271 assert(cb_state);
13272 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyAccelerationStructureKHR()", VK_QUEUE_COMPUTE_BIT,
13273 "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-cmdpool");
13274 skip |= ValidateCmd(cb_state, CMD_COPYACCELERATIONSTRUCTUREKHR, "vkCmdCopyAccelerationStructureKHR()");
13275 skip |= InsideRenderPass(cb_state, "vkCmdCopyAccelerationStructureKHR()", "VUID-vkCmdCopyAccelerationStructureKHR-renderpass");
13276 skip |= ValidateCopyAccelerationStructureInfoKHR(pInfo, "vkCmdCopyAccelerationStructureKHR");
13277 return false;
13278 }
13279
PreCallValidateCopyAccelerationStructureKHR(VkDevice device,const VkCopyAccelerationStructureInfoKHR * pInfo) const13280 bool CoreChecks::PreCallValidateCopyAccelerationStructureKHR(VkDevice device,
13281 const VkCopyAccelerationStructureInfoKHR *pInfo) const {
13282 bool skip = false;
13283 skip |= ValidateCopyAccelerationStructureInfoKHR(pInfo, "vkCopyAccelerationStructureKHR");
13284 return skip;
13285 }
PreCallValidateCmdCopyAccelerationStructureToMemoryKHR(VkCommandBuffer commandBuffer,const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo) const13286 bool CoreChecks::PreCallValidateCmdCopyAccelerationStructureToMemoryKHR(
13287 VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR *pInfo) const {
13288 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13289 assert(cb_state);
13290 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyAccelerationStructureToMemoryKHR()", VK_QUEUE_COMPUTE_BIT,
13291 "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-cmdpool");
13292 skip |= ValidateCmd(cb_state, CMD_COPYACCELERATIONSTRUCTURETOMEMORYKHR, "vkCmdCopyAccelerationStructureToMemoryKHR()");
13293 skip |= InsideRenderPass(cb_state, "vkCmdCopyAccelerationStructureToMemoryKHR()",
13294 "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-renderpass");
13295 return skip;
13296 }
13297
PreCallValidateCmdCopyMemoryToAccelerationStructureKHR(VkCommandBuffer commandBuffer,const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo) const13298 bool CoreChecks::PreCallValidateCmdCopyMemoryToAccelerationStructureKHR(
13299 VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR *pInfo) const {
13300 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13301 assert(cb_state);
13302 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyMemoryToAccelerationStructureKHR()", VK_QUEUE_COMPUTE_BIT,
13303 "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-cmdpool");
13304 skip |= ValidateCmd(cb_state, CMD_COPYMEMORYTOACCELERATIONSTRUCTUREKHR, "vkCmdCopyMemoryToAccelerationStructureKHR()");
13305 // This command must only be called outside of a render pass instance
13306 skip |= InsideRenderPass(cb_state, "vkCmdCopyMemoryToAccelerationStructureKHR()",
13307 "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-renderpass");
13308 return skip;
13309 }
13310
PreCallValidateCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes) const13311 bool CoreChecks::PreCallValidateCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
13312 uint32_t bindingCount, const VkBuffer *pBuffers,
13313 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes) const {
13314 bool skip = false;
13315 char const *const cmd_name = "CmdBindTransformFeedbackBuffersEXT";
13316 if (!enabled_features.transform_feedback_features.transformFeedback) {
13317 skip |= LogError(commandBuffer, "VUID-vkCmdBindTransformFeedbackBuffersEXT-transformFeedback-02355",
13318 "%s: transformFeedback feature is not enabled.", cmd_name);
13319 }
13320
13321 {
13322 auto const cb_state = GetCBState(commandBuffer);
13323 if (cb_state->transform_feedback_active) {
13324 skip |= LogError(commandBuffer, "VUID-vkCmdBindTransformFeedbackBuffersEXT-None-02365",
13325 "%s: transform feedback is active.", cmd_name);
13326 }
13327 }
13328
13329 for (uint32_t i = 0; i < bindingCount; ++i) {
13330 auto const buffer_state = GetBufferState(pBuffers[i]);
13331 assert(buffer_state != nullptr);
13332
13333 if (pOffsets[i] >= buffer_state->createInfo.size) {
13334 skip |= LogError(buffer_state->buffer, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02358",
13335 "%s: pOffset[%" PRIu32 "](0x%" PRIxLEAST64
13336 ") is greater than or equal to the size of pBuffers[%" PRIu32 "](0x%" PRIxLEAST64 ").",
13337 cmd_name, i, pOffsets[i], i, buffer_state->createInfo.size);
13338 }
13339
13340 if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT) == 0) {
13341 skip |= LogError(buffer_state->buffer, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02360",
13342 "%s: pBuffers[%" PRIu32 "] (0x%" PRIxLEAST64
13343 ") was not created with the VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT flag.",
13344 cmd_name, i, pBuffers[i]);
13345 }
13346
13347 // pSizes is optional and may be nullptr. Also might be VK_WHOLE_SIZE which VU don't apply
13348 if ((pSizes != nullptr) && (pSizes[i] != VK_WHOLE_SIZE)) {
13349 // only report one to prevent redundant error if the size is larger since adding offset will be as well
13350 if (pSizes[i] > buffer_state->createInfo.size) {
13351 skip |= LogError(buffer_state->buffer, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-02362",
13352 "%s: pSizes[%" PRIu32 "](0x%" PRIxLEAST64 ") is greater than the size of pBuffers[%" PRIu32
13353 "](0x%" PRIxLEAST64 ").",
13354 cmd_name, i, pSizes[i], i, buffer_state->createInfo.size);
13355 } else if (pOffsets[i] + pSizes[i] > buffer_state->createInfo.size) {
13356 skip |= LogError(buffer_state->buffer, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02363",
13357 "%s: The sum of pOffsets[%" PRIu32 "](Ox%" PRIxLEAST64 ") and pSizes[%" PRIu32 "](0x%" PRIxLEAST64
13358 ") is greater than the size of pBuffers[%" PRIu32 "](0x%" PRIxLEAST64 ").",
13359 cmd_name, i, pOffsets[i], i, pSizes[i], i, buffer_state->createInfo.size);
13360 }
13361 }
13362
13363 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, cmd_name, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02364");
13364 }
13365
13366 return skip;
13367 }
13368
PreCallValidateCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets) const13369 bool CoreChecks::PreCallValidateCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
13370 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
13371 const VkDeviceSize *pCounterBufferOffsets) const {
13372 bool skip = false;
13373 char const *const cmd_name = "CmdBeginTransformFeedbackEXT";
13374 if (!enabled_features.transform_feedback_features.transformFeedback) {
13375 skip |= LogError(commandBuffer, "VUID-vkCmdBeginTransformFeedbackEXT-transformFeedback-02366",
13376 "%s: transformFeedback feature is not enabled.", cmd_name);
13377 }
13378
13379 {
13380 auto const cb_state = GetCBState(commandBuffer);
13381 if (cb_state->transform_feedback_active) {
13382 skip |= LogError(commandBuffer, "VUID-vkCmdBeginTransformFeedbackEXT-None-02367", "%s: transform feedback is active.",
13383 cmd_name);
13384 }
13385 }
13386
13387 // pCounterBuffers and pCounterBufferOffsets are optional and may be nullptr. Additionaly, pCounterBufferOffsets must be nullptr
13388 // if pCounterBuffers is nullptr.
13389 if (pCounterBuffers == nullptr) {
13390 if (pCounterBufferOffsets != nullptr) {
13391 skip |= LogError(commandBuffer, "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffer-02371",
13392 "%s: pCounterBuffers is NULL and pCounterBufferOffsets is not NULL.", cmd_name);
13393 }
13394 } else {
13395 for (uint32_t i = 0; i < counterBufferCount; ++i) {
13396 if (pCounterBuffers[i] != VK_NULL_HANDLE) {
13397 auto const buffer_state = GetBufferState(pCounterBuffers[i]);
13398 assert(buffer_state != nullptr);
13399
13400 if (pCounterBufferOffsets != nullptr && pCounterBufferOffsets[i] + 4 > buffer_state->createInfo.size) {
13401 skip |=
13402 LogError(buffer_state->buffer, "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-02370",
13403 "%s: pCounterBuffers[%" PRIu32 "](0x%" PRIxLEAST64
13404 ") is not large enough to hold 4 bytes at pCounterBufferOffsets[%" PRIu32 "](0x%" PRIxLEAST64 ").",
13405 cmd_name, i, pCounterBuffers[i], i, pCounterBufferOffsets[i]);
13406 }
13407
13408 if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT) == 0) {
13409 skip |= LogError(buffer_state->buffer, "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffers-02372",
13410 "%s: pCounterBuffers[%" PRIu32 "] (0x%" PRIxLEAST64
13411 ") was not created with the VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT flag.",
13412 cmd_name, i, pCounterBuffers[i]);
13413 }
13414 }
13415 }
13416 }
13417
13418 return skip;
13419 }
13420
PreCallValidateCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets) const13421 bool CoreChecks::PreCallValidateCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
13422 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
13423 const VkDeviceSize *pCounterBufferOffsets) const {
13424 bool skip = false;
13425 char const *const cmd_name = "CmdEndTransformFeedbackEXT";
13426 if (!enabled_features.transform_feedback_features.transformFeedback) {
13427 skip |= LogError(commandBuffer, "VUID-vkCmdEndTransformFeedbackEXT-transformFeedback-02374",
13428 "%s: transformFeedback feature is not enabled.", cmd_name);
13429 }
13430
13431 {
13432 auto const cb_state = GetCBState(commandBuffer);
13433 if (!cb_state->transform_feedback_active) {
13434 skip |= LogError(commandBuffer, "VUID-vkCmdEndTransformFeedbackEXT-None-02375", "%s: transform feedback is not active.",
13435 cmd_name);
13436 }
13437 }
13438
13439 // pCounterBuffers and pCounterBufferOffsets are optional and may be nullptr. Additionaly, pCounterBufferOffsets must be nullptr
13440 // if pCounterBuffers is nullptr.
13441 if (pCounterBuffers == nullptr) {
13442 if (pCounterBufferOffsets != nullptr) {
13443 skip |= LogError(commandBuffer, "VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffer-02379",
13444 "%s: pCounterBuffers is NULL and pCounterBufferOffsets is not NULL.", cmd_name);
13445 }
13446 } else {
13447 for (uint32_t i = 0; i < counterBufferCount; ++i) {
13448 if (pCounterBuffers[i] != VK_NULL_HANDLE) {
13449 auto const buffer_state = GetBufferState(pCounterBuffers[i]);
13450 assert(buffer_state != nullptr);
13451
13452 if (pCounterBufferOffsets != nullptr && pCounterBufferOffsets[i] + 4 > buffer_state->createInfo.size) {
13453 skip |=
13454 LogError(buffer_state->buffer, "VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-02378",
13455 "%s: pCounterBuffers[%" PRIu32 "](0x%" PRIxLEAST64
13456 ") is not large enough to hold 4 bytes at pCounterBufferOffsets[%" PRIu32 "](0x%" PRIxLEAST64 ").",
13457 cmd_name, i, pCounterBuffers[i], i, pCounterBufferOffsets[i]);
13458 }
13459
13460 if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT) == 0) {
13461 skip |= LogError(buffer_state->buffer, "VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffers-02380",
13462 "%s: pCounterBuffers[%" PRIu32 "] (0x%" PRIxLEAST64
13463 ") was not created with the VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT flag.",
13464 cmd_name, i, pCounterBuffers[i]);
13465 }
13466 }
13467 }
13468 }
13469
13470 return skip;
13471 }
13472
PreCallValidateCmdSetCullModeEXT(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode) const13473 bool CoreChecks::PreCallValidateCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const {
13474 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13475 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetCullModeEXT()", VK_QUEUE_GRAPHICS_BIT,
13476 "VUID-vkCmdSetCullModeEXT-commandBuffer-cmdpool");
13477 skip |= ValidateCmd(cb_state, CMD_SETCULLMODEEXT, "vkCmdSetCullModeEXT()");
13478
13479 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13480 skip |= LogError(commandBuffer, "VUID-vkCmdSetCullModeEXT-None-03384",
13481 "vkCmdSetCullModeEXT: extendedDynamicState feature is not enabled.");
13482 }
13483
13484 return skip;
13485 }
13486
PreCallValidateCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer,VkFrontFace frontFace) const13487 bool CoreChecks::PreCallValidateCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const {
13488 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13489 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetFrontFaceEXT()", VK_QUEUE_GRAPHICS_BIT,
13490 "VUID-vkCmdSetFrontFaceEXT-commandBuffer-cmdpool");
13491 skip |= ValidateCmd(cb_state, CMD_SETFRONTFACEEXT, "vkCmdSetFrontFaceEXT()");
13492
13493 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13494 skip |= LogError(commandBuffer, "VUID-vkCmdSetFrontFaceEXT-None-03383",
13495 "vkCmdSetFrontFaceEXT: extendedDynamicState feature is not enabled.");
13496 }
13497
13498 return skip;
13499 }
13500
PreCallValidateCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology) const13501 bool CoreChecks::PreCallValidateCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
13502 VkPrimitiveTopology primitiveTopology) const {
13503 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13504 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetPrimitiveTopologyEXT()", VK_QUEUE_GRAPHICS_BIT,
13505 "VUID-vkCmdSetPrimitiveTopologyEXT-commandBuffer-cmdpool");
13506 skip |= ValidateCmd(cb_state, CMD_SETPRIMITIVETOPOLOGYEXT, "vkCmdSetPrimitiveTopologyEXT()");
13507
13508 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13509 skip |= LogError(commandBuffer, "VUID-vkCmdSetPrimitiveTopologyEXT-None-03347",
13510 "vkCmdSetPrimitiveTopologyEXT: extendedDynamicState feature is not enabled.");
13511 }
13512
13513 return skip;
13514 }
13515
PreCallValidateCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports) const13516 bool CoreChecks::PreCallValidateCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
13517 const VkViewport *pViewports) const
13518
13519 {
13520 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13521 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportWithCountEXT()", VK_QUEUE_GRAPHICS_BIT,
13522 "VUID-vkCmdSetViewportWithCountEXT-commandBuffer-cmdpool");
13523 skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTWITHCOUNTEXT, "vkCmdSetViewportWithCountEXT()");
13524
13525 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13526 skip |= LogError(commandBuffer, "VUID-vkCmdSetViewportWithCountEXT-None-03393",
13527 "vkCmdSetViewportWithCountEXT: extendedDynamicState feature is not enabled.");
13528 }
13529
13530 return skip;
13531 }
13532
PreCallValidateCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors) const13533 bool CoreChecks::PreCallValidateCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
13534 const VkRect2D *pScissors) const {
13535 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13536 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetScissorWithCountEXT()", VK_QUEUE_GRAPHICS_BIT,
13537 "VUID-vkCmdSetScissorWithCountEXT-commandBuffer-cmdpool");
13538 skip |= ValidateCmd(cb_state, CMD_SETSCISSORWITHCOUNTEXT, "vkCmdSetScissorWithCountEXT()");
13539
13540 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13541 skip |= LogError(commandBuffer, "VUID-vkCmdSetScissorWithCountEXT-None-03396",
13542 "vkCmdSetScissorWithCountEXT: extendedDynamicState feature is not enabled.");
13543 }
13544
13545 return skip;
13546 }
13547
PreCallValidateCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides) const13548 bool CoreChecks::PreCallValidateCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
13549 uint32_t bindingCount, const VkBuffer *pBuffers,
13550 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
13551 const VkDeviceSize *pStrides) const {
13552 const auto cb_state = GetCBState(commandBuffer);
13553 assert(cb_state);
13554
13555 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindVertexBuffers2EXT()", VK_QUEUE_GRAPHICS_BIT,
13556 "VUID-vkCmdBindVertexBuffers2EXT-commandBuffer-cmdpool");
13557 skip |= ValidateCmd(cb_state, CMD_BINDVERTEXBUFFERS2EXT, "vkCmdBindVertexBuffers2EXT()");
13558 for (uint32_t i = 0; i < bindingCount; ++i) {
13559 const auto buffer_state = GetBufferState(pBuffers[i]);
13560 if (buffer_state) {
13561 skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, true,
13562 "VUID-vkCmdBindVertexBuffers2EXT-pBuffers-03359", "vkCmdBindVertexBuffers2EXT()",
13563 "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT");
13564 skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindVertexBuffers2EXT()",
13565 "VUID-vkCmdBindVertexBuffers2EXT-pBuffers-03360");
13566 if (pOffsets[i] >= buffer_state->createInfo.size) {
13567 skip |= LogError(buffer_state->buffer, "VUID-vkCmdBindVertexBuffers2EXT-pOffsets-03357",
13568 "vkCmdBindVertexBuffers2EXT() offset (0x%" PRIxLEAST64 ") is beyond the end of the buffer.",
13569 pOffsets[i]);
13570 }
13571 if (pSizes && pOffsets[i] + pSizes[i] > buffer_state->createInfo.size) {
13572 skip |=
13573 LogError(buffer_state->buffer, "VUID-vkCmdBindVertexBuffers2EXT-pSizes-03358",
13574 "vkCmdBindVertexBuffers2EXT() size (0x%" PRIxLEAST64 ") is beyond the end of the buffer.", pSizes[i]);
13575 }
13576 }
13577 }
13578
13579 return skip;
13580 }
13581
PreCallValidateCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable) const13582 bool CoreChecks::PreCallValidateCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const {
13583 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13584 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthTestEnableEXT()", VK_QUEUE_GRAPHICS_BIT,
13585 "VUID-vkCmdSetDepthTestEnableEXT-commandBuffer-cmdpool");
13586 skip |= ValidateCmd(cb_state, CMD_SETDEPTHTESTENABLEEXT, "vkCmdSetDepthTestEnableEXT()");
13587
13588 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13589 skip |= LogError(commandBuffer, "VUID-vkCmdSetDepthTestEnableEXT-None-03352",
13590 "vkCmdSetDepthTestEnableEXT: extendedDynamicState feature is not enabled.");
13591 }
13592
13593 return skip;
13594 }
13595
PreCallValidateCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable) const13596 bool CoreChecks::PreCallValidateCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const {
13597 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13598 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthWriteEnableEXT()", VK_QUEUE_GRAPHICS_BIT,
13599 "VUID-vkCmdSetDepthWriteEnableEXT-commandBuffer-cmdpool");
13600 skip |= ValidateCmd(cb_state, CMD_SETDEPTHWRITEENABLEEXT, "vkCmdSetDepthWriteEnableEXT()");
13601
13602 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13603 skip |= LogError(commandBuffer, "VUID-vkCmdSetDepthWriteEnableEXT-None-03354",
13604 "vkCmdSetDepthWriteEnableEXT: extendedDynamicState feature is not enabled.");
13605 }
13606
13607 return skip;
13608 }
13609
PreCallValidateCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp) const13610 bool CoreChecks::PreCallValidateCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const {
13611 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13612 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthCompareOpEXT()", VK_QUEUE_GRAPHICS_BIT,
13613 "VUID-vkCmdSetDepthCompareOpEXT-commandBuffer-cmdpool");
13614 skip |= ValidateCmd(cb_state, CMD_SETDEPTHCOMPAREOPEXT, "vkCmdSetDepthCompareOpEXT()");
13615
13616 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13617 skip |= LogError(commandBuffer, "VUID-vkCmdSetDepthCompareOpEXT-None-03353",
13618 "vkCmdSetDepthCompareOpEXT: extendedDynamicState feature is not enabled.");
13619 }
13620
13621 return skip;
13622 }
13623
PreCallValidateCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable) const13624 bool CoreChecks::PreCallValidateCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
13625 VkBool32 depthBoundsTestEnable) const {
13626 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13627 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBoundsTestEnableEXT()", VK_QUEUE_GRAPHICS_BIT,
13628 "VUID-vkCmdSetDepthBoundsTestEnableEXT-commandBuffer-cmdpool");
13629 skip |= ValidateCmd(cb_state, CMD_SETDEPTHBOUNDSTESTENABLEEXT, "vkCmdSetDepthBoundsTestEnableEXT()");
13630
13631 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13632 skip |= LogError(commandBuffer, "VUID-vkCmdSetDepthBoundsTestEnableEXT-None-03349",
13633 "vkCmdSetDepthBoundsTestEnableEXT: extendedDynamicState feature is not enabled.");
13634 }
13635
13636 return skip;
13637 }
13638
PreCallValidateCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable) const13639 bool CoreChecks::PreCallValidateCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const {
13640 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13641 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilTestEnableEXT()", VK_QUEUE_GRAPHICS_BIT,
13642 "VUID-vkCmdSetStencilTestEnableEXT-commandBuffer-cmdpool");
13643 skip |= ValidateCmd(cb_state, CMD_SETSTENCILTESTENABLEEXT, "vkCmdSetStencilTestEnableEXT()");
13644
13645 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13646 skip |= LogError(commandBuffer, "VUID-vkCmdSetStencilTestEnableEXT-None-03350",
13647 "vkCmdSetStencilTestEnableEXT: extendedDynamicState feature is not enabled.");
13648 }
13649
13650 return skip;
13651 }
13652
PreCallValidateCmdSetStencilOpEXT(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp) const13653 bool CoreChecks::PreCallValidateCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp,
13654 VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp) const {
13655 const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
13656 bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilOpEXT()", VK_QUEUE_GRAPHICS_BIT,
13657 "VUID-vkCmdSetStencilOpEXT-commandBuffer-cmdpool");
13658 skip |= ValidateCmd(cb_state, CMD_SETSTENCILOPEXT, "vkCmdSetStencilOpEXT()");
13659
13660 if (!enabled_features.extended_dynamic_state_features.extendedDynamicState) {
13661 skip |= LogError(commandBuffer, "VUID-vkCmdSetStencilOpEXT-None-03351",
13662 "vkCmdSetStencilOpEXT: extendedDynamicState feature is not enabled.");
13663 }
13664
13665 return skip;
13666 }
13667
initGraphicsPipeline(const ValidationStateTracker * state_data,const VkGraphicsPipelineCreateInfo * pCreateInfo,std::shared_ptr<const RENDER_PASS_STATE> && rpstate)13668 void PIPELINE_STATE::initGraphicsPipeline(const ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
13669 std::shared_ptr<const RENDER_PASS_STATE> &&rpstate) {
13670 reset();
13671 bool uses_color_attachment = false;
13672 bool uses_depthstencil_attachment = false;
13673 if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
13674 const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
13675
13676 for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
13677 if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
13678 uses_color_attachment = true;
13679 break;
13680 }
13681 }
13682
13683 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
13684 uses_depthstencil_attachment = true;
13685 }
13686 }
13687 graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
13688 if (graphicsPipelineCI.pInputAssemblyState) {
13689 topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology;
13690 }
13691
13692 stage_state.resize(pCreateInfo->stageCount);
13693 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
13694 const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
13695 this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
13696 this->active_shaders |= pPSSCI->stage;
13697 state_data->RecordPipelineShaderStage(pPSSCI, this, &stage_state[i]);
13698 }
13699
13700 if (graphicsPipelineCI.pVertexInputState) {
13701 const auto pVICI = graphicsPipelineCI.pVertexInputState;
13702 if (pVICI->vertexBindingDescriptionCount) {
13703 this->vertex_binding_descriptions_ = std::vector<VkVertexInputBindingDescription>(
13704 pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
13705
13706 this->vertex_binding_to_index_map_.reserve(pVICI->vertexBindingDescriptionCount);
13707 for (uint32_t i = 0; i < pVICI->vertexBindingDescriptionCount; ++i) {
13708 this->vertex_binding_to_index_map_[pVICI->pVertexBindingDescriptions[i].binding] = i;
13709 }
13710 }
13711 if (pVICI->vertexAttributeDescriptionCount) {
13712 this->vertex_attribute_descriptions_ = std::vector<VkVertexInputAttributeDescription>(
13713 pVICI->pVertexAttributeDescriptions, pVICI->pVertexAttributeDescriptions + pVICI->vertexAttributeDescriptionCount);
13714 for (uint32_t i = 0; i < pVICI->vertexAttributeDescriptionCount; ++i) {
13715 const auto attribute_format = pVICI->pVertexAttributeDescriptions[i].format;
13716 VkDeviceSize vtx_attrib_req_alignment = FormatElementSize(attribute_format);
13717 if (FormatElementIsTexel(attribute_format)) {
13718 vtx_attrib_req_alignment = SafeDivision(vtx_attrib_req_alignment, FormatChannelCount(attribute_format));
13719 }
13720 this->vertex_attribute_alignments_.push_back(vtx_attrib_req_alignment);
13721 }
13722 }
13723 }
13724 if (graphicsPipelineCI.pColorBlendState) {
13725 const auto pCBCI = graphicsPipelineCI.pColorBlendState;
13726 if (pCBCI->attachmentCount) {
13727 this->attachments =
13728 std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments, pCBCI->pAttachments + pCBCI->attachmentCount);
13729 }
13730 }
13731 rp_state = rpstate;
13732 }
13733
initComputePipeline(const ValidationStateTracker * state_data,const VkComputePipelineCreateInfo * pCreateInfo)13734 void PIPELINE_STATE::initComputePipeline(const ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo) {
13735 reset();
13736 computePipelineCI.initialize(pCreateInfo);
13737 switch (computePipelineCI.stage.stage) {
13738 case VK_SHADER_STAGE_COMPUTE_BIT:
13739 this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
13740 stage_state.resize(1);
13741 state_data->RecordPipelineShaderStage(&pCreateInfo->stage, this, &stage_state[0]);
13742 break;
13743 default:
13744 // TODO : Flag error
13745 break;
13746 }
13747 }
13748
13749 template <typename CreateInfo>
initRayTracingPipeline(const ValidationStateTracker * state_data,const CreateInfo * pCreateInfo)13750 void PIPELINE_STATE::initRayTracingPipeline(const ValidationStateTracker *state_data, const CreateInfo *pCreateInfo) {
13751 reset();
13752 raytracingPipelineCI.initialize(pCreateInfo);
13753
13754 stage_state.resize(pCreateInfo->stageCount);
13755 for (uint32_t stage_index = 0; stage_index < pCreateInfo->stageCount; stage_index++) {
13756 const auto &shader_stage = pCreateInfo->pStages[stage_index];
13757 switch (shader_stage.stage) {
13758 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
13759 this->active_shaders |= VK_SHADER_STAGE_RAYGEN_BIT_NV;
13760 break;
13761 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
13762 this->active_shaders |= VK_SHADER_STAGE_ANY_HIT_BIT_NV;
13763 break;
13764 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
13765 this->active_shaders |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
13766 break;
13767 case VK_SHADER_STAGE_MISS_BIT_NV:
13768 this->active_shaders |= VK_SHADER_STAGE_MISS_BIT_NV;
13769 break;
13770 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
13771 this->active_shaders |= VK_SHADER_STAGE_INTERSECTION_BIT_NV;
13772 break;
13773 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
13774 this->active_shaders |= VK_SHADER_STAGE_CALLABLE_BIT_NV;
13775 break;
13776 default:
13777 // TODO : Flag error
13778 break;
13779 }
13780 state_data->RecordPipelineShaderStage(&shader_stage, this, &stage_state[stage_index]);
13781 }
13782 }
13783
13784 template void PIPELINE_STATE::initRayTracingPipeline(const ValidationStateTracker *state_data,
13785 const VkRayTracingPipelineCreateInfoNV *pCreateInfo);
13786 template void PIPELINE_STATE::initRayTracingPipeline(const ValidationStateTracker *state_data,
13787 const VkRayTracingPipelineCreateInfoKHR *pCreateInfo);
13788