1 /*
2 * Copyright (c) 2015-2020 The Khronos Group Inc.
3 * Copyright (c) 2015-2020 Valve Corporation
4 * Copyright (c) 2015-2020 LunarG, Inc.
5 * Copyright (c) 2015-2020 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26 #include "cast_utils.h"
27 #include "layer_validation_tests.h"
28
29 // Global list of sType,size identifiers
30 std::vector<std::pair<uint32_t, uint32_t>> custom_stype_info{};
31
FindSupportedDepthOnlyFormat(VkPhysicalDevice phy)32 VkFormat FindSupportedDepthOnlyFormat(VkPhysicalDevice phy) {
33 const VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D32_SFLOAT};
34 for (uint32_t i = 0; i < size(ds_formats); ++i) {
35 VkFormatProperties format_props;
36 vk::GetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
37
38 if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
39 return ds_formats[i];
40 }
41 }
42 return VK_FORMAT_UNDEFINED;
43 }
44
FindSupportedStencilOnlyFormat(VkPhysicalDevice phy)45 VkFormat FindSupportedStencilOnlyFormat(VkPhysicalDevice phy) {
46 const VkFormat ds_formats[] = {VK_FORMAT_S8_UINT};
47 for (uint32_t i = 0; i < size(ds_formats); ++i) {
48 VkFormatProperties format_props;
49 vk::GetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
50
51 if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
52 return ds_formats[i];
53 }
54 }
55 return VK_FORMAT_UNDEFINED;
56 }
57
FindSupportedDepthStencilFormat(VkPhysicalDevice phy)58 VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy) {
59 const VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT};
60 for (uint32_t i = 0; i < size(ds_formats); ++i) {
61 VkFormatProperties format_props;
62 vk::GetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
63
64 if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
65 return ds_formats[i];
66 }
67 }
68 return VK_FORMAT_UNDEFINED;
69 }
70
ImageFormatIsSupported(VkPhysicalDevice phy,VkFormat format,VkImageTiling tiling,VkFormatFeatureFlags features)71 bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
72 VkFormatProperties format_props;
73 vk::GetPhysicalDeviceFormatProperties(phy, format, &format_props);
74 VkFormatFeatureFlags phy_features =
75 (VK_IMAGE_TILING_OPTIMAL == tiling ? format_props.optimalTilingFeatures : format_props.linearTilingFeatures);
76 return (0 != (phy_features & features));
77 }
78
ImageFormatAndFeaturesSupported(VkPhysicalDevice phy,VkFormat format,VkImageTiling tiling,VkFormatFeatureFlags features)79 bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
80 VkFormatProperties format_props;
81 vk::GetPhysicalDeviceFormatProperties(phy, format, &format_props);
82 VkFormatFeatureFlags phy_features =
83 (VK_IMAGE_TILING_OPTIMAL == tiling ? format_props.optimalTilingFeatures : format_props.linearTilingFeatures);
84 return (features == (phy_features & features));
85 }
86
ImageFormatAndFeaturesSupported(const VkInstance inst,const VkPhysicalDevice phy,const VkImageCreateInfo info,const VkFormatFeatureFlags features)87 bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevice phy, const VkImageCreateInfo info,
88 const VkFormatFeatureFlags features) {
89 // Verify physical device support of format features
90 if (!ImageFormatAndFeaturesSupported(phy, info.format, info.tiling, features)) {
91 return false;
92 }
93
94 // Verify that PhysDevImageFormatProp() also claims support for the specific usage
95 VkImageFormatProperties props;
96 VkResult err =
97 vk::GetPhysicalDeviceImageFormatProperties(phy, info.format, info.imageType, info.tiling, info.usage, info.flags, &props);
98 if (VK_SUCCESS != err) {
99 return false;
100 }
101
102 #if 0 // Convinced this chunk doesn't currently add any additional info, but leaving in place because it may be
103 // necessary with future extensions
104
105 // Verify again using version 2, if supported, which *can* return more property data than the original...
106 // (It's not clear that this is any more definitive than using the original version - but no harm)
107 PFN_vkGetPhysicalDeviceImageFormatProperties2KHR p_GetPDIFP2KHR =
108 (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)vk::GetInstanceProcAddr(inst,
109 "vkGetPhysicalDeviceImageFormatProperties2KHR");
110 if (NULL != p_GetPDIFP2KHR) {
111 VkPhysicalDeviceImageFormatInfo2KHR fmt_info{};
112 fmt_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR;
113 fmt_info.pNext = nullptr;
114 fmt_info.format = info.format;
115 fmt_info.type = info.imageType;
116 fmt_info.tiling = info.tiling;
117 fmt_info.usage = info.usage;
118 fmt_info.flags = info.flags;
119
120 VkImageFormatProperties2KHR fmt_props = {};
121 fmt_props.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
122 err = p_GetPDIFP2KHR(phy, &fmt_info, &fmt_props);
123 if (VK_SUCCESS != err) {
124 return false;
125 }
126 }
127 #endif
128
129 return true;
130 }
131
BufferFormatAndFeaturesSupported(VkPhysicalDevice phy,VkFormat format,VkFormatFeatureFlags features)132 bool BufferFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkFormatFeatureFlags features) {
133 VkFormatProperties format_props;
134 vk::GetPhysicalDeviceFormatProperties(phy, format, &format_props);
135 VkFormatFeatureFlags phy_features = format_props.bufferFeatures;
136 return (features == (phy_features & features));
137 }
138
GetPushDescriptorProperties(VkInstance instance,VkPhysicalDevice gpu)139 VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstance instance, VkPhysicalDevice gpu) {
140 // Find address of extension call and make the call -- assumes needed extensions are enabled.
141 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
142 (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance, "vkGetPhysicalDeviceProperties2KHR");
143 assert(vkGetPhysicalDeviceProperties2KHR != nullptr);
144
145 // Get the push descriptor limits
146 auto push_descriptor_prop = lvl_init_struct<VkPhysicalDevicePushDescriptorPropertiesKHR>();
147 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&push_descriptor_prop);
148 vkGetPhysicalDeviceProperties2KHR(gpu, &prop2);
149 return push_descriptor_prop;
150 }
151
GetSubgroupProperties(VkInstance instance,VkPhysicalDevice gpu)152 VkPhysicalDeviceSubgroupProperties GetSubgroupProperties(VkInstance instance, VkPhysicalDevice gpu) {
153 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
154
155 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&subgroup_prop);
156 vk::GetPhysicalDeviceProperties2(gpu, &prop2);
157 return subgroup_prop;
158 }
159
GetDescriptorIndexingProperties(VkInstance instance,VkPhysicalDevice gpu)160 VkPhysicalDeviceDescriptorIndexingProperties GetDescriptorIndexingProperties(VkInstance instance, VkPhysicalDevice gpu) {
161 auto descriptor_indexing_prop = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingProperties>();
162
163 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&descriptor_indexing_prop);
164 vk::GetPhysicalDeviceProperties2(gpu, &prop2);
165 return descriptor_indexing_prop;
166 }
167
operator ==(const VkDebugUtilsLabelEXT & rhs,const VkDebugUtilsLabelEXT & lhs)168 bool operator==(const VkDebugUtilsLabelEXT &rhs, const VkDebugUtilsLabelEXT &lhs) {
169 bool is_equal = (rhs.color[0] == lhs.color[0]) && (rhs.color[1] == lhs.color[1]) && (rhs.color[2] == lhs.color[2]) &&
170 (rhs.color[3] == lhs.color[3]);
171 if (is_equal) {
172 if (rhs.pLabelName && lhs.pLabelName) {
173 is_equal = (0 == strcmp(rhs.pLabelName, lhs.pLabelName));
174 } else {
175 is_equal = (rhs.pLabelName == nullptr) && (lhs.pLabelName == nullptr);
176 }
177 }
178 return is_equal;
179 }
180
DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData,void * pUserData)181 VKAPI_ATTR VkBool32 VKAPI_CALL DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
182 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
183 const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData) {
184 auto *data = reinterpret_cast<DebugUtilsLabelCheckData *>(pUserData);
185 data->callback(pCallbackData, data);
186 return VK_FALSE;
187 }
188
189 #if GTEST_IS_THREADSAFE
AddToCommandBuffer(void * arg)190 extern "C" void *AddToCommandBuffer(void *arg) {
191 struct thread_data_struct *data = (struct thread_data_struct *)arg;
192
193 for (int i = 0; i < 80000; i++) {
194 vk::CmdSetEvent(data->commandBuffer, data->event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
195 if (*data->bailout) {
196 break;
197 }
198 }
199 return NULL;
200 }
201
UpdateDescriptor(void * arg)202 extern "C" void *UpdateDescriptor(void *arg) {
203 struct thread_data_struct *data = (struct thread_data_struct *)arg;
204
205 VkDescriptorBufferInfo buffer_info = {};
206 buffer_info.buffer = data->buffer;
207 buffer_info.offset = 0;
208 buffer_info.range = 1;
209
210 VkWriteDescriptorSet descriptor_write;
211 memset(&descriptor_write, 0, sizeof(descriptor_write));
212 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
213 descriptor_write.dstSet = data->descriptorSet;
214 descriptor_write.dstBinding = data->binding;
215 descriptor_write.descriptorCount = 1;
216 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
217 descriptor_write.pBufferInfo = &buffer_info;
218
219 for (int i = 0; i < 80000; i++) {
220 vk::UpdateDescriptorSets(data->device, 1, &descriptor_write, 0, NULL);
221 if (*data->bailout) {
222 break;
223 }
224 }
225 return NULL;
226 }
227
228 #endif // GTEST_IS_THREADSAFE
229
ReleaseNullFence(void * arg)230 extern "C" void *ReleaseNullFence(void *arg) {
231 struct thread_data_struct *data = (struct thread_data_struct *)arg;
232
233 for (int i = 0; i < 40000; i++) {
234 vk::DestroyFence(data->device, VK_NULL_HANDLE, NULL);
235 if (*data->bailout) {
236 break;
237 }
238 }
239 return NULL;
240 }
241
TestRenderPassCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo * create_info,bool rp2_supported,const char * rp1_vuid,const char * rp2_vuid)242 void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
243 bool rp2_supported, const char *rp1_vuid, const char *rp2_vuid) {
244 VkRenderPass render_pass = VK_NULL_HANDLE;
245 VkResult err;
246
247 if (rp1_vuid) {
248 // Some tests mismatch attachment type with layout
249 error_monitor->SetUnexpectedError("VUID-VkSubpassDescription-None-04437");
250
251 error_monitor->SetDesiredFailureMsg(kErrorBit, rp1_vuid);
252 err = vk::CreateRenderPass(device, create_info, nullptr, &render_pass);
253 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
254 error_monitor->VerifyFound();
255 }
256
257 if (rp2_supported && rp2_vuid) {
258 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
259 (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
260 safe_VkRenderPassCreateInfo2 create_info2;
261 ConvertVkRenderPassCreateInfoToV2KHR(*create_info, &create_info2);
262
263 // aspectMasks might never get set in ConvertVkRenderPassCreateInfoToV2KHR
264 error_monitor->SetUnexpectedError("VUID-VkAttachmentReference2-attachment-03311");
265 error_monitor->SetUnexpectedError("VUID-VkAttachmentReference2-attachment-03312");
266 // Some tests mismatch attachment type with layout
267 error_monitor->SetUnexpectedError("VUID-VkSubpassDescription2-None-04439");
268
269 error_monitor->SetDesiredFailureMsg(kErrorBit, rp2_vuid);
270 err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
271 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
272 error_monitor->VerifyFound();
273
274 // For api version >= 1.2, try core entrypoint
275 PFN_vkCreateRenderPass2 vkCreateRenderPass2 = (PFN_vkCreateRenderPass2)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2");
276 if (vkCreateRenderPass2) {
277 // aspectMasks might never get set in ConvertVkRenderPassCreateInfoToV2KHR
278 error_monitor->SetUnexpectedError("VUID-VkAttachmentReference2-attachment-03311");
279 error_monitor->SetUnexpectedError("VUID-VkAttachmentReference2-attachment-03312");
280 // Some tests mismatch attachment type with layout
281 error_monitor->SetUnexpectedError("VUID-VkSubpassDescription2-None-04439");
282
283 error_monitor->SetDesiredFailureMsg(kErrorBit, rp2_vuid);
284 err = vkCreateRenderPass2(device, create_info2.ptr(), nullptr, &render_pass);
285 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
286 error_monitor->VerifyFound();
287 }
288 }
289 }
290
PositiveTestRenderPassCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo * create_info,bool rp2_supported)291 void PositiveTestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
292 bool rp2_supported) {
293 VkRenderPass render_pass = VK_NULL_HANDLE;
294 VkResult err;
295
296 error_monitor->ExpectSuccess();
297 err = vk::CreateRenderPass(device, create_info, nullptr, &render_pass);
298 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
299 error_monitor->VerifyNotFound();
300
301 if (rp2_supported) {
302 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
303 (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
304 safe_VkRenderPassCreateInfo2 create_info2;
305 ConvertVkRenderPassCreateInfoToV2KHR(*create_info, &create_info2);
306
307 error_monitor->ExpectSuccess();
308 err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
309 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
310 error_monitor->VerifyNotFound();
311 }
312 }
313
PositiveTestRenderPass2KHRCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo2KHR * create_info)314 void PositiveTestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device,
315 const VkRenderPassCreateInfo2KHR *create_info) {
316 VkRenderPass render_pass = VK_NULL_HANDLE;
317 VkResult err;
318 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
319 (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
320
321 error_monitor->ExpectSuccess();
322 err = vkCreateRenderPass2KHR(device, create_info, nullptr, &render_pass);
323 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
324 error_monitor->VerifyNotFound();
325 }
326
TestRenderPass2KHRCreate(ErrorMonitor * error_monitor,const VkDevice device,const VkRenderPassCreateInfo2KHR * create_info,const char * rp2_vuid)327 void TestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo2KHR *create_info,
328 const char *rp2_vuid) {
329 VkRenderPass render_pass = VK_NULL_HANDLE;
330 VkResult err;
331 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
332 (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
333
334 error_monitor->SetDesiredFailureMsg(kErrorBit, rp2_vuid);
335 err = vkCreateRenderPass2KHR(device, create_info, nullptr, &render_pass);
336 if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
337 error_monitor->VerifyFound();
338 }
339
TestRenderPassBegin(ErrorMonitor * error_monitor,const VkDevice device,const VkCommandBuffer command_buffer,const VkRenderPassBeginInfo * begin_info,bool rp2Supported,const char * rp1_vuid,const char * rp2_vuid)340 void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
341 const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid) {
342 VkCommandBufferBeginInfo cmd_begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
343 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
344
345 if (rp1_vuid) {
346 vk::BeginCommandBuffer(command_buffer, &cmd_begin_info);
347 error_monitor->SetDesiredFailureMsg(kErrorBit, rp1_vuid);
348 vk::CmdBeginRenderPass(command_buffer, begin_info, VK_SUBPASS_CONTENTS_INLINE);
349 error_monitor->VerifyFound();
350 vk::ResetCommandBuffer(command_buffer, 0);
351 }
352 if (rp2Supported && rp2_vuid) {
353 PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR =
354 (PFN_vkCmdBeginRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCmdBeginRenderPass2KHR");
355 VkSubpassBeginInfoKHR subpass_begin_info = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
356 vk::BeginCommandBuffer(command_buffer, &cmd_begin_info);
357 error_monitor->SetDesiredFailureMsg(kErrorBit, rp2_vuid);
358 vkCmdBeginRenderPass2KHR(command_buffer, begin_info, &subpass_begin_info);
359 error_monitor->VerifyFound();
360 vk::ResetCommandBuffer(command_buffer, 0);
361
362 // For api version >= 1.2, try core entrypoint
363 PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2 =
364 (PFN_vkCmdBeginRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCmdBeginRenderPass2");
365 if (vkCmdBeginRenderPass2) {
366 vk::BeginCommandBuffer(command_buffer, &cmd_begin_info);
367 error_monitor->SetDesiredFailureMsg(kErrorBit, rp2_vuid);
368 vkCmdBeginRenderPass2(command_buffer, begin_info, &subpass_begin_info);
369 error_monitor->VerifyFound();
370 vk::ResetCommandBuffer(command_buffer, 0);
371 }
372 }
373 }
374
ValidOwnershipTransferOp(ErrorMonitor * monitor,VkCommandBufferObj * cb,VkPipelineStageFlags src_stages,VkPipelineStageFlags dst_stages,const VkBufferMemoryBarrier * buf_barrier,const VkImageMemoryBarrier * img_barrier)375 void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
376 VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
377 const VkImageMemoryBarrier *img_barrier) {
378 monitor->ExpectSuccess();
379 cb->begin();
380 uint32_t num_buf_barrier = (buf_barrier) ? 1 : 0;
381 uint32_t num_img_barrier = (img_barrier) ? 1 : 0;
382 cb->PipelineBarrier(src_stages, dst_stages, 0, 0, nullptr, num_buf_barrier, buf_barrier, num_img_barrier, img_barrier);
383 cb->end();
384 cb->QueueCommandBuffer(); // Implicitly waits
385 monitor->VerifyNotFound();
386 }
387
ValidOwnershipTransfer(ErrorMonitor * monitor,VkCommandBufferObj * cb_from,VkCommandBufferObj * cb_to,VkPipelineStageFlags src_stages,VkPipelineStageFlags dst_stages,const VkBufferMemoryBarrier * buf_barrier,const VkImageMemoryBarrier * img_barrier)388 void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
389 VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
390 const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier) {
391 ValidOwnershipTransferOp(monitor, cb_from, src_stages, dst_stages, buf_barrier, img_barrier);
392 ValidOwnershipTransferOp(monitor, cb_to, src_stages, dst_stages, buf_barrier, img_barrier);
393 }
394
GPDIFPHelper(VkPhysicalDevice dev,const VkImageCreateInfo * ci,VkImageFormatProperties * limits)395 VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits) {
396 VkImageFormatProperties tmp_limits;
397 limits = limits ? limits : &tmp_limits;
398 return vk::GetPhysicalDeviceImageFormatProperties(dev, ci->format, ci->imageType, ci->tiling, ci->usage, ci->flags, limits);
399 }
400
FindFormatLinearWithoutMips(VkPhysicalDevice gpu,VkImageCreateInfo image_ci)401 VkFormat FindFormatLinearWithoutMips(VkPhysicalDevice gpu, VkImageCreateInfo image_ci) {
402 image_ci.tiling = VK_IMAGE_TILING_LINEAR;
403
404 const VkFormat first_vk_format = static_cast<VkFormat>(1);
405 const VkFormat last_vk_format = static_cast<VkFormat>(130); // avoid compressed/feature protected, otherwise 184
406
407 for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
408 image_ci.format = format;
409
410 // WORKAROUND for dev_sim and mock_icd not containing valid format limits yet
411 VkFormatProperties format_props;
412 vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
413 const VkFormatFeatureFlags core_filter = 0x1FFF;
414 const auto features = (image_ci.tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
415 : format_props.optimalTilingFeatures & core_filter;
416 if (!(features & core_filter)) continue;
417
418 VkImageFormatProperties img_limits;
419 if (VK_SUCCESS == GPDIFPHelper(gpu, &image_ci, &img_limits) && img_limits.maxMipLevels == 1) return format;
420 }
421
422 return VK_FORMAT_UNDEFINED;
423 }
424
FindFormatWithoutSamples(VkPhysicalDevice gpu,VkImageCreateInfo & image_ci)425 bool FindFormatWithoutSamples(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
426 const VkFormat first_vk_format = static_cast<VkFormat>(1);
427 const VkFormat last_vk_format = static_cast<VkFormat>(130); // avoid compressed/feature protected, otherwise 184
428
429 for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
430 image_ci.format = format;
431
432 // WORKAROUND for dev_sim and mock_icd not containing valid format limits yet
433 VkFormatProperties format_props;
434 vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
435 const VkFormatFeatureFlags core_filter = 0x1FFF;
436 const auto features = (image_ci.tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
437 : format_props.optimalTilingFeatures & core_filter;
438 if (!(features & core_filter)) continue;
439
440 for (VkSampleCountFlagBits samples = VK_SAMPLE_COUNT_64_BIT; samples > 0;
441 samples = static_cast<VkSampleCountFlagBits>(samples >> 1)) {
442 image_ci.samples = samples;
443 VkImageFormatProperties img_limits;
444 if (VK_SUCCESS == GPDIFPHelper(gpu, &image_ci, &img_limits) && !(img_limits.sampleCounts & samples)) return true;
445 }
446 }
447
448 return false;
449 }
450
FindUnsupportedImage(VkPhysicalDevice gpu,VkImageCreateInfo & image_ci)451 bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
452 const VkFormat first_vk_format = static_cast<VkFormat>(1);
453 const VkFormat last_vk_format = static_cast<VkFormat>(130); // avoid compressed/feature protected, otherwise 184
454
455 const std::vector<VkImageTiling> tilings = {VK_IMAGE_TILING_LINEAR, VK_IMAGE_TILING_OPTIMAL};
456 for (const auto tiling : tilings) {
457 image_ci.tiling = tiling;
458
459 for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
460 image_ci.format = format;
461
462 VkFormatProperties format_props;
463 vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
464
465 const VkFormatFeatureFlags core_filter = 0x1FFF;
466 const auto features = (tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
467 : format_props.optimalTilingFeatures & core_filter;
468 if (!(features & core_filter)) continue; // We wand supported by features, but not by ImageFormatProperties
469
470 // get as many usage flags as possible
471 image_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
472 if (features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) image_ci.usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
473 if (features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) image_ci.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
474 if (features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) image_ci.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
475 if (features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
476 image_ci.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
477
478 VkImageFormatProperties img_limits;
479 if (VK_ERROR_FORMAT_NOT_SUPPORTED == GPDIFPHelper(gpu, &image_ci, &img_limits)) {
480 return true;
481 }
482 }
483 }
484
485 return false;
486 }
487
FindFormatWithoutFeatures(VkPhysicalDevice gpu,VkImageTiling tiling,VkFormatFeatureFlags undesired_features)488 VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling, VkFormatFeatureFlags undesired_features) {
489 const VkFormat first_vk_format = static_cast<VkFormat>(1);
490 const VkFormat last_vk_format = static_cast<VkFormat>(130); // avoid compressed/feature protected, otherwise 184
491
492 for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
493 VkFormatProperties format_props;
494 vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
495
496 const VkFormatFeatureFlags core_filter = 0x1FFF;
497 const auto features = (tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
498 : format_props.optimalTilingFeatures & core_filter;
499
500 const auto valid_features = features & core_filter;
501 if (undesired_features == UINT32_MAX) {
502 if (!valid_features) return format;
503 } else {
504 if (valid_features && !(valid_features & undesired_features)) return format;
505 }
506 }
507
508 return VK_FORMAT_UNDEFINED;
509 }
510
AllocateDisjointMemory(VkDeviceObj * device,PFN_vkGetImageMemoryRequirements2KHR fp,VkImage mp_image,VkDeviceMemory * mp_image_mem,VkImageAspectFlagBits plane)511 void AllocateDisjointMemory(VkDeviceObj *device, PFN_vkGetImageMemoryRequirements2KHR fp, VkImage mp_image,
512 VkDeviceMemory *mp_image_mem, VkImageAspectFlagBits plane) {
513 VkImagePlaneMemoryRequirementsInfo image_plane_req = {};
514 image_plane_req.sType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
515 image_plane_req.pNext = nullptr;
516 image_plane_req.planeAspect = plane;
517
518 VkImageMemoryRequirementsInfo2 mem_req_info2 = {};
519 mem_req_info2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
520 mem_req_info2.pNext = (void *)&image_plane_req;
521 mem_req_info2.image = mp_image;
522
523 VkMemoryRequirements2 mp_image_mem_reqs2 = {};
524 mp_image_mem_reqs2.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
525 mp_image_mem_reqs2.pNext = nullptr;
526
527 fp(device->device(), &mem_req_info2, &mp_image_mem_reqs2);
528
529 VkMemoryAllocateInfo mp_image_alloc_info;
530 mp_image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
531 mp_image_alloc_info.pNext = nullptr;
532 mp_image_alloc_info.allocationSize = mp_image_mem_reqs2.memoryRequirements.size;
533 ASSERT_TRUE(device->phy().set_memory_type(mp_image_mem_reqs2.memoryRequirements.memoryTypeBits, &mp_image_alloc_info, 0));
534 ASSERT_VK_SUCCESS(vk::AllocateMemory(device->device(), &mp_image_alloc_info, NULL, mp_image_mem));
535 }
536
NegHeightViewportTests(VkDeviceObj * m_device,VkCommandBufferObj * m_commandBuffer,ErrorMonitor * m_errorMonitor)537 void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_commandBuffer, ErrorMonitor *m_errorMonitor) {
538 const auto &limits = m_device->props.limits;
539
540 m_commandBuffer->begin();
541
542 using std::vector;
543 struct TestCase {
544 VkViewport vp;
545 vector<std::string> vuids;
546 };
547
548 // not necessarily boundary values (unspecified cast rounding), but guaranteed to be over limit
549 const auto one_before_min_h = NearestSmaller(-static_cast<float>(limits.maxViewportDimensions[1]));
550 const auto one_past_max_h = NearestGreater(static_cast<float>(limits.maxViewportDimensions[1]));
551
552 const auto min_bound = limits.viewportBoundsRange[0];
553 const auto max_bound = limits.viewportBoundsRange[1];
554 const auto one_before_min_bound = NearestSmaller(min_bound);
555 const auto one_past_max_bound = NearestGreater(max_bound);
556
557 const vector<TestCase> test_cases = {{{0.0, 0.0, 64.0, one_before_min_h, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
558 {{0.0, 0.0, 64.0, one_past_max_h, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
559 {{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
560 {{0.0, one_before_min_bound, 64.0, 1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01775"}},
561 {{0.0, one_past_max_bound, 64.0, -1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01776"}},
562 {{0.0, min_bound, 64.0, -1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01777"}},
563 {{0.0, max_bound, 64.0, 1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01233"}}};
564
565 for (const auto &test_case : test_cases) {
566 for (const auto &vuid : test_case.vuids) {
567 if (vuid == "VUID-Undefined")
568 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "is less than VkPhysicalDeviceLimits::viewportBoundsRange[0]");
569 else
570 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, vuid);
571 }
572 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &test_case.vp);
573 m_errorMonitor->VerifyFound();
574 }
575 }
576
CreateSamplerTest(VkLayerTest & test,const VkSamplerCreateInfo * pCreateInfo,std::string code)577 void CreateSamplerTest(VkLayerTest &test, const VkSamplerCreateInfo *pCreateInfo, std::string code) {
578 VkResult err;
579 VkSampler sampler = VK_NULL_HANDLE;
580 if (code.length())
581 test.Monitor().SetDesiredFailureMsg(kErrorBit | kWarningBit, code);
582 else
583 test.Monitor().ExpectSuccess();
584
585 err = vk::CreateSampler(test.device(), pCreateInfo, NULL, &sampler);
586 if (code.length())
587 test.Monitor().VerifyFound();
588 else
589 test.Monitor().VerifyNotFound();
590
591 if (VK_SUCCESS == err) {
592 vk::DestroySampler(test.device(), sampler, NULL);
593 }
594 }
595
CreateBufferTest(VkLayerTest & test,const VkBufferCreateInfo * pCreateInfo,std::string code)596 void CreateBufferTest(VkLayerTest &test, const VkBufferCreateInfo *pCreateInfo, std::string code) {
597 VkResult err;
598 VkBuffer buffer = VK_NULL_HANDLE;
599 if (code.length())
600 test.Monitor().SetDesiredFailureMsg(kErrorBit, code);
601 else
602 test.Monitor().ExpectSuccess();
603
604 err = vk::CreateBuffer(test.device(), pCreateInfo, NULL, &buffer);
605 if (code.length())
606 test.Monitor().VerifyFound();
607 else
608 test.Monitor().VerifyNotFound();
609
610 if (VK_SUCCESS == err) {
611 vk::DestroyBuffer(test.device(), buffer, NULL);
612 }
613 }
614
CreateImageTest(VkLayerTest & test,const VkImageCreateInfo * pCreateInfo,std::string code)615 void CreateImageTest(VkLayerTest &test, const VkImageCreateInfo *pCreateInfo, std::string code) {
616 VkResult err;
617 VkImage image = VK_NULL_HANDLE;
618 if (code.length()) {
619 test.Monitor().SetDesiredFailureMsg(kErrorBit, code);
620 // Very possible a test didn't check for VK_ERROR_FORMAT_NOT_SUPPORTED
621 test.Monitor().SetUnexpectedError("UNASSIGNED-CoreValidation-Image-FormatNotSupported");
622 } else {
623 test.Monitor().ExpectSuccess();
624 }
625
626 err = vk::CreateImage(test.device(), pCreateInfo, NULL, &image);
627 if (code.length())
628 test.Monitor().VerifyFound();
629 else
630 test.Monitor().VerifyNotFound();
631
632 if (VK_SUCCESS == err) {
633 vk::DestroyImage(test.device(), image, NULL);
634 }
635 }
636
CreateBufferViewTest(VkLayerTest & test,const VkBufferViewCreateInfo * pCreateInfo,const std::vector<std::string> & codes)637 void CreateBufferViewTest(VkLayerTest &test, const VkBufferViewCreateInfo *pCreateInfo, const std::vector<std::string> &codes) {
638 VkResult err;
639 VkBufferView view = VK_NULL_HANDLE;
640 if (codes.size())
641 std::for_each(codes.begin(), codes.end(), [&](const std::string &s) { test.Monitor().SetDesiredFailureMsg(kErrorBit, s); });
642 else
643 test.Monitor().ExpectSuccess();
644
645 err = vk::CreateBufferView(test.device(), pCreateInfo, NULL, &view);
646 if (codes.size())
647 test.Monitor().VerifyFound();
648 else
649 test.Monitor().VerifyNotFound();
650
651 if (VK_SUCCESS == err) {
652 vk::DestroyBufferView(test.device(), view, NULL);
653 }
654 }
655
CreateImageViewTest(VkLayerTest & test,const VkImageViewCreateInfo * pCreateInfo,std::string code)656 void CreateImageViewTest(VkLayerTest &test, const VkImageViewCreateInfo *pCreateInfo, std::string code) {
657 VkResult err;
658 VkImageView view = VK_NULL_HANDLE;
659 if (code.length())
660 test.Monitor().SetDesiredFailureMsg(kErrorBit, code);
661 else
662 test.Monitor().ExpectSuccess();
663
664 err = vk::CreateImageView(test.device(), pCreateInfo, NULL, &view);
665 if (code.length())
666 test.Monitor().VerifyFound();
667 else
668 test.Monitor().VerifyNotFound();
669
670 if (VK_SUCCESS == err) {
671 vk::DestroyImageView(test.device(), view, NULL);
672 }
673 }
674
SafeSaneSamplerCreateInfo()675 VkSamplerCreateInfo SafeSaneSamplerCreateInfo() {
676 VkSamplerCreateInfo sampler_create_info = {};
677 sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
678 sampler_create_info.pNext = nullptr;
679 sampler_create_info.magFilter = VK_FILTER_NEAREST;
680 sampler_create_info.minFilter = VK_FILTER_NEAREST;
681 sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
682 sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
683 sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
684 sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
685 sampler_create_info.mipLodBias = 0.0;
686 sampler_create_info.anisotropyEnable = VK_FALSE;
687 sampler_create_info.maxAnisotropy = 1.0;
688 sampler_create_info.compareEnable = VK_FALSE;
689 sampler_create_info.compareOp = VK_COMPARE_OP_NEVER;
690 sampler_create_info.minLod = 0.0;
691 sampler_create_info.maxLod = 16.0;
692 sampler_create_info.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
693 sampler_create_info.unnormalizedCoordinates = VK_FALSE;
694
695 return sampler_create_info;
696 }
697
SafeSaneImageViewCreateInfo(VkImage image,VkFormat format,VkImageAspectFlags aspect_mask)698 VkImageViewCreateInfo SafeSaneImageViewCreateInfo(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask) {
699 VkImageViewCreateInfo image_view_create_info = {};
700 image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
701 image_view_create_info.image = image;
702 image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
703 image_view_create_info.format = format;
704 image_view_create_info.subresourceRange.layerCount = 1;
705 image_view_create_info.subresourceRange.baseMipLevel = 0;
706 image_view_create_info.subresourceRange.levelCount = 1;
707 image_view_create_info.subresourceRange.aspectMask = aspect_mask;
708
709 return image_view_create_info;
710 }
711
SafeSaneImageViewCreateInfo(const VkImageObj & image,VkFormat format,VkImageAspectFlags aspect_mask)712 VkImageViewCreateInfo SafeSaneImageViewCreateInfo(const VkImageObj &image, VkFormat format, VkImageAspectFlags aspect_mask) {
713 return SafeSaneImageViewCreateInfo(image.handle(), format, aspect_mask);
714 }
715
CheckCreateRenderPass2Support(VkRenderFramework * renderFramework,std::vector<const char * > & device_extension_names)716 bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names) {
717 if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
718 device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
719 device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
720 device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
721 return true;
722 }
723 return false;
724 }
725
CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework * renderFramework,std::vector<const char * > & instance_extension_names,std::vector<const char * > & device_extension_names,VkValidationFeaturesEXT * features,void * userData)726 bool CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework *renderFramework,
727 std::vector<const char *> &instance_extension_names,
728 std::vector<const char *> &device_extension_names,
729 VkValidationFeaturesEXT *features, void *userData) {
730 bool descriptor_indexing = renderFramework->InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
731 if (descriptor_indexing) {
732 instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
733 }
734 renderFramework->InitFramework(userData, features);
735 descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr,
736 VK_KHR_MAINTENANCE3_EXTENSION_NAME);
737 descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(
738 renderFramework->gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
739 if (descriptor_indexing) {
740 device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
741 device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
742 return true;
743 }
744 return false;
745 }
746
CheckTimelineSemaphoreSupportAndInitState(VkRenderFramework * renderFramework)747 bool CheckTimelineSemaphoreSupportAndInitState(VkRenderFramework *renderFramework) {
748 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
749 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(renderFramework->instance(),
750 "vkGetPhysicalDeviceFeatures2KHR");
751 auto timeline_semaphore_features = lvl_init_struct<VkPhysicalDeviceTimelineSemaphoreFeatures>();
752 auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&timeline_semaphore_features);
753 vkGetPhysicalDeviceFeatures2KHR(renderFramework->gpu(), &features2);
754 if (!timeline_semaphore_features.timelineSemaphore) {
755 return false;
756 }
757 renderFramework->InitState(nullptr, &features2);
758 return true;
759 }
760
VKTriangleTest(BsoFailSelect failCase)761 void VkLayerTest::VKTriangleTest(BsoFailSelect failCase) {
762 ASSERT_TRUE(m_device && m_device->initialized()); // VKTriangleTest assumes Init() has finished
763
764 ASSERT_NO_FATAL_FAILURE(InitViewport());
765
766 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
767 VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
768
769 VkPipelineObj pipelineobj(m_device);
770 pipelineobj.AddDefaultColorAttachment();
771 pipelineobj.AddShader(&vs);
772 pipelineobj.AddShader(&ps);
773
774 bool failcase_needs_depth = false; // to mark cases that need depth attachment
775
776 VkBufferObj index_buffer;
777
778 switch (failCase) {
779 case BsoFailLineWidth: {
780 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_WIDTH);
781 VkPipelineInputAssemblyStateCreateInfo ia_state = {};
782 ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
783 ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
784 pipelineobj.SetInputAssembly(&ia_state);
785 break;
786 }
787 case BsoFailLineStipple: {
788 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_STIPPLE_EXT);
789 VkPipelineInputAssemblyStateCreateInfo ia_state = {};
790 ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
791 ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
792 pipelineobj.SetInputAssembly(&ia_state);
793
794 VkPipelineRasterizationLineStateCreateInfoEXT line_state = {};
795 line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
796 line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
797 line_state.stippledLineEnable = VK_TRUE;
798 line_state.lineStippleFactor = 0;
799 line_state.lineStipplePattern = 0;
800 pipelineobj.SetLineState(&line_state);
801 break;
802 }
803 case BsoFailDepthBias: {
804 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BIAS);
805 VkPipelineRasterizationStateCreateInfo rs_state = {};
806 rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
807 rs_state.depthBiasEnable = VK_TRUE;
808 rs_state.lineWidth = 1.0f;
809 pipelineobj.SetRasterization(&rs_state);
810 break;
811 }
812 case BsoFailViewport: {
813 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
814 break;
815 }
816 case BsoFailScissor: {
817 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
818 break;
819 }
820 case BsoFailBlend: {
821 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
822 VkPipelineColorBlendAttachmentState att_state = {};
823 att_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
824 att_state.blendEnable = VK_TRUE;
825 pipelineobj.AddColorAttachment(0, att_state);
826 break;
827 }
828 case BsoFailDepthBounds: {
829 failcase_needs_depth = true;
830 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
831 break;
832 }
833 case BsoFailStencilReadMask: {
834 failcase_needs_depth = true;
835 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
836 break;
837 }
838 case BsoFailStencilWriteMask: {
839 failcase_needs_depth = true;
840 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
841 break;
842 }
843 case BsoFailStencilReference: {
844 failcase_needs_depth = true;
845 pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
846 break;
847 }
848
849 case BsoFailIndexBuffer:
850 break;
851 case BsoFailIndexBufferBadSize:
852 case BsoFailIndexBufferBadOffset:
853 case BsoFailIndexBufferBadMapSize:
854 case BsoFailIndexBufferBadMapOffset: {
855 // Create an index buffer for these tests.
856 // There is no need to populate it because we should bail before trying to draw.
857 uint32_t const indices[] = {0};
858 VkBufferCreateInfo buffer_info = {};
859 buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
860 buffer_info.size = 1024;
861 buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
862 buffer_info.queueFamilyIndexCount = 1;
863 buffer_info.pQueueFamilyIndices = indices;
864 index_buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
865 } break;
866 case BsoFailCmdClearAttachments:
867 break;
868 case BsoFailNone:
869 break;
870 default:
871 break;
872 }
873
874 VkDescriptorSetObj descriptorSet(m_device);
875
876 VkImageView *depth_attachment = nullptr;
877 if (failcase_needs_depth) {
878 m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
879 ASSERT_TRUE(m_depth_stencil_fmt != VK_FORMAT_UNDEFINED);
880
881 m_depthStencil->Init(m_device, static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height), m_depth_stencil_fmt,
882 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
883 depth_attachment = m_depthStencil->BindInfo();
884 }
885
886 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(1, depth_attachment));
887 m_commandBuffer->begin();
888
889 GenericDrawPreparation(m_commandBuffer, pipelineobj, descriptorSet, failCase);
890
891 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
892
893 // render triangle
894 if (failCase == BsoFailIndexBuffer) {
895 // Use DrawIndexed w/o an index buffer bound
896 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
897 } else if (failCase == BsoFailIndexBufferBadSize) {
898 // Bind the index buffer and draw one too many indices
899 m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
900 m_commandBuffer->DrawIndexed(513, 1, 0, 0, 0);
901 } else if (failCase == BsoFailIndexBufferBadOffset) {
902 // Bind the index buffer and draw one past the end of the buffer using the offset
903 m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
904 m_commandBuffer->DrawIndexed(512, 1, 1, 0, 0);
905 } else if (failCase == BsoFailIndexBufferBadMapSize) {
906 // Bind the index buffer at the middle point and draw one too many indices
907 m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
908 m_commandBuffer->DrawIndexed(257, 1, 0, 0, 0);
909 } else if (failCase == BsoFailIndexBufferBadMapOffset) {
910 // Bind the index buffer at the middle point and draw one past the end of the buffer
911 m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
912 m_commandBuffer->DrawIndexed(256, 1, 1, 0, 0);
913 } else {
914 m_commandBuffer->Draw(3, 1, 0, 0);
915 }
916
917 if (failCase == BsoFailCmdClearAttachments) {
918 VkClearAttachment color_attachment = {};
919 color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
920 color_attachment.colorAttachment = 2000000000; // Someone who knew what they were doing would use 0 for the index;
921 VkClearRect clear_rect = {{{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}}, 0, 1};
922
923 vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
924 }
925
926 // finalize recording of the command buffer
927 m_commandBuffer->EndRenderPass();
928 m_commandBuffer->end();
929 m_commandBuffer->QueueCommandBuffer(true);
930 DestroyRenderTarget();
931 }
932
GenericDrawPreparation(VkCommandBufferObj * commandBuffer,VkPipelineObj & pipelineobj,VkDescriptorSetObj & descriptorSet,BsoFailSelect failCase)933 void VkLayerTest::GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj,
934 VkDescriptorSetObj &descriptorSet, BsoFailSelect failCase) {
935 commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
936
937 commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
938 // Make sure depthWriteEnable is set so that Depth fail test will work
939 // correctly
940 // Make sure stencilTestEnable is set so that Stencil fail test will work
941 // correctly
942 VkStencilOpState stencil = {};
943 stencil.failOp = VK_STENCIL_OP_KEEP;
944 stencil.passOp = VK_STENCIL_OP_KEEP;
945 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
946 stencil.compareOp = VK_COMPARE_OP_NEVER;
947
948 VkPipelineDepthStencilStateCreateInfo ds_ci = {};
949 ds_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
950 ds_ci.pNext = NULL;
951 ds_ci.depthTestEnable = VK_FALSE;
952 ds_ci.depthWriteEnable = VK_TRUE;
953 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
954 ds_ci.depthBoundsTestEnable = VK_FALSE;
955 if (failCase == BsoFailDepthBounds) {
956 ds_ci.depthBoundsTestEnable = VK_TRUE;
957 ds_ci.maxDepthBounds = 0.0f;
958 ds_ci.minDepthBounds = 0.0f;
959 }
960 ds_ci.stencilTestEnable = VK_TRUE;
961 ds_ci.front = stencil;
962 ds_ci.back = stencil;
963
964 pipelineobj.SetDepthStencil(&ds_ci);
965 pipelineobj.SetViewport(m_viewports);
966 pipelineobj.SetScissor(m_scissors);
967 descriptorSet.CreateVKDescriptorSet(commandBuffer);
968 VkResult err = pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
969 ASSERT_VK_SUCCESS(err);
970 vk::CmdBindPipeline(commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineobj.handle());
971 commandBuffer->BindDescriptorSet(descriptorSet);
972 }
973
Init(VkPhysicalDeviceFeatures * features,VkPhysicalDeviceFeatures2 * features2,const VkCommandPoolCreateFlags flags,void * instance_pnext)974 void VkLayerTest::Init(VkPhysicalDeviceFeatures *features, VkPhysicalDeviceFeatures2 *features2,
975 const VkCommandPoolCreateFlags flags, void *instance_pnext) {
976 InitFramework(m_errorMonitor, instance_pnext);
977 InitState(features, features2, flags);
978 }
979
CommandBuffer()980 VkCommandBufferObj *VkLayerTest::CommandBuffer() { return m_commandBuffer; }
981
VkLayerTest()982 VkLayerTest::VkLayerTest() {
983 m_enableWSI = false;
984
985 // TODO: not quite sure why most of this is here instead of in super
986
987 // Add default instance extensions to the list
988 instance_extensions_.push_back(debug_reporter_.debug_extension_name);
989
990 instance_layers_.push_back(kValidationLayerName);
991
992 if (VkTestFramework::m_devsim_layer) {
993 if (InstanceLayerSupported("VK_LAYER_LUNARG_device_simulation")) {
994 instance_layers_.push_back("VK_LAYER_LUNARG_device_simulation");
995 } else {
996 VkTestFramework::m_devsim_layer = false;
997 printf(" Did not find VK_LAYER_LUNARG_device_simulation layer so it will not be enabled.\n");
998 }
999 } else {
1000 if (InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api"))
1001 instance_layers_.push_back("VK_LAYER_LUNARG_device_profile_api");
1002 }
1003
1004 app_info_.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
1005 app_info_.pNext = NULL;
1006 app_info_.pApplicationName = "layer_tests";
1007 app_info_.applicationVersion = 1;
1008 app_info_.pEngineName = "unittest";
1009 app_info_.engineVersion = 1;
1010 app_info_.apiVersion = VK_API_VERSION_1_0;
1011
1012 // Find out what version the instance supports and record the default target instance
1013 auto enumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)vk::GetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
1014 if (enumerateInstanceVersion) {
1015 enumerateInstanceVersion(&m_instance_api_version);
1016 } else {
1017 m_instance_api_version = VK_API_VERSION_1_0;
1018 }
1019 m_target_api_version = app_info_.apiVersion;
1020 }
1021
AddSurfaceInstanceExtension()1022 bool VkLayerTest::AddSurfaceInstanceExtension() {
1023 m_enableWSI = true;
1024 if (!InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
1025 printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_SURFACE_EXTENSION_NAME);
1026 return false;
1027 }
1028 instance_extensions_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
1029
1030 bool bSupport = false;
1031 #if defined(VK_USE_PLATFORM_WIN32_KHR)
1032 if (!InstanceExtensionSupported(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
1033 printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
1034 return false;
1035 }
1036 instance_extensions_.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
1037 bSupport = true;
1038 #endif
1039
1040 #if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
1041 if (!InstanceExtensionSupported(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) {
1042 printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
1043 return false;
1044 }
1045 instance_extensions_.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
1046 bSupport = true;
1047 #endif
1048
1049 #if defined(VK_USE_PLATFORM_XLIB_KHR)
1050 if (!InstanceExtensionSupported(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
1051 printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
1052 return false;
1053 }
1054 if (XOpenDisplay(NULL)) {
1055 instance_extensions_.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
1056 bSupport = true;
1057 }
1058 #endif
1059
1060 #if defined(VK_USE_PLATFORM_XCB_KHR)
1061 if (!InstanceExtensionSupported(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
1062 printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_XCB_SURFACE_EXTENSION_NAME);
1063 return false;
1064 }
1065 if (!bSupport && xcb_connect(NULL, NULL)) {
1066 instance_extensions_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
1067 bSupport = true;
1068 }
1069 #endif
1070
1071 if (bSupport) return true;
1072 printf("%s No platform's surface extension supported\n", kSkipPrefix);
1073 return false;
1074 }
1075
AddSwapchainDeviceExtension()1076 bool VkLayerTest::AddSwapchainDeviceExtension() {
1077 if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
1078 printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1079 return false;
1080 }
1081 m_device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1082 return true;
1083 }
1084
SetTargetApiVersion(uint32_t target_api_version)1085 uint32_t VkLayerTest::SetTargetApiVersion(uint32_t target_api_version) {
1086 if (target_api_version == 0) target_api_version = VK_API_VERSION_1_0;
1087 if (target_api_version <= m_instance_api_version) {
1088 m_target_api_version = target_api_version;
1089 app_info_.apiVersion = m_target_api_version;
1090 }
1091 return m_target_api_version;
1092 }
1093
DeviceValidationVersion()1094 uint32_t VkLayerTest::DeviceValidationVersion() {
1095 // The validation layers assume the version we are validating to is the apiVersion unless the device apiVersion is lower
1096 return std::min(m_target_api_version, physDevProps().apiVersion);
1097 }
1098
LoadDeviceProfileLayer(PFN_vkSetPhysicalDeviceFormatPropertiesEXT & fpvkSetPhysicalDeviceFormatPropertiesEXT,PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT & fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)1099 bool VkLayerTest::LoadDeviceProfileLayer(
1100 PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
1101 PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT) {
1102 // Load required functions
1103 fpvkSetPhysicalDeviceFormatPropertiesEXT =
1104 (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vk::GetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
1105 fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vk::GetInstanceProcAddr(
1106 instance(), "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
1107
1108 if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
1109 printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
1110 return 0;
1111 }
1112
1113 return 1;
1114 }
1115
LoadDeviceProfileLayer(PFN_vkSetPhysicalDeviceFormatProperties2EXT & fpvkSetPhysicalDeviceFormatProperties2EXT,PFN_vkGetOriginalPhysicalDeviceFormatProperties2EXT & fpvkGetOriginalPhysicalDeviceFormatProperties2EXT)1116 bool VkLayerTest::LoadDeviceProfileLayer(
1117 PFN_vkSetPhysicalDeviceFormatProperties2EXT &fpvkSetPhysicalDeviceFormatProperties2EXT,
1118 PFN_vkGetOriginalPhysicalDeviceFormatProperties2EXT &fpvkGetOriginalPhysicalDeviceFormatProperties2EXT) {
1119 // Load required functions
1120 fpvkSetPhysicalDeviceFormatProperties2EXT =
1121 (PFN_vkSetPhysicalDeviceFormatProperties2EXT)vk::GetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatProperties2EXT");
1122 fpvkGetOriginalPhysicalDeviceFormatProperties2EXT =
1123 (PFN_vkGetOriginalPhysicalDeviceFormatProperties2EXT)vk::GetInstanceProcAddr(
1124 instance(), "vkGetOriginalPhysicalDeviceFormatProperties2EXT");
1125
1126 if (!(fpvkSetPhysicalDeviceFormatProperties2EXT) || !(fpvkGetOriginalPhysicalDeviceFormatProperties2EXT)) {
1127 printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
1128 return false;
1129 }
1130
1131 return true;
1132 }
1133
GetTestConditionValid(VkDeviceObj * aVulkanDevice,eTestEnFlags aTestFlag,VkBufferUsageFlags aBufferUsage)1134 bool VkBufferTest::GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage) {
1135 if (eInvalidDeviceOffset != aTestFlag && eInvalidMemoryOffset != aTestFlag) {
1136 return true;
1137 }
1138 VkDeviceSize offset_limit = 0;
1139 if (eInvalidMemoryOffset == aTestFlag) {
1140 VkBuffer vulkanBuffer;
1141 VkBufferCreateInfo buffer_create_info = {};
1142 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1143 buffer_create_info.size = 32;
1144 buffer_create_info.usage = aBufferUsage;
1145
1146 vk::CreateBuffer(aVulkanDevice->device(), &buffer_create_info, nullptr, &vulkanBuffer);
1147 VkMemoryRequirements memory_reqs = {};
1148
1149 vk::GetBufferMemoryRequirements(aVulkanDevice->device(), vulkanBuffer, &memory_reqs);
1150 vk::DestroyBuffer(aVulkanDevice->device(), vulkanBuffer, nullptr);
1151 offset_limit = memory_reqs.alignment;
1152 } else if ((VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) & aBufferUsage) {
1153 offset_limit = aVulkanDevice->props.limits.minTexelBufferOffsetAlignment;
1154 } else if (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT & aBufferUsage) {
1155 offset_limit = aVulkanDevice->props.limits.minUniformBufferOffsetAlignment;
1156 } else if (VK_BUFFER_USAGE_STORAGE_BUFFER_BIT & aBufferUsage) {
1157 offset_limit = aVulkanDevice->props.limits.minStorageBufferOffsetAlignment;
1158 }
1159 return eOffsetAlignment < offset_limit;
1160 }
1161
VkBufferTest(VkDeviceObj * aVulkanDevice,VkBufferUsageFlags aBufferUsage,eTestEnFlags aTestFlag)1162 VkBufferTest::VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag)
1163 : AllocateCurrent(true),
1164 BoundCurrent(false),
1165 CreateCurrent(false),
1166 InvalidDeleteEn(false),
1167 VulkanDevice(aVulkanDevice->device()) {
1168 if (eBindNullBuffer == aTestFlag || eBindFakeBuffer == aTestFlag) {
1169 VkMemoryAllocateInfo memory_allocate_info = {};
1170 memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1171 memory_allocate_info.allocationSize = 1; // fake size -- shouldn't matter for the test
1172 memory_allocate_info.memoryTypeIndex = 0; // fake type -- shouldn't matter for the test
1173 vk::AllocateMemory(VulkanDevice, &memory_allocate_info, nullptr, &VulkanMemory);
1174
1175 VulkanBuffer = (aTestFlag == eBindNullBuffer) ? VK_NULL_HANDLE : (VkBuffer)0xCDCDCDCDCDCDCDCD;
1176
1177 vk::BindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, 0);
1178 } else {
1179 VkBufferCreateInfo buffer_create_info = {};
1180 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1181 buffer_create_info.size = 32;
1182 buffer_create_info.usage = aBufferUsage;
1183
1184 vk::CreateBuffer(VulkanDevice, &buffer_create_info, nullptr, &VulkanBuffer);
1185
1186 CreateCurrent = true;
1187
1188 VkMemoryRequirements memory_requirements;
1189 vk::GetBufferMemoryRequirements(VulkanDevice, VulkanBuffer, &memory_requirements);
1190
1191 VkMemoryAllocateInfo memory_allocate_info = {};
1192 memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1193 memory_allocate_info.allocationSize = memory_requirements.size + eOffsetAlignment;
1194 bool pass = aVulkanDevice->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info,
1195 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1196 if (!pass) {
1197 CreateCurrent = false;
1198 vk::DestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
1199 return;
1200 }
1201
1202 vk::AllocateMemory(VulkanDevice, &memory_allocate_info, NULL, &VulkanMemory);
1203 // NB: 1 is intentionally an invalid offset value
1204 const bool offset_en = eInvalidDeviceOffset == aTestFlag || eInvalidMemoryOffset == aTestFlag;
1205 vk::BindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, offset_en ? eOffsetAlignment : 0);
1206 BoundCurrent = true;
1207
1208 InvalidDeleteEn = (eFreeInvalidHandle == aTestFlag);
1209 }
1210 }
1211
~VkBufferTest()1212 VkBufferTest::~VkBufferTest() {
1213 if (CreateCurrent) {
1214 vk::DestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
1215 }
1216 if (AllocateCurrent) {
1217 if (InvalidDeleteEn) {
1218 auto bad_memory = CastFromUint64<VkDeviceMemory>(CastToUint64(VulkanMemory) + 1);
1219 vk::FreeMemory(VulkanDevice, bad_memory, nullptr);
1220 }
1221 vk::FreeMemory(VulkanDevice, VulkanMemory, nullptr);
1222 }
1223 }
1224
GetBufferCurrent()1225 bool VkBufferTest::GetBufferCurrent() { return AllocateCurrent && BoundCurrent && CreateCurrent; }
1226
GetBuffer()1227 const VkBuffer &VkBufferTest::GetBuffer() { return VulkanBuffer; }
1228
TestDoubleDestroy()1229 void VkBufferTest::TestDoubleDestroy() {
1230 // Destroy the buffer but leave the flag set, which will cause
1231 // the buffer to be destroyed again in the destructor.
1232 vk::DestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
1233 }
1234
1235 uint32_t VkVerticesObj::BindIdGenerator;
1236
VkVerticesObj(VkDeviceObj * aVulkanDevice,unsigned aAttributeCount,unsigned aBindingCount,unsigned aByteStride,VkDeviceSize aVertexCount,const float * aVerticies)1237 VkVerticesObj::VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
1238 VkDeviceSize aVertexCount, const float *aVerticies)
1239 : BoundCurrent(false),
1240 AttributeCount(aAttributeCount),
1241 BindingCount(aBindingCount),
1242 BindId(BindIdGenerator),
1243 PipelineVertexInputStateCreateInfo(),
1244 VulkanMemoryBuffer(aVulkanDevice, static_cast<int>(aByteStride * aVertexCount), reinterpret_cast<const void *>(aVerticies),
1245 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) {
1246 BindIdGenerator++; // NB: This can wrap w/misuse
1247
1248 VertexInputAttributeDescription = new VkVertexInputAttributeDescription[AttributeCount];
1249 VertexInputBindingDescription = new VkVertexInputBindingDescription[BindingCount];
1250
1251 PipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions = VertexInputAttributeDescription;
1252 PipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount = AttributeCount;
1253 PipelineVertexInputStateCreateInfo.pVertexBindingDescriptions = VertexInputBindingDescription;
1254 PipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount = BindingCount;
1255 PipelineVertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1256
1257 unsigned i = 0;
1258 do {
1259 VertexInputAttributeDescription[i].binding = BindId;
1260 VertexInputAttributeDescription[i].location = i;
1261 VertexInputAttributeDescription[i].format = VK_FORMAT_R32G32B32_SFLOAT;
1262 VertexInputAttributeDescription[i].offset = sizeof(float) * aByteStride;
1263 i++;
1264 } while (AttributeCount < i);
1265
1266 i = 0;
1267 do {
1268 VertexInputBindingDescription[i].binding = BindId;
1269 VertexInputBindingDescription[i].stride = aByteStride;
1270 VertexInputBindingDescription[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1271 i++;
1272 } while (BindingCount < i);
1273 }
1274
~VkVerticesObj()1275 VkVerticesObj::~VkVerticesObj() {
1276 if (VertexInputAttributeDescription) {
1277 delete[] VertexInputAttributeDescription;
1278 }
1279 if (VertexInputBindingDescription) {
1280 delete[] VertexInputBindingDescription;
1281 }
1282 }
1283
AddVertexInputToPipe(VkPipelineObj & aPipelineObj)1284 bool VkVerticesObj::AddVertexInputToPipe(VkPipelineObj &aPipelineObj) {
1285 aPipelineObj.AddVertexInputAttribs(VertexInputAttributeDescription, AttributeCount);
1286 aPipelineObj.AddVertexInputBindings(VertexInputBindingDescription, BindingCount);
1287 return true;
1288 }
1289
AddVertexInputToPipeHelpr(CreatePipelineHelper * pipelineHelper)1290 bool VkVerticesObj::AddVertexInputToPipeHelpr(CreatePipelineHelper *pipelineHelper) {
1291 pipelineHelper->vi_ci_.pVertexBindingDescriptions = VertexInputBindingDescription;
1292 pipelineHelper->vi_ci_.vertexBindingDescriptionCount = BindingCount;
1293 pipelineHelper->vi_ci_.pVertexAttributeDescriptions = VertexInputAttributeDescription;
1294 pipelineHelper->vi_ci_.vertexAttributeDescriptionCount = AttributeCount;
1295 return true;
1296 }
1297
BindVertexBuffers(VkCommandBuffer aCommandBuffer,unsigned aOffsetCount,VkDeviceSize * aOffsetList)1298 void VkVerticesObj::BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount, VkDeviceSize *aOffsetList) {
1299 VkDeviceSize *offsetList;
1300 unsigned offsetCount;
1301
1302 if (aOffsetCount) {
1303 offsetList = aOffsetList;
1304 offsetCount = aOffsetCount;
1305 } else {
1306 offsetList = new VkDeviceSize[1]();
1307 offsetCount = 1;
1308 }
1309
1310 vk::CmdBindVertexBuffers(aCommandBuffer, BindId, offsetCount, &VulkanMemoryBuffer.handle(), offsetList);
1311 BoundCurrent = true;
1312
1313 if (!aOffsetCount) {
1314 delete[] offsetList;
1315 }
1316 }
1317
OneOffDescriptorSet(VkDeviceObj * device,const Bindings & bindings,VkDescriptorSetLayoutCreateFlags layout_flags,void * layout_pnext,VkDescriptorPoolCreateFlags poolFlags,void * allocate_pnext,int buffer_info_size,int image_info_size,int buffer_view_size)1318 OneOffDescriptorSet::OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings,
1319 VkDescriptorSetLayoutCreateFlags layout_flags, void *layout_pnext,
1320 VkDescriptorPoolCreateFlags poolFlags, void *allocate_pnext, int buffer_info_size,
1321 int image_info_size, int buffer_view_size)
1322 : device_{device}, pool_{}, layout_(device, bindings, layout_flags, layout_pnext), set_{} {
1323 VkResult err;
1324 buffer_infos.reserve(buffer_info_size);
1325 image_infos.reserve(image_info_size);
1326 buffer_views.reserve(buffer_view_size);
1327 std::vector<VkDescriptorPoolSize> sizes;
1328 for (const auto &b : bindings) sizes.push_back({b.descriptorType, std::max(1u, b.descriptorCount)});
1329
1330 VkDescriptorPoolCreateInfo dspci = {
1331 VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, poolFlags, 1, uint32_t(sizes.size()), sizes.data()};
1332 err = vk::CreateDescriptorPool(device_->handle(), &dspci, nullptr, &pool_);
1333 if (err != VK_SUCCESS) return;
1334
1335 if ((layout_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) == 0) {
1336 VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, allocate_pnext, pool_, 1,
1337 &layout_.handle()};
1338 err = vk::AllocateDescriptorSets(device_->handle(), &alloc_info, &set_);
1339 }
1340 }
1341
~OneOffDescriptorSet()1342 OneOffDescriptorSet::~OneOffDescriptorSet() {
1343 // No need to destroy set-- it's going away with the pool.
1344 vk::DestroyDescriptorPool(device_->handle(), pool_, nullptr);
1345 }
1346
Initialized()1347 bool OneOffDescriptorSet::Initialized() { return pool_ != VK_NULL_HANDLE && layout_.initialized() && set_ != VK_NULL_HANDLE; }
1348
Clear()1349 void OneOffDescriptorSet::Clear() {
1350 buffer_infos.clear();
1351 image_infos.clear();
1352 buffer_views.clear();
1353 descriptor_writes.clear();
1354 }
1355
WriteDescriptorBufferInfo(int blinding,VkBuffer buffer,VkDeviceSize size,VkDescriptorType descriptorType,uint32_t count)1356 void OneOffDescriptorSet::WriteDescriptorBufferInfo(int blinding, VkBuffer buffer, VkDeviceSize size,
1357 VkDescriptorType descriptorType, uint32_t count) {
1358 const auto index = buffer_infos.size();
1359
1360 VkDescriptorBufferInfo buffer_info = {};
1361 buffer_info.buffer = buffer;
1362 buffer_info.offset = 0;
1363 buffer_info.range = size;
1364
1365 for (uint32_t i = 0; i < count; ++i) {
1366 buffer_infos.emplace_back(buffer_info);
1367 }
1368
1369 VkWriteDescriptorSet descriptor_write;
1370 memset(&descriptor_write, 0, sizeof(descriptor_write));
1371 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1372 descriptor_write.dstSet = set_;
1373 descriptor_write.dstBinding = blinding;
1374 descriptor_write.descriptorCount = count;
1375 descriptor_write.descriptorType = descriptorType;
1376 descriptor_write.pBufferInfo = &buffer_infos[index];
1377 descriptor_write.pImageInfo = nullptr;
1378 descriptor_write.pTexelBufferView = nullptr;
1379
1380 descriptor_writes.emplace_back(descriptor_write);
1381 }
1382
WriteDescriptorBufferView(int blinding,VkBufferView & buffer_view,VkDescriptorType descriptorType,uint32_t count)1383 void OneOffDescriptorSet::WriteDescriptorBufferView(int blinding, VkBufferView &buffer_view, VkDescriptorType descriptorType,
1384 uint32_t count) {
1385 const auto index = buffer_views.size();
1386
1387 for (uint32_t i = 0; i < count; ++i) {
1388 buffer_views.emplace_back(buffer_view);
1389 }
1390
1391 VkWriteDescriptorSet descriptor_write;
1392 memset(&descriptor_write, 0, sizeof(descriptor_write));
1393 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1394 descriptor_write.dstSet = set_;
1395 descriptor_write.dstBinding = blinding;
1396 descriptor_write.descriptorCount = count;
1397 descriptor_write.descriptorType = descriptorType;
1398 descriptor_write.pTexelBufferView = &buffer_views[index];
1399 descriptor_write.pImageInfo = nullptr;
1400 descriptor_write.pBufferInfo = nullptr;
1401
1402 descriptor_writes.emplace_back(descriptor_write);
1403 }
1404
WriteDescriptorImageInfo(int blinding,VkImageView image_view,VkSampler sampler,VkDescriptorType descriptorType,VkImageLayout imageLayout,uint32_t count)1405 void OneOffDescriptorSet::WriteDescriptorImageInfo(int blinding, VkImageView image_view, VkSampler sampler,
1406 VkDescriptorType descriptorType, VkImageLayout imageLayout, uint32_t count) {
1407 const auto index = image_infos.size();
1408
1409 VkDescriptorImageInfo image_info = {};
1410 image_info.imageView = image_view;
1411 image_info.sampler = sampler;
1412 image_info.imageLayout = imageLayout;
1413
1414 for (uint32_t i = 0; i < count; ++i) {
1415 image_infos.emplace_back(image_info);
1416 }
1417
1418 VkWriteDescriptorSet descriptor_write;
1419 memset(&descriptor_write, 0, sizeof(descriptor_write));
1420 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1421 descriptor_write.dstSet = set_;
1422 descriptor_write.dstBinding = blinding;
1423 descriptor_write.descriptorCount = count;
1424 descriptor_write.descriptorType = descriptorType;
1425 descriptor_write.pImageInfo = &image_infos[index];
1426 descriptor_write.pBufferInfo = nullptr;
1427 descriptor_write.pTexelBufferView = nullptr;
1428
1429 descriptor_writes.emplace_back(descriptor_write);
1430 }
1431
UpdateDescriptorSets()1432 void OneOffDescriptorSet::UpdateDescriptorSets() {
1433 vk::UpdateDescriptorSets(device_->handle(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
1434 }
1435
CreatePipelineHelper(VkLayerTest & test)1436 CreatePipelineHelper::CreatePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
1437
~CreatePipelineHelper()1438 CreatePipelineHelper::~CreatePipelineHelper() {
1439 VkDevice device = layer_test_.device();
1440 vk::DestroyPipelineCache(device, pipeline_cache_, nullptr);
1441 vk::DestroyPipeline(device, pipeline_, nullptr);
1442 }
1443
InitDescriptorSetInfo()1444 void CreatePipelineHelper::InitDescriptorSetInfo() {
1445 dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
1446 }
1447
InitInputAndVertexInfo()1448 void CreatePipelineHelper::InitInputAndVertexInfo() {
1449 vi_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1450
1451 ia_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1452 ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1453 }
1454
InitMultisampleInfo()1455 void CreatePipelineHelper::InitMultisampleInfo() {
1456 pipe_ms_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1457 pipe_ms_state_ci_.pNext = nullptr;
1458 pipe_ms_state_ci_.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1459 pipe_ms_state_ci_.sampleShadingEnable = VK_FALSE;
1460 pipe_ms_state_ci_.minSampleShading = 1.0;
1461 pipe_ms_state_ci_.pSampleMask = NULL;
1462 }
1463
InitPipelineLayoutInfo()1464 void CreatePipelineHelper::InitPipelineLayoutInfo() {
1465 pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1466 pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
1467 pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
1468 }
1469
InitViewportInfo()1470 void CreatePipelineHelper::InitViewportInfo() {
1471 viewport_ = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
1472 scissor_ = {{0, 0}, {64, 64}};
1473
1474 vp_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1475 vp_state_ci_.pNext = nullptr;
1476 vp_state_ci_.viewportCount = 1;
1477 vp_state_ci_.pViewports = &viewport_; // ignored if dynamic
1478 vp_state_ci_.scissorCount = 1;
1479 vp_state_ci_.pScissors = &scissor_; // ignored if dynamic
1480 }
1481
InitDynamicStateInfo()1482 void CreatePipelineHelper::InitDynamicStateInfo() {
1483 // Use a "validity" check on the {} initialized structure to detect initialization
1484 // during late bind
1485 }
1486
InitShaderInfo()1487 void CreatePipelineHelper::InitShaderInfo() {
1488 vs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, &layer_test_));
1489 fs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, &layer_test_));
1490 // We shouldn't need a fragment shader but add it to be able to run on more devices
1491 shader_stages_ = {vs_->GetStageCreateInfo(), fs_->GetStageCreateInfo()};
1492 }
1493
InitRasterizationInfo()1494 void CreatePipelineHelper::InitRasterizationInfo() {
1495 rs_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1496 rs_state_ci_.pNext = &line_state_ci_;
1497 rs_state_ci_.flags = 0;
1498 rs_state_ci_.depthClampEnable = VK_FALSE;
1499 rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
1500 rs_state_ci_.polygonMode = VK_POLYGON_MODE_FILL;
1501 rs_state_ci_.cullMode = VK_CULL_MODE_BACK_BIT;
1502 rs_state_ci_.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
1503 rs_state_ci_.depthBiasEnable = VK_FALSE;
1504 rs_state_ci_.lineWidth = 1.0F;
1505 }
1506
InitLineRasterizationInfo()1507 void CreatePipelineHelper::InitLineRasterizationInfo() {
1508 line_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
1509 line_state_ci_.pNext = nullptr;
1510 line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
1511 line_state_ci_.stippledLineEnable = VK_FALSE;
1512 line_state_ci_.lineStippleFactor = 0;
1513 line_state_ci_.lineStipplePattern = 0;
1514 }
1515
InitBlendStateInfo()1516 void CreatePipelineHelper::InitBlendStateInfo() {
1517 cb_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1518 cb_ci_.logicOpEnable = VK_FALSE;
1519 cb_ci_.logicOp = VK_LOGIC_OP_COPY; // ignored if enable is VK_FALSE above
1520 cb_ci_.attachmentCount = layer_test_.RenderPassInfo().subpassCount;
1521 ASSERT_TRUE(IsValidVkStruct(layer_test_.RenderPassInfo()));
1522 cb_ci_.pAttachments = &cb_attachments_;
1523 for (int i = 0; i < 4; i++) {
1524 cb_ci_.blendConstants[0] = 1.0F;
1525 }
1526 }
1527
InitGraphicsPipelineInfo()1528 void CreatePipelineHelper::InitGraphicsPipelineInfo() {
1529 // Color-only rendering in a subpass with no depth/stencil attachment
1530 // Active Pipeline Shader Stages
1531 // Vertex Shader
1532 // Fragment Shader
1533 // Required: Fixed-Function Pipeline Stages
1534 // VkPipelineVertexInputStateCreateInfo
1535 // VkPipelineInputAssemblyStateCreateInfo
1536 // VkPipelineViewportStateCreateInfo
1537 // VkPipelineRasterizationStateCreateInfo
1538 // VkPipelineMultisampleStateCreateInfo
1539 // VkPipelineColorBlendStateCreateInfo
1540 gp_ci_.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1541 gp_ci_.pNext = nullptr;
1542 gp_ci_.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
1543 gp_ci_.pVertexInputState = &vi_ci_;
1544 gp_ci_.pInputAssemblyState = &ia_ci_;
1545 gp_ci_.pTessellationState = nullptr;
1546 gp_ci_.pViewportState = &vp_state_ci_;
1547 gp_ci_.pRasterizationState = &rs_state_ci_;
1548 gp_ci_.pMultisampleState = &pipe_ms_state_ci_;
1549 gp_ci_.pDepthStencilState = nullptr;
1550 gp_ci_.pColorBlendState = &cb_ci_;
1551 gp_ci_.pDynamicState = nullptr;
1552 gp_ci_.renderPass = layer_test_.renderPass();
1553 }
1554
InitPipelineCacheInfo()1555 void CreatePipelineHelper::InitPipelineCacheInfo() {
1556 pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1557 pc_ci_.pNext = nullptr;
1558 pc_ci_.flags = 0;
1559 pc_ci_.initialDataSize = 0;
1560 pc_ci_.pInitialData = nullptr;
1561 }
1562
InitTesselationState()1563 void CreatePipelineHelper::InitTesselationState() {
1564 // TBD -- add shaders and create_info
1565 }
1566
InitInfo()1567 void CreatePipelineHelper::InitInfo() {
1568 InitDescriptorSetInfo();
1569 InitInputAndVertexInfo();
1570 InitMultisampleInfo();
1571 InitPipelineLayoutInfo();
1572 InitViewportInfo();
1573 InitDynamicStateInfo();
1574 InitShaderInfo();
1575 InitRasterizationInfo();
1576 InitLineRasterizationInfo();
1577 InitBlendStateInfo();
1578 InitGraphicsPipelineInfo();
1579 InitPipelineCacheInfo();
1580 }
1581
InitState()1582 void CreatePipelineHelper::InitState() {
1583 VkResult err;
1584 descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
1585 ASSERT_TRUE(descriptor_set_->Initialized());
1586
1587 const std::vector<VkPushConstantRange> push_ranges(
1588 pipeline_layout_ci_.pPushConstantRanges,
1589 pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
1590 pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
1591
1592 err = vk::CreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
1593 ASSERT_VK_SUCCESS(err);
1594 }
1595
LateBindPipelineInfo()1596 void CreatePipelineHelper::LateBindPipelineInfo() {
1597 // By value or dynamically located items must be late bound
1598 gp_ci_.layout = pipeline_layout_.handle();
1599 gp_ci_.stageCount = shader_stages_.size();
1600 gp_ci_.pStages = shader_stages_.data();
1601 if ((gp_ci_.pTessellationState == nullptr) && IsValidVkStruct(tess_ci_)) {
1602 gp_ci_.pTessellationState = &tess_ci_;
1603 }
1604 if ((gp_ci_.pDynamicState == nullptr) && IsValidVkStruct(dyn_state_ci_)) {
1605 gp_ci_.pDynamicState = &dyn_state_ci_;
1606 }
1607 }
1608
CreateGraphicsPipeline(bool implicit_destroy,bool do_late_bind)1609 VkResult CreatePipelineHelper::CreateGraphicsPipeline(bool implicit_destroy, bool do_late_bind) {
1610 VkResult err;
1611 if (do_late_bind) {
1612 LateBindPipelineInfo();
1613 }
1614 if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1615 vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1616 pipeline_ = VK_NULL_HANDLE;
1617 }
1618 err = vk::CreateGraphicsPipelines(layer_test_.device(), pipeline_cache_, 1, &gp_ci_, NULL, &pipeline_);
1619 return err;
1620 }
1621
CreateComputePipelineHelper(VkLayerTest & test)1622 CreateComputePipelineHelper::CreateComputePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
1623
~CreateComputePipelineHelper()1624 CreateComputePipelineHelper::~CreateComputePipelineHelper() {
1625 VkDevice device = layer_test_.device();
1626 vk::DestroyPipelineCache(device, pipeline_cache_, nullptr);
1627 vk::DestroyPipeline(device, pipeline_, nullptr);
1628 }
1629
InitDescriptorSetInfo()1630 void CreateComputePipelineHelper::InitDescriptorSetInfo() {
1631 dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
1632 }
1633
InitPipelineLayoutInfo()1634 void CreateComputePipelineHelper::InitPipelineLayoutInfo() {
1635 pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1636 pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
1637 pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
1638 }
1639
InitShaderInfo()1640 void CreateComputePipelineHelper::InitShaderInfo() {
1641 cs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, &layer_test_));
1642 // We shouldn't need a fragment shader but add it to be able to run on more devices
1643 }
1644
InitComputePipelineInfo()1645 void CreateComputePipelineHelper::InitComputePipelineInfo() {
1646 cp_ci_.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
1647 cp_ci_.pNext = nullptr;
1648 cp_ci_.flags = 0;
1649 }
1650
InitPipelineCacheInfo()1651 void CreateComputePipelineHelper::InitPipelineCacheInfo() {
1652 pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1653 pc_ci_.pNext = nullptr;
1654 pc_ci_.flags = 0;
1655 pc_ci_.initialDataSize = 0;
1656 pc_ci_.pInitialData = nullptr;
1657 }
1658
InitInfo()1659 void CreateComputePipelineHelper::InitInfo() {
1660 InitDescriptorSetInfo();
1661 InitPipelineLayoutInfo();
1662 InitShaderInfo();
1663 InitComputePipelineInfo();
1664 InitPipelineCacheInfo();
1665 }
1666
InitState()1667 void CreateComputePipelineHelper::InitState() {
1668 VkResult err;
1669 descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
1670 ASSERT_TRUE(descriptor_set_->Initialized());
1671
1672 const std::vector<VkPushConstantRange> push_ranges(
1673 pipeline_layout_ci_.pPushConstantRanges,
1674 pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
1675 pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
1676
1677 err = vk::CreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
1678 ASSERT_VK_SUCCESS(err);
1679 }
1680
LateBindPipelineInfo()1681 void CreateComputePipelineHelper::LateBindPipelineInfo() {
1682 // By value or dynamically located items must be late bound
1683 cp_ci_.layout = pipeline_layout_.handle();
1684 cp_ci_.stage = cs_.get()->GetStageCreateInfo();
1685 }
1686
CreateComputePipeline(bool implicit_destroy,bool do_late_bind)1687 VkResult CreateComputePipelineHelper::CreateComputePipeline(bool implicit_destroy, bool do_late_bind) {
1688 VkResult err;
1689 if (do_late_bind) {
1690 LateBindPipelineInfo();
1691 }
1692 if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1693 vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1694 pipeline_ = VK_NULL_HANDLE;
1695 }
1696 err = vk::CreateComputePipelines(layer_test_.device(), pipeline_cache_, 1, &cp_ci_, NULL, &pipeline_);
1697 return err;
1698 }
1699
CreateNVRayTracingPipelineHelper(VkLayerTest & test)1700 CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipelineHelper(VkLayerTest &test) : layer_test_(test) {}
~CreateNVRayTracingPipelineHelper()1701 CreateNVRayTracingPipelineHelper::~CreateNVRayTracingPipelineHelper() {
1702 VkDevice device = layer_test_.device();
1703 vk::DestroyPipelineCache(device, pipeline_cache_, nullptr);
1704 vk::DestroyPipeline(device, pipeline_, nullptr);
1705 }
1706
InitInstanceExtensions(VkLayerTest & test,std::vector<const char * > & instance_extension_names)1707 bool CreateNVRayTracingPipelineHelper::InitInstanceExtensions(VkLayerTest &test,
1708 std::vector<const char *> &instance_extension_names) {
1709 if (test.InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1710 instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1711 } else {
1712 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
1713 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1714 return false;
1715 }
1716 return true;
1717 }
1718
InitDeviceExtensions(VkLayerTest & test,std::vector<const char * > & device_extension_names)1719 bool CreateNVRayTracingPipelineHelper::InitDeviceExtensions(VkLayerTest &test, std::vector<const char *> &device_extension_names) {
1720 std::array<const char *, 2> required_device_extensions = {
1721 {VK_NV_RAY_TRACING_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}};
1722 for (auto device_extension : required_device_extensions) {
1723 if (test.DeviceExtensionSupported(test.gpu(), nullptr, device_extension)) {
1724 device_extension_names.push_back(device_extension);
1725 } else {
1726 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
1727 return false;
1728 }
1729 }
1730 return true;
1731 }
1732
InitShaderGroups()1733 void CreateNVRayTracingPipelineHelper::InitShaderGroups() {
1734 {
1735 VkRayTracingShaderGroupCreateInfoNV group = {};
1736 group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
1737 group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
1738 group.generalShader = 0;
1739 group.closestHitShader = VK_SHADER_UNUSED_NV;
1740 group.anyHitShader = VK_SHADER_UNUSED_NV;
1741 group.intersectionShader = VK_SHADER_UNUSED_NV;
1742 groups_.push_back(group);
1743 }
1744 {
1745 VkRayTracingShaderGroupCreateInfoNV group = {};
1746 group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
1747 group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
1748 group.generalShader = VK_SHADER_UNUSED_NV;
1749 group.closestHitShader = 1;
1750 group.anyHitShader = VK_SHADER_UNUSED_NV;
1751 group.intersectionShader = VK_SHADER_UNUSED_NV;
1752 groups_.push_back(group);
1753 }
1754 {
1755 VkRayTracingShaderGroupCreateInfoNV group = {};
1756 group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
1757 group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
1758 group.generalShader = 2;
1759 group.closestHitShader = VK_SHADER_UNUSED_NV;
1760 group.anyHitShader = VK_SHADER_UNUSED_NV;
1761 group.intersectionShader = VK_SHADER_UNUSED_NV;
1762 groups_.push_back(group);
1763 }
1764 }
1765
InitShaderGroupsKHR()1766 void CreateNVRayTracingPipelineHelper::InitShaderGroupsKHR() {
1767 {
1768 VkRayTracingShaderGroupCreateInfoKHR group = {};
1769 group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR;
1770 group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR;
1771 group.generalShader = 0;
1772 group.closestHitShader = VK_SHADER_UNUSED_KHR;
1773 group.anyHitShader = VK_SHADER_UNUSED_KHR;
1774 group.intersectionShader = VK_SHADER_UNUSED_KHR;
1775 groups_KHR_.push_back(group);
1776 }
1777 {
1778 VkRayTracingShaderGroupCreateInfoKHR group = {};
1779 group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR;
1780 group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR;
1781 group.generalShader = VK_SHADER_UNUSED_KHR;
1782 group.closestHitShader = 1;
1783 group.anyHitShader = VK_SHADER_UNUSED_KHR;
1784 group.intersectionShader = VK_SHADER_UNUSED_KHR;
1785 groups_KHR_.push_back(group);
1786 }
1787 {
1788 VkRayTracingShaderGroupCreateInfoKHR group = {};
1789 group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR;
1790 group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR;
1791 group.generalShader = 2;
1792 group.closestHitShader = VK_SHADER_UNUSED_KHR;
1793 group.anyHitShader = VK_SHADER_UNUSED_KHR;
1794 group.intersectionShader = VK_SHADER_UNUSED_KHR;
1795 groups_KHR_.push_back(group);
1796 }
1797 }
InitDescriptorSetInfo()1798 void CreateNVRayTracingPipelineHelper::InitDescriptorSetInfo() {
1799 dsl_bindings_ = {
1800 {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
1801 {1, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
1802 };
1803 }
1804
InitPipelineLayoutInfo()1805 void CreateNVRayTracingPipelineHelper::InitPipelineLayoutInfo() {
1806 pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1807 pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
1808 pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
1809 }
1810
InitShaderInfo()1811 void CreateNVRayTracingPipelineHelper::InitShaderInfo() { // DONE
1812 static const char rayGenShaderText[] =
1813 "#version 460 core \n"
1814 "#extension GL_NV_ray_tracing : require \n"
1815 "layout(set = 0, binding = 0, rgba8) uniform image2D image; \n"
1816 "layout(set = 0, binding = 1) uniform accelerationStructureNV as; \n"
1817 " \n"
1818 "layout(location = 0) rayPayloadNV float payload; \n"
1819 " \n"
1820 "void main() \n"
1821 "{ \n"
1822 " vec4 col = vec4(0, 0, 0, 1); \n"
1823 " \n"
1824 " vec3 origin = vec3(float(gl_LaunchIDNV.x)/float(gl_LaunchSizeNV.x), "
1825 "float(gl_LaunchIDNV.y)/float(gl_LaunchSizeNV.y), "
1826 "1.0); \n"
1827 " vec3 dir = vec3(0.0, 0.0, -1.0); \n"
1828 " \n"
1829 " payload = 0.5; \n"
1830 " traceNV(as, gl_RayFlagsCullBackFacingTrianglesNV, 0xff, 0, 1, 0, origin, 0.0, dir, 1000.0, 0); \n"
1831 " \n"
1832 " col.y = payload; \n"
1833 " \n"
1834 " imageStore(image, ivec2(gl_LaunchIDNV.xy), col); \n"
1835 "}\n";
1836
1837 static char const closestHitShaderText[] =
1838 "#version 460 core \n"
1839 "#extension GL_NV_ray_tracing : require \n"
1840 "layout(location = 0) rayPayloadInNV float hitValue; \n"
1841 " \n"
1842 "void main() { \n"
1843 " hitValue = 1.0; \n"
1844 "} \n";
1845
1846 static char const missShaderText[] =
1847 "#version 460 core \n"
1848 "#extension GL_NV_ray_tracing : require \n"
1849 "layout(location = 0) rayPayloadInNV float hitValue; \n"
1850 " \n"
1851 "void main() { \n"
1852 " hitValue = 0.0; \n"
1853 "} \n";
1854
1855 rgs_.reset(new VkShaderObj(layer_test_.DeviceObj(), rayGenShaderText, VK_SHADER_STAGE_RAYGEN_BIT_NV, &layer_test_));
1856 chs_.reset(new VkShaderObj(layer_test_.DeviceObj(), closestHitShaderText, VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, &layer_test_));
1857 mis_.reset(new VkShaderObj(layer_test_.DeviceObj(), missShaderText, VK_SHADER_STAGE_MISS_BIT_NV, &layer_test_));
1858
1859 shader_stages_ = {rgs_->GetStageCreateInfo(), chs_->GetStageCreateInfo(), mis_->GetStageCreateInfo()};
1860 }
1861
InitNVRayTracingPipelineInfo()1862 void CreateNVRayTracingPipelineHelper::InitNVRayTracingPipelineInfo() {
1863 rp_ci_.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
1864 rp_ci_.maxRecursionDepth = 0;
1865 rp_ci_.stageCount = shader_stages_.size();
1866 rp_ci_.pStages = shader_stages_.data();
1867 rp_ci_.groupCount = groups_.size();
1868 rp_ci_.pGroups = groups_.data();
1869 }
1870
InitKHRRayTracingPipelineInfo()1871 void CreateNVRayTracingPipelineHelper::InitKHRRayTracingPipelineInfo() {
1872 rp_ci_KHR_.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
1873 rp_ci_KHR_.maxRecursionDepth = 0;
1874 rp_ci_KHR_.stageCount = shader_stages_.size();
1875 rp_ci_KHR_.pStages = shader_stages_.data();
1876 rp_ci_KHR_.groupCount = groups_KHR_.size();
1877 rp_ci_KHR_.pGroups = groups_KHR_.data();
1878 }
1879
InitPipelineCacheInfo()1880 void CreateNVRayTracingPipelineHelper::InitPipelineCacheInfo() {
1881 pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1882 pc_ci_.pNext = nullptr;
1883 pc_ci_.flags = 0;
1884 pc_ci_.initialDataSize = 0;
1885 pc_ci_.pInitialData = nullptr;
1886 }
1887
InitInfo(bool isKHR)1888 void CreateNVRayTracingPipelineHelper::InitInfo(bool isKHR) {
1889 isKHR ? InitShaderGroupsKHR() : InitShaderGroups();
1890 InitDescriptorSetInfo();
1891 InitPipelineLayoutInfo();
1892 InitShaderInfo();
1893 isKHR ? InitKHRRayTracingPipelineInfo() : InitNVRayTracingPipelineInfo();
1894 InitPipelineCacheInfo();
1895 }
1896
InitState()1897 void CreateNVRayTracingPipelineHelper::InitState() {
1898 VkResult err;
1899 descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
1900 ASSERT_TRUE(descriptor_set_->Initialized());
1901
1902 pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_});
1903
1904 err = vk::CreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
1905 ASSERT_VK_SUCCESS(err);
1906 }
1907
LateBindPipelineInfo(bool isKHR)1908 void CreateNVRayTracingPipelineHelper::LateBindPipelineInfo(bool isKHR) {
1909 // By value or dynamically located items must be late bound
1910 if (isKHR) {
1911 rp_ci_KHR_.layout = pipeline_layout_.handle();
1912 rp_ci_KHR_.stageCount = shader_stages_.size();
1913 rp_ci_KHR_.pStages = shader_stages_.data();
1914 } else {
1915 rp_ci_.layout = pipeline_layout_.handle();
1916 rp_ci_.stageCount = shader_stages_.size();
1917 rp_ci_.pStages = shader_stages_.data();
1918 }
1919 }
1920
CreateNVRayTracingPipeline(bool implicit_destroy,bool do_late_bind)1921 VkResult CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipeline(bool implicit_destroy, bool do_late_bind) {
1922 VkResult err;
1923 if (do_late_bind) {
1924 LateBindPipelineInfo();
1925 }
1926 if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1927 vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1928 pipeline_ = VK_NULL_HANDLE;
1929 }
1930
1931 PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV =
1932 (PFN_vkCreateRayTracingPipelinesNV)vk::GetInstanceProcAddr(layer_test_.instance(), "vkCreateRayTracingPipelinesNV");
1933 err = vkCreateRayTracingPipelinesNV(layer_test_.device(), pipeline_cache_, 1, &rp_ci_, nullptr, &pipeline_);
1934 return err;
1935 }
1936
CreateKHRRayTracingPipeline(bool implicit_destroy,bool do_late_bind)1937 VkResult CreateNVRayTracingPipelineHelper::CreateKHRRayTracingPipeline(bool implicit_destroy, bool do_late_bind) {
1938 VkResult err;
1939 if (do_late_bind) {
1940 LateBindPipelineInfo(true /*isKHR*/);
1941 }
1942 if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
1943 vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
1944 pipeline_ = VK_NULL_HANDLE;
1945 }
1946 PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR =
1947 (PFN_vkCreateRayTracingPipelinesKHR)vk::GetInstanceProcAddr(layer_test_.instance(), "vkCreateRayTracingPipelinesKHR");
1948 err = vkCreateRayTracingPipelinesKHR(layer_test_.device(), pipeline_cache_, 1, &rp_ci_KHR_, nullptr, &pipeline_);
1949 return err;
1950 }
1951
1952 namespace chain_util {
Head() const1953 const void *ExtensionChain::Head() const { return head_; }
1954 } // namespace chain_util
1955
~QueueFamilyObjs()1956 BarrierQueueFamilyTestHelper::QueueFamilyObjs::~QueueFamilyObjs() {
1957 delete command_buffer2;
1958 delete command_buffer;
1959 delete command_pool;
1960 delete queue;
1961 }
1962
Init(VkDeviceObj * device,uint32_t qf_index,VkQueue qf_queue,VkCommandPoolCreateFlags cp_flags)1963 void BarrierQueueFamilyTestHelper::QueueFamilyObjs::Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue,
1964 VkCommandPoolCreateFlags cp_flags) {
1965 index = qf_index;
1966 queue = new VkQueueObj(qf_queue, qf_index);
1967 command_pool = new VkCommandPoolObj(device, qf_index, cp_flags);
1968 command_buffer = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
1969 command_buffer2 = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
1970 };
1971
Context(VkLayerTest * test,const std::vector<uint32_t> & queue_family_indices)1972 BarrierQueueFamilyTestHelper::Context::Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices)
1973 : layer_test(test) {
1974 if (0 == queue_family_indices.size()) {
1975 return; // This is invalid
1976 }
1977 VkDeviceObj *device_obj = layer_test->DeviceObj();
1978 queue_families.reserve(queue_family_indices.size());
1979 default_index = queue_family_indices[0];
1980 for (auto qfi : queue_family_indices) {
1981 VkQueue queue = device_obj->queue_family_queues(qfi)[0]->handle();
1982 queue_families.emplace(std::make_pair(qfi, QueueFamilyObjs()));
1983 queue_families[qfi].Init(device_obj, qfi, queue, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
1984 }
1985 Reset();
1986 }
1987
Reset()1988 void BarrierQueueFamilyTestHelper::Context::Reset() {
1989 layer_test->DeviceObj()->wait();
1990 for (auto &qf : queue_families) {
1991 vk::ResetCommandPool(layer_test->device(), qf.second.command_pool->handle(), 0);
1992 }
1993 }
1994
BarrierQueueFamilyTestHelper(Context * context)1995 BarrierQueueFamilyTestHelper::BarrierQueueFamilyTestHelper(Context *context)
1996 : context_(context), image_(context->layer_test->DeviceObj()) {}
1997
Init(std::vector<uint32_t> * families,bool image_memory,bool buffer_memory)1998 void BarrierQueueFamilyTestHelper::Init(std::vector<uint32_t> *families, bool image_memory, bool buffer_memory) {
1999 VkDeviceObj *device_obj = context_->layer_test->DeviceObj();
2000
2001 image_.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0, families,
2002 image_memory);
2003
2004 ASSERT_TRUE(image_.initialized());
2005
2006 image_barrier_ = image_.image_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, image_.Layout(),
2007 image_.Layout(), image_.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1));
2008
2009 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2010 buffer_.init_as_src_and_dst(*device_obj, 256, mem_prop, families, buffer_memory);
2011 ASSERT_TRUE(buffer_.initialized());
2012 buffer_barrier_ = buffer_.buffer_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, 0, VK_WHOLE_SIZE);
2013 }
2014
GetQueueFamilyInfo(Context * context,uint32_t qfi)2015 BarrierQueueFamilyTestHelper::QueueFamilyObjs *BarrierQueueFamilyTestHelper::GetQueueFamilyInfo(Context *context, uint32_t qfi) {
2016 QueueFamilyObjs *qf;
2017
2018 auto qf_it = context->queue_families.find(qfi);
2019 if (qf_it != context->queue_families.end()) {
2020 qf = &(qf_it->second);
2021 } else {
2022 qf = &(context->queue_families[context->default_index]);
2023 }
2024 return qf;
2025 }
2026
operator ()(std::string img_err,std::string buf_err,uint32_t src,uint32_t dst,bool positive,uint32_t queue_family_index,Modifier mod)2027 void BarrierQueueFamilyTestHelper::operator()(std::string img_err, std::string buf_err, uint32_t src, uint32_t dst, bool positive,
2028 uint32_t queue_family_index, Modifier mod) {
2029 auto &monitor = context_->layer_test->Monitor();
2030 if (img_err.length()) monitor.SetDesiredFailureMsg(kErrorBit | kWarningBit, img_err);
2031 if (buf_err.length()) monitor.SetDesiredFailureMsg(kErrorBit | kWarningBit, buf_err);
2032
2033 image_barrier_.srcQueueFamilyIndex = src;
2034 image_barrier_.dstQueueFamilyIndex = dst;
2035 buffer_barrier_.srcQueueFamilyIndex = src;
2036 buffer_barrier_.dstQueueFamilyIndex = dst;
2037
2038 QueueFamilyObjs *qf = GetQueueFamilyInfo(context_, queue_family_index);
2039
2040 VkCommandBufferObj *command_buffer = qf->command_buffer;
2041 for (int cb_repeat = 0; cb_repeat < (mod == Modifier::DOUBLE_COMMAND_BUFFER ? 2 : 1); cb_repeat++) {
2042 command_buffer->begin();
2043 for (int repeat = 0; repeat < (mod == Modifier::DOUBLE_RECORD ? 2 : 1); repeat++) {
2044 vk::CmdPipelineBarrier(command_buffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
2045 VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buffer_barrier_, 1, &image_barrier_);
2046 }
2047 command_buffer->end();
2048 command_buffer = qf->command_buffer2; // Second pass (if any) goes to the secondary command_buffer.
2049 }
2050
2051 if (queue_family_index != kInvalidQueueFamily) {
2052 if (mod == Modifier::DOUBLE_COMMAND_BUFFER) {
2053 // the Fence resolves to VK_NULL_HANLE... i.e. no fence
2054 qf->queue->submit({{qf->command_buffer, qf->command_buffer2}}, vk_testing::Fence(), positive);
2055 } else {
2056 qf->command_buffer->QueueCommandBuffer(positive); // Check for success on positive tests only
2057 }
2058 }
2059
2060 if (positive) {
2061 monitor.VerifyNotFound();
2062 } else {
2063 monitor.VerifyFound();
2064 }
2065 context_->Reset();
2066 };
2067
InitFrameworkForRayTracingTest(VkRenderFramework * renderFramework,bool isKHR,std::vector<const char * > & instance_extension_names,std::vector<const char * > & device_extension_names,void * user_data,bool need_gpu_validation,bool need_push_descriptors,bool deferred_state_init)2068 bool InitFrameworkForRayTracingTest(VkRenderFramework *renderFramework, bool isKHR,
2069 std::vector<const char *> &instance_extension_names,
2070 std::vector<const char *> &device_extension_names, void *user_data, bool need_gpu_validation,
2071 bool need_push_descriptors, bool deferred_state_init) {
2072 const std::array<const char *, 1> required_instance_extensions = {{VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}};
2073 for (const char *required_instance_extension : required_instance_extensions) {
2074 if (renderFramework->InstanceExtensionSupported(required_instance_extension)) {
2075 instance_extension_names.push_back(required_instance_extension);
2076 } else {
2077 printf("%s %s instance extension not supported, skipping test\n", kSkipPrefix, required_instance_extension);
2078 return false;
2079 }
2080 }
2081
2082 VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
2083 VkValidationFeatureDisableEXT disables[] = {
2084 VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
2085 VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT};
2086 VkValidationFeaturesEXT features = {};
2087 features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
2088 features.enabledValidationFeatureCount = 1;
2089 features.pEnabledValidationFeatures = enables;
2090 features.disabledValidationFeatureCount = 4;
2091 features.pDisabledValidationFeatures = disables;
2092
2093 VkValidationFeaturesEXT *enabled_features = need_gpu_validation ? &features : nullptr;
2094
2095 renderFramework->InitFramework(user_data, enabled_features);
2096
2097 if (renderFramework->IsPlatform(kMockICD) || renderFramework->DeviceSimulation()) {
2098 printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
2099 return false;
2100 }
2101
2102 std::vector<const char *> required_device_extensions;
2103 required_device_extensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
2104 if (isKHR) {
2105 required_device_extensions.push_back(VK_KHR_RAY_TRACING_EXTENSION_NAME);
2106 required_device_extensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
2107 required_device_extensions.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
2108 required_device_extensions.push_back(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
2109 required_device_extensions.push_back(VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME);
2110 required_device_extensions.push_back(VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME);
2111 } else {
2112 required_device_extensions.push_back(VK_NV_RAY_TRACING_EXTENSION_NAME);
2113 }
2114 if (need_push_descriptors) {
2115 required_device_extensions.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
2116 }
2117
2118 for (const char *required_device_extension : required_device_extensions) {
2119 if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, required_device_extension)) {
2120 device_extension_names.push_back(required_device_extension);
2121 } else {
2122 printf("%s %s device extension not supported, skipping test\n", kSkipPrefix, required_device_extension);
2123 return false;
2124 }
2125 }
2126 if (!deferred_state_init) renderFramework->InitState();
2127 return true;
2128 }
2129
GetSimpleGeometryForAccelerationStructureTests(const VkDeviceObj & device,VkBufferObj * vbo,VkBufferObj * ibo,VkGeometryNV * geometry)2130 void GetSimpleGeometryForAccelerationStructureTests(const VkDeviceObj &device, VkBufferObj *vbo, VkBufferObj *ibo,
2131 VkGeometryNV *geometry) {
2132 vbo->init(device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
2133 VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
2134 ibo->init(device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
2135 VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
2136
2137 const std::vector<float> vertices = {1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f};
2138 const std::vector<uint32_t> indicies = {0, 1, 2};
2139
2140 uint8_t *mapped_vbo_buffer_data = (uint8_t *)vbo->memory().map();
2141 std::memcpy(mapped_vbo_buffer_data, (uint8_t *)vertices.data(), sizeof(float) * vertices.size());
2142 vbo->memory().unmap();
2143
2144 uint8_t *mapped_ibo_buffer_data = (uint8_t *)ibo->memory().map();
2145 std::memcpy(mapped_ibo_buffer_data, (uint8_t *)indicies.data(), sizeof(uint32_t) * indicies.size());
2146 ibo->memory().unmap();
2147
2148 *geometry = {};
2149 geometry->sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
2150 geometry->geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
2151 geometry->geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
2152 geometry->geometry.triangles.vertexData = vbo->handle();
2153 geometry->geometry.triangles.vertexOffset = 0;
2154 geometry->geometry.triangles.vertexCount = 3;
2155 geometry->geometry.triangles.vertexStride = 12;
2156 geometry->geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
2157 geometry->geometry.triangles.indexData = ibo->handle();
2158 geometry->geometry.triangles.indexOffset = 0;
2159 geometry->geometry.triangles.indexCount = 3;
2160 geometry->geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
2161 geometry->geometry.triangles.transformData = VK_NULL_HANDLE;
2162 geometry->geometry.triangles.transformOffset = 0;
2163 geometry->geometry.aabbs = {};
2164 geometry->geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
2165 }
2166
OOBRayTracingShadersTestBody(bool gpu_assisted)2167 void VkLayerTest::OOBRayTracingShadersTestBody(bool gpu_assisted) {
2168 std::array<const char *, 1> required_instance_extensions = {{VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}};
2169 for (auto instance_extension : required_instance_extensions) {
2170 if (InstanceExtensionSupported(instance_extension)) {
2171 m_instance_extension_names.push_back(instance_extension);
2172 } else {
2173 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix, instance_extension);
2174 return;
2175 }
2176 }
2177
2178 VkValidationFeatureEnableEXT validation_feature_enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
2179 VkValidationFeatureDisableEXT validation_feature_disables[] = {
2180 VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
2181 VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT};
2182 VkValidationFeaturesEXT validation_features = {};
2183 validation_features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
2184 validation_features.enabledValidationFeatureCount = 1;
2185 validation_features.pEnabledValidationFeatures = validation_feature_enables;
2186 validation_features.disabledValidationFeatureCount = 4;
2187 validation_features.pDisabledValidationFeatures = validation_feature_disables;
2188 bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(
2189 this, m_instance_extension_names, m_device_extension_names, gpu_assisted ? &validation_features : nullptr, m_errorMonitor);
2190
2191 if (IsPlatform(kMockICD) || DeviceSimulation()) {
2192 printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
2193 return;
2194 }
2195
2196 std::array<const char *, 2> required_device_extensions = {
2197 {VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_NV_RAY_TRACING_EXTENSION_NAME}};
2198 for (auto device_extension : required_device_extensions) {
2199 if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
2200 m_device_extension_names.push_back(device_extension);
2201 } else {
2202 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
2203 return;
2204 }
2205 }
2206
2207 VkPhysicalDeviceFeatures2KHR features2 = {};
2208 auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
2209 if (descriptor_indexing) {
2210 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
2211 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
2212 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
2213
2214 features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
2215 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
2216
2217 if (!indexing_features.runtimeDescriptorArray || !indexing_features.descriptorBindingPartiallyBound ||
2218 !indexing_features.descriptorBindingSampledImageUpdateAfterBind ||
2219 !indexing_features.descriptorBindingVariableDescriptorCount) {
2220 printf("Not all descriptor indexing features supported, skipping descriptor indexing tests\n");
2221 descriptor_indexing = false;
2222 }
2223 }
2224 VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2225 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
2226
2227 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
2228 (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
2229 ASSERT_TRUE(vkGetPhysicalDeviceProperties2KHR != nullptr);
2230
2231 auto ray_tracing_properties = lvl_init_struct<VkPhysicalDeviceRayTracingPropertiesNV>();
2232 auto properties2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&ray_tracing_properties);
2233 vkGetPhysicalDeviceProperties2KHR(gpu(), &properties2);
2234 if (ray_tracing_properties.maxTriangleCount == 0) {
2235 printf("%s Did not find required ray tracing properties; skipped.\n", kSkipPrefix);
2236 return;
2237 }
2238
2239 VkQueue ray_tracing_queue = m_device->m_queue;
2240 uint32_t ray_tracing_queue_family_index = 0;
2241
2242 // If supported, run on the compute only queue.
2243 uint32_t compute_only_queue_family_index = m_device->QueueFamilyMatching(VK_QUEUE_COMPUTE_BIT, VK_QUEUE_GRAPHICS_BIT);
2244 if (compute_only_queue_family_index != UINT32_MAX) {
2245 const auto &compute_only_queues = m_device->queue_family_queues(compute_only_queue_family_index);
2246 if (!compute_only_queues.empty()) {
2247 ray_tracing_queue = compute_only_queues[0]->handle();
2248 ray_tracing_queue_family_index = compute_only_queue_family_index;
2249 }
2250 }
2251
2252 VkCommandPoolObj ray_tracing_command_pool(m_device, ray_tracing_queue_family_index,
2253 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
2254 VkCommandBufferObj ray_tracing_command_buffer(m_device, &ray_tracing_command_pool);
2255
2256 struct AABB {
2257 float min_x;
2258 float min_y;
2259 float min_z;
2260 float max_x;
2261 float max_y;
2262 float max_z;
2263 };
2264
2265 const std::vector<AABB> aabbs = {{-1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f}};
2266
2267 struct VkGeometryInstanceNV {
2268 float transform[12];
2269 uint32_t instanceCustomIndex : 24;
2270 uint32_t mask : 8;
2271 uint32_t instanceOffset : 24;
2272 uint32_t flags : 8;
2273 uint64_t accelerationStructureHandle;
2274 };
2275
2276 VkDeviceSize aabb_buffer_size = sizeof(AABB) * aabbs.size();
2277 VkBufferObj aabb_buffer;
2278 aabb_buffer.init(*m_device, aabb_buffer_size, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
2279 VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
2280
2281 uint8_t *mapped_aabb_buffer_data = (uint8_t *)aabb_buffer.memory().map();
2282 std::memcpy(mapped_aabb_buffer_data, (uint8_t *)aabbs.data(), static_cast<std::size_t>(aabb_buffer_size));
2283 aabb_buffer.memory().unmap();
2284
2285 VkGeometryNV geometry = {};
2286 geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
2287 geometry.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
2288 geometry.geometry.triangles = {};
2289 geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
2290 geometry.geometry.aabbs = {};
2291 geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
2292 geometry.geometry.aabbs.aabbData = aabb_buffer.handle();
2293 geometry.geometry.aabbs.numAABBs = static_cast<uint32_t>(aabbs.size());
2294 geometry.geometry.aabbs.offset = 0;
2295 geometry.geometry.aabbs.stride = static_cast<VkDeviceSize>(sizeof(AABB));
2296 geometry.flags = 0;
2297
2298 VkAccelerationStructureInfoNV bot_level_as_info = {};
2299 bot_level_as_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
2300 bot_level_as_info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
2301 bot_level_as_info.instanceCount = 0;
2302 bot_level_as_info.geometryCount = 1;
2303 bot_level_as_info.pGeometries = &geometry;
2304
2305 VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
2306 bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
2307 bot_level_as_create_info.info = bot_level_as_info;
2308
2309 VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
2310
2311 const std::vector<VkGeometryInstanceNV> instances = {
2312 VkGeometryInstanceNV{
2313 {
2314 // clang-format off
2315 1.0f, 0.0f, 0.0f, 0.0f,
2316 0.0f, 1.0f, 0.0f, 0.0f,
2317 0.0f, 0.0f, 1.0f, 0.0f,
2318 // clang-format on
2319 },
2320 0,
2321 0xFF,
2322 0,
2323 VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
2324 bot_level_as.opaque_handle(),
2325 },
2326 };
2327
2328 VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV) * instances.size();
2329 VkBufferObj instance_buffer;
2330 instance_buffer.init(*m_device, instance_buffer_size,
2331 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
2332 VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
2333
2334 uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
2335 std::memcpy(mapped_instance_buffer_data, (uint8_t *)instances.data(), static_cast<std::size_t>(instance_buffer_size));
2336 instance_buffer.memory().unmap();
2337
2338 VkAccelerationStructureInfoNV top_level_as_info = {};
2339 top_level_as_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
2340 top_level_as_info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
2341 top_level_as_info.instanceCount = 1;
2342 top_level_as_info.geometryCount = 0;
2343
2344 VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
2345 top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
2346 top_level_as_create_info.info = top_level_as_info;
2347
2348 VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
2349
2350 VkDeviceSize scratch_buffer_size = std::max(bot_level_as.build_scratch_memory_requirements().memoryRequirements.size,
2351 top_level_as.build_scratch_memory_requirements().memoryRequirements.size);
2352 VkBufferObj scratch_buffer;
2353 scratch_buffer.init(*m_device, scratch_buffer_size, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
2354
2355 ray_tracing_command_buffer.begin();
2356
2357 // Build bot level acceleration structure
2358 ray_tracing_command_buffer.BuildAccelerationStructure(&bot_level_as, scratch_buffer.handle());
2359
2360 // Barrier to prevent using scratch buffer for top level build before bottom level build finishes
2361 VkMemoryBarrier memory_barrier = {};
2362 memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
2363 memory_barrier.srcAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV | VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV;
2364 memory_barrier.dstAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV | VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV;
2365 ray_tracing_command_buffer.PipelineBarrier(VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
2366 VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, 0, 1, &memory_barrier, 0,
2367 nullptr, 0, nullptr);
2368
2369 // Build top level acceleration structure
2370 ray_tracing_command_buffer.BuildAccelerationStructure(&top_level_as, scratch_buffer.handle(), instance_buffer.handle());
2371
2372 ray_tracing_command_buffer.end();
2373
2374 VkSubmitInfo submit_info = {};
2375 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2376 submit_info.commandBufferCount = 1;
2377 submit_info.pCommandBuffers = &ray_tracing_command_buffer.handle();
2378 vk::QueueSubmit(ray_tracing_queue, 1, &submit_info, VK_NULL_HANDLE);
2379 vk::QueueWaitIdle(ray_tracing_queue);
2380 m_errorMonitor->VerifyNotFound();
2381
2382 VkTextureObj texture(m_device, nullptr);
2383 VkSamplerObj sampler(m_device);
2384
2385 VkDeviceSize storage_buffer_size = 1024;
2386 VkBufferObj storage_buffer;
2387 storage_buffer.init(*m_device, storage_buffer_size, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
2388 VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, {ray_tracing_queue_family_index});
2389
2390 VkDeviceSize shader_binding_table_buffer_size = ray_tracing_properties.shaderGroupBaseAlignment * 4ull;
2391 VkBufferObj shader_binding_table_buffer;
2392 shader_binding_table_buffer.init(*m_device, shader_binding_table_buffer_size,
2393 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
2394 VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
2395
2396 // Setup descriptors!
2397 const VkShaderStageFlags kAllRayTracingStages = VK_SHADER_STAGE_RAYGEN_BIT_NV | VK_SHADER_STAGE_ANY_HIT_BIT_NV |
2398 VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV | VK_SHADER_STAGE_MISS_BIT_NV |
2399 VK_SHADER_STAGE_INTERSECTION_BIT_NV | VK_SHADER_STAGE_CALLABLE_BIT_NV;
2400
2401 void *layout_pnext = nullptr;
2402 void *allocate_pnext = nullptr;
2403 VkDescriptorPoolCreateFlags pool_create_flags = 0;
2404 VkDescriptorSetLayoutCreateFlags layout_create_flags = 0;
2405 VkDescriptorBindingFlagsEXT ds_binding_flags[3] = {};
2406 VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
2407 if (descriptor_indexing) {
2408 ds_binding_flags[0] = 0;
2409 ds_binding_flags[1] = 0;
2410 ds_binding_flags[2] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT;
2411
2412 layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
2413 layout_createinfo_binding_flags[0].pNext = NULL;
2414 layout_createinfo_binding_flags[0].bindingCount = 3;
2415 layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
2416 layout_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
2417 pool_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
2418 layout_pnext = layout_createinfo_binding_flags;
2419 }
2420
2421 // Prepare descriptors
2422 OneOffDescriptorSet ds(m_device,
2423 {
2424 {0, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, kAllRayTracingStages, nullptr},
2425 {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, kAllRayTracingStages, nullptr},
2426 {2, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6, kAllRayTracingStages, nullptr},
2427 },
2428 layout_create_flags, layout_pnext, pool_create_flags);
2429
2430 VkDescriptorSetVariableDescriptorCountAllocateInfoEXT variable_count = {};
2431 uint32_t desc_counts;
2432 if (descriptor_indexing) {
2433 layout_create_flags = 0;
2434 pool_create_flags = 0;
2435 ds_binding_flags[2] =
2436 VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
2437 desc_counts = 6; // We'll reserve 8 spaces in the layout, but the descriptor will only use 6
2438 variable_count.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
2439 variable_count.descriptorSetCount = 1;
2440 variable_count.pDescriptorCounts = &desc_counts;
2441 allocate_pnext = &variable_count;
2442 }
2443
2444 OneOffDescriptorSet ds_variable(m_device,
2445 {
2446 {0, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, kAllRayTracingStages, nullptr},
2447 {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, kAllRayTracingStages, nullptr},
2448 {2, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 8, kAllRayTracingStages, nullptr},
2449 },
2450 layout_create_flags, layout_pnext, pool_create_flags, allocate_pnext);
2451
2452 VkAccelerationStructureNV top_level_as_handle = top_level_as.handle();
2453 VkWriteDescriptorSetAccelerationStructureNV write_descript_set_as = {};
2454 write_descript_set_as.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
2455 write_descript_set_as.accelerationStructureCount = 1;
2456 write_descript_set_as.pAccelerationStructures = &top_level_as_handle;
2457
2458 VkDescriptorBufferInfo descriptor_buffer_info = {};
2459 descriptor_buffer_info.buffer = storage_buffer.handle();
2460 descriptor_buffer_info.offset = 0;
2461 descriptor_buffer_info.range = storage_buffer_size;
2462
2463 VkDescriptorImageInfo descriptor_image_infos[6] = {};
2464 for (int i = 0; i < 6; i++) {
2465 descriptor_image_infos[i] = texture.DescriptorImageInfo();
2466 descriptor_image_infos[i].sampler = sampler.handle();
2467 descriptor_image_infos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2468 }
2469
2470 VkWriteDescriptorSet descriptor_writes[3] = {};
2471 descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2472 descriptor_writes[0].dstSet = ds.set_;
2473 descriptor_writes[0].dstBinding = 0;
2474 descriptor_writes[0].descriptorCount = 1;
2475 descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV;
2476 descriptor_writes[0].pNext = &write_descript_set_as;
2477
2478 descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2479 descriptor_writes[1].dstSet = ds.set_;
2480 descriptor_writes[1].dstBinding = 1;
2481 descriptor_writes[1].descriptorCount = 1;
2482 descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
2483 descriptor_writes[1].pBufferInfo = &descriptor_buffer_info;
2484
2485 descriptor_writes[2].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
2486 descriptor_writes[2].dstSet = ds.set_;
2487 descriptor_writes[2].dstBinding = 2;
2488 if (descriptor_indexing) {
2489 descriptor_writes[2].descriptorCount = 5; // Intentionally don't write index 5
2490 } else {
2491 descriptor_writes[2].descriptorCount = 6;
2492 }
2493 descriptor_writes[2].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2494 descriptor_writes[2].pImageInfo = descriptor_image_infos;
2495 vk::UpdateDescriptorSets(m_device->device(), 3, descriptor_writes, 0, NULL);
2496 if (descriptor_indexing) {
2497 descriptor_writes[0].dstSet = ds_variable.set_;
2498 descriptor_writes[1].dstSet = ds_variable.set_;
2499 descriptor_writes[2].dstSet = ds_variable.set_;
2500 vk::UpdateDescriptorSets(m_device->device(), 3, descriptor_writes, 0, NULL);
2501 }
2502
2503 const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
2504 const VkPipelineLayoutObj pipeline_layout_variable(m_device, {&ds_variable.layout_});
2505
2506 const auto SetImagesArrayLength = [](const std::string &shader_template, const std::string &length_str) {
2507 const std::string to_replace = "IMAGES_ARRAY_LENGTH";
2508
2509 std::string result = shader_template;
2510 auto position = result.find(to_replace);
2511 assert(position != std::string::npos);
2512 result.replace(position, to_replace.length(), length_str);
2513 return result;
2514 };
2515
2516 const std::string rgen_source_template = R"(#version 460
2517 #extension GL_EXT_nonuniform_qualifier : require
2518 #extension GL_EXT_samplerless_texture_functions : require
2519 #extension GL_NV_ray_tracing : require
2520
2521 layout(set = 0, binding = 0) uniform accelerationStructureNV topLevelAS;
2522 layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
2523 uint rgen_index;
2524 uint ahit_index;
2525 uint chit_index;
2526 uint miss_index;
2527 uint intr_index;
2528 uint call_index;
2529
2530 uint rgen_ran;
2531 uint ahit_ran;
2532 uint chit_ran;
2533 uint miss_ran;
2534 uint intr_ran;
2535 uint call_ran;
2536
2537 float result1;
2538 float result2;
2539 float result3;
2540 } sbo;
2541 layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
2542
2543 layout(location = 0) rayPayloadNV vec3 payload;
2544 layout(location = 3) callableDataNV vec3 callableData;
2545
2546 void main() {
2547 sbo.rgen_ran = 1;
2548
2549 executeCallableNV(0, 3);
2550 sbo.result1 = callableData.x;
2551
2552 vec3 origin = vec3(0.0f, 0.0f, -2.0f);
2553 vec3 direction = vec3(0.0f, 0.0f, 1.0f);
2554
2555 traceNV(topLevelAS, gl_RayFlagsNoneNV, 0xFF, 0, 1, 0, origin, 0.001, direction, 10000.0, 0);
2556 sbo.result2 = payload.x;
2557
2558 traceNV(topLevelAS, gl_RayFlagsNoneNV, 0xFF, 0, 1, 0, origin, 0.001, -direction, 10000.0, 0);
2559 sbo.result3 = payload.x;
2560
2561 if (sbo.rgen_index > 0) {
2562 // OOB here:
2563 sbo.result3 = texelFetch(textures[sbo.rgen_index], ivec2(0, 0), 0).x;
2564 }
2565 }
2566 )";
2567
2568 const std::string rgen_source = SetImagesArrayLength(rgen_source_template, "6");
2569 const std::string rgen_source_runtime = SetImagesArrayLength(rgen_source_template, "");
2570
2571 const std::string ahit_source_template = R"(#version 460
2572 #extension GL_EXT_nonuniform_qualifier : require
2573 #extension GL_EXT_samplerless_texture_functions : require
2574 #extension GL_NV_ray_tracing : require
2575
2576 layout(set = 0, binding = 1, std430) buffer StorageBuffer {
2577 uint rgen_index;
2578 uint ahit_index;
2579 uint chit_index;
2580 uint miss_index;
2581 uint intr_index;
2582 uint call_index;
2583
2584 uint rgen_ran;
2585 uint ahit_ran;
2586 uint chit_ran;
2587 uint miss_ran;
2588 uint intr_ran;
2589 uint call_ran;
2590
2591 float result1;
2592 float result2;
2593 float result3;
2594 } sbo;
2595 layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
2596
2597 hitAttributeNV vec3 hitValue;
2598
2599 layout(location = 0) rayPayloadInNV vec3 payload;
2600
2601 void main() {
2602 sbo.ahit_ran = 2;
2603
2604 payload = vec3(0.1234f);
2605
2606 if (sbo.ahit_index > 0) {
2607 // OOB here:
2608 payload.x = texelFetch(textures[sbo.ahit_index], ivec2(0, 0), 0).x;
2609 }
2610 }
2611 )";
2612 const std::string ahit_source = SetImagesArrayLength(ahit_source_template, "6");
2613 const std::string ahit_source_runtime = SetImagesArrayLength(ahit_source_template, "");
2614
2615 const std::string chit_source_template = R"(#version 460
2616 #extension GL_EXT_nonuniform_qualifier : require
2617 #extension GL_EXT_samplerless_texture_functions : require
2618 #extension GL_NV_ray_tracing : require
2619
2620 layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
2621 uint rgen_index;
2622 uint ahit_index;
2623 uint chit_index;
2624 uint miss_index;
2625 uint intr_index;
2626 uint call_index;
2627
2628 uint rgen_ran;
2629 uint ahit_ran;
2630 uint chit_ran;
2631 uint miss_ran;
2632 uint intr_ran;
2633 uint call_ran;
2634
2635 float result1;
2636 float result2;
2637 float result3;
2638 } sbo;
2639 layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
2640
2641 layout(location = 0) rayPayloadInNV vec3 payload;
2642
2643 hitAttributeNV vec3 attribs;
2644
2645 void main() {
2646 sbo.chit_ran = 3;
2647
2648 payload = attribs;
2649 if (sbo.chit_index > 0) {
2650 // OOB here:
2651 payload.x = texelFetch(textures[sbo.chit_index], ivec2(0, 0), 0).x;
2652 }
2653 }
2654 )";
2655 const std::string chit_source = SetImagesArrayLength(chit_source_template, "6");
2656 const std::string chit_source_runtime = SetImagesArrayLength(chit_source_template, "");
2657
2658 const std::string miss_source_template = R"(#version 460
2659 #extension GL_EXT_nonuniform_qualifier : enable
2660 #extension GL_EXT_samplerless_texture_functions : require
2661 #extension GL_NV_ray_tracing : require
2662
2663 layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
2664 uint rgen_index;
2665 uint ahit_index;
2666 uint chit_index;
2667 uint miss_index;
2668 uint intr_index;
2669 uint call_index;
2670
2671 uint rgen_ran;
2672 uint ahit_ran;
2673 uint chit_ran;
2674 uint miss_ran;
2675 uint intr_ran;
2676 uint call_ran;
2677
2678 float result1;
2679 float result2;
2680 float result3;
2681 } sbo;
2682 layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
2683
2684 layout(location = 0) rayPayloadInNV vec3 payload;
2685
2686 void main() {
2687 sbo.miss_ran = 4;
2688
2689 payload = vec3(1.0, 0.0, 0.0);
2690
2691 if (sbo.miss_index > 0) {
2692 // OOB here:
2693 payload.x = texelFetch(textures[sbo.miss_index], ivec2(0, 0), 0).x;
2694 }
2695 }
2696 )";
2697 const std::string miss_source = SetImagesArrayLength(miss_source_template, "6");
2698 const std::string miss_source_runtime = SetImagesArrayLength(miss_source_template, "");
2699
2700 const std::string intr_source_template = R"(#version 460
2701 #extension GL_EXT_nonuniform_qualifier : require
2702 #extension GL_EXT_samplerless_texture_functions : require
2703 #extension GL_NV_ray_tracing : require
2704
2705 layout(set = 0, binding = 1, std430) buffer StorageBuffer {
2706 uint rgen_index;
2707 uint ahit_index;
2708 uint chit_index;
2709 uint miss_index;
2710 uint intr_index;
2711 uint call_index;
2712
2713 uint rgen_ran;
2714 uint ahit_ran;
2715 uint chit_ran;
2716 uint miss_ran;
2717 uint intr_ran;
2718 uint call_ran;
2719
2720 float result1;
2721 float result2;
2722 float result3;
2723 } sbo;
2724 layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
2725
2726 hitAttributeNV vec3 hitValue;
2727
2728 void main() {
2729 sbo.intr_ran = 5;
2730
2731 hitValue = vec3(0.0f, 0.5f, 0.0f);
2732
2733 reportIntersectionNV(1.0f, 0);
2734
2735 if (sbo.intr_index > 0) {
2736 // OOB here:
2737 hitValue.x = texelFetch(textures[sbo.intr_index], ivec2(0, 0), 0).x;
2738 }
2739 }
2740 )";
2741 const std::string intr_source = SetImagesArrayLength(intr_source_template, "6");
2742 const std::string intr_source_runtime = SetImagesArrayLength(intr_source_template, "");
2743
2744 const std::string call_source_template = R"(#version 460
2745 #extension GL_EXT_nonuniform_qualifier : require
2746 #extension GL_EXT_samplerless_texture_functions : require
2747 #extension GL_NV_ray_tracing : require
2748
2749 layout(set = 0, binding = 1, std430) buffer StorageBuffer {
2750 uint rgen_index;
2751 uint ahit_index;
2752 uint chit_index;
2753 uint miss_index;
2754 uint intr_index;
2755 uint call_index;
2756
2757 uint rgen_ran;
2758 uint ahit_ran;
2759 uint chit_ran;
2760 uint miss_ran;
2761 uint intr_ran;
2762 uint call_ran;
2763
2764 float result1;
2765 float result2;
2766 float result3;
2767 } sbo;
2768 layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
2769
2770 layout(location = 3) callableDataInNV vec3 callableData;
2771
2772 void main() {
2773 sbo.call_ran = 6;
2774
2775 callableData = vec3(0.1234f);
2776
2777 if (sbo.call_index > 0) {
2778 // OOB here:
2779 callableData.x = texelFetch(textures[sbo.call_index], ivec2(0, 0), 0).x;
2780 }
2781 }
2782 )";
2783 const std::string call_source = SetImagesArrayLength(call_source_template, "6");
2784 const std::string call_source_runtime = SetImagesArrayLength(call_source_template, "");
2785
2786 struct TestCase {
2787 const std::string &rgen_shader_source;
2788 const std::string &ahit_shader_source;
2789 const std::string &chit_shader_source;
2790 const std::string &miss_shader_source;
2791 const std::string &intr_shader_source;
2792 const std::string &call_shader_source;
2793 bool variable_length;
2794 uint32_t rgen_index;
2795 uint32_t ahit_index;
2796 uint32_t chit_index;
2797 uint32_t miss_index;
2798 uint32_t intr_index;
2799 uint32_t call_index;
2800 const char *expected_error;
2801 };
2802
2803 std::vector<TestCase> tests;
2804 tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 25, 0, 0, 0, 0, 0,
2805 "Index of 25 used to index descriptor array of length 6."});
2806 tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 25, 0, 0, 0, 0,
2807 "Index of 25 used to index descriptor array of length 6."});
2808 tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 25, 0, 0, 0,
2809 "Index of 25 used to index descriptor array of length 6."});
2810 tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 25, 0, 0,
2811 "Index of 25 used to index descriptor array of length 6."});
2812 tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 0, 25, 0,
2813 "Index of 25 used to index descriptor array of length 6."});
2814 tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 0, 0, 25,
2815 "Index of 25 used to index descriptor array of length 6."});
2816
2817 if (descriptor_indexing) {
2818 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2819 call_source_runtime, true, 25, 0, 0, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
2820 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2821 call_source_runtime, true, 0, 25, 0, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
2822 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2823 call_source_runtime, true, 0, 0, 25, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
2824 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2825 call_source_runtime, true, 0, 0, 0, 25, 0, 0, "Index of 25 used to index descriptor array of length 6."});
2826 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2827 call_source_runtime, true, 0, 0, 0, 0, 25, 0, "Index of 25 used to index descriptor array of length 6."});
2828 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2829 call_source_runtime, true, 0, 0, 0, 0, 0, 25, "Index of 25 used to index descriptor array of length 6."});
2830
2831 // For this group, 6 is less than max specified (max specified is 8) but more than actual specified (actual specified is 5)
2832 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2833 call_source_runtime, true, 6, 0, 0, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
2834 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2835 call_source_runtime, true, 0, 6, 0, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
2836 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2837 call_source_runtime, true, 0, 0, 6, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
2838 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2839 call_source_runtime, true, 0, 0, 0, 6, 0, 0, "Index of 6 used to index descriptor array of length 6."});
2840 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2841 call_source_runtime, true, 0, 0, 0, 0, 6, 0, "Index of 6 used to index descriptor array of length 6."});
2842 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2843 call_source_runtime, true, 0, 0, 0, 0, 0, 6, "Index of 6 used to index descriptor array of length 6."});
2844
2845 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2846 call_source_runtime, true, 5, 0, 0, 0, 0, 0, "Descriptor index 5 is uninitialized."});
2847 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2848 call_source_runtime, true, 0, 5, 0, 0, 0, 0, "Descriptor index 5 is uninitialized."});
2849 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2850 call_source_runtime, true, 0, 0, 5, 0, 0, 0, "Descriptor index 5 is uninitialized."});
2851 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2852 call_source_runtime, true, 0, 0, 0, 5, 0, 0, "Descriptor index 5 is uninitialized."});
2853 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2854 call_source_runtime, true, 0, 0, 0, 0, 5, 0, "Descriptor index 5 is uninitialized."});
2855 tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
2856 call_source_runtime, true, 0, 0, 0, 0, 0, 5, "Descriptor index 5 is uninitialized."});
2857 }
2858
2859 PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = reinterpret_cast<PFN_vkCreateRayTracingPipelinesNV>(
2860 vk::GetDeviceProcAddr(m_device->handle(), "vkCreateRayTracingPipelinesNV"));
2861 ASSERT_TRUE(vkCreateRayTracingPipelinesNV != nullptr);
2862
2863 PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV =
2864 reinterpret_cast<PFN_vkGetRayTracingShaderGroupHandlesNV>(
2865 vk::GetDeviceProcAddr(m_device->handle(), "vkGetRayTracingShaderGroupHandlesNV"));
2866 ASSERT_TRUE(vkGetRayTracingShaderGroupHandlesNV != nullptr);
2867
2868 PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV =
2869 reinterpret_cast<PFN_vkCmdTraceRaysNV>(vk::GetDeviceProcAddr(m_device->handle(), "vkCmdTraceRaysNV"));
2870 ASSERT_TRUE(vkCmdTraceRaysNV != nullptr);
2871
2872 // Iteration 0 tests with no descriptor set bound (to sanity test "draw" validation). Iteration 1
2873 // tests what's in the test case vector.
2874 for (const auto &test : tests) {
2875 if (gpu_assisted) {
2876 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, test.expected_error);
2877 }
2878
2879 VkShaderObj rgen_shader(m_device, test.rgen_shader_source.c_str(), VK_SHADER_STAGE_RAYGEN_BIT_NV, this, "main");
2880 VkShaderObj ahit_shader(m_device, test.ahit_shader_source.c_str(), VK_SHADER_STAGE_ANY_HIT_BIT_NV, this, "main");
2881 VkShaderObj chit_shader(m_device, test.chit_shader_source.c_str(), VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, this, "main");
2882 VkShaderObj miss_shader(m_device, test.miss_shader_source.c_str(), VK_SHADER_STAGE_MISS_BIT_NV, this, "main");
2883 VkShaderObj intr_shader(m_device, test.intr_shader_source.c_str(), VK_SHADER_STAGE_INTERSECTION_BIT_NV, this, "main");
2884 VkShaderObj call_shader(m_device, test.call_shader_source.c_str(), VK_SHADER_STAGE_CALLABLE_BIT_NV, this, "main");
2885
2886 VkPipelineShaderStageCreateInfo stage_create_infos[6] = {};
2887 stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
2888 stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
2889 stage_create_infos[0].module = rgen_shader.handle();
2890 stage_create_infos[0].pName = "main";
2891
2892 stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
2893 stage_create_infos[1].stage = VK_SHADER_STAGE_ANY_HIT_BIT_NV;
2894 stage_create_infos[1].module = ahit_shader.handle();
2895 stage_create_infos[1].pName = "main";
2896
2897 stage_create_infos[2].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
2898 stage_create_infos[2].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
2899 stage_create_infos[2].module = chit_shader.handle();
2900 stage_create_infos[2].pName = "main";
2901
2902 stage_create_infos[3].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
2903 stage_create_infos[3].stage = VK_SHADER_STAGE_MISS_BIT_NV;
2904 stage_create_infos[3].module = miss_shader.handle();
2905 stage_create_infos[3].pName = "main";
2906
2907 stage_create_infos[4].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
2908 stage_create_infos[4].stage = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
2909 stage_create_infos[4].module = intr_shader.handle();
2910 stage_create_infos[4].pName = "main";
2911
2912 stage_create_infos[5].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
2913 stage_create_infos[5].stage = VK_SHADER_STAGE_CALLABLE_BIT_NV;
2914 stage_create_infos[5].module = call_shader.handle();
2915 stage_create_infos[5].pName = "main";
2916
2917 VkRayTracingShaderGroupCreateInfoNV group_create_infos[4] = {};
2918 group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
2919 group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
2920 group_create_infos[0].generalShader = 0; // rgen
2921 group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
2922 group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
2923 group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
2924
2925 group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
2926 group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
2927 group_create_infos[1].generalShader = 3; // miss
2928 group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
2929 group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
2930 group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
2931
2932 group_create_infos[2].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
2933 group_create_infos[2].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV;
2934 group_create_infos[2].generalShader = VK_SHADER_UNUSED_NV;
2935 group_create_infos[2].closestHitShader = 2;
2936 group_create_infos[2].anyHitShader = 1;
2937 group_create_infos[2].intersectionShader = 4;
2938
2939 group_create_infos[3].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
2940 group_create_infos[3].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
2941 group_create_infos[3].generalShader = 5; // call
2942 group_create_infos[3].closestHitShader = VK_SHADER_UNUSED_NV;
2943 group_create_infos[3].anyHitShader = VK_SHADER_UNUSED_NV;
2944 group_create_infos[3].intersectionShader = VK_SHADER_UNUSED_NV;
2945
2946 VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
2947 pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
2948 pipeline_ci.stageCount = 6;
2949 pipeline_ci.pStages = stage_create_infos;
2950 pipeline_ci.groupCount = 4;
2951 pipeline_ci.pGroups = group_create_infos;
2952 pipeline_ci.maxRecursionDepth = 2;
2953 pipeline_ci.layout = test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle();
2954
2955 VkPipeline pipeline = VK_NULL_HANDLE;
2956 ASSERT_VK_SUCCESS(vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline));
2957
2958 std::vector<uint8_t> shader_binding_table_data;
2959 shader_binding_table_data.resize(static_cast<std::size_t>(shader_binding_table_buffer_size), 0);
2960 ASSERT_VK_SUCCESS(vkGetRayTracingShaderGroupHandlesNV(m_device->handle(), pipeline, 0, 4,
2961 static_cast<std::size_t>(shader_binding_table_buffer_size),
2962 shader_binding_table_data.data()));
2963
2964 uint8_t *mapped_shader_binding_table_data = (uint8_t *)shader_binding_table_buffer.memory().map();
2965 std::memcpy(mapped_shader_binding_table_data, shader_binding_table_data.data(), shader_binding_table_data.size());
2966 shader_binding_table_buffer.memory().unmap();
2967
2968 ray_tracing_command_buffer.begin();
2969
2970 vk::CmdBindPipeline(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, pipeline);
2971
2972 if (gpu_assisted) {
2973 vk::CmdBindDescriptorSets(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
2974 test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle(), 0, 1,
2975 test.variable_length ? &ds_variable.set_ : &ds.set_, 0, nullptr);
2976 } else {
2977 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-None-02697");
2978 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotBound");
2979 }
2980
2981 if (gpu_assisted) {
2982 // Need these values to pass mapped storage buffer checks
2983 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
2984 ray_tracing_properties.shaderGroupHandleSize * 0ull, shader_binding_table_buffer.handle(),
2985 ray_tracing_properties.shaderGroupHandleSize * 1ull, ray_tracing_properties.shaderGroupHandleSize,
2986 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupHandleSize * 2ull,
2987 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
2988 ray_tracing_properties.shaderGroupHandleSize * 3ull, ray_tracing_properties.shaderGroupHandleSize,
2989 /*width=*/1, /*height=*/1, /*depth=*/1);
2990 } else {
2991 // offset shall be multiple of shaderGroupBaseAlignment and stride of shaderGroupHandleSize
2992 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
2993 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
2994 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
2995 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
2996 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
2997 ray_tracing_properties.shaderGroupBaseAlignment * 3ull, ray_tracing_properties.shaderGroupHandleSize,
2998 /*width=*/1, /*height=*/1, /*depth=*/1);
2999 }
3000
3001 ray_tracing_command_buffer.end();
3002 // Update the index of the texture that the shaders should read
3003 uint32_t *mapped_storage_buffer_data = (uint32_t *)storage_buffer.memory().map();
3004 mapped_storage_buffer_data[0] = test.rgen_index;
3005 mapped_storage_buffer_data[1] = test.ahit_index;
3006 mapped_storage_buffer_data[2] = test.chit_index;
3007 mapped_storage_buffer_data[3] = test.miss_index;
3008 mapped_storage_buffer_data[4] = test.intr_index;
3009 mapped_storage_buffer_data[5] = test.call_index;
3010 mapped_storage_buffer_data[6] = 0;
3011 mapped_storage_buffer_data[7] = 0;
3012 mapped_storage_buffer_data[8] = 0;
3013 mapped_storage_buffer_data[9] = 0;
3014 mapped_storage_buffer_data[10] = 0;
3015 mapped_storage_buffer_data[11] = 0;
3016 storage_buffer.memory().unmap();
3017
3018 vk::QueueSubmit(ray_tracing_queue, 1, &submit_info, VK_NULL_HANDLE);
3019 vk::QueueWaitIdle(ray_tracing_queue);
3020 m_errorMonitor->VerifyFound();
3021
3022 if (gpu_assisted) {
3023 mapped_storage_buffer_data = (uint32_t *)storage_buffer.memory().map();
3024 ASSERT_TRUE(mapped_storage_buffer_data[6] == 1);
3025 ASSERT_TRUE(mapped_storage_buffer_data[7] == 2);
3026 ASSERT_TRUE(mapped_storage_buffer_data[8] == 3);
3027 ASSERT_TRUE(mapped_storage_buffer_data[9] == 4);
3028 ASSERT_TRUE(mapped_storage_buffer_data[10] == 5);
3029 ASSERT_TRUE(mapped_storage_buffer_data[11] == 6);
3030 storage_buffer.memory().unmap();
3031 } else {
3032 ray_tracing_command_buffer.begin();
3033 vk::CmdBindPipeline(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, pipeline);
3034 vk::CmdBindDescriptorSets(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
3035 test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle(), 0, 1,
3036 test.variable_length ? &ds_variable.set_ : &ds.set_, 0, nullptr);
3037
3038 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462");
3039 VkDeviceSize stride_align = ray_tracing_properties.shaderGroupHandleSize;
3040 VkDeviceSize invalid_max_stride = ray_tracing_properties.maxShaderGroupStride +
3041 (stride_align - (ray_tracing_properties.maxShaderGroupStride %
3042 stride_align)); // should be less than maxShaderGroupStride
3043 VkDeviceSize invalid_stride =
3044 ray_tracing_properties.shaderGroupHandleSize >> 1; // should be multiple of shaderGroupHandleSize
3045 VkDeviceSize invalid_offset =
3046 ray_tracing_properties.shaderGroupBaseAlignment >> 1; // should be multiple of shaderGroupBaseAlignment
3047
3048 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3049 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3050 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3051 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3052 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(), invalid_offset,
3053 ray_tracing_properties.shaderGroupHandleSize,
3054 /*width=*/1, /*height=*/1, /*depth=*/1);
3055 m_errorMonitor->VerifyFound();
3056
3057 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465");
3058 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3059 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3060 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3061 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3062 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3063 ray_tracing_properties.shaderGroupBaseAlignment, invalid_stride,
3064 /*width=*/1, /*height=*/1, /*depth=*/1);
3065 m_errorMonitor->VerifyFound();
3066
3067 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468");
3068 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3069 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3070 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3071 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3072 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3073 ray_tracing_properties.shaderGroupBaseAlignment, invalid_max_stride,
3074 /*width=*/1, /*height=*/1, /*depth=*/1);
3075 m_errorMonitor->VerifyFound();
3076
3077 // hit shader
3078 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460");
3079 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3080 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3081 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3082 shader_binding_table_buffer.handle(), invalid_offset, ray_tracing_properties.shaderGroupHandleSize,
3083 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment,
3084 ray_tracing_properties.shaderGroupHandleSize,
3085 /*width=*/1, /*height=*/1, /*depth=*/1);
3086 m_errorMonitor->VerifyFound();
3087
3088 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02464");
3089 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3090 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3091 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3092 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3093 invalid_stride, shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment,
3094 ray_tracing_properties.shaderGroupHandleSize,
3095 /*width=*/1, /*height=*/1, /*depth=*/1);
3096 m_errorMonitor->VerifyFound();
3097
3098 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02467");
3099 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3100 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3101 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3102 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3103 invalid_max_stride, shader_binding_table_buffer.handle(),
3104 ray_tracing_properties.shaderGroupBaseAlignment, ray_tracing_properties.shaderGroupHandleSize,
3105 /*width=*/1, /*height=*/1, /*depth=*/1);
3106 m_errorMonitor->VerifyFound();
3107
3108 // miss shader
3109 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02458");
3110 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3111 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3112 invalid_offset, ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3113 ray_tracing_properties.shaderGroupBaseAlignment * 2ull, ray_tracing_properties.shaderGroupHandleSize,
3114 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment,
3115 ray_tracing_properties.shaderGroupHandleSize,
3116 /*width=*/1, /*height=*/1, /*depth=*/1);
3117 m_errorMonitor->VerifyFound();
3118
3119 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-missShaderBindingStride-02463");
3120 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3121 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3122 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, invalid_stride,
3123 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3124 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3125 ray_tracing_properties.shaderGroupBaseAlignment, ray_tracing_properties.shaderGroupHandleSize,
3126 /*width=*/1, /*height=*/1, /*depth=*/1);
3127 m_errorMonitor->VerifyFound();
3128
3129 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-missShaderBindingStride-02466");
3130 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3131 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3132 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, invalid_max_stride,
3133 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3134 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3135 ray_tracing_properties.shaderGroupBaseAlignment, ray_tracing_properties.shaderGroupHandleSize,
3136 /*width=*/1, /*height=*/1, /*depth=*/1);
3137 m_errorMonitor->VerifyFound();
3138
3139 // raygenshader
3140 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456");
3141 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(), invalid_offset,
3142 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 1ull,
3143 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3144 ray_tracing_properties.shaderGroupBaseAlignment * 2ull, ray_tracing_properties.shaderGroupHandleSize,
3145 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment,
3146 ray_tracing_properties.shaderGroupHandleSize,
3147 /*width=*/1, /*height=*/1, /*depth=*/1);
3148
3149 m_errorMonitor->VerifyFound();
3150 const auto &limits = m_device->props.limits;
3151
3152 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-width-02469");
3153 uint32_t invalid_width = limits.maxComputeWorkGroupCount[0] + 1;
3154 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3155 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3156 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3157 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3158 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3159 ray_tracing_properties.shaderGroupBaseAlignment, ray_tracing_properties.shaderGroupHandleSize,
3160 /*width=*/invalid_width, /*height=*/1, /*depth=*/1);
3161 m_errorMonitor->VerifyFound();
3162
3163 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-height-02470");
3164 uint32_t invalid_height = limits.maxComputeWorkGroupCount[1] + 1;
3165 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3166 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3167 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3168 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3169 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3170 ray_tracing_properties.shaderGroupBaseAlignment, ray_tracing_properties.shaderGroupHandleSize,
3171 /*width=*/1, /*height=*/invalid_height, /*depth=*/1);
3172 m_errorMonitor->VerifyFound();
3173
3174 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdTraceRaysNV-depth-02471");
3175 uint32_t invalid_depth = limits.maxComputeWorkGroupCount[2] + 1;
3176 vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
3177 ray_tracing_properties.shaderGroupBaseAlignment * 0ull, shader_binding_table_buffer.handle(),
3178 ray_tracing_properties.shaderGroupBaseAlignment * 1ull, ray_tracing_properties.shaderGroupHandleSize,
3179 shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupBaseAlignment * 2ull,
3180 ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
3181 ray_tracing_properties.shaderGroupBaseAlignment, ray_tracing_properties.shaderGroupHandleSize,
3182 /*width=*/1, /*height=*/1, /*depth=*/invalid_depth);
3183 m_errorMonitor->VerifyFound();
3184
3185 ray_tracing_command_buffer.end();
3186 }
3187 vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
3188 }
3189 }
3190
InitSyncValFramework()3191 void VkSyncValTest::InitSyncValFramework() {
3192 // Enable synchronization validation
3193 InitFramework(m_errorMonitor, &features_);
3194 }
3195
print_android(const char * c)3196 void print_android(const char *c) {
3197 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3198 __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "%s", c);
3199 #endif // VK_USE_PLATFORM_ANDROID_KHR
3200 }
3201
3202 #if defined(ANDROID) && defined(VALIDATION_APK)
3203 const char *appTag = "VulkanLayerValidationTests";
3204 static bool initialized = false;
3205 static bool active = false;
3206
3207 // Convert Intents to argv
3208 // Ported from Hologram sample, only difference is flexible key
get_args(android_app & app,const char * intent_extra_data_key)3209 std::vector<std::string> get_args(android_app &app, const char *intent_extra_data_key) {
3210 std::vector<std::string> args;
3211 JavaVM &vm = *app.activity->vm;
3212 JNIEnv *p_env;
3213 if (vm.AttachCurrentThread(&p_env, nullptr) != JNI_OK) return args;
3214
3215 JNIEnv &env = *p_env;
3216 jobject activity = app.activity->clazz;
3217 jmethodID get_intent_method = env.GetMethodID(env.GetObjectClass(activity), "getIntent", "()Landroid/content/Intent;");
3218 jobject intent = env.CallObjectMethod(activity, get_intent_method);
3219 jmethodID get_string_extra_method =
3220 env.GetMethodID(env.GetObjectClass(intent), "getStringExtra", "(Ljava/lang/String;)Ljava/lang/String;");
3221 jvalue get_string_extra_args;
3222 get_string_extra_args.l = env.NewStringUTF(intent_extra_data_key);
3223 jstring extra_str = static_cast<jstring>(env.CallObjectMethodA(intent, get_string_extra_method, &get_string_extra_args));
3224
3225 std::string args_str;
3226 if (extra_str) {
3227 const char *extra_utf = env.GetStringUTFChars(extra_str, nullptr);
3228 args_str = extra_utf;
3229 env.ReleaseStringUTFChars(extra_str, extra_utf);
3230 env.DeleteLocalRef(extra_str);
3231 }
3232
3233 env.DeleteLocalRef(get_string_extra_args.l);
3234 env.DeleteLocalRef(intent);
3235 vm.DetachCurrentThread();
3236
3237 // split args_str
3238 std::stringstream ss(args_str);
3239 std::string arg;
3240 while (std::getline(ss, arg, ' ')) {
3241 if (!arg.empty()) args.push_back(arg);
3242 }
3243
3244 return args;
3245 }
3246
addFullTestCommentIfPresent(const::testing::TestInfo & test_info,std::string & error_message)3247 void addFullTestCommentIfPresent(const ::testing::TestInfo &test_info, std::string &error_message) {
3248 const char *const type_param = test_info.type_param();
3249 const char *const value_param = test_info.value_param();
3250
3251 if (type_param != NULL || value_param != NULL) {
3252 error_message.append(", where ");
3253 if (type_param != NULL) {
3254 error_message.append("TypeParam = ").append(type_param);
3255 if (value_param != NULL) error_message.append(" and ");
3256 }
3257 if (value_param != NULL) {
3258 error_message.append("GetParam() = ").append(value_param);
3259 }
3260 }
3261 }
3262
3263 // Inspired by https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
3264 class LogcatPrinter : public ::testing::EmptyTestEventListener {
3265 // Called before a test starts.
OnTestStart(const::testing::TestInfo & test_info)3266 virtual void OnTestStart(const ::testing::TestInfo &test_info) {
3267 __android_log_print(ANDROID_LOG_INFO, appTag, "[ RUN ] %s.%s", test_info.test_case_name(), test_info.name());
3268 }
3269
3270 // Called after a failed assertion or a SUCCEED() invocation.
OnTestPartResult(const::testing::TestPartResult & result)3271 virtual void OnTestPartResult(const ::testing::TestPartResult &result) {
3272 // If the test part succeeded, we don't need to do anything.
3273 if (result.type() == ::testing::TestPartResult::kSuccess) return;
3274
3275 __android_log_print(ANDROID_LOG_INFO, appTag, "%s in %s:%d %s", result.failed() ? "*** Failure" : "Success",
3276 result.file_name(), result.line_number(), result.summary());
3277 }
3278
3279 // Called after a test ends.
OnTestEnd(const::testing::TestInfo & info)3280 virtual void OnTestEnd(const ::testing::TestInfo &info) {
3281 std::string result;
3282 if (info.result()->Passed()) {
3283 result.append("[ OK ]");
3284 } else {
3285 result.append("[ FAILED ]");
3286 }
3287 result.append(info.test_case_name()).append(".").append(info.name());
3288 if (info.result()->Failed()) addFullTestCommentIfPresent(info, result);
3289
3290 if (::testing::GTEST_FLAG(print_time)) {
3291 std::ostringstream os;
3292 os << info.result()->elapsed_time();
3293 result.append(" (").append(os.str()).append(" ms)");
3294 }
3295
3296 __android_log_print(ANDROID_LOG_INFO, appTag, "%s", result.c_str());
3297 };
3298 };
3299
processInput(struct android_app * app,AInputEvent * event)3300 static int32_t processInput(struct android_app *app, AInputEvent *event) { return 0; }
3301
processCommand(struct android_app * app,int32_t cmd)3302 static void processCommand(struct android_app *app, int32_t cmd) {
3303 switch (cmd) {
3304 case APP_CMD_INIT_WINDOW: {
3305 if (app->window) {
3306 initialized = true;
3307 VkTestFramework::window = app->window;
3308 }
3309 break;
3310 }
3311 case APP_CMD_GAINED_FOCUS: {
3312 active = true;
3313 break;
3314 }
3315 case APP_CMD_LOST_FOCUS: {
3316 active = false;
3317 break;
3318 }
3319 }
3320 }
3321
android_main(struct android_app * app)3322 void android_main(struct android_app *app) {
3323 app->onAppCmd = processCommand;
3324 app->onInputEvent = processInput;
3325
3326 while (1) {
3327 int events;
3328 struct android_poll_source *source;
3329 while (ALooper_pollAll(active ? 0 : -1, NULL, &events, (void **)&source) >= 0) {
3330 if (source) {
3331 source->process(app, source);
3332 }
3333
3334 if (app->destroyRequested != 0) {
3335 VkTestFramework::Finish();
3336 return;
3337 }
3338 }
3339
3340 if (initialized && active) {
3341 // Use the following key to send arguments to gtest, i.e.
3342 // --es args "--gtest_filter=-VkLayerTest.foo"
3343 const char key[] = "args";
3344 std::vector<std::string> args = get_args(*app, key);
3345
3346 std::string filter = "";
3347 if (args.size() > 0) {
3348 __android_log_print(ANDROID_LOG_INFO, appTag, "Intent args = %s", args[0].c_str());
3349 filter += args[0];
3350 } else {
3351 __android_log_print(ANDROID_LOG_INFO, appTag, "No Intent args detected");
3352 }
3353
3354 int argc = 2;
3355 char *argv[] = {(char *)"foo", (char *)filter.c_str()};
3356 __android_log_print(ANDROID_LOG_DEBUG, appTag, "filter = %s", argv[1]);
3357
3358 // Route output to files until we can override the gtest output
3359 freopen("/sdcard/Android/data/com.example.VulkanLayerValidationTests/files/out.txt", "w", stdout);
3360 freopen("/sdcard/Android/data/com.example.VulkanLayerValidationTests/files/err.txt", "w", stderr);
3361
3362 ::testing::InitGoogleTest(&argc, argv);
3363
3364 ::testing::TestEventListeners &listeners = ::testing::UnitTest::GetInstance()->listeners();
3365 listeners.Append(new LogcatPrinter);
3366
3367 VkTestFramework::InitArgs(&argc, argv);
3368 ::testing::AddGlobalTestEnvironment(new TestEnvironment);
3369
3370 int result = RUN_ALL_TESTS();
3371
3372 if (result != 0) {
3373 __android_log_print(ANDROID_LOG_INFO, appTag, "==== Tests FAILED ====");
3374 } else {
3375 __android_log_print(ANDROID_LOG_INFO, appTag, "==== Tests PASSED ====");
3376 }
3377
3378 VkTestFramework::Finish();
3379
3380 fclose(stdout);
3381 fclose(stderr);
3382
3383 ANativeActivity_finish(app->activity);
3384 return;
3385 }
3386 }
3387 }
3388 #endif
3389
3390 #if defined(_WIN32) && !defined(NDEBUG)
3391 #include <crtdbg.h>
3392 #endif
3393
main(int argc,char ** argv)3394 int main(int argc, char **argv) {
3395 int result;
3396
3397 #if defined(_WIN32)
3398 #if !defined(NDEBUG)
3399 _CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE);
3400 _CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);
3401 #endif
3402 // Avoid "Abort, Retry, Ignore" dialog boxes
3403 _set_abort_behavior(0, _WRITE_ABORT_MSG | _CALL_REPORTFAULT);
3404 SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
3405 _CrtSetReportMode(_CRT_ASSERT, _CRTDBG_MODE_FILE);
3406 _CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);
3407 #endif
3408
3409 ::testing::InitGoogleTest(&argc, argv);
3410 VkTestFramework::InitArgs(&argc, argv);
3411
3412 ::testing::AddGlobalTestEnvironment(new TestEnvironment);
3413
3414 result = RUN_ALL_TESTS();
3415
3416 VkTestFramework::Finish();
3417 return result;
3418 }
3419