1 /*
2 * Copyright (c) 2015-2021 The Khronos Group Inc.
3 * Copyright (c) 2015-2021 Valve Corporation
4 * Copyright (c) 2015-2021 LunarG, Inc.
5 * Copyright (c) 2015-2021 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26
27 #include "../layer_validation_tests.h"
28 #include "vk_extension_helper.h"
29
30 #include <algorithm>
31 #include <array>
32 #include <chrono>
33 #include <memory>
34 #include <mutex>
35 #include <thread>
36
37 #include "cast_utils.h"
38
39 //
40 // POSITIVE VALIDATION TESTS
41 //
42 // These tests do not expect to encounter ANY validation errors pass only if this is true
43
TEST_F(VkPositiveLayerTest,CopyNonupdatedDescriptors)44 TEST_F(VkPositiveLayerTest, CopyNonupdatedDescriptors) {
45 TEST_DESCRIPTION("Copy non-updated descriptors");
46 unsigned int i;
47
48 ASSERT_NO_FATAL_FAILURE(Init());
49 OneOffDescriptorSet src_descriptor_set(m_device, {
50 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
51 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
52 {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
53 });
54 OneOffDescriptorSet dst_descriptor_set(m_device, {
55 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
56 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
57 });
58
59 m_errorMonitor->ExpectSuccess();
60
61 const unsigned int copy_size = 2;
62 VkCopyDescriptorSet copy_ds_update[copy_size];
63 memset(copy_ds_update, 0, sizeof(copy_ds_update));
64 for (i = 0; i < copy_size; i++) {
65 copy_ds_update[i].sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
66 copy_ds_update[i].srcSet = src_descriptor_set.set_;
67 copy_ds_update[i].srcBinding = i;
68 copy_ds_update[i].dstSet = dst_descriptor_set.set_;
69 copy_ds_update[i].dstBinding = i;
70 copy_ds_update[i].descriptorCount = 1;
71 }
72 vk::UpdateDescriptorSets(m_device->device(), 0, NULL, copy_size, copy_ds_update);
73
74 m_errorMonitor->VerifyNotFound();
75 }
76
TEST_F(VkPositiveLayerTest,DeleteDescriptorSetLayoutsBeforeDescriptorSets)77 TEST_F(VkPositiveLayerTest, DeleteDescriptorSetLayoutsBeforeDescriptorSets) {
78 TEST_DESCRIPTION("Create DSLayouts and DescriptorSets and then delete the DSLayouts before the DescriptorSets.");
79 ASSERT_NO_FATAL_FAILURE(Init());
80 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
81 VkResult err;
82
83 m_errorMonitor->ExpectSuccess();
84
85 VkDescriptorPoolSize ds_type_count = {};
86 ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
87 ds_type_count.descriptorCount = 1;
88
89 VkDescriptorPoolCreateInfo ds_pool_ci = {};
90 ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
91 ds_pool_ci.pNext = NULL;
92 ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
93 ds_pool_ci.maxSets = 1;
94 ds_pool_ci.poolSizeCount = 1;
95 ds_pool_ci.pPoolSizes = &ds_type_count;
96
97 VkDescriptorPool ds_pool_one;
98 err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool_one);
99 ASSERT_VK_SUCCESS(err);
100
101 VkDescriptorSetLayoutBinding dsl_binding = {};
102 dsl_binding.binding = 0;
103 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
104 dsl_binding.descriptorCount = 1;
105 dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
106 dsl_binding.pImmutableSamplers = NULL;
107
108 VkDescriptorSet descriptorSet;
109 {
110 const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
111
112 VkDescriptorSetAllocateInfo alloc_info = {};
113 alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
114 alloc_info.descriptorSetCount = 1;
115 alloc_info.descriptorPool = ds_pool_one;
116 alloc_info.pSetLayouts = &ds_layout.handle();
117 err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
118 ASSERT_VK_SUCCESS(err);
119 } // ds_layout destroyed
120 err = vk::FreeDescriptorSets(m_device->device(), ds_pool_one, 1, &descriptorSet);
121
122 vk::DestroyDescriptorPool(m_device->device(), ds_pool_one, NULL);
123 m_errorMonitor->VerifyNotFound();
124 }
125
126 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,IgnoreUnrelatedDescriptor)127 TEST_F(VkPositiveLayerTest, IgnoreUnrelatedDescriptor) {
128 TEST_DESCRIPTION(
129 "Ensure that the vkUpdateDescriptorSets validation code is ignoring VkWriteDescriptorSet members that are not related to "
130 "the descriptor type specified by VkWriteDescriptorSet::descriptorType. Correct validation behavior will result in the "
131 "test running to completion without validation errors.");
132
133 const uintptr_t invalid_ptr = 0xcdcdcdcd;
134
135 ASSERT_NO_FATAL_FAILURE(Init());
136
137 // Verify VK_FORMAT_R8_UNORM supports VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
138 const VkFormat format_texel_case = VK_FORMAT_R8_UNORM;
139 const char *format_texel_case_string = "VK_FORMAT_R8_UNORM";
140 VkFormatProperties format_properties;
141 vk::GetPhysicalDeviceFormatProperties(gpu(), format_texel_case, &format_properties);
142 if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
143 printf("%s Test requires %s to support VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n", kSkipPrefix, format_texel_case_string);
144 return;
145 }
146
147 // Image Case
148 {
149 m_errorMonitor->ExpectSuccess();
150
151 VkImageObj image(m_device);
152 image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
153
154 VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
155
156 OneOffDescriptorSet descriptor_set(m_device, {
157 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
158 });
159
160 VkDescriptorImageInfo image_info = {};
161 image_info.imageView = view;
162 image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
163
164 VkWriteDescriptorSet descriptor_write;
165 memset(&descriptor_write, 0, sizeof(descriptor_write));
166 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
167 descriptor_write.dstSet = descriptor_set.set_;
168 descriptor_write.dstBinding = 0;
169 descriptor_write.descriptorCount = 1;
170 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
171 descriptor_write.pImageInfo = &image_info;
172
173 // Set pBufferInfo and pTexelBufferView to invalid values, which should
174 // be
175 // ignored for descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE.
176 // This will most likely produce a crash if the parameter_validation
177 // layer
178 // does not correctly ignore pBufferInfo.
179 descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
180 descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
181
182 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
183
184 m_errorMonitor->VerifyNotFound();
185 }
186
187 // Buffer Case
188 {
189 m_errorMonitor->ExpectSuccess();
190
191 uint32_t queue_family_index = 0;
192 VkBufferCreateInfo buffer_create_info = {};
193 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
194 buffer_create_info.size = 1024;
195 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
196 buffer_create_info.queueFamilyIndexCount = 1;
197 buffer_create_info.pQueueFamilyIndices = &queue_family_index;
198
199 VkBufferObj buffer;
200 buffer.init(*m_device, buffer_create_info);
201
202 OneOffDescriptorSet descriptor_set(m_device, {
203 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
204 });
205
206 VkDescriptorBufferInfo buffer_info = {};
207 buffer_info.buffer = buffer.handle();
208 buffer_info.offset = 0;
209 buffer_info.range = 1024;
210
211 VkWriteDescriptorSet descriptor_write;
212 memset(&descriptor_write, 0, sizeof(descriptor_write));
213 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
214 descriptor_write.dstSet = descriptor_set.set_;
215 descriptor_write.dstBinding = 0;
216 descriptor_write.descriptorCount = 1;
217 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
218 descriptor_write.pBufferInfo = &buffer_info;
219
220 // Set pImageInfo and pTexelBufferView to invalid values, which should
221 // be
222 // ignored for descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER.
223 // This will most likely produce a crash if the parameter_validation
224 // layer
225 // does not correctly ignore pImageInfo.
226 descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
227 descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
228
229 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
230
231 m_errorMonitor->VerifyNotFound();
232 }
233
234 // Texel Buffer Case
235 {
236 m_errorMonitor->ExpectSuccess();
237
238 uint32_t queue_family_index = 0;
239 VkBufferCreateInfo buffer_create_info = {};
240 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
241 buffer_create_info.size = 1024;
242 buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
243 buffer_create_info.queueFamilyIndexCount = 1;
244 buffer_create_info.pQueueFamilyIndices = &queue_family_index;
245
246 VkBufferObj buffer;
247 buffer.init(*m_device, buffer_create_info);
248
249 VkBufferViewCreateInfo buff_view_ci = {};
250 buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
251 buff_view_ci.buffer = buffer.handle();
252 buff_view_ci.format = format_texel_case;
253 buff_view_ci.range = VK_WHOLE_SIZE;
254 VkBufferView buffer_view;
255 VkResult err = vk::CreateBufferView(m_device->device(), &buff_view_ci, NULL, &buffer_view);
256 ASSERT_VK_SUCCESS(err);
257 OneOffDescriptorSet descriptor_set(m_device,
258 {
259 {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
260 });
261
262 VkWriteDescriptorSet descriptor_write;
263 memset(&descriptor_write, 0, sizeof(descriptor_write));
264 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
265 descriptor_write.dstSet = descriptor_set.set_;
266 descriptor_write.dstBinding = 0;
267 descriptor_write.descriptorCount = 1;
268 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
269 descriptor_write.pTexelBufferView = &buffer_view;
270
271 // Set pImageInfo and pBufferInfo to invalid values, which should be
272 // ignored for descriptorType ==
273 // VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER.
274 // This will most likely produce a crash if the parameter_validation
275 // layer
276 // does not correctly ignore pImageInfo and pBufferInfo.
277 descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
278 descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
279
280 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
281
282 m_errorMonitor->VerifyNotFound();
283
284 vk::DestroyBufferView(m_device->device(), buffer_view, NULL);
285 }
286 }
287
TEST_F(VkPositiveLayerTest,ImmutableSamplerOnlyDescriptor)288 TEST_F(VkPositiveLayerTest, ImmutableSamplerOnlyDescriptor) {
289 TEST_DESCRIPTION("Bind a DescriptorSet with only an immutable sampler and make sure that we don't warn for no update.");
290
291 ASSERT_NO_FATAL_FAILURE(Init());
292 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
293
294 OneOffDescriptorSet descriptor_set(m_device, {
295 {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
296 });
297
298 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
299 VkSampler sampler;
300 VkResult err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
301 ASSERT_VK_SUCCESS(err);
302
303 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
304
305 m_errorMonitor->ExpectSuccess();
306 m_commandBuffer->begin();
307 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
308
309 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
310 &descriptor_set.set_, 0, nullptr);
311 m_errorMonitor->VerifyNotFound();
312
313 vk::DestroySampler(m_device->device(), sampler, NULL);
314
315 m_commandBuffer->EndRenderPass();
316 m_commandBuffer->end();
317 }
318
319 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,EmptyDescriptorUpdateTest)320 TEST_F(VkPositiveLayerTest, EmptyDescriptorUpdateTest) {
321 TEST_DESCRIPTION("Update last descriptor in a set that includes an empty binding");
322 VkResult err;
323
324 ASSERT_NO_FATAL_FAILURE(Init());
325 if (IsPlatform(kNexusPlayer)) {
326 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
327 return;
328 }
329 m_errorMonitor->ExpectSuccess();
330
331 // Create layout with two uniform buffer descriptors w/ empty binding between them
332 OneOffDescriptorSet ds(m_device, {
333 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
334 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0 /*!*/, 0, nullptr},
335 {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
336 });
337
338 // Create a buffer to be used for update
339 VkBufferCreateInfo buff_ci = {};
340 buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
341 buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
342 buff_ci.size = 256;
343 buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
344 VkBuffer buffer;
345 err = vk::CreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
346 ASSERT_VK_SUCCESS(err);
347 // Have to bind memory to buffer before descriptor update
348 VkMemoryAllocateInfo mem_alloc = {};
349 mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
350 mem_alloc.pNext = NULL;
351 mem_alloc.allocationSize = 512; // one allocation for both buffers
352 mem_alloc.memoryTypeIndex = 0;
353
354 VkMemoryRequirements mem_reqs;
355 vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
356 bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
357 if (!pass) {
358 printf("%s Failed to allocate memory.\n", kSkipPrefix);
359 vk::DestroyBuffer(m_device->device(), buffer, NULL);
360 return;
361 }
362 // Make sure allocation is sufficiently large to accommodate buffer requirements
363 if (mem_reqs.size > mem_alloc.allocationSize) {
364 mem_alloc.allocationSize = mem_reqs.size;
365 }
366
367 VkDeviceMemory mem;
368 err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
369 ASSERT_VK_SUCCESS(err);
370 err = vk::BindBufferMemory(m_device->device(), buffer, mem, 0);
371 ASSERT_VK_SUCCESS(err);
372
373 // Only update the descriptor at binding 2
374 VkDescriptorBufferInfo buff_info = {};
375 buff_info.buffer = buffer;
376 buff_info.offset = 0;
377 buff_info.range = VK_WHOLE_SIZE;
378 VkWriteDescriptorSet descriptor_write = {};
379 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
380 descriptor_write.dstBinding = 2;
381 descriptor_write.descriptorCount = 1;
382 descriptor_write.pTexelBufferView = nullptr;
383 descriptor_write.pBufferInfo = &buff_info;
384 descriptor_write.pImageInfo = nullptr;
385 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
386 descriptor_write.dstSet = ds.set_;
387
388 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
389
390 m_errorMonitor->VerifyNotFound();
391 // Cleanup
392 vk::FreeMemory(m_device->device(), mem, NULL);
393 vk::DestroyBuffer(m_device->device(), buffer, NULL);
394 }
395
396 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,PushDescriptorNullDstSetTest)397 TEST_F(VkPositiveLayerTest, PushDescriptorNullDstSetTest) {
398 TEST_DESCRIPTION("Use null dstSet in CmdPushDescriptorSetKHR");
399
400 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
401 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
402 } else {
403 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
404 return;
405 }
406 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
407 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
408 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
409 } else {
410 printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
411 return;
412 }
413 ASSERT_NO_FATAL_FAILURE(InitState());
414 m_errorMonitor->ExpectSuccess();
415
416 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
417 if (push_descriptor_prop.maxPushDescriptors < 1) {
418 // Some implementations report an invalid maxPushDescriptors of 0
419 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
420 return;
421 }
422
423 ASSERT_NO_FATAL_FAILURE(InitViewport());
424 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
425
426 VkDescriptorSetLayoutBinding dsl_binding = {};
427 dsl_binding.binding = 2;
428 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
429 dsl_binding.descriptorCount = 1;
430 dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
431 dsl_binding.pImmutableSamplers = NULL;
432
433 const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
434 // Create push descriptor set layout
435 const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
436
437 // Use helper to create graphics pipeline
438 CreatePipelineHelper helper(*this);
439 helper.InitInfo();
440 helper.InitState();
441 helper.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &ds_layout});
442 helper.CreateGraphicsPipeline();
443
444 const float vbo_data[3] = {1.f, 0.f, 1.f};
445 VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
446
447 VkDescriptorBufferInfo buff_info;
448 buff_info.buffer = vbo.handle();
449 buff_info.offset = 0;
450 buff_info.range = sizeof(vbo_data);
451 VkWriteDescriptorSet descriptor_write = {};
452 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
453 descriptor_write.dstBinding = 2;
454 descriptor_write.descriptorCount = 1;
455 descriptor_write.pTexelBufferView = nullptr;
456 descriptor_write.pBufferInfo = &buff_info;
457 descriptor_write.pImageInfo = nullptr;
458 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
459 descriptor_write.dstSet = 0; // Should not cause a validation error
460
461 // Find address of extension call and make the call
462 PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
463 (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
464 assert(vkCmdPushDescriptorSetKHR != nullptr);
465
466 m_commandBuffer->begin();
467
468 // In Intel GPU, it needs to bind pipeline before push descriptor set.
469 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
470 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_layout_.handle(), 0, 1,
471 &descriptor_write);
472
473 m_errorMonitor->VerifyNotFound();
474 }
475
476 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,PushDescriptorUnboundSetTest)477 TEST_F(VkPositiveLayerTest, PushDescriptorUnboundSetTest) {
478 TEST_DESCRIPTION("Ensure that no validation errors are produced for not bound push descriptor sets");
479 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
480 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
481 } else {
482 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
483 return;
484 }
485 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
486 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
487 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
488 } else {
489 printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
490 return;
491 }
492 ASSERT_NO_FATAL_FAILURE(InitState());
493
494 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
495 if (push_descriptor_prop.maxPushDescriptors < 1) {
496 // Some implementations report an invalid maxPushDescriptors of 0
497 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
498 return;
499 }
500
501 ASSERT_NO_FATAL_FAILURE(InitViewport());
502 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
503 m_errorMonitor->ExpectSuccess();
504
505 // Create descriptor set layout
506 VkDescriptorSetLayoutBinding dsl_binding = {};
507 dsl_binding.binding = 2;
508 dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
509 dsl_binding.descriptorCount = 1;
510 dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
511 dsl_binding.pImmutableSamplers = NULL;
512
513 OneOffDescriptorSet descriptor_set(m_device, {dsl_binding}, 0, nullptr, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
514 nullptr);
515
516 // Create push descriptor set layout
517 const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
518
519 // Create PSO
520 char const fsSource[] = R"glsl(
521 #version 450
522 layout(location=0) out vec4 x;
523 layout(set=0) layout(binding=2) uniform foo1 { float x; } bar1;
524 layout(set=1) layout(binding=2) uniform foo2 { float y; } bar2;
525 void main(){
526 x = vec4(bar1.x) + vec4(bar2.y);
527 }
528 )glsl";
529 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
530 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
531 CreatePipelineHelper pipe(*this);
532 pipe.InitInfo();
533 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
534 pipe.InitState();
535 // Now use the descriptor layouts to create a pipeline layout
536 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &descriptor_set.layout_});
537 pipe.CreateGraphicsPipeline();
538
539 const float bo_data[1] = {1.f};
540 VkConstantBufferObj buffer(m_device, sizeof(bo_data), (const void *)&bo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
541
542 // Update descriptor set
543 descriptor_set.WriteDescriptorBufferInfo(2, buffer.handle(), 0, sizeof(bo_data));
544 descriptor_set.UpdateDescriptorSets();
545
546 PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
547 (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
548 assert(vkCmdPushDescriptorSetKHR != nullptr);
549
550 m_commandBuffer->begin();
551 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
552 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
553
554 // Push descriptors and bind descriptor set
555 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
556 descriptor_set.descriptor_writes.data());
557 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 1, 1,
558 &descriptor_set.set_, 0, NULL);
559
560 // No errors should be generated.
561 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
562
563 m_errorMonitor->VerifyNotFound();
564
565 m_commandBuffer->EndRenderPass();
566 m_commandBuffer->end();
567 }
568
569 // This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest,BindingPartiallyBound)570 TEST_F(VkPositiveLayerTest, BindingPartiallyBound) {
571 TEST_DESCRIPTION("Ensure that no validation errors for invalid descriptors if binding is PARTIALLY_BOUND");
572 SetTargetApiVersion(VK_API_VERSION_1_1);
573 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
574 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
575 } else {
576 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
577 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
578 return;
579 }
580
581 InitFramework(m_errorMonitor);
582
583 bool descriptor_indexing = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
584 descriptor_indexing =
585 descriptor_indexing && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
586 if (descriptor_indexing) {
587 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
588 m_device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
589 } else {
590 printf("%s %s and/or %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_MAINTENANCE_3_EXTENSION_NAME,
591 VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
592 return;
593 }
594 VkPhysicalDeviceFeatures2KHR features2 = {};
595 auto indexing_features = LvlInitStruct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
596
597 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
598 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
599 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
600 features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
601 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
602 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
603 if (!indexing_features.descriptorBindingPartiallyBound) {
604 printf("Partially bound bindings not supported, skipping test\n");
605 return;
606 }
607
608 ASSERT_NO_FATAL_FAILURE(InitViewport());
609 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
610 m_errorMonitor->ExpectSuccess();
611
612 VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
613 VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags = {};
614 ds_binding_flags[0] = 0;
615 // No Error
616 ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
617 // Uncomment for Error
618 // ds_binding_flags[1] = 0;
619
620 layout_createinfo_binding_flags.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
621 layout_createinfo_binding_flags.pNext = NULL;
622 layout_createinfo_binding_flags.bindingCount = 2;
623 layout_createinfo_binding_flags.pBindingFlags = ds_binding_flags;
624
625 // Prepare descriptors
626 OneOffDescriptorSet descriptor_set(m_device,
627 {
628 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
629 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
630 },
631 0, &layout_createinfo_binding_flags, 0);
632 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
633 uint32_t qfi = 0;
634 VkBufferCreateInfo buffer_create_info = {};
635 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
636 buffer_create_info.size = 32;
637 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
638 buffer_create_info.queueFamilyIndexCount = 1;
639 buffer_create_info.pQueueFamilyIndices = &qfi;
640
641 VkBufferObj buffer;
642 buffer.init(*m_device, buffer_create_info);
643
644 VkDescriptorBufferInfo buffer_info[2] = {};
645 buffer_info[0].buffer = buffer.handle();
646 buffer_info[0].offset = 0;
647 buffer_info[0].range = sizeof(uint32_t);
648
649 VkBufferCreateInfo index_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
650 index_buffer_create_info.size = sizeof(uint32_t);
651 index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
652 VkBufferObj index_buffer;
653 index_buffer.init(*m_device, index_buffer_create_info);
654
655 // Only update binding 0
656 VkWriteDescriptorSet descriptor_writes[2] = {};
657 descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
658 descriptor_writes[0].dstSet = descriptor_set.set_;
659 descriptor_writes[0].dstBinding = 0;
660 descriptor_writes[0].descriptorCount = 1;
661 descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
662 descriptor_writes[0].pBufferInfo = buffer_info;
663 vk::UpdateDescriptorSets(m_device->device(), 1, descriptor_writes, 0, NULL);
664
665 char const *shader_source = R"glsl(
666 #version 450
667 layout(set = 0, binding = 0) uniform foo_0 { int val; } doit;
668 layout(set = 0, binding = 1) uniform foo_1 { int val; } readit;
669 void main() {
670 if (doit.val == 0)
671 gl_Position = vec4(0.0);
672 else
673 gl_Position = vec4(readit.val);
674 }
675 )glsl";
676
677 VkShaderObj vs(m_device, shader_source, VK_SHADER_STAGE_VERTEX_BIT, this);
678 VkPipelineObj pipe(m_device);
679 pipe.AddShader(&vs);
680 pipe.AddDefaultColorAttachment();
681 pipe.CreateVKPipeline(pipeline_layout.handle(), m_renderPass);
682 VkCommandBufferBeginInfo begin_info = {};
683 begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
684 m_commandBuffer->begin(&begin_info);
685 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
686 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
687 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
688 &descriptor_set.set_, 0, nullptr);
689 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
690 VkViewport viewport = {0, 0, 16, 16, 0, 1};
691 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
692 VkRect2D scissor = {{0, 0}, {16, 16}};
693 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
694 vk::CmdDrawIndexed(m_commandBuffer->handle(), 1, 1, 0, 0, 0);
695 vk::CmdEndRenderPass(m_commandBuffer->handle());
696 m_commandBuffer->end();
697 m_commandBuffer->QueueCommandBuffer();
698 m_errorMonitor->VerifyNotFound();
699 }
700
TEST_F(VkPositiveLayerTest,PushDescriptorSetUpdatingSetNumber)701 TEST_F(VkPositiveLayerTest, PushDescriptorSetUpdatingSetNumber) {
702 TEST_DESCRIPTION(
703 "Ensure that no validation errors are produced when the push descriptor set number changes "
704 "between two vk::CmdPushDescriptorSetKHR calls.");
705
706 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
707 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
708 } else {
709 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
710 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
711 return;
712 }
713
714 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
715 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
716 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
717 } else {
718 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
719 return;
720 }
721 ASSERT_NO_FATAL_FAILURE(InitState());
722 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
723 if (push_descriptor_prop.maxPushDescriptors < 1) {
724 // Some implementations report an invalid maxPushDescriptors of 0
725 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
726 return;
727 }
728 ASSERT_NO_FATAL_FAILURE(InitViewport());
729 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
730 m_errorMonitor->ExpectSuccess();
731
732 // Create a descriptor to push
733 const uint32_t buffer_data[4] = {4, 5, 6, 7};
734 VkConstantBufferObj buffer_obj(
735 m_device, sizeof(buffer_data), &buffer_data,
736 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
737 ASSERT_TRUE(buffer_obj.initialized());
738
739 VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
740
741 PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
742 (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
743 ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
744
745 const VkDescriptorSetLayoutBinding ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
746 nullptr};
747 const VkDescriptorSetLayoutBinding ds_binding_1 = {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
748 nullptr};
749 const VkDescriptorSetLayoutObj ds_layout(m_device, {ds_binding_0, ds_binding_1});
750 ASSERT_TRUE(ds_layout.initialized());
751
752 const VkDescriptorSetLayoutBinding push_ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
753 nullptr};
754 const VkDescriptorSetLayoutObj push_ds_layout(m_device, {push_ds_binding_0},
755 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
756 ASSERT_TRUE(push_ds_layout.initialized());
757
758 m_commandBuffer->begin();
759 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
760
761 VkPipelineObj pipe0(m_device);
762 VkPipelineObj pipe1(m_device);
763 {
764 // Note: the push descriptor set is set number 2.
765 const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &push_ds_layout, &ds_layout});
766 ASSERT_TRUE(pipeline_layout.initialized());
767
768 char const *fsSource = R"glsl(
769 #version 450
770 layout(location=0) out vec4 x;
771 layout(set=2) layout(binding=0) uniform foo { vec4 y; } bar;
772 void main(){
773 x = bar.y;
774 }
775 )glsl";
776
777 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
778 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
779 VkPipelineObj &pipe = pipe0;
780 pipe.SetViewport(m_viewports);
781 pipe.SetScissor(m_scissors);
782 pipe.AddShader(&vs);
783 pipe.AddShader(&fs);
784 pipe.AddDefaultColorAttachment();
785 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
786
787 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
788
789 const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
790 vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
791
792 // Note: pushing to desciptor set number 2.
793 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
794 &descriptor_write);
795 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
796 }
797
798 m_errorMonitor->VerifyNotFound();
799
800 {
801 // Note: the push descriptor set is now set number 3.
802 const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &ds_layout, &push_ds_layout});
803 ASSERT_TRUE(pipeline_layout.initialized());
804
805 const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
806 vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
807
808 char const *fsSource = R"glsl(
809 #version 450
810 layout(location=0) out vec4 x;
811 layout(set=3) layout(binding=0) uniform foo { vec4 y; } bar;
812 void main(){
813 x = bar.y;
814 }
815 )glsl";
816
817 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
818 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
819 VkPipelineObj &pipe = pipe1;
820 pipe.SetViewport(m_viewports);
821 pipe.SetScissor(m_scissors);
822 pipe.AddShader(&vs);
823 pipe.AddShader(&fs);
824 pipe.AddDefaultColorAttachment();
825 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
826
827 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
828
829 // Note: now pushing to desciptor set number 3.
830 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 3, 1,
831 &descriptor_write);
832 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
833 }
834
835 m_errorMonitor->VerifyNotFound();
836
837 m_commandBuffer->EndRenderPass();
838 m_commandBuffer->end();
839 }
840
TEST_F(VkPositiveLayerTest,DynamicOffsetWithInactiveBinding)841 TEST_F(VkPositiveLayerTest, DynamicOffsetWithInactiveBinding) {
842 // Create a descriptorSet w/ dynamic descriptors where 1 binding is inactive
843 // We previously had a bug where dynamic offset of inactive bindings was still being used
844 m_errorMonitor->ExpectSuccess();
845
846 ASSERT_NO_FATAL_FAILURE(Init());
847 ASSERT_NO_FATAL_FAILURE(InitViewport());
848 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
849
850 OneOffDescriptorSet descriptor_set(m_device,
851 {
852 {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
853 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
854 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
855 });
856
857 // Create two buffers to update the descriptors with
858 // The first will be 2k and used for bindings 0 & 1, the second is 1k for binding 2
859 uint32_t qfi = 0;
860 VkBufferCreateInfo buffCI = {};
861 buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
862 buffCI.size = 2048;
863 buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
864 buffCI.queueFamilyIndexCount = 1;
865 buffCI.pQueueFamilyIndices = &qfi;
866
867 VkBufferObj dynamic_uniform_buffer_1, dynamic_uniform_buffer_2;
868 dynamic_uniform_buffer_1.init(*m_device, buffCI);
869 buffCI.size = 1024;
870 dynamic_uniform_buffer_2.init(*m_device, buffCI);
871
872 // Update descriptors
873 const uint32_t BINDING_COUNT = 3;
874 VkDescriptorBufferInfo buff_info[BINDING_COUNT] = {};
875 buff_info[0].buffer = dynamic_uniform_buffer_1.handle();
876 buff_info[0].offset = 0;
877 buff_info[0].range = 256;
878 buff_info[1].buffer = dynamic_uniform_buffer_1.handle();
879 buff_info[1].offset = 256;
880 buff_info[1].range = 512;
881 buff_info[2].buffer = dynamic_uniform_buffer_2.handle();
882 buff_info[2].offset = 0;
883 buff_info[2].range = 512;
884
885 VkWriteDescriptorSet descriptor_write;
886 memset(&descriptor_write, 0, sizeof(descriptor_write));
887 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
888 descriptor_write.dstSet = descriptor_set.set_;
889 descriptor_write.dstBinding = 0;
890 descriptor_write.descriptorCount = BINDING_COUNT;
891 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
892 descriptor_write.pBufferInfo = buff_info;
893
894 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
895
896 m_commandBuffer->begin();
897 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
898
899 // Create PSO to be used for draw-time errors below
900 char const *fsSource = R"glsl(
901 #version 450
902 layout(location=0) out vec4 x;
903 layout(set=0) layout(binding=0) uniform foo1 { int x; int y; } bar1;
904 layout(set=0) layout(binding=2) uniform foo2 { int x; int y; } bar2;
905 void main(){
906 x = vec4(bar1.y) + vec4(bar2.y);
907 }
908 )glsl";
909 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
910 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
911
912 CreatePipelineHelper pipe(*this);
913 pipe.InitInfo();
914 pipe.InitState();
915 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
916 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
917 pipe.CreateGraphicsPipeline();
918
919 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
920 // This update should succeed, but offset of inactive binding 1 oversteps binding 2 buffer size
921 // we used to have a bug in this case.
922 uint32_t dyn_off[BINDING_COUNT] = {0, 1024, 256};
923 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
924 &descriptor_set.set_, BINDING_COUNT, dyn_off);
925 m_commandBuffer->Draw(1, 0, 0, 0);
926 m_errorMonitor->VerifyNotFound();
927
928 m_commandBuffer->EndRenderPass();
929 m_commandBuffer->end();
930 }
931
TEST_F(VkPositiveLayerTest,CreateDescriptorSetBindingWithIgnoredSamplers)932 TEST_F(VkPositiveLayerTest, CreateDescriptorSetBindingWithIgnoredSamplers) {
933 TEST_DESCRIPTION("Test that layers conditionally do ignore the pImmutableSamplers on vkCreateDescriptorSetLayout");
934
935 bool prop2_found = false;
936 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
937 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
938 prop2_found = true;
939 } else {
940 printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
941 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
942 }
943
944 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
945 bool push_descriptor_found = false;
946 if (prop2_found && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
947 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
948
949 // In addition to the extension being supported we need to have at least one available
950 // Some implementations report an invalid maxPushDescriptors of 0
951 push_descriptor_found = GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0;
952 } else {
953 printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
954 VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
955 }
956
957 ASSERT_NO_FATAL_FAILURE(InitState());
958 const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
959 const uint64_t fake_address_32 = 0xCDCDCDCD;
960 const void *fake_pointer =
961 sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
962 const VkSampler *hopefully_undereferencable_pointer = reinterpret_cast<const VkSampler *>(fake_pointer);
963
964 // regular descriptors
965 m_errorMonitor->ExpectSuccess();
966 {
967 const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
968 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
969 {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
970 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
971 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
972 {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
973 {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
974 {6, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
975 {7, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
976 {8, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
977 };
978 const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr, 0,
979 static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
980 VkDescriptorSetLayout dsl;
981 const VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
982 ASSERT_VK_SUCCESS(err);
983 vk::DestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
984 }
985 m_errorMonitor->VerifyNotFound();
986
987 if (push_descriptor_found) {
988 // push descriptors
989 m_errorMonitor->ExpectSuccess();
990 {
991 const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
992 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
993 {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
994 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
995 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
996 {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
997 {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
998 {6, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
999 };
1000 const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr,
1001 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
1002 static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
1003 VkDescriptorSetLayout dsl;
1004 const VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
1005 ASSERT_VK_SUCCESS(err);
1006 vk::DestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
1007 }
1008 m_errorMonitor->VerifyNotFound();
1009 }
1010 }
1011
TEST_F(VkPositiveLayerTest,PushingDescriptorSetWithImmutableSampler)1012 TEST_F(VkPositiveLayerTest, PushingDescriptorSetWithImmutableSampler) {
1013 TEST_DESCRIPTION("Use a push descriptor with an immutable sampler.");
1014
1015 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1016 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1017 } else {
1018 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
1019 return;
1020 }
1021
1022 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1023 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
1024 m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
1025 } else {
1026 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
1027 return;
1028 }
1029
1030 auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
1031 if (push_descriptor_prop.maxPushDescriptors < 1) {
1032 // Some implementations report an invalid maxPushDescriptors of 0
1033 printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
1034 return;
1035 }
1036
1037 ASSERT_NO_FATAL_FAILURE(InitState());
1038
1039 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
1040 vk_testing::Sampler sampler;
1041 sampler.init(*m_device, sampler_ci);
1042 VkSampler sampler_handle = sampler.handle();
1043
1044 VkImageObj image(m_device);
1045 image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
1046 VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
1047
1048 auto vkCmdPushDescriptorSetKHR =
1049 (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
1050
1051 std::vector<VkDescriptorSetLayoutBinding> ds_bindings = {
1052 {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, &sampler_handle}};
1053 OneOffDescriptorSet descriptor_set(m_device, ds_bindings);
1054
1055 VkDescriptorSetLayoutObj push_dsl(m_device, ds_bindings, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
1056
1057 VkPipelineLayoutObj pipeline_layout(m_device, {&push_dsl});
1058
1059 VkDescriptorImageInfo img_info = {};
1060 img_info.sampler = sampler_handle;
1061 img_info.imageView = imageView;
1062 img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1063
1064 VkWriteDescriptorSet descriptor_write = LvlInitStruct<VkWriteDescriptorSet>();
1065 descriptor_write.dstBinding = 0;
1066 descriptor_write.descriptorCount = 1;
1067 descriptor_write.pTexelBufferView = nullptr;
1068 descriptor_write.pBufferInfo = nullptr;
1069 descriptor_write.pImageInfo = &img_info;
1070 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
1071 descriptor_write.dstSet = descriptor_set.set_;
1072
1073 m_errorMonitor->ExpectSuccess();
1074 m_commandBuffer->begin();
1075 vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
1076 &descriptor_write);
1077 m_commandBuffer->end();
1078 m_errorMonitor->VerifyNotFound();
1079 }
1080
TEST_F(VkPositiveLayerTest,BindVertexBuffers2EXTNullDescriptors)1081 TEST_F(VkPositiveLayerTest, BindVertexBuffers2EXTNullDescriptors) {
1082 TEST_DESCRIPTION("Test nullDescriptor works wih CmdBindVertexBuffers variants");
1083
1084 if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1085 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
1086 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1087 return;
1088 }
1089
1090 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1091 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1092
1093 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME)) {
1094 printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
1095 return;
1096 }
1097 m_device_extension_names.push_back(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
1098
1099 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME)) {
1100 printf("%s Extension %s is not supported; skipped.\n", kSkipPrefix, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
1101 return;
1102 }
1103 m_device_extension_names.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
1104
1105 auto robustness2_features = LvlInitStruct<VkPhysicalDeviceRobustness2FeaturesEXT>();
1106 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&robustness2_features);
1107
1108 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
1109 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
1110 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
1111 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
1112
1113 if (!robustness2_features.nullDescriptor) {
1114 printf("%s nullDescriptor feature not supported, skipping test\n", kSkipPrefix);
1115 return;
1116 }
1117
1118 PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT =
1119 (PFN_vkCmdBindVertexBuffers2EXT)vk::GetInstanceProcAddr(instance(), "vkCmdBindVertexBuffers2EXT");
1120 ASSERT_TRUE(vkCmdBindVertexBuffers2EXT != nullptr);
1121
1122 VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1123 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
1124 ASSERT_NO_FATAL_FAILURE(InitViewport());
1125 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1126
1127 m_errorMonitor->ExpectSuccess();
1128
1129 OneOffDescriptorSet descriptor_set(m_device, {
1130 {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
1131 {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1132 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1133 });
1134
1135 descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, VK_NULL_HANDLE, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1136 descriptor_set.WriteDescriptorBufferInfo(1, VK_NULL_HANDLE, 0, VK_WHOLE_SIZE, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1137 VkBufferView buffer_view = VK_NULL_HANDLE;
1138 descriptor_set.WriteDescriptorBufferView(2, buffer_view, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1139 descriptor_set.UpdateDescriptorSets();
1140 descriptor_set.descriptor_writes.clear();
1141
1142 m_commandBuffer->begin();
1143 VkBuffer buffer = VK_NULL_HANDLE;
1144 VkDeviceSize offset = 0;
1145 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &buffer, &offset);
1146 vkCmdBindVertexBuffers2EXT(m_commandBuffer->handle(), 0, 1, &buffer, &offset, nullptr, nullptr);
1147 m_commandBuffer->end();
1148 m_errorMonitor->VerifyNotFound();
1149 }
1150
TEST_F(VkPositiveLayerTest,CopyMutableDescriptors)1151 TEST_F(VkPositiveLayerTest, CopyMutableDescriptors) {
1152 TEST_DESCRIPTION("Copy mutable descriptors.");
1153
1154 AddRequiredExtensions(VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME);
1155 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1156 if (!AreRequestedExtensionsEnabled()) {
1157 printf("%s Extension %s is not supported, skipping test.\n", kSkipPrefix, VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME);
1158 return;
1159 }
1160 auto mutable_descriptor_type_features = LvlInitStruct<VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE>();
1161 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&mutable_descriptor_type_features);
1162 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
1163 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
1164 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
1165 if (mutable_descriptor_type_features.mutableDescriptorType == VK_FALSE) {
1166 printf("%s mutableDescriptorType feature not supported. Skipped.\n", kSkipPrefix);
1167 return;
1168 }
1169 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
1170
1171 m_errorMonitor->ExpectSuccess();
1172
1173 VkDescriptorType descriptor_types[] = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER};
1174
1175 VkMutableDescriptorTypeListVALVE mutable_descriptor_type_lists[2] = {};
1176 mutable_descriptor_type_lists[0].descriptorTypeCount = 2;
1177 mutable_descriptor_type_lists[0].pDescriptorTypes = descriptor_types;
1178 mutable_descriptor_type_lists[1].descriptorTypeCount = 0;
1179 mutable_descriptor_type_lists[1].pDescriptorTypes = nullptr;
1180
1181 VkMutableDescriptorTypeCreateInfoVALVE mdtci = LvlInitStruct<VkMutableDescriptorTypeCreateInfoVALVE>();
1182 mdtci.mutableDescriptorTypeListCount = 2;
1183 mdtci.pMutableDescriptorTypeLists = mutable_descriptor_type_lists;
1184
1185 VkDescriptorPoolSize pool_sizes[2] = {};
1186 pool_sizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1187 pool_sizes[0].descriptorCount = 2;
1188 pool_sizes[1].type = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE;
1189 pool_sizes[1].descriptorCount = 2;
1190
1191 VkDescriptorPoolCreateInfo ds_pool_ci = LvlInitStruct<VkDescriptorPoolCreateInfo>(&mdtci);
1192 ds_pool_ci.maxSets = 2;
1193 ds_pool_ci.poolSizeCount = 2;
1194 ds_pool_ci.pPoolSizes = pool_sizes;
1195
1196 vk_testing::DescriptorPool pool;
1197 pool.init(*m_device, ds_pool_ci);
1198
1199 VkDescriptorSetLayoutBinding bindings[2] = {};
1200 bindings[0].binding = 0;
1201 bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE;
1202 bindings[0].descriptorCount = 1;
1203 bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
1204 bindings[0].pImmutableSamplers = nullptr;
1205 bindings[1].binding = 1;
1206 bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1207 bindings[1].descriptorCount = 1;
1208 bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
1209 bindings[1].pImmutableSamplers = nullptr;
1210
1211 VkDescriptorSetLayoutCreateInfo create_info = LvlInitStruct<VkDescriptorSetLayoutCreateInfo>(&mdtci);
1212 create_info.bindingCount = 2;
1213 create_info.pBindings = bindings;
1214
1215 vk_testing::DescriptorSetLayout set_layout;
1216 set_layout.init(*m_device, create_info);
1217 VkDescriptorSetLayout set_layout_handle = set_layout.handle();
1218
1219 VkDescriptorSetLayout layouts[2] = {set_layout_handle, set_layout_handle};
1220
1221 VkDescriptorSetAllocateInfo allocate_info = LvlInitStruct<VkDescriptorSetAllocateInfo>();
1222 allocate_info.descriptorPool = pool.handle();
1223 allocate_info.descriptorSetCount = 2;
1224 allocate_info.pSetLayouts = layouts;
1225
1226 VkDescriptorSet descriptor_sets[2];
1227 vk::AllocateDescriptorSets(device(), &allocate_info, descriptor_sets);
1228
1229 VkBufferCreateInfo buffer_ci = LvlInitStruct<VkBufferCreateInfo>();
1230 buffer_ci.size = 32;
1231 buffer_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
1232
1233 VkBufferObj buffer;
1234 buffer.init(*m_device, buffer_ci);
1235
1236 VkDescriptorBufferInfo buffer_info = {};
1237 buffer_info.buffer = buffer.handle();
1238 buffer_info.offset = 0;
1239 buffer_info.range = buffer_ci.size;
1240
1241 VkWriteDescriptorSet descriptor_write = LvlInitStruct<VkWriteDescriptorSet>();
1242 descriptor_write.dstSet = descriptor_sets[0];
1243 descriptor_write.dstBinding = 0;
1244 descriptor_write.descriptorCount = 1;
1245 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1246 descriptor_write.pBufferInfo = &buffer_info;
1247
1248 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, nullptr);
1249
1250 VkCopyDescriptorSet copy_set = LvlInitStruct<VkCopyDescriptorSet>();
1251 copy_set.srcSet = descriptor_sets[0];
1252 copy_set.srcBinding = 0;
1253 copy_set.dstSet = descriptor_sets[1];
1254 copy_set.dstBinding = 1;
1255 copy_set.descriptorCount = 1;
1256
1257 vk::UpdateDescriptorSets(m_device->device(), 0, nullptr, 1, ©_set);
1258 m_errorMonitor->VerifyNotFound();
1259 }
1260