1 /*
2 * Copyright (c) 2015-2021 The Khronos Group Inc.
3 * Copyright (c) 2015-2021 Valve Corporation
4 * Copyright (c) 2015-2021 LunarG, Inc.
5 * Copyright (c) 2015-2021 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26
27 #include "../layer_validation_tests.h"
28 #include "vk_extension_helper.h"
29
30 #include <algorithm>
31 #include <array>
32 #include <chrono>
33 #include <memory>
34 #include <mutex>
35 #include <thread>
36
37 #include "cast_utils.h"
38
39 //
40 // POSITIVE VALIDATION TESTS
41 //
42 // These tests do not expect to encounter ANY validation errors pass only if this is true
43
TEST_F(VkPositiveLayerTest,ViewportWithCountNoMultiViewport)44 TEST_F(VkPositiveLayerTest, ViewportWithCountNoMultiViewport) {
45 TEST_DESCRIPTION("DynamicViewportWithCount/ScissorWithCount without multiViewport feature not enabled.");
46
47 uint32_t version = SetTargetApiVersion(VK_API_VERSION_1_1);
48 if (version < VK_API_VERSION_1_1) {
49 printf("%s At least Vulkan version 1.1 is required, skipping test.\n", kSkipPrefix);
50 return;
51 }
52
53 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
54 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME)) {
55 m_device_extension_names.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
56 } else {
57 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
58 return;
59 }
60
61 auto extended_dynamic_state_features = LvlInitStruct<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>();
62 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&extended_dynamic_state_features);
63 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
64 if (!extended_dynamic_state_features.extendedDynamicState) {
65 printf("%s Test requires (unsupported) extendedDynamicState, skipping\n", kSkipPrefix);
66 return;
67 }
68 // Ensure multiViewport feature is *not* enabled for this device
69 features2.features.multiViewport = 0;
70 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
71 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
72
73 CreatePipelineHelper pipe(*this);
74 pipe.InitInfo();
75 const VkDynamicState dyn_states[] = {
76 VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT,
77 VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
78 };
79 VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
80 dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
81 dyn_state_ci.dynamicStateCount = size(dyn_states);
82 dyn_state_ci.pDynamicStates = dyn_states;
83 pipe.dyn_state_ci_ = dyn_state_ci;
84 pipe.vp_state_ci_.viewportCount = 0;
85 pipe.vp_state_ci_.scissorCount = 0;
86 pipe.InitState();
87 m_errorMonitor->ExpectSuccess();
88 pipe.CreateGraphicsPipeline();
89 m_errorMonitor->VerifyNotFound();
90 }
91
TEST_F(VkPositiveLayerTest,CreatePipelineComplexTypes)92 TEST_F(VkPositiveLayerTest, CreatePipelineComplexTypes) {
93 TEST_DESCRIPTION("Smoke test for complex types across VS/FS boundary");
94 ASSERT_NO_FATAL_FAILURE(Init());
95 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
96
97 if (!m_device->phy().features().tessellationShader) {
98 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
99 return;
100 }
101
102 m_errorMonitor->ExpectSuccess();
103
104 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
105 VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
106 VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
107 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
108
109 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
110 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
111 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
112
113 CreatePipelineHelper pipe(*this);
114 pipe.InitInfo();
115 pipe.gp_ci_.pTessellationState = &tsci;
116 pipe.gp_ci_.pInputAssemblyState = &iasci;
117 pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
118 pipe.InitState();
119 pipe.CreateGraphicsPipeline();
120 m_errorMonitor->VerifyNotFound();
121 }
122
TEST_F(VkPositiveLayerTest,CreatePipelineAttribMatrixType)123 TEST_F(VkPositiveLayerTest, CreatePipelineAttribMatrixType) {
124 TEST_DESCRIPTION("Test that pipeline validation accepts matrices passed as vertex attributes");
125 m_errorMonitor->ExpectSuccess();
126
127 ASSERT_NO_FATAL_FAILURE(Init());
128 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
129
130 VkVertexInputBindingDescription input_binding;
131 memset(&input_binding, 0, sizeof(input_binding));
132
133 VkVertexInputAttributeDescription input_attribs[2];
134 memset(input_attribs, 0, sizeof(input_attribs));
135
136 for (int i = 0; i < 2; i++) {
137 input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
138 input_attribs[i].location = i;
139 }
140
141 char const *vsSource = R"glsl(
142 #version 450
143 layout(location=0) in mat2x4 x;
144 void main(){
145 gl_Position = x[0] + x[1];
146 }
147 )glsl";
148
149 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
150 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
151
152 CreatePipelineHelper pipe(*this);
153 pipe.InitInfo();
154 pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
155 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
156 pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
157 pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
158 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
159 pipe.InitState();
160 pipe.CreateGraphicsPipeline();
161 /* expect success */
162 m_errorMonitor->VerifyNotFound();
163 }
164
TEST_F(VkPositiveLayerTest,CreatePipelineAttribArrayType)165 TEST_F(VkPositiveLayerTest, CreatePipelineAttribArrayType) {
166 m_errorMonitor->ExpectSuccess();
167
168 ASSERT_NO_FATAL_FAILURE(Init());
169 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
170
171 VkVertexInputBindingDescription input_binding;
172 memset(&input_binding, 0, sizeof(input_binding));
173
174 VkVertexInputAttributeDescription input_attribs[2];
175 memset(input_attribs, 0, sizeof(input_attribs));
176
177 for (int i = 0; i < 2; i++) {
178 input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
179 input_attribs[i].location = i;
180 }
181
182 char const *vsSource = R"glsl(
183 #version 450
184 layout(location=0) in vec4 x[2];
185 void main(){
186 gl_Position = x[0] + x[1];
187 }
188 )glsl";
189
190 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
191 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
192
193 CreatePipelineHelper pipe(*this);
194 pipe.InitInfo();
195 pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
196 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
197 pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
198 pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
199 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
200 pipe.InitState();
201 pipe.CreateGraphicsPipeline();
202
203 m_errorMonitor->VerifyNotFound();
204 }
205
TEST_F(VkPositiveLayerTest,CreatePipelineAttribComponents)206 TEST_F(VkPositiveLayerTest, CreatePipelineAttribComponents) {
207 TEST_DESCRIPTION(
208 "Test that pipeline validation accepts consuming a vertex attribute through multiple vertex shader inputs, each consuming "
209 "a different subset of the components, and that fragment shader-attachment validation tolerates multiple duplicate "
210 "location outputs");
211 m_errorMonitor->ExpectSuccess(kErrorBit | kWarningBit);
212
213 ASSERT_NO_FATAL_FAILURE(Init());
214 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
215
216 VkVertexInputBindingDescription input_binding;
217 memset(&input_binding, 0, sizeof(input_binding));
218
219 VkVertexInputAttributeDescription input_attribs[3];
220 memset(input_attribs, 0, sizeof(input_attribs));
221
222 for (int i = 0; i < 3; i++) {
223 input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
224 input_attribs[i].location = i;
225 }
226
227 char const *vsSource = R"glsl(
228 #version 450
229 layout(location=0) in vec4 x;
230 layout(location=1) in vec3 y1;
231 layout(location=1, component=3) in float y2;
232 layout(location=2) in vec4 z;
233 void main(){
234 gl_Position = x + vec4(y1, y2) + z;
235 }
236 )glsl";
237 char const *fsSource = R"glsl(
238 #version 450
239 layout(location=0, component=0) out float color0;
240 layout(location=0, component=1) out float color1;
241 layout(location=0, component=2) out float color2;
242 layout(location=0, component=3) out float color3;
243 layout(location=1, component=0) out vec2 second_color0;
244 layout(location=1, component=2) out vec2 second_color1;
245 void main(){
246 color0 = float(1);
247 second_color0 = vec2(1);
248 }
249 )glsl";
250
251 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
252 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
253
254 VkPipelineObj pipe(m_device);
255
256 VkDescriptorSetObj descriptorSet(m_device);
257 descriptorSet.AppendDummy();
258 descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
259
260 // Create a renderPass with two color attachments
261 VkAttachmentReference attachments[2] = {};
262 attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
263 attachments[1].attachment = 1;
264 attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
265
266 VkSubpassDescription subpass = {};
267 subpass.pColorAttachments = attachments;
268 subpass.colorAttachmentCount = 2;
269
270 VkRenderPassCreateInfo rpci = {};
271 rpci.subpassCount = 1;
272 rpci.pSubpasses = &subpass;
273 rpci.attachmentCount = 2;
274
275 VkAttachmentDescription attach_desc[2] = {};
276 attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
277 attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
278 attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
279 attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
280 attach_desc[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
281 attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
282 attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
283 attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
284 attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
285 attach_desc[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
286
287 rpci.pAttachments = attach_desc;
288 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
289
290 VkRenderPass renderpass;
291 vk::CreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
292 pipe.AddShader(&vs);
293 pipe.AddShader(&fs);
294
295 VkPipelineColorBlendAttachmentState att_state1 = {};
296 att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
297 att_state1.blendEnable = VK_FALSE;
298
299 pipe.AddColorAttachment(0, att_state1);
300 pipe.AddColorAttachment(1, att_state1);
301 pipe.AddVertexInputBindings(&input_binding, 1);
302 pipe.AddVertexInputAttribs(input_attribs, 3);
303 pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
304 vk::DestroyRenderPass(m_device->device(), renderpass, nullptr);
305
306 m_errorMonitor->VerifyNotFound();
307 }
308
TEST_F(VkPositiveLayerTest,CreatePipelineSimplePositive)309 TEST_F(VkPositiveLayerTest, CreatePipelineSimplePositive) {
310 m_errorMonitor->ExpectSuccess();
311
312 ASSERT_NO_FATAL_FAILURE(Init());
313 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
314
315 CreatePipelineHelper pipe(*this);
316 pipe.InitInfo();
317 pipe.InitState();
318 pipe.CreateGraphicsPipeline();
319
320 m_errorMonitor->VerifyNotFound();
321 }
322
TEST_F(VkPositiveLayerTest,CreatePipelineRelaxedTypeMatch)323 TEST_F(VkPositiveLayerTest, CreatePipelineRelaxedTypeMatch) {
324 TEST_DESCRIPTION(
325 "Test that pipeline validation accepts the relaxed type matching rules set out in 14.1.3: fundamental type must match, and "
326 "producer side must have at least as many components");
327 m_errorMonitor->ExpectSuccess();
328
329 // VK 1.0.8 Specification, 14.1.3 "Additionally,..." block
330
331 ASSERT_NO_FATAL_FAILURE(Init());
332 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
333
334 char const *vsSource = R"glsl(
335 #version 450
336 layout(location=0) out vec3 x;
337 layout(location=1) out ivec3 y;
338 layout(location=2) out vec3 z;
339 void main(){
340 gl_Position = vec4(0);
341 x = vec3(0); y = ivec3(0); z = vec3(0);
342 }
343 )glsl";
344 char const *fsSource = R"glsl(
345 #version 450
346 layout(location=0) out vec4 color;
347 layout(location=0) in float x;
348 layout(location=1) flat in int y;
349 layout(location=2) in vec2 z;
350 void main(){
351 color = vec4(1 + x + y + z.x);
352 }
353 )glsl";
354
355 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
356 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
357
358 CreatePipelineHelper pipe(*this);
359 pipe.InitInfo();
360 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
361 pipe.InitState();
362 pipe.CreateGraphicsPipeline();
363
364 m_errorMonitor->VerifyNotFound();
365 }
366
TEST_F(VkPositiveLayerTest,CreatePipelineTessPerVertex)367 TEST_F(VkPositiveLayerTest, CreatePipelineTessPerVertex) {
368 TEST_DESCRIPTION("Test that pipeline validation accepts per-vertex variables passed between the TCS and TES stages");
369 m_errorMonitor->ExpectSuccess();
370
371 ASSERT_NO_FATAL_FAILURE(Init());
372 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
373
374 if (!m_device->phy().features().tessellationShader) {
375 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
376 return;
377 }
378
379 char const *tcsSource = R"glsl(
380 #version 450
381 layout(location=0) out int x[];
382 layout(vertices=3) out;
383 void main(){
384 gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;
385 gl_TessLevelInner[0] = 1;
386 x[gl_InvocationID] = gl_InvocationID;
387 }
388 )glsl";
389 char const *tesSource = R"glsl(
390 #version 450
391 layout(triangles, equal_spacing, cw) in;
392 layout(location=0) in int x[];
393 void main(){
394 gl_Position.xyz = gl_TessCoord;
395 gl_Position.w = x[0] + x[1] + x[2];
396 }
397 )glsl";
398
399 VkShaderObj vs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
400 VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
401 VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
402 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
403
404 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
405 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
406
407 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
408
409 CreatePipelineHelper pipe(*this);
410 pipe.InitInfo();
411 pipe.gp_ci_.pTessellationState = &tsci;
412 pipe.gp_ci_.pInputAssemblyState = &iasci;
413 pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
414 pipe.InitState();
415 pipe.CreateGraphicsPipeline();
416 m_errorMonitor->VerifyNotFound();
417 }
418
TEST_F(VkPositiveLayerTest,CreatePipelineGeometryInputBlockPositive)419 TEST_F(VkPositiveLayerTest, CreatePipelineGeometryInputBlockPositive) {
420 TEST_DESCRIPTION(
421 "Test that pipeline validation accepts a user-defined interface block passed into the geometry shader. This is interesting "
422 "because the 'extra' array level is not present on the member type, but on the block instance.");
423 m_errorMonitor->ExpectSuccess();
424
425 ASSERT_NO_FATAL_FAILURE(Init());
426 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
427
428 if (!m_device->phy().features().geometryShader) {
429 printf("%s Device does not support geometry shaders; skipped.\n", kSkipPrefix);
430 return;
431 }
432
433 char const *vsSource = R"glsl(
434 #version 450
435
436 layout(location = 0) out VertexData { vec4 x; } gs_out;
437
438 void main(){
439 gs_out.x = vec4(1.0f);
440 }
441 )glsl";
442
443 char const *gsSource = R"glsl(
444 #version 450
445 layout(triangles) in;
446 layout(triangle_strip, max_vertices=3) out;
447 layout(location=0) in VertexData { vec4 x; } gs_in[];
448 void main() {
449 gl_Position = gs_in[0].x;
450 EmitVertex();
451 }
452 )glsl";
453
454 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
455 VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
456 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
457
458 CreatePipelineHelper pipe(*this);
459 pipe.InitInfo();
460 pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
461 pipe.InitState();
462 pipe.CreateGraphicsPipeline();
463 m_errorMonitor->VerifyNotFound();
464 }
465
TEST_F(VkPositiveLayerTest,CreatePipeline64BitAttributesPositive)466 TEST_F(VkPositiveLayerTest, CreatePipeline64BitAttributesPositive) {
467 TEST_DESCRIPTION(
468 "Test that pipeline validation accepts basic use of 64bit vertex attributes. This is interesting because they consume "
469 "multiple locations.");
470 m_errorMonitor->ExpectSuccess();
471
472 if (!EnableDeviceProfileLayer()) {
473 printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
474 return;
475 }
476
477 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
478 ASSERT_NO_FATAL_FAILURE(InitState());
479 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
480
481 if (!m_device->phy().features().shaderFloat64) {
482 printf("%s Device does not support 64bit vertex attributes; skipped.\n", kSkipPrefix);
483 return;
484 }
485
486 VkFormatProperties format_props;
487 vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, &format_props);
488 if (!(format_props.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT)) {
489 printf("%s Device does not support VK_FORMAT_R64G64B64A64_SFLOAT vertex buffers; skipped.\n", kSkipPrefix);
490 return;
491 }
492
493 VkVertexInputBindingDescription input_bindings[1];
494 memset(input_bindings, 0, sizeof(input_bindings));
495
496 VkVertexInputAttributeDescription input_attribs[4];
497 memset(input_attribs, 0, sizeof(input_attribs));
498 input_attribs[0].location = 0;
499 input_attribs[0].offset = 0;
500 input_attribs[0].format = VK_FORMAT_R64G64B64A64_SFLOAT;
501 input_attribs[1].location = 2;
502 input_attribs[1].offset = 32;
503 input_attribs[1].format = VK_FORMAT_R64G64B64A64_SFLOAT;
504 input_attribs[2].location = 4;
505 input_attribs[2].offset = 64;
506 input_attribs[2].format = VK_FORMAT_R64G64B64A64_SFLOAT;
507 input_attribs[3].location = 6;
508 input_attribs[3].offset = 96;
509 input_attribs[3].format = VK_FORMAT_R64G64B64A64_SFLOAT;
510
511 char const *vsSource = R"glsl(
512 #version 450
513 layout(location=0) in dmat4 x;
514 void main(){
515 gl_Position = vec4(x[0][0]);
516 }
517 )glsl";
518
519 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
520 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
521
522 CreatePipelineHelper pipe(*this);
523 pipe.InitInfo();
524 pipe.vi_ci_.pVertexBindingDescriptions = input_bindings;
525 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
526 pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
527 pipe.vi_ci_.vertexAttributeDescriptionCount = 4;
528 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
529 pipe.InitState();
530 pipe.CreateGraphicsPipeline();
531 m_errorMonitor->VerifyNotFound();
532 }
533
TEST_F(VkPositiveLayerTest,CreatePipelineInputAttachmentPositive)534 TEST_F(VkPositiveLayerTest, CreatePipelineInputAttachmentPositive) {
535 TEST_DESCRIPTION("Positive test for a correctly matched input attachment");
536 m_errorMonitor->ExpectSuccess();
537
538 ASSERT_NO_FATAL_FAILURE(Init());
539
540 char const *fsSource = R"glsl(
541 #version 450
542 layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;
543 layout(location=0) out vec4 color;
544 void main() {
545 color = subpassLoad(x);
546 }
547 )glsl";
548
549 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
550 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
551
552 VkPipelineObj pipe(m_device);
553 pipe.AddShader(&vs);
554 pipe.AddShader(&fs);
555 pipe.AddDefaultColorAttachment();
556 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
557
558 VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
559 const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
560 const VkPipelineLayoutObj pl(m_device, {&dsl});
561
562 VkAttachmentDescription descs[2] = {
563 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
564 VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
565 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
566 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
567 VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
568 };
569 VkAttachmentReference color = {
570 0,
571 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
572 };
573 VkAttachmentReference input = {
574 1,
575 VK_IMAGE_LAYOUT_GENERAL,
576 };
577
578 VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
579
580 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
581 VkRenderPass rp;
582 VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
583 ASSERT_VK_SUCCESS(err);
584
585 // should be OK. would go wrong here if it's going to...
586 pipe.CreateVKPipeline(pl.handle(), rp);
587
588 m_errorMonitor->VerifyNotFound();
589
590 vk::DestroyRenderPass(m_device->device(), rp, nullptr);
591 }
592
TEST_F(VkPositiveLayerTest,CreateComputePipelineMissingDescriptorUnusedPositive)593 TEST_F(VkPositiveLayerTest, CreateComputePipelineMissingDescriptorUnusedPositive) {
594 TEST_DESCRIPTION(
595 "Test that pipeline validation accepts a compute pipeline which declares a descriptor-backed resource which is not "
596 "provided, but the shader does not statically use it. This is interesting because it requires compute pipelines to have a "
597 "proper descriptor use walk, which they didn't for some time.");
598 m_errorMonitor->ExpectSuccess();
599
600 ASSERT_NO_FATAL_FAILURE(Init());
601
602 char const *csSource = R"glsl(
603 #version 450
604 layout(local_size_x=1) in;
605 layout(set=0, binding=0) buffer block { vec4 x; };
606 void main(){
607 // x is not used.
608 }
609 )glsl";
610
611 CreateComputePipelineHelper pipe(*this);
612 pipe.InitInfo();
613 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
614 pipe.InitState();
615 pipe.CreateComputePipeline();
616
617 m_errorMonitor->VerifyNotFound();
618 }
619
TEST_F(VkPositiveLayerTest,CreateComputePipelineFragmentShadingRate)620 TEST_F(VkPositiveLayerTest, CreateComputePipelineFragmentShadingRate) {
621 TEST_DESCRIPTION("Verify that pipeline validation accepts a compute pipeline with fragment shading rate extension enabled");
622 m_errorMonitor->ExpectSuccess();
623
624 // Enable KHR_fragment_shading_rate and all of its required extensions
625 bool fsr_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
626 if (fsr_extensions) {
627 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
628 }
629 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
630
631 fsr_extensions = fsr_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
632 fsr_extensions = fsr_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_2_EXTENSION_NAME);
633 fsr_extensions = fsr_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME);
634 fsr_extensions = fsr_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
635 fsr_extensions = fsr_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME);
636 if (fsr_extensions) {
637 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
638 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_2_EXTENSION_NAME);
639 m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
640 m_device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
641 m_device_extension_names.push_back(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME);
642 } else {
643 printf("%s requires VK_KHR_fragment_shading_rate.\n", kSkipPrefix);
644 return;
645 }
646
647 VkPhysicalDeviceFragmentShadingRateFeaturesKHR fsr_features = {};
648 fsr_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
649 fsr_features.pipelineFragmentShadingRate = true;
650 fsr_features.primitiveFragmentShadingRate = true;
651
652 VkPhysicalDeviceFeatures2 device_features = {};
653 device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
654 device_features.pNext = &fsr_features;
655
656 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &device_features));
657
658 char const *csSource = R"glsl(
659 #version 450
660 layout(local_size_x=1) in;
661 layout(set=0, binding=0) buffer block { vec4 x; };
662 void main(){
663 // x is not used.
664 }
665 )glsl";
666
667 CreateComputePipelineHelper pipe(*this);
668 pipe.InitInfo();
669 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
670 pipe.InitState();
671 pipe.CreateComputePipeline();
672
673 m_errorMonitor->VerifyNotFound();
674 }
675
TEST_F(VkPositiveLayerTest,CreateComputePipelineCombinedImageSamplerConsumedAsSampler)676 TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsSampler) {
677 TEST_DESCRIPTION(
678 "Test that pipeline validation accepts a shader consuming only the sampler portion of a combined image + sampler");
679 m_errorMonitor->ExpectSuccess();
680
681 ASSERT_NO_FATAL_FAILURE(Init());
682
683 std::vector<VkDescriptorSetLayoutBinding> bindings = {
684 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
685 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
686 {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
687 };
688
689 char const *csSource = R"glsl(
690 #version 450
691 layout(local_size_x=1) in;
692 layout(set=0, binding=0) uniform sampler s;
693 layout(set=0, binding=1) uniform texture2D t;
694 layout(set=0, binding=2) buffer block { vec4 x; };
695 void main() {
696 x = texture(sampler2D(t, s), vec2(0));
697 }
698 )glsl";
699 CreateComputePipelineHelper pipe(*this);
700 pipe.InitInfo();
701 pipe.dsl_bindings_.resize(bindings.size());
702 memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
703 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
704 pipe.InitState();
705 m_errorMonitor->ExpectSuccess();
706 pipe.CreateComputePipeline();
707
708 m_errorMonitor->VerifyNotFound();
709 }
710
TEST_F(VkPositiveLayerTest,CreateComputePipelineCombinedImageSamplerConsumedAsImage)711 TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsImage) {
712 TEST_DESCRIPTION(
713 "Test that pipeline validation accepts a shader consuming only the image portion of a combined image + sampler");
714 m_errorMonitor->ExpectSuccess();
715
716 ASSERT_NO_FATAL_FAILURE(Init());
717
718 std::vector<VkDescriptorSetLayoutBinding> bindings = {
719 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
720 {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
721 {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
722 };
723
724 char const *csSource = R"glsl(
725 #version 450
726 layout(local_size_x=1) in;
727 layout(set=0, binding=0) uniform texture2D t;
728 layout(set=0, binding=1) uniform sampler s;
729 layout(set=0, binding=2) buffer block { vec4 x; };
730 void main() {
731 x = texture(sampler2D(t, s), vec2(0));
732 }
733 )glsl";
734 CreateComputePipelineHelper pipe(*this);
735 pipe.InitInfo();
736 pipe.dsl_bindings_.resize(bindings.size());
737 memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
738 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
739 pipe.InitState();
740 m_errorMonitor->ExpectSuccess();
741 pipe.CreateComputePipeline();
742
743 m_errorMonitor->VerifyNotFound();
744 }
745
TEST_F(VkPositiveLayerTest,CreateComputePipelineCombinedImageSamplerConsumedAsBoth)746 TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsBoth) {
747 TEST_DESCRIPTION(
748 "Test that pipeline validation accepts a shader consuming both the sampler and the image of a combined image+sampler but "
749 "via separate variables");
750 m_errorMonitor->ExpectSuccess();
751
752 ASSERT_NO_FATAL_FAILURE(Init());
753
754 std::vector<VkDescriptorSetLayoutBinding> bindings = {
755 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
756 {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
757 };
758
759 char const *csSource = R"glsl(
760 #version 450
761 layout(local_size_x=1) in;
762 layout(set=0, binding=0) uniform texture2D t;
763 layout(set=0, binding=0) uniform sampler s; // both binding 0!
764 layout(set=0, binding=1) buffer block { vec4 x; };
765 void main() {
766 x = texture(sampler2D(t, s), vec2(0));
767 }
768 )glsl";
769 CreateComputePipelineHelper pipe(*this);
770 pipe.InitInfo();
771 pipe.dsl_bindings_.resize(bindings.size());
772 memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
773 pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
774 pipe.InitState();
775 m_errorMonitor->ExpectSuccess();
776 pipe.CreateComputePipeline();
777
778 m_errorMonitor->VerifyNotFound();
779 }
780
TEST_F(VkPositiveLayerTest,PSOPolygonModeValid)781 TEST_F(VkPositiveLayerTest, PSOPolygonModeValid) {
782 TEST_DESCRIPTION("Verify that using a solid polygon fill mode works correctly.");
783
784 ASSERT_NO_FATAL_FAILURE(Init());
785 if (IsPlatform(kNexusPlayer)) {
786 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
787 return;
788 }
789 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
790
791 std::vector<const char *> device_extension_names;
792 auto features = m_device->phy().features();
793 // Artificially disable support for non-solid fill modes
794 features.fillModeNonSolid = false;
795 // The sacrificial device object
796 VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
797
798 VkRenderpassObj render_pass(&test_device);
799
800 const VkPipelineLayoutObj pipeline_layout(&test_device);
801
802 VkPipelineRasterizationStateCreateInfo rs_ci = {};
803 rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
804 rs_ci.pNext = nullptr;
805 rs_ci.lineWidth = 1.0f;
806 rs_ci.rasterizerDiscardEnable = false;
807
808 VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
809 VkShaderObj fs(&test_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
810
811 // Set polygonMode=FILL. No error is expected
812 m_errorMonitor->ExpectSuccess();
813 {
814 VkPipelineObj pipe(&test_device);
815 pipe.AddShader(&vs);
816 pipe.AddShader(&fs);
817 pipe.AddDefaultColorAttachment();
818 // Set polygonMode to a good value
819 rs_ci.polygonMode = VK_POLYGON_MODE_FILL;
820 pipe.SetRasterization(&rs_ci);
821 pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
822 }
823 m_errorMonitor->VerifyNotFound();
824 }
825
TEST_F(VkPositiveLayerTest,CreateGraphicsPipelineWithIgnoredPointers)826 TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineWithIgnoredPointers) {
827 TEST_DESCRIPTION("Create Graphics Pipeline with pointers that must be ignored by layers");
828 SetTargetApiVersion(VK_API_VERSION_1_1);
829 ASSERT_NO_FATAL_FAILURE(Init());
830 if (IsPlatform(kNexusPlayer)) {
831 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
832 return;
833 }
834
835 m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
836 ASSERT_TRUE(m_depth_stencil_fmt != 0);
837
838 m_depthStencil->Init(m_device, static_cast<int32_t>(m_width), static_cast<int32_t>(m_height), m_depth_stencil_fmt);
839
840 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(m_depthStencil->BindInfo()));
841
842 const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
843 const uint64_t fake_address_32 = 0xCDCDCDCD;
844 void *hopefully_undereferencable_pointer =
845 sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
846
847 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
848
849 const VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info{
850 VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
851 nullptr, // pNext
852 0, // flags
853 0,
854 nullptr, // bindings
855 0,
856 nullptr // attributes
857 };
858
859 const VkPipelineInputAssemblyStateCreateInfo pipeline_input_assembly_state_create_info{
860 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
861 nullptr, // pNext
862 0, // flags
863 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
864 VK_FALSE // primitive restart
865 };
866
867 const VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info_template{
868 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
869 nullptr, // pNext
870 0, // flags
871 VK_FALSE, // depthClamp
872 VK_FALSE, // rasterizerDiscardEnable
873 VK_POLYGON_MODE_FILL,
874 VK_CULL_MODE_NONE,
875 VK_FRONT_FACE_COUNTER_CLOCKWISE,
876 VK_FALSE, // depthBias
877 0.0f,
878 0.0f,
879 0.0f, // depthBias params
880 1.0f // lineWidth
881 };
882
883 VkPipelineLayout pipeline_layout;
884 {
885 VkPipelineLayoutCreateInfo pipeline_layout_create_info{
886 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
887 nullptr, // pNext
888 0, // flags
889 0,
890 nullptr, // layouts
891 0,
892 nullptr // push constants
893 };
894
895 VkResult err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_create_info, nullptr, &pipeline_layout);
896 ASSERT_VK_SUCCESS(err);
897 }
898
899 // try disabled rasterizer and no tessellation
900 {
901 m_errorMonitor->ExpectSuccess();
902
903 VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
904 pipeline_rasterization_state_create_info_template;
905 pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_TRUE;
906
907 VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
908 VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
909 nullptr, // pNext
910 0, // flags
911 1, // stageCount
912 &vs.GetStageCreateInfo(),
913 &pipeline_vertex_input_state_create_info,
914 &pipeline_input_assembly_state_create_info,
915 reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>(hopefully_undereferencable_pointer),
916 reinterpret_cast<const VkPipelineViewportStateCreateInfo *>(hopefully_undereferencable_pointer),
917 &pipeline_rasterization_state_create_info,
918 reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>(hopefully_undereferencable_pointer),
919 reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
920 reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
921 nullptr, // dynamic states
922 pipeline_layout,
923 m_renderPass,
924 0, // subpass
925 VK_NULL_HANDLE,
926 0};
927
928 VkPipeline pipeline;
929 vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
930 m_errorMonitor->VerifyNotFound();
931
932 m_errorMonitor->ExpectSuccess();
933 m_commandBuffer->begin();
934 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
935 m_errorMonitor->VerifyNotFound();
936 vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
937 }
938
939 const VkPipelineMultisampleStateCreateInfo pipeline_multisample_state_create_info{
940 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
941 nullptr, // pNext
942 0, // flags
943 VK_SAMPLE_COUNT_1_BIT,
944 VK_FALSE, // sample shading
945 0.0f, // minSampleShading
946 nullptr, // pSampleMask
947 VK_FALSE, // alphaToCoverageEnable
948 VK_FALSE // alphaToOneEnable
949 };
950
951 // try enabled rasterizer but no subpass attachments
952 {
953 m_errorMonitor->ExpectSuccess();
954
955 VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
956 pipeline_rasterization_state_create_info_template;
957 pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
958
959 VkViewport viewport = {0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f};
960 VkRect2D scissor = {{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}};
961
962 const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
963 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
964 nullptr, // pNext
965 0, // flags
966 1,
967 &viewport,
968 1,
969 &scissor};
970
971 VkRenderPass render_pass;
972 {
973 VkSubpassDescription subpass_desc = {};
974
975 VkRenderPassCreateInfo render_pass_create_info{
976 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
977 nullptr, // pNext
978 0, // flags
979 0,
980 nullptr, // attachments
981 1,
982 &subpass_desc,
983 0,
984 nullptr // subpass dependencies
985 };
986
987 VkResult err = vk::CreateRenderPass(m_device->handle(), &render_pass_create_info, nullptr, &render_pass);
988 ASSERT_VK_SUCCESS(err);
989 }
990
991 VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
992 VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
993 nullptr, // pNext
994 0, // flags
995 1, // stageCount
996 &vs.GetStageCreateInfo(),
997 &pipeline_vertex_input_state_create_info,
998 &pipeline_input_assembly_state_create_info,
999 nullptr,
1000 &pipeline_viewport_state_create_info,
1001 &pipeline_rasterization_state_create_info,
1002 &pipeline_multisample_state_create_info,
1003 reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
1004 reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
1005 nullptr, // dynamic states
1006 pipeline_layout,
1007 render_pass,
1008 0, // subpass
1009 VK_NULL_HANDLE,
1010 0};
1011
1012 VkPipeline pipeline;
1013 vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
1014
1015 m_errorMonitor->VerifyNotFound();
1016
1017 vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
1018 vk::DestroyRenderPass(m_device->handle(), render_pass, nullptr);
1019 }
1020
1021 // try dynamic viewport and scissor
1022 {
1023 m_errorMonitor->ExpectSuccess();
1024
1025 VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
1026 pipeline_rasterization_state_create_info_template;
1027 pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
1028
1029 const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
1030 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
1031 nullptr, // pNext
1032 0, // flags
1033 1,
1034 reinterpret_cast<const VkViewport *>(hopefully_undereferencable_pointer),
1035 1,
1036 reinterpret_cast<const VkRect2D *>(hopefully_undereferencable_pointer)};
1037
1038 const VkPipelineDepthStencilStateCreateInfo pipeline_depth_stencil_state_create_info{
1039 VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
1040 nullptr, // pNext
1041 0, // flags
1042 };
1043
1044 const VkPipelineColorBlendAttachmentState pipeline_color_blend_attachment_state = {};
1045
1046 const VkPipelineColorBlendStateCreateInfo pipeline_color_blend_state_create_info{
1047 VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
1048 nullptr, // pNext
1049 0, // flags
1050 VK_FALSE,
1051 VK_LOGIC_OP_CLEAR,
1052 1,
1053 &pipeline_color_blend_attachment_state,
1054 {0.0f, 0.0f, 0.0f, 0.0f}};
1055
1056 const VkDynamicState dynamic_states[2] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
1057
1058 const VkPipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info{
1059 VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
1060 nullptr, // pNext
1061 0, // flags
1062 2, dynamic_states};
1063
1064 VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
1065 nullptr, // pNext
1066 0, // flags
1067 1, // stageCount
1068 &vs.GetStageCreateInfo(),
1069 &pipeline_vertex_input_state_create_info,
1070 &pipeline_input_assembly_state_create_info,
1071 nullptr,
1072 &pipeline_viewport_state_create_info,
1073 &pipeline_rasterization_state_create_info,
1074 &pipeline_multisample_state_create_info,
1075 &pipeline_depth_stencil_state_create_info,
1076 &pipeline_color_blend_state_create_info,
1077 &pipeline_dynamic_state_create_info, // dynamic states
1078 pipeline_layout,
1079 m_renderPass,
1080 0, // subpass
1081 VK_NULL_HANDLE,
1082 0};
1083
1084 VkPipeline pipeline;
1085 vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
1086
1087 m_errorMonitor->VerifyNotFound();
1088
1089 vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
1090 }
1091
1092 vk::DestroyPipelineLayout(m_device->handle(), pipeline_layout, nullptr);
1093 }
1094
TEST_F(VkPositiveLayerTest,CreatePipelineWithCoreChecksDisabled)1095 TEST_F(VkPositiveLayerTest, CreatePipelineWithCoreChecksDisabled) {
1096 TEST_DESCRIPTION("Test CreatePipeline while the CoreChecks validation object is disabled");
1097
1098 // Enable KHR validation features extension
1099 VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT};
1100 VkValidationFeaturesEXT features = {};
1101 features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
1102 features.disabledValidationFeatureCount = 1;
1103 features.pDisabledValidationFeatures = disables;
1104
1105 VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1106 ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
1107 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1108 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
1109 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1110 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
1111 VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_FALSE};
1112
1113 CreatePipelineHelper pipe(*this);
1114 pipe.InitInfo();
1115 pipe.gp_ci_.pInputAssemblyState = &iasci;
1116 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1117 pipe.InitState();
1118 m_errorMonitor->ExpectSuccess();
1119 pipe.CreateGraphicsPipeline();
1120 m_errorMonitor->VerifyNotFound();
1121 }
1122
TEST_F(VkPositiveLayerTest,CreatePipeineWithTessellationDomainOrigin)1123 TEST_F(VkPositiveLayerTest, CreatePipeineWithTessellationDomainOrigin) {
1124 TEST_DESCRIPTION(
1125 "Test CreatePipeline when VkPipelineTessellationStateCreateInfo.pNext include "
1126 "VkPipelineTessellationDomainOriginStateCreateInfo");
1127 SetTargetApiVersion(VK_API_VERSION_1_1);
1128
1129 ASSERT_NO_FATAL_FAILURE(Init());
1130 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1131
1132 if (!m_device->phy().features().tessellationShader) {
1133 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
1134 return;
1135 }
1136
1137 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
1138 VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
1139 VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
1140 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1141
1142 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
1143 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
1144
1145 VkPipelineTessellationDomainOriginStateCreateInfo tessellationDomainOriginStateInfo = {
1146 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, VK_NULL_HANDLE,
1147 VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT};
1148
1149 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
1150 &tessellationDomainOriginStateInfo, 0, 3};
1151
1152 CreatePipelineHelper pipe(*this);
1153 pipe.InitInfo();
1154 pipe.gp_ci_.pTessellationState = &tsci;
1155 pipe.gp_ci_.pInputAssemblyState = &iasci;
1156 pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1157 pipe.InitState();
1158 m_errorMonitor->ExpectSuccess();
1159 pipe.CreateGraphicsPipeline();
1160 m_errorMonitor->VerifyNotFound();
1161 }
1162
TEST_F(VkPositiveLayerTest,ViewportArray2NV)1163 TEST_F(VkPositiveLayerTest, ViewportArray2NV) {
1164 TEST_DESCRIPTION("Test to validate VK_NV_viewport_array2");
1165
1166 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1167
1168 VkPhysicalDeviceFeatures available_features = {};
1169 ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
1170
1171 if (!available_features.multiViewport) {
1172 printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported, skipping tests\n", kSkipPrefix);
1173 return;
1174 }
1175 if (!available_features.tessellationShader) {
1176 printf("%s VkPhysicalDeviceFeatures::tessellationShader is not supported, skipping tests\n", kSkipPrefix);
1177 return;
1178 }
1179 if (!available_features.geometryShader) {
1180 printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping tests\n", kSkipPrefix);
1181 return;
1182 }
1183
1184 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME)) {
1185 m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME);
1186 } else {
1187 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME);
1188 return;
1189 }
1190
1191 ASSERT_NO_FATAL_FAILURE(InitState());
1192 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1193
1194 const char tcs_src[] = R"glsl(
1195 #version 450
1196 layout(vertices = 3) out;
1197
1198 void main() {
1199 gl_TessLevelOuter[0] = 4.0f;
1200 gl_TessLevelOuter[1] = 4.0f;
1201 gl_TessLevelOuter[2] = 4.0f;
1202 gl_TessLevelInner[0] = 3.0f;
1203
1204 gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
1205 }
1206 )glsl";
1207
1208 // Create tessellation control and fragment shader here since they will not be
1209 // modified by the different test cases.
1210 VkShaderObj tcs(m_device, tcs_src, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
1211 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1212
1213 std::vector<VkViewport> vps = {{0.0f, 0.0f, m_width / 2.0f, m_height}, {m_width / 2.0f, 0.0f, m_width / 2.0f, m_height}};
1214 std::vector<VkRect2D> scs = {
1215 {{0, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}},
1216 {{static_cast<int32_t>(m_width) / 2, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}}};
1217
1218 enum class TestStage { VERTEX = 0, TESSELLATION_EVAL = 1, GEOMETRY = 2 };
1219 std::array<TestStage, 3> vertex_stages = {{TestStage::VERTEX, TestStage::TESSELLATION_EVAL, TestStage::GEOMETRY}};
1220
1221 // Verify that the usage of gl_ViewportMask[] in the allowed vertex processing
1222 // stages does not cause any errors.
1223 for (auto stage : vertex_stages) {
1224 m_errorMonitor->ExpectSuccess();
1225
1226 VkPipelineInputAssemblyStateCreateInfo iaci = {VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO};
1227 iaci.topology = (stage != TestStage::VERTEX) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1228
1229 VkPipelineTessellationStateCreateInfo tsci = {VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO};
1230 tsci.patchControlPoints = 3;
1231
1232 const VkPipelineLayoutObj pl(m_device);
1233
1234 VkPipelineObj pipe(m_device);
1235 pipe.AddDefaultColorAttachment();
1236 pipe.SetInputAssembly(&iaci);
1237 pipe.SetViewport(vps);
1238 pipe.SetScissor(scs);
1239 pipe.AddShader(&fs);
1240
1241 std::stringstream vs_src, tes_src, geom_src;
1242
1243 vs_src << R"(
1244 #version 450
1245 #extension GL_NV_viewport_array2 : require
1246
1247 vec2 positions[3] = { vec2( 0.0f, -0.5f),
1248 vec2( 0.5f, 0.5f),
1249 vec2(-0.5f, 0.5f)
1250 };
1251 void main() {)";
1252 // Write viewportMask if the vertex shader is the last vertex processing stage.
1253 if (stage == TestStage::VERTEX) {
1254 vs_src << "gl_ViewportMask[0] = 3;\n";
1255 }
1256 vs_src << R"(
1257 gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
1258 })";
1259
1260 VkShaderObj vs(m_device, vs_src.str().c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
1261 pipe.AddShader(&vs);
1262
1263 std::unique_ptr<VkShaderObj> tes, geom;
1264
1265 if (stage >= TestStage::TESSELLATION_EVAL) {
1266 tes_src << R"(
1267 #version 450
1268 #extension GL_NV_viewport_array2 : require
1269 layout(triangles) in;
1270
1271 void main() {
1272 gl_Position = (gl_in[0].gl_Position * gl_TessCoord.x +
1273 gl_in[1].gl_Position * gl_TessCoord.y +
1274 gl_in[2].gl_Position * gl_TessCoord.z);)";
1275 // Write viewportMask if the tess eval shader is the last vertex processing stage.
1276 if (stage == TestStage::TESSELLATION_EVAL) {
1277 tes_src << "gl_ViewportMask[0] = 3;\n";
1278 }
1279 tes_src << "}";
1280
1281 tes = std::unique_ptr<VkShaderObj>(
1282 new VkShaderObj(m_device, tes_src.str().c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this));
1283 pipe.AddShader(tes.get());
1284 pipe.AddShader(&tcs);
1285 pipe.SetTessellation(&tsci);
1286 }
1287
1288 if (stage >= TestStage::GEOMETRY) {
1289 geom_src << R"(
1290 #version 450
1291 #extension GL_NV_viewport_array2 : require
1292 layout(triangles) in;
1293 layout(triangle_strip, max_vertices = 3) out;
1294
1295 void main() {
1296 gl_ViewportMask[0] = 3;
1297 for(int i = 0; i < 3; ++i) {
1298 gl_Position = gl_in[i].gl_Position;
1299 EmitVertex();
1300 }
1301 })";
1302
1303 geom =
1304 std::unique_ptr<VkShaderObj>(new VkShaderObj(m_device, geom_src.str().c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this));
1305 pipe.AddShader(geom.get());
1306 }
1307
1308 pipe.CreateVKPipeline(pl.handle(), renderPass());
1309 m_errorMonitor->VerifyNotFound();
1310 }
1311 }
1312
TEST_F(VkPositiveLayerTest,CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled)1313 TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled) {
1314 TEST_DESCRIPTION(
1315 "Test that no warning is produced when writing to non-existing color attachment if alpha to coverage is enabled.");
1316
1317 ASSERT_NO_FATAL_FAILURE(Init());
1318 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
1319
1320 VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
1321 ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1322 ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1323 ms_state_ci.alphaToCoverageEnable = VK_TRUE;
1324
1325 const auto set_info = [&](CreatePipelineHelper &helper) {
1326 helper.pipe_ms_state_ci_ = ms_state_ci;
1327 helper.cb_ci_.attachmentCount = 0;
1328 };
1329 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit | kWarningBit, "", true);
1330 }
1331
TEST_F(VkPositiveLayerTest,CreatePipelineAttachmentUnused)1332 TEST_F(VkPositiveLayerTest, CreatePipelineAttachmentUnused) {
1333 TEST_DESCRIPTION("Make sure unused attachments are correctly ignored.");
1334
1335 ASSERT_NO_FATAL_FAILURE(Init());
1336 if (IsPlatform(kNexusPlayer)) {
1337 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
1338 return;
1339 }
1340 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1341
1342 char const *fsSource = R"glsl(
1343 #version 450
1344 layout(location=0) out vec4 x;
1345 void main(){
1346 x = vec4(1); // attachment is unused
1347 }
1348 )glsl";
1349 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1350
1351 VkAttachmentReference const color_attachments[1]{{VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}};
1352
1353 VkSubpassDescription const subpass_descriptions[1]{
1354 {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, color_attachments, nullptr, nullptr, 0, nullptr}};
1355
1356 VkAttachmentDescription const attachment_descriptions[1]{{0, VK_FORMAT_B8G8R8A8_UNORM, VK_SAMPLE_COUNT_1_BIT,
1357 VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_STORE,
1358 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
1359 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}};
1360
1361 VkRenderPassCreateInfo const render_pass_info{
1362 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attachment_descriptions, 1, subpass_descriptions, 0, nullptr};
1363
1364 VkRenderPass render_pass;
1365 auto result = vk::CreateRenderPass(m_device->device(), &render_pass_info, nullptr, &render_pass);
1366 ASSERT_VK_SUCCESS(result);
1367
1368 const auto override_info = [&](CreatePipelineHelper &helper) {
1369 helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
1370 helper.gp_ci_.renderPass = render_pass;
1371 };
1372 CreatePipelineHelper::OneshotTest(*this, override_info, kErrorBit | kWarningBit, "", true);
1373
1374 vk::DestroyRenderPass(m_device->device(), render_pass, nullptr);
1375 }
1376
TEST_F(VkPositiveLayerTest,CreateSurface)1377 TEST_F(VkPositiveLayerTest, CreateSurface) {
1378 TEST_DESCRIPTION("Create and destroy a surface without ever creating a swapchain");
1379
1380 if (!AddSurfaceInstanceExtension()) {
1381 printf("%s surface extensions not supported, skipping CreateSurface test\n", kSkipPrefix);
1382 return;
1383 }
1384
1385 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1386
1387 ASSERT_NO_FATAL_FAILURE(InitState());
1388
1389 m_errorMonitor->ExpectSuccess();
1390 if (!InitSurface()) {
1391 printf("%s Cannot create surface, skipping test\n", kSkipPrefix);
1392 return;
1393 }
1394 DestroySwapchain(); // cleans up both surface and swapchain, if they were created
1395 m_errorMonitor->VerifyNotFound();
1396 }
1397
TEST_F(VkPositiveLayerTest,SampleMaskOverrideCoverageNV)1398 TEST_F(VkPositiveLayerTest, SampleMaskOverrideCoverageNV) {
1399 TEST_DESCRIPTION("Test to validate VK_NV_sample_mask_override_coverage");
1400
1401 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1402
1403 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME)) {
1404 m_device_extension_names.push_back(VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
1405 } else {
1406 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
1407 return;
1408 }
1409
1410 ASSERT_NO_FATAL_FAILURE(InitState());
1411
1412 const char vs_src[] = R"glsl(
1413 #version 450
1414 layout(location=0) out vec4 fragColor;
1415
1416 const vec2 pos[3] = { vec2( 0.0f, -0.5f),
1417 vec2( 0.5f, 0.5f),
1418 vec2(-0.5f, 0.5f)
1419 };
1420 void main()
1421 {
1422 gl_Position = vec4(pos[gl_VertexIndex % 3], 0.0f, 1.0f);
1423 fragColor = vec4(0.0f, 1.0f, 0.0f, 1.0f);
1424 }
1425 )glsl";
1426
1427 const char fs_src[] = R"glsl(
1428 #version 450
1429 #extension GL_NV_sample_mask_override_coverage : require
1430
1431 layout(location = 0) in vec4 fragColor;
1432 layout(location = 0) out vec4 outColor;
1433
1434 layout(override_coverage) out int gl_SampleMask[];
1435
1436 void main()
1437 {
1438 gl_SampleMask[0] = 0xff;
1439 outColor = fragColor;
1440 }
1441 )glsl";
1442
1443 m_errorMonitor->ExpectSuccess();
1444
1445 const VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_8_BIT;
1446
1447 VkAttachmentDescription cAttachment = {};
1448 cAttachment.format = VK_FORMAT_B8G8R8A8_UNORM;
1449 cAttachment.samples = sampleCount;
1450 cAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
1451 cAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1452 cAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1453 cAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1454 cAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1455 cAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1456
1457 VkAttachmentReference cAttachRef = {};
1458 cAttachRef.attachment = 0;
1459 cAttachRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1460
1461 VkSubpassDescription subpass = {};
1462 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
1463 subpass.colorAttachmentCount = 1;
1464 subpass.pColorAttachments = &cAttachRef;
1465
1466 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
1467 rpci.attachmentCount = 1;
1468 rpci.pAttachments = &cAttachment;
1469 rpci.subpassCount = 1;
1470 rpci.pSubpasses = &subpass;
1471
1472 VkRenderPass rp;
1473 vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
1474
1475 const VkPipelineLayoutObj pl(m_device);
1476
1477 VkSampleMask sampleMask = 0x01;
1478 VkPipelineMultisampleStateCreateInfo msaa = {VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO};
1479 msaa.rasterizationSamples = sampleCount;
1480 msaa.sampleShadingEnable = VK_FALSE;
1481 msaa.pSampleMask = &sampleMask;
1482
1483 VkPipelineObj pipe(m_device);
1484 pipe.AddDefaultColorAttachment();
1485 pipe.SetMSAA(&msaa);
1486
1487 VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
1488 pipe.AddShader(&vs);
1489
1490 VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1491 pipe.AddShader(&fs);
1492
1493 // Create pipeline and make sure that the usage of NV_sample_mask_override_coverage
1494 // in the fragment shader does not cause any errors.
1495 pipe.CreateVKPipeline(pl.handle(), rp);
1496
1497 vk::DestroyRenderPass(m_device->device(), rp, nullptr);
1498
1499 m_errorMonitor->VerifyNotFound();
1500 }
1501
TEST_F(VkPositiveLayerTest,TestRasterizationDiscardEnableTrue)1502 TEST_F(VkPositiveLayerTest, TestRasterizationDiscardEnableTrue) {
1503 TEST_DESCRIPTION("Ensure it doesn't crash and trigger error msg when rasterizerDiscardEnable = true");
1504 ASSERT_NO_FATAL_FAILURE(Init());
1505 if (IsPlatform(kNexusPlayer)) {
1506 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
1507 return;
1508 }
1509 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1510
1511 VkAttachmentDescription att[1] = {{}};
1512 att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
1513 att[0].samples = VK_SAMPLE_COUNT_4_BIT;
1514 att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1515 att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1516 VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
1517 VkSubpassDescription sp = {};
1518 sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
1519 sp.colorAttachmentCount = 1;
1520 sp.pColorAttachments = &cr;
1521 VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
1522 rpi.attachmentCount = 1;
1523 rpi.pAttachments = att;
1524 rpi.subpassCount = 1;
1525 rpi.pSubpasses = &sp;
1526 VkRenderPass rp;
1527 vk::CreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
1528
1529 CreatePipelineHelper pipe(*this);
1530 pipe.InitInfo();
1531 pipe.gp_ci_.pViewportState = nullptr;
1532 pipe.gp_ci_.pMultisampleState = nullptr;
1533 pipe.gp_ci_.pDepthStencilState = nullptr;
1534 pipe.gp_ci_.pColorBlendState = nullptr;
1535 pipe.gp_ci_.renderPass = rp;
1536
1537 m_errorMonitor->ExpectSuccess();
1538 // Skip the test in NexusPlayer. The driver crashes when pViewportState, pMultisampleState, pDepthStencilState, pColorBlendState
1539 // are NULL.
1540 pipe.rs_state_ci_.rasterizerDiscardEnable = VK_TRUE;
1541 pipe.InitState();
1542 pipe.CreateGraphicsPipeline();
1543 m_errorMonitor->VerifyNotFound();
1544 vk::DestroyRenderPass(m_device->device(), rp, nullptr);
1545 }
1546
TEST_F(VkPositiveLayerTest,TestSamplerDataForCombinedImageSampler)1547 TEST_F(VkPositiveLayerTest, TestSamplerDataForCombinedImageSampler) {
1548 TEST_DESCRIPTION("Shader code uses sampler data for CombinedImageSampler");
1549 ASSERT_NO_FATAL_FAILURE(Init());
1550 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1551
1552 const std::string fsSource = R"(
1553 OpCapability Shader
1554 OpMemoryModel Logical GLSL450
1555 OpEntryPoint Fragment %main "main"
1556 OpExecutionMode %main OriginUpperLeft
1557
1558 OpDecorate %InputData DescriptorSet 0
1559 OpDecorate %InputData Binding 0
1560 OpDecorate %SamplerData DescriptorSet 0
1561 OpDecorate %SamplerData Binding 0
1562
1563 %void = OpTypeVoid
1564 %f32 = OpTypeFloat 32
1565 %Image = OpTypeImage %f32 2D 0 0 0 1 Rgba32f
1566 %ImagePtr = OpTypePointer UniformConstant %Image
1567 %InputData = OpVariable %ImagePtr UniformConstant
1568 %Sampler = OpTypeSampler
1569 %SamplerPtr = OpTypePointer UniformConstant %Sampler
1570 %SamplerData = OpVariable %SamplerPtr UniformConstant
1571 %SampledImage = OpTypeSampledImage %Image
1572
1573 %func = OpTypeFunction %void
1574 %main = OpFunction %void None %func
1575 %40 = OpLabel
1576 %call_smp = OpLoad %Sampler %SamplerData
1577 OpReturn
1578 OpFunctionEnd)";
1579
1580 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1581
1582 CreatePipelineHelper pipe(*this);
1583 pipe.InitInfo();
1584 pipe.dsl_bindings_ = {
1585 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
1586 };
1587 pipe.shader_stages_ = {fs.GetStageCreateInfo(), pipe.vs_->GetStageCreateInfo()};
1588 pipe.InitState();
1589 pipe.CreateGraphicsPipeline();
1590
1591 VkImageObj image(m_device);
1592 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
1593 VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
1594
1595 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
1596 VkSampler sampler;
1597 vk::CreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
1598
1599 uint32_t qfi = 0;
1600 VkBufferCreateInfo buffer_create_info = {};
1601 buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1602 buffer_create_info.size = 1024;
1603 buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
1604 buffer_create_info.queueFamilyIndexCount = 1;
1605 buffer_create_info.pQueueFamilyIndices = &qfi;
1606
1607 VkBufferObj buffer;
1608 buffer.init(*m_device, buffer_create_info);
1609
1610 pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1611 pipe.descriptor_set_->UpdateDescriptorSets();
1612
1613 m_commandBuffer->begin();
1614 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1615 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
1616 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
1617 &pipe.descriptor_set_->set_, 0, NULL);
1618
1619 m_errorMonitor->ExpectSuccess();
1620 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
1621 m_errorMonitor->VerifyNotFound();
1622
1623 vk::CmdEndRenderPass(m_commandBuffer->handle());
1624 m_commandBuffer->end();
1625 vk::DestroySampler(m_device->device(), sampler, NULL);
1626 }
1627
TEST_F(VkPositiveLayerTest,NotPointSizeGeometryShaderSuccess)1628 TEST_F(VkPositiveLayerTest, NotPointSizeGeometryShaderSuccess) {
1629 TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST, but geometry shader doesn't include PointSize.");
1630
1631 ASSERT_NO_FATAL_FAILURE(Init());
1632
1633 if ((!m_device->phy().features().geometryShader)) {
1634 printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
1635 return;
1636 }
1637 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1638 ASSERT_NO_FATAL_FAILURE(InitViewport());
1639
1640 VkShaderObj gs(m_device, bindStateGeomShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
1641
1642 CreatePipelineHelper pipe(*this);
1643 pipe.InitInfo();
1644 pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
1645 pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1646 pipe.InitState();
1647
1648 m_errorMonitor->ExpectSuccess();
1649 pipe.CreateGraphicsPipeline();
1650 m_errorMonitor->VerifyNotFound();
1651 }
1652
TEST_F(VkPositiveLayerTest,SubpassWithReadOnlyLayoutWithoutDependency)1653 TEST_F(VkPositiveLayerTest, SubpassWithReadOnlyLayoutWithoutDependency) {
1654 TEST_DESCRIPTION("When both subpasses' attachments are the same and layouts are read-only, they don't need dependency.");
1655 ASSERT_NO_FATAL_FAILURE(Init());
1656
1657 auto depth_format = FindSupportedDepthStencilFormat(gpu());
1658 if (!depth_format) {
1659 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
1660 return;
1661 }
1662
1663 // A renderpass with one color attachment.
1664 VkAttachmentDescription attachment = {0,
1665 depth_format,
1666 VK_SAMPLE_COUNT_1_BIT,
1667 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
1668 VK_ATTACHMENT_STORE_OP_STORE,
1669 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
1670 VK_ATTACHMENT_STORE_OP_DONT_CARE,
1671 VK_IMAGE_LAYOUT_UNDEFINED,
1672 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
1673 const int size = 2;
1674 std::array<VkAttachmentDescription, size> attachments = {{attachment, attachment}};
1675
1676 VkAttachmentReference att_ref_depth_stencil = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
1677
1678 std::array<VkSubpassDescription, size> subpasses;
1679 subpasses[0] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
1680 subpasses[1] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
1681
1682 VkRenderPassCreateInfo rpci = {
1683 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, size, attachments.data(), size, subpasses.data(), 0, nullptr};
1684
1685 VkRenderPass rp;
1686 VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
1687 ASSERT_VK_SUCCESS(err);
1688
1689 // A compatible framebuffer.
1690 VkImageObj image(m_device);
1691 image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_LINEAR, 0);
1692 ASSERT_TRUE(image.initialized());
1693
1694 VkImageViewCreateInfo ivci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
1695 nullptr,
1696 0,
1697 image.handle(),
1698 VK_IMAGE_VIEW_TYPE_2D,
1699 depth_format,
1700 {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
1701 VK_COMPONENT_SWIZZLE_IDENTITY},
1702 {VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, 0, 1, 0, 1}};
1703
1704 VkImageView view;
1705 err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
1706 ASSERT_VK_SUCCESS(err);
1707 std::array<VkImageView, size> views = {{view, view}};
1708
1709 VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, size, views.data(), 32, 32, 1};
1710 VkFramebuffer fb;
1711 err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
1712 ASSERT_VK_SUCCESS(err);
1713
1714 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
1715 m_commandBuffer->begin();
1716 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
1717 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
1718 vk::CmdEndRenderPass(m_commandBuffer->handle());
1719 m_commandBuffer->end();
1720
1721 vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
1722 vk::DestroyRenderPass(m_device->device(), rp, nullptr);
1723 vk::DestroyImageView(m_device->device(), view, nullptr);
1724 }
1725
TEST_F(VkPositiveLayerTest,GeometryShaderPassthroughNV)1726 TEST_F(VkPositiveLayerTest, GeometryShaderPassthroughNV) {
1727 TEST_DESCRIPTION("Test to validate VK_NV_geometry_shader_passthrough");
1728
1729 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1730
1731 VkPhysicalDeviceFeatures available_features = {};
1732 ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
1733
1734 if (!available_features.geometryShader) {
1735 printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping test\n", kSkipPrefix);
1736 return;
1737 }
1738
1739 if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME)) {
1740 m_device_extension_names.push_back(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
1741 } else {
1742 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
1743 return;
1744 }
1745
1746 ASSERT_NO_FATAL_FAILURE(InitState());
1747 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1748
1749 const char vs_src[] = R"glsl(
1750 #version 450
1751
1752 out gl_PerVertex {
1753 vec4 gl_Position;
1754 };
1755
1756 layout(location = 0) out ColorBlock {vec4 vertexColor;};
1757
1758 const vec2 positions[3] = { vec2( 0.0f, -0.5f),
1759 vec2( 0.5f, 0.5f),
1760 vec2(-0.5f, 0.5f)
1761 };
1762
1763 const vec4 colors[3] = { vec4(1.0f, 0.0f, 0.0f, 1.0f),
1764 vec4(0.0f, 1.0f, 0.0f, 1.0f),
1765 vec4(0.0f, 0.0f, 1.0f, 1.0f)
1766 };
1767 void main()
1768 {
1769 vertexColor = colors[gl_VertexIndex % 3];
1770 gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
1771 }
1772 )glsl";
1773
1774 const char gs_src[] = R"glsl(
1775 #version 450
1776 #extension GL_NV_geometry_shader_passthrough: require
1777
1778 layout(triangles) in;
1779 layout(triangle_strip, max_vertices = 3) out;
1780
1781 layout(passthrough) in gl_PerVertex {vec4 gl_Position;};
1782 layout(location = 0, passthrough) in ColorBlock {vec4 vertexColor;};
1783
1784 void main()
1785 {
1786 gl_Layer = 0;
1787 }
1788 )glsl";
1789
1790 const char fs_src[] = R"glsl(
1791 #version 450
1792
1793 layout(location = 0) in ColorBlock {vec4 vertexColor;};
1794 layout(location = 0) out vec4 outColor;
1795
1796 void main() {
1797 outColor = vertexColor;
1798 }
1799 )glsl";
1800
1801 m_errorMonitor->ExpectSuccess();
1802
1803 const VkPipelineLayoutObj pl(m_device);
1804
1805 VkPipelineObj pipe(m_device);
1806 pipe.AddDefaultColorAttachment();
1807
1808 VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
1809 pipe.AddShader(&vs);
1810
1811 VkShaderObj gs(m_device, gs_src, VK_SHADER_STAGE_GEOMETRY_BIT, this);
1812 pipe.AddShader(&gs);
1813
1814 VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1815 pipe.AddShader(&fs);
1816
1817 // Create pipeline and make sure that the usage of NV_geometry_shader_passthrough
1818 // in the fragment shader does not cause any errors.
1819 pipe.CreateVKPipeline(pl.handle(), renderPass());
1820
1821 m_errorMonitor->VerifyNotFound();
1822 }
1823
TEST_F(VkPositiveLayerTest,PipelineStageConditionalRendering)1824 TEST_F(VkPositiveLayerTest, PipelineStageConditionalRendering) {
1825 TEST_DESCRIPTION("Create renderpass and CmdPipelineBarrier with VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT");
1826
1827 m_errorMonitor->ExpectSuccess();
1828 if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1829 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
1830 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1831 return;
1832 }
1833 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1834
1835 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
1836 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME)) {
1837 printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
1838 VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
1839 return;
1840 }
1841 m_device_extension_names.push_back(VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
1842
1843 auto vkGetPhysicalDeviceFeatures2KHR =
1844 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
1845 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
1846 if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME)) {
1847 printf("%s requires %s.\n", kSkipPrefix, VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
1848 return;
1849 }
1850 auto cond_rendering_feature = LvlInitStruct<VkPhysicalDeviceConditionalRenderingFeaturesEXT>();
1851 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&cond_rendering_feature);
1852 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
1853 if (cond_rendering_feature.conditionalRendering == VK_FALSE) {
1854 printf("%s conditionalRendering feature not supported.\n", kSkipPrefix);
1855 return;
1856 }
1857
1858 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
1859 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1860
1861 // A renderpass with a single subpass that declared a self-dependency
1862 VkAttachmentDescription attach[] = {
1863 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
1864 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
1865 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
1866 };
1867 VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
1868 VkSubpassDescription subpasses[] = {
1869 {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
1870 };
1871
1872 VkSubpassDependency dependency = {0,
1873 0,
1874 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
1875 VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
1876 VK_ACCESS_SHADER_WRITE_BIT,
1877 VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
1878 (VkDependencyFlags)0};
1879 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dependency};
1880 VkRenderPass rp;
1881
1882 m_errorMonitor->ExpectSuccess();
1883 vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
1884 m_errorMonitor->VerifyNotFound();
1885
1886 VkImageObj image(m_device);
1887 image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
1888 VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
1889
1890 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
1891 VkFramebuffer fb;
1892 vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
1893
1894 m_commandBuffer->begin();
1895 VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1896 nullptr,
1897 rp,
1898 fb,
1899 {{
1900 0,
1901 0,
1902 },
1903 {32, 32}},
1904 0,
1905 nullptr};
1906 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
1907
1908 VkImageMemoryBarrier imb = {};
1909 imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1910 imb.pNext = nullptr;
1911 imb.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
1912 imb.dstAccessMask = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT;
1913 imb.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1914 imb.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1915 imb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1916 imb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1917 imb.image = image.handle();
1918 imb.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1919 imb.subresourceRange.baseMipLevel = 0;
1920 imb.subresourceRange.levelCount = 1;
1921 imb.subresourceRange.baseArrayLayer = 0;
1922 imb.subresourceRange.layerCount = 1;
1923
1924 m_errorMonitor->ExpectSuccess();
1925 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
1926 VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, 0, 0, nullptr, 0, nullptr, 1, &imb);
1927 m_errorMonitor->VerifyNotFound();
1928
1929 vk::CmdEndRenderPass(m_commandBuffer->handle());
1930 m_commandBuffer->end();
1931 vk::DestroyRenderPass(m_device->device(), rp, nullptr);
1932 vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
1933 }
1934
TEST_F(VkPositiveLayerTest,CreatePipelineOverlappingPushConstantRange)1935 TEST_F(VkPositiveLayerTest, CreatePipelineOverlappingPushConstantRange) {
1936 TEST_DESCRIPTION("Test overlapping push-constant ranges.");
1937
1938 m_errorMonitor->ExpectSuccess();
1939
1940 ASSERT_NO_FATAL_FAILURE(Init());
1941 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1942
1943 char const *const vsSource = R"glsl(
1944 #version 450
1945 layout(push_constant, std430) uniform foo { float x[8]; } constants;
1946 void main(){
1947 gl_Position = vec4(constants.x[0]);
1948 }
1949 )glsl";
1950
1951 char const *const fsSource = R"glsl(
1952 #version 450
1953 layout(push_constant, std430) uniform foo { float x[4]; } constants;
1954 layout(location=0) out vec4 o;
1955 void main(){
1956 o = vec4(constants.x[0]);
1957 }
1958 )glsl";
1959
1960 VkShaderObj const vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
1961 VkShaderObj const fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
1962
1963 VkPushConstantRange push_constant_ranges[2]{{VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(float) * 8},
1964 {VK_SHADER_STAGE_FRAGMENT_BIT, 0, sizeof(float) * 4}};
1965
1966 VkPipelineLayoutCreateInfo const pipeline_layout_info{
1967 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 2, push_constant_ranges};
1968
1969 CreatePipelineHelper pipe(*this);
1970 pipe.InitInfo();
1971 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1972 pipe.pipeline_layout_ci_ = pipeline_layout_info;
1973 pipe.InitState();
1974
1975 pipe.CreateGraphicsPipeline();
1976
1977 m_errorMonitor->VerifyNotFound();
1978 }
1979
TEST_F(VkPositiveLayerTest,MultipleEntryPointPushConstantVertNormalFrag)1980 TEST_F(VkPositiveLayerTest, MultipleEntryPointPushConstantVertNormalFrag) {
1981 TEST_DESCRIPTION("Test push-constant only being used by single entrypoint.");
1982
1983 m_errorMonitor->ExpectSuccess();
1984
1985 ASSERT_NO_FATAL_FAILURE(Init());
1986 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1987
1988 // #version 450
1989 // layout(push_constant, std430) uniform foo { float x; } consts;
1990 // void main(){
1991 // gl_Position = vec4(consts.x);
1992 // }
1993 //
1994 // #version 450
1995 // layout(location=0) out vec4 o;
1996 // void main(){
1997 // o = vec4(1.0);
1998 // }
1999 const std::string source_body = R"(
2000 OpExecutionMode %main_f OriginUpperLeft
2001 OpSource GLSL 450
2002 OpMemberDecorate %gl_PerVertex 0 BuiltIn Position
2003 OpMemberDecorate %gl_PerVertex 1 BuiltIn PointSize
2004 OpMemberDecorate %gl_PerVertex 2 BuiltIn ClipDistance
2005 OpMemberDecorate %gl_PerVertex 3 BuiltIn CullDistance
2006 OpDecorate %gl_PerVertex Block
2007 OpMemberDecorate %foo 0 Offset 0
2008 OpDecorate %foo Block
2009 OpDecorate %out_frag Location 0
2010 %void = OpTypeVoid
2011 %3 = OpTypeFunction %void
2012 %float = OpTypeFloat 32
2013 %v4float = OpTypeVector %float 4
2014 %uint = OpTypeInt 32 0
2015 %uint_1 = OpConstant %uint 1
2016 %_arr_float_uint_1 = OpTypeArray %float %uint_1
2017 %gl_PerVertex = OpTypeStruct %v4float %float %_arr_float_uint_1 %_arr_float_uint_1
2018 %_ptr_Output_gl_PerVertex = OpTypePointer Output %gl_PerVertex
2019 %out_vert = OpVariable %_ptr_Output_gl_PerVertex Output
2020 %int = OpTypeInt 32 1
2021 %int_0 = OpConstant %int 0
2022 %foo = OpTypeStruct %float
2023 %_ptr_PushConstant_foo = OpTypePointer PushConstant %foo
2024 %consts = OpVariable %_ptr_PushConstant_foo PushConstant
2025 %_ptr_PushConstant_float = OpTypePointer PushConstant %float
2026 %_ptr_Output_v4float = OpTypePointer Output %v4float
2027 %out_frag = OpVariable %_ptr_Output_v4float Output
2028 %float_1 = OpConstant %float 1
2029 %vec_1_0 = OpConstantComposite %v4float %float_1 %float_1 %float_1 %float_1
2030 %main_v = OpFunction %void None %3
2031 %label_v = OpLabel
2032 %20 = OpAccessChain %_ptr_PushConstant_float %consts %int_0
2033 %21 = OpLoad %float %20
2034 %22 = OpCompositeConstruct %v4float %21 %21 %21 %21
2035 %24 = OpAccessChain %_ptr_Output_v4float %out_vert %int_0
2036 OpStore %24 %22
2037 OpReturn
2038 OpFunctionEnd
2039 %main_f = OpFunction %void None %3
2040 %label_f = OpLabel
2041 OpStore %out_frag %vec_1_0
2042 OpReturn
2043 OpFunctionEnd
2044 )";
2045
2046 std::string vert_first = R"(
2047 OpCapability Shader
2048 OpMemoryModel Logical GLSL450
2049 OpEntryPoint Vertex %main_v "main_v" %out_vert
2050 OpEntryPoint Fragment %main_f "main_f" %out_frag
2051 )" + source_body;
2052
2053 std::string frag_first = R"(
2054 OpCapability Shader
2055 OpMemoryModel Logical GLSL450
2056 OpEntryPoint Fragment %main_f "main_f" %out_frag
2057 OpEntryPoint Vertex %main_v "main_v" %out_vert
2058 )" + source_body;
2059
2060 VkPushConstantRange push_constant_ranges[1]{{VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(float)}};
2061 VkPipelineLayoutCreateInfo const pipeline_layout_info{
2062 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, push_constant_ranges};
2063
2064 // Vertex entry point first
2065 {
2066 VkShaderObj const vs(m_device, vert_first, VK_SHADER_STAGE_VERTEX_BIT, this, "main_v");
2067 VkShaderObj const fs(m_device, vert_first, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main_f");
2068 const auto set_info = [&](CreatePipelineHelper &helper) {
2069 helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2070 helper.pipeline_layout_ci_ = pipeline_layout_info;
2071 };
2072 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
2073 }
2074
2075 // Fragment entry point first
2076 {
2077 VkShaderObj const vs(m_device, frag_first, VK_SHADER_STAGE_VERTEX_BIT, this, "main_v");
2078 VkShaderObj const fs(m_device, frag_first, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main_f");
2079 const auto set_info = [&](CreatePipelineHelper &helper) {
2080 helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2081 helper.pipeline_layout_ci_ = pipeline_layout_info;
2082 };
2083 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
2084 }
2085
2086 m_errorMonitor->VerifyNotFound();
2087 }
2088
TEST_F(VkPositiveLayerTest,MultipleEntryPointNormalVertPushConstantFrag)2089 TEST_F(VkPositiveLayerTest, MultipleEntryPointNormalVertPushConstantFrag) {
2090 TEST_DESCRIPTION("Test push-constant only being used by single entrypoint.");
2091
2092 m_errorMonitor->ExpectSuccess();
2093
2094 ASSERT_NO_FATAL_FAILURE(Init());
2095 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2096
2097 // #version 450
2098 // void main(){
2099 // gl_Position = vec4(1.0);
2100 // }
2101 //
2102 // #version 450
2103 // layout(push_constant, std430) uniform foo { float x; } consts;
2104 // layout(location=0) out vec4 o;
2105 // void main(){
2106 // o = vec4(consts.x);
2107 // }
2108 const std::string source_body = R"(
2109 OpExecutionMode %main_f OriginUpperLeft
2110 OpSource GLSL 450
2111 OpMemberDecorate %gl_PerVertex 0 BuiltIn Position
2112 OpMemberDecorate %gl_PerVertex 1 BuiltIn PointSize
2113 OpMemberDecorate %gl_PerVertex 2 BuiltIn ClipDistance
2114 OpMemberDecorate %gl_PerVertex 3 BuiltIn CullDistance
2115 OpDecorate %gl_PerVertex Block
2116 OpDecorate %out_frag Location 0
2117 OpMemberDecorate %foo 0 Offset 0
2118 OpDecorate %foo Block
2119 %void = OpTypeVoid
2120 %3 = OpTypeFunction %void
2121 %float = OpTypeFloat 32
2122 %v4float = OpTypeVector %float 4
2123 %uint = OpTypeInt 32 0
2124 %uint_1 = OpConstant %uint 1
2125 %_arr_float_uint_1 = OpTypeArray %float %uint_1
2126 %gl_PerVertex = OpTypeStruct %v4float %float %_arr_float_uint_1 %_arr_float_uint_1
2127 %_ptr_Output_gl_PerVertex = OpTypePointer Output %gl_PerVertex
2128 %out_vert = OpVariable %_ptr_Output_gl_PerVertex Output
2129 %int = OpTypeInt 32 1
2130 %int_0 = OpConstant %int 0
2131 %float_1 = OpConstant %float 1
2132 %17 = OpConstantComposite %v4float %float_1 %float_1 %float_1 %float_1
2133 %_ptr_Output_v4float = OpTypePointer Output %v4float
2134 %out_frag = OpVariable %_ptr_Output_v4float Output
2135 %foo = OpTypeStruct %float
2136 %_ptr_PushConstant_foo = OpTypePointer PushConstant %foo
2137 %consts = OpVariable %_ptr_PushConstant_foo PushConstant
2138 %_ptr_PushConstant_float = OpTypePointer PushConstant %float
2139 %main_v = OpFunction %void None %3
2140 %label_v = OpLabel
2141 %19 = OpAccessChain %_ptr_Output_v4float %out_vert %int_0
2142 OpStore %19 %17
2143 OpReturn
2144 OpFunctionEnd
2145 %main_f = OpFunction %void None %3
2146 %label_f = OpLabel
2147 %26 = OpAccessChain %_ptr_PushConstant_float %consts %int_0
2148 %27 = OpLoad %float %26
2149 %28 = OpCompositeConstruct %v4float %27 %27 %27 %27
2150 OpStore %out_frag %28
2151 OpReturn
2152 OpFunctionEnd
2153 )";
2154
2155 std::string vert_first = R"(
2156 OpCapability Shader
2157 OpMemoryModel Logical GLSL450
2158 OpEntryPoint Vertex %main_v "main_v" %out_vert
2159 OpEntryPoint Fragment %main_f "main_f" %out_frag
2160 )" + source_body;
2161
2162 std::string frag_first = R"(
2163 OpCapability Shader
2164 OpMemoryModel Logical GLSL450
2165 OpEntryPoint Fragment %main_f "main_f" %out_frag
2166 OpEntryPoint Vertex %main_v "main_v" %out_vert
2167 )" + source_body;
2168
2169 VkPushConstantRange push_constant_ranges[1]{{VK_SHADER_STAGE_FRAGMENT_BIT, 0, sizeof(float)}};
2170 VkPipelineLayoutCreateInfo const pipeline_layout_info{
2171 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, push_constant_ranges};
2172
2173 // Vertex entry point first
2174 {
2175 VkShaderObj const vs(m_device, vert_first, VK_SHADER_STAGE_VERTEX_BIT, this, "main_v");
2176 VkShaderObj const fs(m_device, vert_first, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main_f");
2177 const auto set_info = [&](CreatePipelineHelper &helper) {
2178 helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2179 helper.pipeline_layout_ci_ = pipeline_layout_info;
2180 };
2181 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
2182 }
2183
2184 // Fragment entry point first
2185 {
2186 VkShaderObj const vs(m_device, frag_first, VK_SHADER_STAGE_VERTEX_BIT, this, "main_v");
2187 VkShaderObj const fs(m_device, frag_first, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main_f");
2188 const auto set_info = [&](CreatePipelineHelper &helper) {
2189 helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2190 helper.pipeline_layout_ci_ = pipeline_layout_info;
2191 };
2192 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
2193 }
2194
2195 m_errorMonitor->VerifyNotFound();
2196 }
2197
TEST_F(VkPositiveLayerTest,PushConstantsCompatibilityGraphicsOnly)2198 TEST_F(VkPositiveLayerTest, PushConstantsCompatibilityGraphicsOnly) {
2199 TEST_DESCRIPTION("Based on verified valid examples from internal Vulkan Spec issue #2168");
2200 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
2201 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
2202 ASSERT_NO_FATAL_FAILURE(InitViewport());
2203 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2204 m_errorMonitor->ExpectSuccess();
2205
2206 char const *const vsSource = R"glsl(
2207 #version 450
2208 layout(push_constant, std430) uniform foo { float x[16]; } constants;
2209 void main(){
2210 gl_Position = vec4(constants.x[4]);
2211 }
2212 )glsl";
2213
2214 VkShaderObj const vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
2215 VkShaderObj const fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2216
2217 // range A and B are the same while range C is different
2218 const uint32_t pc_size = 32;
2219 VkPushConstantRange range_a = {VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size};
2220 VkPushConstantRange range_b = {VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size};
2221 VkPushConstantRange range_c = {VK_SHADER_STAGE_VERTEX_BIT, 16, pc_size};
2222
2223 VkPipelineLayoutCreateInfo pipeline_layout_info_a = {
2224 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &range_a};
2225 VkPipelineLayoutCreateInfo pipeline_layout_info_b = {
2226 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &range_b};
2227 VkPipelineLayoutCreateInfo pipeline_layout_info_c = {
2228 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &range_c};
2229
2230 CreatePipelineHelper pipeline_helper_a(*this); // layout_a and range_a
2231 CreatePipelineHelper pipeline_helper_b(*this); // layout_b and range_b
2232 CreatePipelineHelper pipeline_helper_c(*this); // layout_c and range_c
2233 pipeline_helper_a.InitInfo();
2234 pipeline_helper_a.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2235 pipeline_helper_a.pipeline_layout_ci_ = pipeline_layout_info_a;
2236 pipeline_helper_a.InitState();
2237 pipeline_helper_a.CreateGraphicsPipeline();
2238 pipeline_helper_b.InitInfo();
2239 pipeline_helper_b.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2240 pipeline_helper_b.pipeline_layout_ci_ = pipeline_layout_info_b;
2241 pipeline_helper_b.InitState();
2242 pipeline_helper_b.CreateGraphicsPipeline();
2243 pipeline_helper_c.InitInfo();
2244 pipeline_helper_c.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2245 pipeline_helper_c.pipeline_layout_ci_ = pipeline_layout_info_c;
2246 pipeline_helper_c.InitState();
2247 pipeline_helper_c.CreateGraphicsPipeline();
2248
2249 // Easier to see in command buffers
2250 const VkPipelineLayout layout_a = pipeline_helper_a.pipeline_layout_.handle();
2251 const VkPipelineLayout layout_b = pipeline_helper_b.pipeline_layout_.handle();
2252 const VkPipelineLayout layout_c = pipeline_helper_c.pipeline_layout_.handle();
2253 const VkPipeline pipeline_a = pipeline_helper_a.pipeline_;
2254 const VkPipeline pipeline_b = pipeline_helper_b.pipeline_;
2255 const VkPipeline pipeline_c = pipeline_helper_c.pipeline_;
2256
2257 const float data[16] = {}; // dummy data to match shader size
2258 const float vbo_data[3] = {1.f, 0.f, 1.f};
2259 VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
2260
2261 // case 1 - bind different layout with the same range
2262 m_commandBuffer->begin();
2263 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2264 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2265 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2266 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_b);
2267 m_commandBuffer->Draw(1, 0, 0, 0);
2268 m_commandBuffer->EndRenderPass();
2269 m_commandBuffer->end();
2270
2271 // case 2 - bind layout with same range then push different range
2272 m_commandBuffer->begin();
2273 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2274 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2275 vk::CmdPushConstants(m_commandBuffer->handle(), layout_b, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2276 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_b);
2277 m_commandBuffer->Draw(1, 0, 0, 0);
2278 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2279 m_commandBuffer->Draw(1, 0, 0, 0);
2280 m_commandBuffer->EndRenderPass();
2281 m_commandBuffer->end();
2282
2283 // case 3 - same range same layout then same range from a different layout and same range from the same layout
2284 m_commandBuffer->begin();
2285 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2286 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2287 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2288 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_a);
2289 vk::CmdPushConstants(m_commandBuffer->handle(), layout_b, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2290 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2291 m_commandBuffer->Draw(1, 0, 0, 0);
2292 m_commandBuffer->EndRenderPass();
2293 m_commandBuffer->end();
2294
2295 // case 4 - same range same layout then diff range and same range update
2296 m_commandBuffer->begin();
2297 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2298 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2299 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2300 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_a);
2301 vk::CmdPushConstants(m_commandBuffer->handle(), layout_c, VK_SHADER_STAGE_VERTEX_BIT, 16, pc_size, data);
2302 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2303 m_commandBuffer->Draw(1, 0, 0, 0);
2304 m_commandBuffer->EndRenderPass();
2305 m_commandBuffer->end();
2306
2307 // case 5 - update push constant bind different layout with the same range then bind correct layout
2308 m_commandBuffer->begin();
2309 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2310 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2311 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2312 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_b);
2313 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_a);
2314 m_commandBuffer->Draw(1, 0, 0, 0);
2315 m_commandBuffer->EndRenderPass();
2316 m_commandBuffer->end();
2317
2318 // case 6 - update push constant then bind different layout with overlapping range then bind correct layout
2319 m_commandBuffer->begin();
2320 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2321 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2322 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2323 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_c);
2324 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_a);
2325 m_commandBuffer->Draw(1, 0, 0, 0);
2326 m_commandBuffer->EndRenderPass();
2327 m_commandBuffer->end();
2328
2329 // case 7 - bind different layout with different range then update push constant and bind correct layout
2330 m_commandBuffer->begin();
2331 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2332 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2333 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_c);
2334 vk::CmdPushConstants(m_commandBuffer->handle(), layout_a, VK_SHADER_STAGE_VERTEX_BIT, 0, pc_size, data);
2335 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_a);
2336 m_commandBuffer->Draw(1, 0, 0, 0);
2337 m_commandBuffer->EndRenderPass();
2338 m_commandBuffer->end();
2339
2340 m_errorMonitor->VerifyNotFound();
2341 }
2342
TEST_F(VkPositiveLayerTest,PushConstantsStaticallyUnused)2343 TEST_F(VkPositiveLayerTest, PushConstantsStaticallyUnused) {
2344 TEST_DESCRIPTION("Test cases where creating pipeline with no use of push constants but still has ranges in layout");
2345 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
2346 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
2347 ASSERT_NO_FATAL_FAILURE(InitViewport());
2348 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2349 m_errorMonitor->ExpectSuccess();
2350
2351 // Create set of Pipeline Layouts that cover variations of ranges
2352 VkPushConstantRange push_constant_range = {VK_SHADER_STAGE_VERTEX_BIT, 0, 4};
2353 VkPipelineLayoutCreateInfo pipeline_layout_info = {
2354 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &push_constant_range};
2355
2356 char const *vsSourceUnused = R"glsl(
2357 #version 450
2358 layout(push_constant, std430) uniform foo { float x; } consts;
2359 void main(){
2360 gl_Position = vec4(1.0);
2361 }
2362 )glsl";
2363
2364 char const *vsSourceEmpty = R"glsl(
2365 #version 450
2366 void main(){
2367 gl_Position = vec4(1.0);
2368 }
2369 )glsl";
2370
2371 VkShaderObj vsUnused(m_device, vsSourceUnused, VK_SHADER_STAGE_VERTEX_BIT, this);
2372 VkShaderObj vsEmpty(m_device, vsSourceEmpty, VK_SHADER_STAGE_VERTEX_BIT, this);
2373 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2374
2375 // Just in layout
2376 CreatePipelineHelper pipeline_unused(*this);
2377 pipeline_unused.InitInfo();
2378 pipeline_unused.shader_stages_ = {vsUnused.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2379 pipeline_unused.pipeline_layout_ci_ = pipeline_layout_info;
2380 pipeline_unused.InitState();
2381 pipeline_unused.CreateGraphicsPipeline();
2382
2383 // Shader never had a reference
2384 CreatePipelineHelper pipeline_empty(*this);
2385 pipeline_empty.InitInfo();
2386 pipeline_empty.shader_stages_ = {vsEmpty.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2387 pipeline_empty.pipeline_layout_ci_ = pipeline_layout_info;
2388 pipeline_empty.InitState();
2389 pipeline_empty.CreateGraphicsPipeline();
2390
2391 const float vbo_data[3] = {1.f, 0.f, 1.f};
2392 VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
2393
2394 // Draw without ever pushing to the unused and empty pipelines
2395 m_commandBuffer->begin();
2396 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2397 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2398 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_unused.pipeline_);
2399 m_commandBuffer->Draw(1, 0, 0, 0);
2400 m_commandBuffer->EndRenderPass();
2401 m_commandBuffer->end();
2402
2403 m_commandBuffer->begin();
2404 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2405 m_commandBuffer->BindVertexBuffer(&vbo, 0, 1);
2406 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_empty.pipeline_);
2407 m_commandBuffer->Draw(1, 0, 0, 0);
2408 m_commandBuffer->EndRenderPass();
2409 m_commandBuffer->end();
2410
2411 m_errorMonitor->VerifyNotFound();
2412 }
2413
TEST_F(VkPositiveLayerTest,CreatePipelineSpecializeInt8)2414 TEST_F(VkPositiveLayerTest, CreatePipelineSpecializeInt8) {
2415 TEST_DESCRIPTION("Test int8 specialization.");
2416
2417 m_errorMonitor->ExpectSuccess();
2418
2419 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
2420 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2421 } else {
2422 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
2423 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2424 return;
2425 }
2426 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
2427 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME)) {
2428 m_device_extension_names.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
2429 } else {
2430 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
2431 return;
2432 }
2433
2434 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
2435 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
2436 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
2437
2438 auto float16int8_features = LvlInitStruct<VkPhysicalDeviceFloat16Int8FeaturesKHR>();
2439 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&float16int8_features);
2440 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
2441 if (float16int8_features.shaderInt8 == VK_FALSE) {
2442 printf("%s shaderInt8 feature not supported.\n", kSkipPrefix);
2443 return;
2444 }
2445
2446 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
2447 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2448
2449 std::string const fs_src = R"(
2450 OpCapability Shader
2451 OpCapability Int8
2452 %1 = OpExtInstImport "GLSL.std.450"
2453 OpMemoryModel Logical GLSL450
2454 OpEntryPoint Fragment %main "main"
2455 OpExecutionMode %main OriginUpperLeft
2456 OpSource GLSL 450
2457 OpName %main "main"
2458 OpName %v "v"
2459 OpDecorate %v SpecId 0
2460 %void = OpTypeVoid
2461 %3 = OpTypeFunction %void
2462 %int = OpTypeInt 8 1
2463 %v = OpSpecConstant %int 0
2464 %main = OpFunction %void None %3
2465 %5 = OpLabel
2466 OpReturn
2467 OpFunctionEnd
2468 )";
2469
2470 VkShaderObj const fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2471
2472 const VkSpecializationMapEntry entry = {
2473 0, // id
2474 0, // offset
2475 sizeof(uint8_t) // size
2476 };
2477 uint8_t const data = 0x42;
2478 const VkSpecializationInfo specialization_info = {
2479 1,
2480 &entry,
2481 1 * sizeof(uint8_t),
2482 &data,
2483 };
2484
2485 CreatePipelineHelper pipe(*this);
2486 pipe.InitInfo();
2487 pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
2488 pipe.shader_stages_[1].pSpecializationInfo = &specialization_info;
2489 pipe.InitState();
2490
2491 pipe.CreateGraphicsPipeline();
2492
2493 m_errorMonitor->VerifyNotFound();
2494 }
2495
TEST_F(VkPositiveLayerTest,CreatePipelineSpecializeInt16)2496 TEST_F(VkPositiveLayerTest, CreatePipelineSpecializeInt16) {
2497 TEST_DESCRIPTION("Test int16 specialization.");
2498
2499 m_errorMonitor->ExpectSuccess();
2500
2501 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
2502 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2503 } else {
2504 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
2505 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2506 return;
2507 }
2508 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
2509
2510 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
2511 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
2512 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
2513
2514 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>();
2515 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
2516 if (features2.features.shaderInt16 == VK_FALSE) {
2517 printf("%s shaderInt16 feature not supported.\n", kSkipPrefix);
2518 return;
2519 }
2520
2521 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
2522 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2523
2524 std::string const fs_src = R"(
2525 OpCapability Shader
2526 OpCapability Int16
2527 %1 = OpExtInstImport "GLSL.std.450"
2528 OpMemoryModel Logical GLSL450
2529 OpEntryPoint Fragment %main "main"
2530 OpExecutionMode %main OriginUpperLeft
2531 OpSource GLSL 450
2532 OpName %main "main"
2533 OpName %v "v"
2534 OpDecorate %v SpecId 0
2535 %void = OpTypeVoid
2536 %3 = OpTypeFunction %void
2537 %int = OpTypeInt 16 1
2538 %v = OpSpecConstant %int 0
2539 %main = OpFunction %void None %3
2540 %5 = OpLabel
2541 OpReturn
2542 OpFunctionEnd
2543 )";
2544
2545 VkShaderObj const fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2546
2547 const VkSpecializationMapEntry entry = {
2548 0, // id
2549 0, // offset
2550 sizeof(uint16_t) // size
2551 };
2552 uint16_t const data = 0x4342;
2553 const VkSpecializationInfo specialization_info = {
2554 1,
2555 &entry,
2556 1 * sizeof(uint16_t),
2557 &data,
2558 };
2559
2560 CreatePipelineHelper pipe(*this);
2561 pipe.InitInfo();
2562 pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
2563 pipe.shader_stages_[1].pSpecializationInfo = &specialization_info;
2564 pipe.InitState();
2565
2566 pipe.CreateGraphicsPipeline();
2567
2568 m_errorMonitor->VerifyNotFound();
2569 }
2570
TEST_F(VkPositiveLayerTest,CreatePipelineSpecializeInt32)2571 TEST_F(VkPositiveLayerTest, CreatePipelineSpecializeInt32) {
2572 TEST_DESCRIPTION("Test int32 specialization.");
2573
2574 m_errorMonitor->ExpectSuccess();
2575
2576 ASSERT_NO_FATAL_FAILURE(Init());
2577 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2578
2579 std::string const fs_src = R"(
2580 OpCapability Shader
2581 %1 = OpExtInstImport "GLSL.std.450"
2582 OpMemoryModel Logical GLSL450
2583 OpEntryPoint Fragment %main "main"
2584 OpExecutionMode %main OriginUpperLeft
2585 OpSource GLSL 450
2586 OpName %main "main"
2587 OpName %v "v"
2588 OpDecorate %v SpecId 0
2589 %void = OpTypeVoid
2590 %3 = OpTypeFunction %void
2591 %int = OpTypeInt 32 1
2592 %v = OpSpecConstant %int 0
2593 %main = OpFunction %void None %3
2594 %5 = OpLabel
2595 OpReturn
2596 OpFunctionEnd
2597 )";
2598
2599 VkShaderObj const fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2600
2601 const VkSpecializationMapEntry entry = {
2602 0, // id
2603 0, // offset
2604 sizeof(uint32_t) // size
2605 };
2606 uint32_t const data = 0x45444342;
2607 const VkSpecializationInfo specialization_info = {
2608 1,
2609 &entry,
2610 1 * sizeof(uint32_t),
2611 &data,
2612 };
2613
2614 CreatePipelineHelper pipe(*this);
2615 pipe.InitInfo();
2616 pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
2617 pipe.shader_stages_[1].pSpecializationInfo = &specialization_info;
2618 pipe.InitState();
2619
2620 pipe.CreateGraphicsPipeline();
2621
2622 m_errorMonitor->VerifyNotFound();
2623 }
2624
TEST_F(VkPositiveLayerTest,CreatePipelineSpecializeInt64)2625 TEST_F(VkPositiveLayerTest, CreatePipelineSpecializeInt64) {
2626 TEST_DESCRIPTION("Test int64 specialization.");
2627
2628 m_errorMonitor->ExpectSuccess();
2629
2630 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
2631 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2632 } else {
2633 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
2634 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
2635 return;
2636 }
2637 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
2638
2639 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
2640 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
2641 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
2642
2643 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>();
2644 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
2645 if (features2.features.shaderInt64 == VK_FALSE) {
2646 printf("%s shaderInt64 feature not supported.\n", kSkipPrefix);
2647 return;
2648 }
2649
2650 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
2651 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2652
2653 std::string const fs_src = R"(
2654 OpCapability Shader
2655 OpCapability Int64
2656 %1 = OpExtInstImport "GLSL.std.450"
2657 OpMemoryModel Logical GLSL450
2658 OpEntryPoint Fragment %main "main"
2659 OpExecutionMode %main OriginUpperLeft
2660 OpSource GLSL 450
2661 OpName %main "main"
2662 OpName %v "v"
2663 OpDecorate %v SpecId 0
2664 %void = OpTypeVoid
2665 %3 = OpTypeFunction %void
2666 %int = OpTypeInt 64 1
2667 %v = OpSpecConstant %int 0
2668 %main = OpFunction %void None %3
2669 %5 = OpLabel
2670 OpReturn
2671 OpFunctionEnd
2672 )";
2673
2674 VkShaderObj const fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2675
2676 const VkSpecializationMapEntry entry = {
2677 0, // id
2678 0, // offset
2679 sizeof(uint64_t) // size
2680 };
2681 uint64_t const data = 0x4948474645444342;
2682 const VkSpecializationInfo specialization_info = {
2683 1,
2684 &entry,
2685 1 * sizeof(uint64_t),
2686 &data,
2687 };
2688
2689 CreatePipelineHelper pipe(*this);
2690 pipe.InitInfo();
2691 pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
2692 pipe.shader_stages_[1].pSpecializationInfo = &specialization_info;
2693 pipe.InitState();
2694
2695 pipe.CreateGraphicsPipeline();
2696
2697 m_errorMonitor->VerifyNotFound();
2698 }
2699
TEST_F(VkPositiveLayerTest,SeparateDepthStencilSubresourceLayout)2700 TEST_F(VkPositiveLayerTest, SeparateDepthStencilSubresourceLayout) {
2701 TEST_DESCRIPTION("Test that separate depth stencil layouts are tracked correctly.");
2702 SetTargetApiVersion(VK_API_VERSION_1_1);
2703 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
2704 m_errorMonitor->ExpectSuccess(kErrorBit | kWarningBit);
2705
2706 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME)) {
2707 m_device_extension_names.push_back(VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME);
2708 m_device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
2709 } else {
2710 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
2711 VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME);
2712 return;
2713 }
2714
2715 VkPhysicalDeviceFeatures features = {};
2716 VkPhysicalDeviceFeatures2 features2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2};
2717 VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures separate_features = {
2718 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES};
2719 features2.pNext = &separate_features;
2720 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
2721 if (!separate_features.separateDepthStencilLayouts) {
2722 printf("separateDepthStencilLayouts feature not supported, skipping tests\n");
2723 return;
2724 }
2725
2726 m_errorMonitor->VerifyNotFound();
2727 m_errorMonitor->ExpectSuccess(kErrorBit | kWarningBit);
2728 ASSERT_NO_FATAL_FAILURE(InitState(&features, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
2729
2730 VkFormat ds_format = VK_FORMAT_D24_UNORM_S8_UINT;
2731 VkFormatProperties props;
2732 vk::GetPhysicalDeviceFormatProperties(gpu(), ds_format, &props);
2733 if ((props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) {
2734 ds_format = VK_FORMAT_D32_SFLOAT_S8_UINT;
2735 vk::GetPhysicalDeviceFormatProperties(gpu(), ds_format, &props);
2736 ASSERT_TRUE((props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0);
2737 }
2738
2739 auto image_ci = vk_testing::Image::create_info();
2740 image_ci.imageType = VK_IMAGE_TYPE_2D;
2741 image_ci.extent.width = 64;
2742 image_ci.extent.height = 64;
2743 image_ci.mipLevels = 1;
2744 image_ci.arrayLayers = 6;
2745 image_ci.format = ds_format;
2746 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
2747 image_ci.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2748 vk_testing::Image image;
2749 image.init(*m_device, image_ci);
2750
2751 const auto depth_range = image.subresource_range(VK_IMAGE_ASPECT_DEPTH_BIT);
2752 const auto stencil_range = image.subresource_range(VK_IMAGE_ASPECT_STENCIL_BIT);
2753 const auto depth_stencil_range = image.subresource_range(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
2754
2755 vk_testing::ImageView view;
2756 VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
2757 view_info.image = image.handle();
2758 view_info.subresourceRange = depth_stencil_range;
2759 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
2760 view_info.format = ds_format;
2761 view.init(*m_device, view_info);
2762
2763 std::vector<VkImageMemoryBarrier> barriers;
2764
2765 {
2766 m_commandBuffer->begin();
2767 auto depth_barrier =
2768 image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, depth_range);
2769 auto stencil_barrier =
2770 image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, stencil_range);
2771 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0,
2772 0, nullptr, 0, nullptr, 1, &depth_barrier);
2773 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0,
2774 0, nullptr, 0, nullptr, 1, &stencil_barrier);
2775 m_commandBuffer->end();
2776 m_commandBuffer->QueueCommandBuffer(false);
2777 m_commandBuffer->reset();
2778 }
2779
2780 m_commandBuffer->begin();
2781
2782 // Test that we handle initial layout in command buffer.
2783 barriers.push_back(image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
2784 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, depth_stencil_range));
2785
2786 // Test that we can transition aspects separately and use specific layouts.
2787 barriers.push_back(image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
2788 VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, depth_range));
2789
2790 barriers.push_back(image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
2791 VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, stencil_range));
2792
2793 // Test that transition from UNDEFINED on depth aspect does not clobber stencil layout.
2794 barriers.push_back(
2795 image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, depth_range));
2796
2797 // Test that we can transition aspects separately and use combined layouts. (Only care about the aspect in question).
2798 barriers.push_back(image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
2799 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, depth_range));
2800
2801 barriers.push_back(image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
2802 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, stencil_range));
2803
2804 // Test that we can transition back again with combined layout.
2805 barriers.push_back(image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
2806 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, depth_stencil_range));
2807
2808 VkRenderPassBeginInfo rp_begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO};
2809 VkRenderPassCreateInfo2 rp2 = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2};
2810 VkAttachmentDescription2 desc = {VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2};
2811 VkSubpassDescription2 sub = {VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2};
2812 VkAttachmentReference2 att = {VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2};
2813 VkAttachmentDescriptionStencilLayout stencil_desc = {VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT};
2814 VkAttachmentReferenceStencilLayout stencil_att = {VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT};
2815 // Test that we can discard stencil layout.
2816 stencil_desc.stencilInitialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2817 stencil_desc.stencilFinalLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL;
2818 stencil_att.stencilLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL;
2819
2820 desc.format = ds_format;
2821 desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
2822 desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL;
2823 desc.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
2824 desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
2825 desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
2826 desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
2827 desc.samples = VK_SAMPLE_COUNT_1_BIT;
2828 desc.pNext = &stencil_desc;
2829
2830 att.layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL;
2831 att.attachment = 0;
2832 att.pNext = &stencil_att;
2833
2834 sub.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2835 sub.pDepthStencilAttachment = &att;
2836 rp2.subpassCount = 1;
2837 rp2.pSubpasses = ⊂
2838 rp2.attachmentCount = 1;
2839 rp2.pAttachments = &desc;
2840
2841 VkRenderPass render_pass_separate{};
2842 VkFramebuffer framebuffer_separate{};
2843 VkRenderPass render_pass_combined{};
2844 VkFramebuffer framebuffer_combined{};
2845
2846 PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
2847 (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device(), "vkCreateRenderPass2KHR");
2848
2849 vkCreateRenderPass2KHR(device(), &rp2, nullptr, &render_pass_separate);
2850
2851 desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
2852 desc.finalLayout = desc.initialLayout;
2853 desc.pNext = nullptr;
2854 att.layout = desc.initialLayout;
2855 att.pNext = nullptr;
2856
2857 vkCreateRenderPass2KHR(device(), &rp2, nullptr, &render_pass_combined);
2858
2859 VkFramebufferCreateInfo fb_info = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO};
2860 fb_info.renderPass = render_pass_separate;
2861 fb_info.width = 1;
2862 fb_info.height = 1;
2863 fb_info.layers = 1;
2864 fb_info.attachmentCount = 1;
2865 fb_info.pAttachments = &view.handle();
2866 vk::CreateFramebuffer(device(), &fb_info, nullptr, &framebuffer_separate);
2867
2868 fb_info.renderPass = render_pass_combined;
2869 vk::CreateFramebuffer(device(), &fb_info, nullptr, &framebuffer_combined);
2870
2871 for (auto &barrier : barriers) {
2872 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0,
2873 0, nullptr, 0, nullptr, 1, &barrier);
2874 }
2875
2876 rp_begin_info.renderPass = render_pass_separate;
2877 rp_begin_info.framebuffer = framebuffer_separate;
2878 rp_begin_info.renderArea.extent = {1, 1};
2879 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rp_begin_info, VK_SUBPASS_CONTENTS_INLINE);
2880 vk::CmdEndRenderPass(m_commandBuffer->handle());
2881
2882 rp_begin_info.renderPass = render_pass_combined;
2883 rp_begin_info.framebuffer = framebuffer_combined;
2884 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rp_begin_info, VK_SUBPASS_CONTENTS_INLINE);
2885 vk::CmdEndRenderPass(m_commandBuffer->handle());
2886
2887 m_commandBuffer->end();
2888 m_commandBuffer->QueueCommandBuffer(false);
2889 m_errorMonitor->VerifyNotFound();
2890 }
2891
TEST_F(VkPositiveLayerTest,SwapchainImageFormatProps)2892 TEST_F(VkPositiveLayerTest, SwapchainImageFormatProps) {
2893 TEST_DESCRIPTION("Try using special format props on a swapchain image");
2894
2895 if (!AddSurfaceInstanceExtension()) {
2896 printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
2897 return;
2898 }
2899
2900 ASSERT_NO_FATAL_FAILURE(InitFramework());
2901
2902 if (!AddSwapchainDeviceExtension()) {
2903 printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
2904 return;
2905 }
2906
2907 ASSERT_NO_FATAL_FAILURE(InitState());
2908
2909 if (!InitSwapchain()) {
2910 printf("%s Cannot create surface or swapchain, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
2911 return;
2912 }
2913
2914 // HACK: I know InitSwapchain() will pick first supported format
2915 VkSurfaceFormatKHR format_tmp;
2916 {
2917 uint32_t format_count = 1;
2918 const VkResult err = vk::GetPhysicalDeviceSurfaceFormatsKHR(gpu(), m_surface, &format_count, &format_tmp);
2919 ASSERT_TRUE(err == VK_SUCCESS || err == VK_INCOMPLETE) << vk_result_string(err);
2920 }
2921 const VkFormat format = format_tmp.format;
2922
2923 VkFormatProperties format_props;
2924 vk::GetPhysicalDeviceFormatProperties(gpu(), format, &format_props);
2925 if (!(format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT)) {
2926 printf("%s We need VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT feature. Skipping test.\n", kSkipPrefix);
2927 return;
2928 }
2929
2930 VkShaderObj vs(DeviceObj(), bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
2931 VkShaderObj fs(DeviceObj(), bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
2932
2933 VkPipelineLayoutObj pipeline_layout(DeviceObj());
2934 VkRenderpassObj render_pass(DeviceObj(), format);
2935
2936 VkPipelineObj pipeline(DeviceObj());
2937 pipeline.AddShader(&vs);
2938 pipeline.AddShader(&fs);
2939 VkPipelineColorBlendAttachmentState pcbas = {};
2940 pcbas.blendEnable = VK_TRUE; // !!!
2941 pcbas.colorWriteMask =
2942 VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
2943 pipeline.AddColorAttachment(0, pcbas);
2944 pipeline.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
2945 pipeline.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
2946
2947 ASSERT_VK_SUCCESS(pipeline.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle()));
2948
2949 uint32_t image_count;
2950 ASSERT_VK_SUCCESS(vk::GetSwapchainImagesKHR(device(), m_swapchain, &image_count, nullptr));
2951 std::vector<VkImage> swapchain_images(image_count);
2952 ASSERT_VK_SUCCESS(vk::GetSwapchainImagesKHR(device(), m_swapchain, &image_count, swapchain_images.data()));
2953
2954 VkFenceObj fence;
2955 fence.init(*DeviceObj(), VkFenceObj::create_info());
2956
2957 uint32_t image_index;
2958 ASSERT_VK_SUCCESS(vk::AcquireNextImageKHR(device(), m_swapchain, UINT64_MAX, VK_NULL_HANDLE, fence.handle(), &image_index));
2959 fence.wait(UINT32_MAX);
2960
2961 VkImageViewCreateInfo ivci = {};
2962 ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2963 ivci.image = swapchain_images[image_index];
2964 ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
2965 ivci.format = format;
2966 ivci.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2967 VkImageView image_view;
2968 ASSERT_VK_SUCCESS(vk::CreateImageView(device(), &ivci, nullptr, &image_view));
2969
2970 VkFramebufferCreateInfo fbci = {};
2971 fbci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
2972 fbci.renderPass = render_pass.handle();
2973 fbci.attachmentCount = 1;
2974 fbci.pAttachments = &image_view;
2975 fbci.width = 1;
2976 fbci.height = 1;
2977 fbci.layers = 1;
2978 VkFramebuffer framebuffer;
2979 ASSERT_VK_SUCCESS(vk::CreateFramebuffer(device(), &fbci, nullptr, &framebuffer));
2980
2981 VkCommandBufferObj cmdbuff(DeviceObj(), m_commandPool);
2982 cmdbuff.begin();
2983 VkRenderPassBeginInfo rpbi = {};
2984 rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
2985 rpbi.renderPass = render_pass.handle();
2986 rpbi.framebuffer = framebuffer;
2987 rpbi.renderArea = {{0, 0}, {1, 1}};
2988 cmdbuff.BeginRenderPass(rpbi);
2989
2990 Monitor().ExpectSuccess();
2991 vk::CmdBindPipeline(cmdbuff.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.handle());
2992 Monitor().VerifyNotFound();
2993
2994 // teardown
2995 vk::DestroyImageView(device(), image_view, nullptr);
2996 vk::DestroyFramebuffer(device(), framebuffer, nullptr);
2997 DestroySwapchain();
2998 }
2999
TEST_F(VkPositiveLayerTest,SwapchainExclusiveModeQueueFamilyPropertiesReferences)3000 TEST_F(VkPositiveLayerTest, SwapchainExclusiveModeQueueFamilyPropertiesReferences) {
3001 TEST_DESCRIPTION("Try using special format props on a swapchain image");
3002
3003 if (!AddSurfaceInstanceExtension()) {
3004 printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
3005 return;
3006 }
3007
3008 ASSERT_NO_FATAL_FAILURE(InitFramework());
3009
3010 if (!AddSwapchainDeviceExtension()) {
3011 printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
3012 return;
3013 }
3014
3015 ASSERT_NO_FATAL_FAILURE(InitState());
3016 if (!InitSurface()) {
3017 printf("%s Cannot create surface, skipping test\n", kSkipPrefix);
3018 return;
3019 }
3020 InitSwapchainInfo();
3021 m_errorMonitor->ExpectSuccess();
3022
3023 VkBool32 supported;
3024 vk::GetPhysicalDeviceSurfaceSupportKHR(gpu(), m_device->graphics_queue_node_index_, m_surface, &supported);
3025 if (!supported) {
3026 printf("%s Graphics queue does not support present, skipping test\n", kSkipPrefix);
3027 return;
3028 }
3029
3030 auto surface = m_surface;
3031 VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
3032 VkSurfaceTransformFlagBitsKHR preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
3033
3034 VkSwapchainCreateInfoKHR swapchain_create_info = LvlInitStruct<VkSwapchainCreateInfoKHR>();
3035 swapchain_create_info.surface = surface;
3036 swapchain_create_info.minImageCount = m_surface_capabilities.minImageCount;
3037 swapchain_create_info.imageFormat = m_surface_formats[0].format;
3038 swapchain_create_info.imageColorSpace = m_surface_formats[0].colorSpace;
3039 swapchain_create_info.imageExtent = {m_surface_capabilities.minImageExtent.width, m_surface_capabilities.minImageExtent.height};
3040 swapchain_create_info.imageArrayLayers = 1;
3041 swapchain_create_info.imageUsage = imageUsage;
3042 swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
3043 swapchain_create_info.preTransform = preTransform;
3044 swapchain_create_info.compositeAlpha = m_surface_composite_alpha;
3045 swapchain_create_info.presentMode = m_surface_non_shared_present_mode;
3046 swapchain_create_info.clipped = VK_FALSE;
3047 swapchain_create_info.oldSwapchain = 0;
3048
3049 swapchain_create_info.queueFamilyIndexCount = 4094967295; // This SHOULD get ignored
3050 uint32_t bogus_int = 99;
3051 swapchain_create_info.pQueueFamilyIndices = &bogus_int;
3052
3053 vk::CreateSwapchainKHR(device(), &swapchain_create_info, nullptr, &m_swapchain);
3054
3055 // Create another device, create another swapchain, and use this one for oldSwapchain
3056 // It is legal to include an 'oldSwapchain' object that is from a different device
3057 const float q_priority[] = {1.0f};
3058 VkDeviceQueueCreateInfo queue_ci = LvlInitStruct<VkDeviceQueueCreateInfo>();
3059 queue_ci.queueFamilyIndex = 0;
3060 queue_ci.queueCount = 1;
3061 queue_ci.pQueuePriorities = q_priority;
3062
3063 VkDeviceCreateInfo device_ci = LvlInitStruct<VkDeviceCreateInfo>();
3064 device_ci.queueCreateInfoCount = 1;
3065 device_ci.pQueueCreateInfos = &queue_ci;
3066 device_ci.ppEnabledExtensionNames = m_device_extension_names.data();
3067 device_ci.enabledExtensionCount = m_device_extension_names.size();
3068
3069 VkDevice test_device;
3070 vk::CreateDevice(gpu(), &device_ci, nullptr, &test_device);
3071
3072 swapchain_create_info.oldSwapchain = m_swapchain;
3073 VkSwapchainKHR new_swapchain = VK_NULL_HANDLE;
3074 vk::CreateSwapchainKHR(test_device, &swapchain_create_info, nullptr, &new_swapchain);
3075
3076 if (new_swapchain != VK_NULL_HANDLE) {
3077 vk::DestroySwapchainKHR(test_device, new_swapchain, nullptr);
3078 }
3079
3080 vk::DestroyDevice(test_device, nullptr);
3081
3082 if (m_surface != VK_NULL_HANDLE) {
3083 vk::DestroySurfaceKHR(instance(), m_surface, nullptr);
3084 m_surface = VK_NULL_HANDLE;
3085 }
3086 m_errorMonitor->VerifyNotFound();
3087 }
3088
TEST_F(VkPositiveLayerTest,ProtectedAndUnprotectedQueue)3089 TEST_F(VkPositiveLayerTest, ProtectedAndUnprotectedQueue) {
3090 TEST_DESCRIPTION("Test creating 2 queues, 1 protected, and getting both with vkGetDeviceQueue2");
3091 SetTargetApiVersion(VK_API_VERSION_1_1);
3092
3093 m_errorMonitor->ExpectSuccess();
3094
3095 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
3096 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3097 } else {
3098 printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
3099 return;
3100 }
3101
3102 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3103
3104 // NOTE (ncesario): This appears to be failing in the driver on the Shield.
3105 // It's clear what is causing this; more investigation is necessary.
3106 if (IsPlatform(kShieldTV) || IsPlatform(kShieldTVb)) {
3107 printf("%s Test not supported by Shield TV, skipping test case.\n", kSkipPrefix);
3108 return;
3109 }
3110
3111 // Needed for both protected memory and vkGetDeviceQueue2
3112 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
3113 printf("%s test requires Vulkan 1.1 extensions, not available. Skipping.\n", kSkipPrefix);
3114 return;
3115 }
3116
3117 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
3118 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
3119 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
3120
3121 auto protected_features = LvlInitStruct<VkPhysicalDeviceProtectedMemoryFeatures>();
3122 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&protected_features);
3123 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
3124
3125 if (protected_features.protectedMemory == VK_FALSE) {
3126 printf("%s test requires protectedMemory, not available. Skipping.\n", kSkipPrefix);
3127 return;
3128 }
3129
3130 // Try to find a protected queue family type
3131 bool protected_queue = false;
3132 VkQueueFamilyProperties queue_properties; // selected queue family used
3133 uint32_t queue_family_index = 0;
3134 uint32_t queue_family_count = 0;
3135 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_family_count, nullptr);
3136 std::vector<VkQueueFamilyProperties> queue_families(queue_family_count);
3137 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_family_count, queue_families.data());
3138
3139 for (size_t i = 0; i < queue_families.size(); i++) {
3140 // need to have at least 2 queues to use
3141 if (((queue_families[i].queueFlags & VK_QUEUE_PROTECTED_BIT) != 0) && (queue_families[i].queueCount > 1)) {
3142 protected_queue = true;
3143 queue_family_index = i;
3144 queue_properties = queue_families[i];
3145 break;
3146 }
3147 }
3148
3149 if (protected_queue == false) {
3150 printf("%s test requires queue family with VK_QUEUE_PROTECTED_BIT and 2 queues, not available. Skipping.\n", kSkipPrefix);
3151 return;
3152 }
3153
3154 float queue_priority = 1.0;
3155
3156 VkDeviceQueueCreateInfo queue_create_info[2];
3157 queue_create_info[0] = LvlInitStruct<VkDeviceQueueCreateInfo>();
3158 queue_create_info[0].flags = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT;
3159 queue_create_info[0].queueFamilyIndex = queue_family_index;
3160 queue_create_info[0].queueCount = 1;
3161 queue_create_info[0].pQueuePriorities = &queue_priority;
3162
3163 queue_create_info[1] = LvlInitStruct<VkDeviceQueueCreateInfo>();
3164 queue_create_info[1].flags = 0; // unprotected because the protected flag is not set
3165 queue_create_info[1].queueFamilyIndex = queue_family_index;
3166 queue_create_info[1].queueCount = 1;
3167 queue_create_info[1].pQueuePriorities = &queue_priority;
3168
3169 VkDevice test_device = VK_NULL_HANDLE;
3170 VkDeviceCreateInfo device_create_info = LvlInitStruct<VkDeviceCreateInfo>(&protected_features);
3171 device_create_info.flags = 0;
3172 device_create_info.pQueueCreateInfos = queue_create_info;
3173 device_create_info.queueCreateInfoCount = 2;
3174 device_create_info.pEnabledFeatures = nullptr;
3175 device_create_info.enabledLayerCount = 0;
3176 device_create_info.enabledExtensionCount = 0;
3177 ASSERT_VK_SUCCESS(vk::CreateDevice(gpu(), &device_create_info, nullptr, &test_device));
3178
3179 VkQueue test_queue_protected = VK_NULL_HANDLE;
3180 VkQueue test_queue_unprotected = VK_NULL_HANDLE;
3181
3182 PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = (PFN_vkGetDeviceQueue2)vk::GetDeviceProcAddr(test_device, "vkGetDeviceQueue2");
3183 ASSERT_TRUE(vkGetDeviceQueue2 != nullptr);
3184
3185 VkDeviceQueueInfo2 queue_info_2 = LvlInitStruct<VkDeviceQueueInfo2>();
3186
3187 queue_info_2.flags = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT;
3188 queue_info_2.queueFamilyIndex = queue_family_index;
3189 queue_info_2.queueIndex = 0;
3190 vkGetDeviceQueue2(test_device, &queue_info_2, &test_queue_protected);
3191
3192 queue_info_2.flags = 0;
3193 queue_info_2.queueIndex = 0;
3194 vkGetDeviceQueue2(test_device, &queue_info_2, &test_queue_unprotected);
3195
3196 vk::DestroyDevice(test_device, nullptr);
3197
3198 m_errorMonitor->VerifyNotFound();
3199 }
3200
TEST_F(VkPositiveLayerTest,ShaderFloatControl)3201 TEST_F(VkPositiveLayerTest, ShaderFloatControl) {
3202 TEST_DESCRIPTION("Test VK_KHR_float_controls");
3203 m_errorMonitor->ExpectSuccess();
3204
3205 // Need 1.1 to get SPIR-V 1.3 since OpExecutionModeId was added in SPIR-V 1.2
3206 SetTargetApiVersion(VK_API_VERSION_1_1);
3207
3208 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
3209 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3210 } else {
3211 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3212 return;
3213 }
3214 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3215 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
3216 printf("%s test requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
3217 return;
3218 }
3219
3220 // The issue with revision 4 of this extension should not be an issue with the tests
3221 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME)) {
3222 m_device_extension_names.push_back(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME);
3223 } else {
3224 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME);
3225 return;
3226 }
3227
3228 ASSERT_NO_FATAL_FAILURE(InitState());
3229 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
3230
3231 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
3232 (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
3233 ASSERT_TRUE(vkGetPhysicalDeviceProperties2KHR != nullptr);
3234
3235 auto shader_float_control = LvlInitStruct<VkPhysicalDeviceFloatControlsProperties>();
3236 auto properties2 = LvlInitStruct<VkPhysicalDeviceProperties2KHR>(&shader_float_control);
3237 vkGetPhysicalDeviceProperties2KHR(gpu(), &properties2);
3238
3239 bool signed_zero_inf_nan_preserve = (shader_float_control.shaderSignedZeroInfNanPreserveFloat32 == VK_TRUE);
3240 bool denorm_preserve = (shader_float_control.shaderDenormPreserveFloat32 == VK_TRUE);
3241 bool denorm_flush_to_zero = (shader_float_control.shaderDenormFlushToZeroFloat32 == VK_TRUE);
3242 bool rounding_mode_rte = (shader_float_control.shaderRoundingModeRTEFloat32 == VK_TRUE);
3243 bool rounding_mode_rtz = (shader_float_control.shaderRoundingModeRTZFloat32 == VK_TRUE);
3244
3245 // same body for each shader, only the start is different
3246 // this is just "float a = 1.0 + 2.0;" in SPIR-V
3247 const std::string source_body = R"(
3248 OpExecutionMode %main LocalSize 1 1 1
3249 OpSource GLSL 450
3250 OpName %main "main"
3251 %void = OpTypeVoid
3252 %3 = OpTypeFunction %void
3253 %float = OpTypeFloat 32
3254 %pFunction = OpTypePointer Function %float
3255 %float_3 = OpConstant %float 3
3256 %main = OpFunction %void None %3
3257 %5 = OpLabel
3258 %6 = OpVariable %pFunction Function
3259 OpStore %6 %float_3
3260 OpReturn
3261 OpFunctionEnd
3262 )";
3263
3264 if (signed_zero_inf_nan_preserve) {
3265 const std::string spv_source = R"(
3266 OpCapability Shader
3267 OpCapability SignedZeroInfNanPreserve
3268 OpExtension "SPV_KHR_float_controls"
3269 %1 = OpExtInstImport "GLSL.std.450"
3270 OpMemoryModel Logical GLSL450
3271 OpEntryPoint GLCompute %main "main"
3272 OpExecutionMode %main SignedZeroInfNanPreserve 32
3273 )" + source_body;
3274
3275 const auto set_info = [&](CreateComputePipelineHelper &helper) {
3276 helper.cs_.reset(
3277 new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", nullptr, SPV_ENV_VULKAN_1_1));
3278 };
3279 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3280 }
3281
3282 if (denorm_preserve) {
3283 const std::string spv_source = R"(
3284 OpCapability Shader
3285 OpCapability DenormPreserve
3286 OpExtension "SPV_KHR_float_controls"
3287 %1 = OpExtInstImport "GLSL.std.450"
3288 OpMemoryModel Logical GLSL450
3289 OpEntryPoint GLCompute %main "main"
3290 OpExecutionMode %main DenormPreserve 32
3291 )" + source_body;
3292
3293 const auto set_info = [&](CreateComputePipelineHelper &helper) {
3294 helper.cs_.reset(
3295 new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", nullptr, SPV_ENV_VULKAN_1_1));
3296 };
3297 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3298 }
3299
3300 if (denorm_flush_to_zero) {
3301 const std::string spv_source = R"(
3302 OpCapability Shader
3303 OpCapability DenormFlushToZero
3304 OpExtension "SPV_KHR_float_controls"
3305 %1 = OpExtInstImport "GLSL.std.450"
3306 OpMemoryModel Logical GLSL450
3307 OpEntryPoint GLCompute %main "main"
3308 OpExecutionMode %main DenormFlushToZero 32
3309 )" + source_body;
3310
3311 const auto set_info = [&](CreateComputePipelineHelper &helper) {
3312 helper.cs_.reset(
3313 new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", nullptr, SPV_ENV_VULKAN_1_1));
3314 };
3315 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3316 }
3317
3318 if (rounding_mode_rte) {
3319 const std::string spv_source = R"(
3320 OpCapability Shader
3321 OpCapability RoundingModeRTE
3322 OpExtension "SPV_KHR_float_controls"
3323 %1 = OpExtInstImport "GLSL.std.450"
3324 OpMemoryModel Logical GLSL450
3325 OpEntryPoint GLCompute %main "main"
3326 OpExecutionMode %main RoundingModeRTE 32
3327 )" + source_body;
3328
3329 const auto set_info = [&](CreateComputePipelineHelper &helper) {
3330 helper.cs_.reset(
3331 new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", nullptr, SPV_ENV_VULKAN_1_1));
3332 };
3333 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3334 }
3335
3336 if (rounding_mode_rtz) {
3337 const std::string spv_source = R"(
3338 OpCapability Shader
3339 OpCapability RoundingModeRTZ
3340 OpExtension "SPV_KHR_float_controls"
3341 %1 = OpExtInstImport "GLSL.std.450"
3342 OpMemoryModel Logical GLSL450
3343 OpEntryPoint GLCompute %main "main"
3344 OpExecutionMode %main RoundingModeRTZ 32
3345 )" + source_body;
3346
3347 const auto set_info = [&](CreateComputePipelineHelper &helper) {
3348 helper.cs_.reset(
3349 new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", nullptr, SPV_ENV_VULKAN_1_1));
3350 };
3351 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3352 }
3353
3354 m_errorMonitor->VerifyNotFound();
3355 }
3356
TEST_F(VkPositiveLayerTest,Storage8and16bit)3357 TEST_F(VkPositiveLayerTest, Storage8and16bit) {
3358 TEST_DESCRIPTION("Test VK_KHR_8bit_storage and VK_KHR_16bit_storage");
3359 m_errorMonitor->ExpectSuccess();
3360
3361 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
3362 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3363 } else {
3364 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3365 return;
3366 }
3367 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3368
3369 bool support_8_bit = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_8BIT_STORAGE_EXTENSION_NAME);
3370 bool support_16_bit = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_16BIT_STORAGE_EXTENSION_NAME);
3371
3372 if ((support_8_bit == false) && (support_16_bit == false)) {
3373 printf("%s Extension %s and %s are not supported.\n", kSkipPrefix, VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
3374 VK_KHR_16BIT_STORAGE_EXTENSION_NAME);
3375 return;
3376 } else if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME) == false) {
3377 // need for all shaders, but not guaranteed from driver to have support
3378 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
3379 return;
3380 } else {
3381 m_device_extension_names.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
3382 m_device_extension_names.push_back(VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME);
3383 if (support_8_bit == true) {
3384 m_device_extension_names.push_back(VK_KHR_8BIT_STORAGE_EXTENSION_NAME);
3385 }
3386 if (support_16_bit == true) {
3387 m_device_extension_names.push_back(VK_KHR_16BIT_STORAGE_EXTENSION_NAME);
3388 }
3389 }
3390
3391 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
3392 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
3393 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
3394
3395 auto storage_8_bit_features = LvlInitStruct<VkPhysicalDevice8BitStorageFeaturesKHR>();
3396 auto storage_16_bit_features = LvlInitStruct<VkPhysicalDevice16BitStorageFeaturesKHR>(&storage_8_bit_features);
3397 auto float_16_int_8_features = LvlInitStruct<VkPhysicalDeviceShaderFloat16Int8Features>(&storage_16_bit_features);
3398 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&float_16_int_8_features);
3399 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
3400 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
3401 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
3402
3403 // 8 bit int test (not 8 bit float support in Vulkan)
3404 if ((support_8_bit == true) && (float_16_int_8_features.shaderInt8 == VK_TRUE)) {
3405 if (storage_8_bit_features.storageBuffer8BitAccess == VK_TRUE) {
3406 char const *vsSource = R"glsl(
3407 #version 450
3408 #extension GL_EXT_shader_8bit_storage: enable
3409 #extension GL_EXT_shader_explicit_arithmetic_types_int8: enable
3410 layout(set = 0, binding = 0) buffer SSBO { int8_t x; } data;
3411 void main(){
3412 int8_t a = data.x + data.x;
3413 gl_Position = vec4(float(a) * 0.0);
3414 }
3415 )glsl";
3416 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3417
3418 const auto set_info = [&](CreatePipelineHelper &helper) {
3419 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3420 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
3421 };
3422 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3423 }
3424
3425 if (storage_8_bit_features.uniformAndStorageBuffer8BitAccess == VK_TRUE) {
3426 char const *vsSource = R"glsl(
3427 #version 450
3428 #extension GL_EXT_shader_8bit_storage: enable
3429 #extension GL_EXT_shader_explicit_arithmetic_types_int8: enable
3430 layout(set = 0, binding = 0) uniform UBO { int8_t x; } data;
3431 void main(){
3432 int8_t a = data.x + data.x;
3433 gl_Position = vec4(float(a) * 0.0);
3434 }
3435 )glsl";
3436 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3437
3438 const auto set_info = [&](CreatePipelineHelper &helper) {
3439 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3440 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
3441 };
3442 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3443 }
3444
3445 if (storage_8_bit_features.storagePushConstant8 == VK_TRUE) {
3446 char const *vsSource = R"glsl(
3447 #version 450
3448 #extension GL_EXT_shader_8bit_storage: enable
3449 #extension GL_EXT_shader_explicit_arithmetic_types_int8: enable
3450 layout(push_constant) uniform PushConstant { int8_t x; } data;
3451 void main(){
3452 int8_t a = data.x + data.x;
3453 gl_Position = vec4(float(a) * 0.0);
3454 }
3455 )glsl";
3456 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3457
3458 VkPushConstantRange push_constant_range = {VK_SHADER_STAGE_VERTEX_BIT, 0, 4};
3459 VkPipelineLayoutCreateInfo pipeline_layout_info{
3460 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &push_constant_range};
3461 const auto set_info = [&](CreatePipelineHelper &helper) {
3462 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3463 helper.pipeline_layout_ci_ = pipeline_layout_info;
3464 };
3465 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3466 }
3467 }
3468
3469 // 16 bit float tests
3470 if ((support_16_bit == true) && (float_16_int_8_features.shaderFloat16 == VK_TRUE)) {
3471 if (storage_16_bit_features.storageBuffer16BitAccess == VK_TRUE) {
3472 char const *vsSource = R"glsl(
3473 #version 450
3474 #extension GL_EXT_shader_16bit_storage: enable
3475 #extension GL_EXT_shader_explicit_arithmetic_types_float16: enable
3476 layout(set = 0, binding = 0) buffer SSBO { float16_t x; } data;
3477 void main(){
3478 float16_t a = data.x + data.x;
3479 gl_Position = vec4(float(a) * 0.0);
3480 }
3481 )glsl";
3482 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3483
3484 const auto set_info = [&](CreatePipelineHelper &helper) {
3485 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3486 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
3487 };
3488 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3489 }
3490
3491 if (storage_16_bit_features.uniformAndStorageBuffer16BitAccess == VK_TRUE) {
3492 char const *vsSource = R"glsl(
3493 #version 450
3494 #extension GL_EXT_shader_16bit_storage: enable
3495 #extension GL_EXT_shader_explicit_arithmetic_types_float16: enable
3496 layout(set = 0, binding = 0) uniform UBO { float16_t x; } data;
3497 void main(){
3498 float16_t a = data.x + data.x;
3499 gl_Position = vec4(float(a) * 0.0);
3500 }
3501 )glsl";
3502 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3503
3504 const auto set_info = [&](CreatePipelineHelper &helper) {
3505 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3506 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
3507 };
3508 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3509 }
3510
3511 if (storage_16_bit_features.storagePushConstant16 == VK_TRUE) {
3512 char const *vsSource = R"glsl(
3513 #version 450
3514 #extension GL_EXT_shader_16bit_storage: enable
3515 #extension GL_EXT_shader_explicit_arithmetic_types_float16: enable
3516 layout(push_constant) uniform PushConstant { float16_t x; } data;
3517 void main(){
3518 float16_t a = data.x + data.x;
3519 gl_Position = vec4(float(a) * 0.0);
3520 }
3521 )glsl";
3522 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3523
3524 VkPushConstantRange push_constant_range = {VK_SHADER_STAGE_VERTEX_BIT, 0, 4};
3525 VkPipelineLayoutCreateInfo pipeline_layout_info{
3526 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &push_constant_range};
3527 const auto set_info = [&](CreatePipelineHelper &helper) {
3528 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3529 helper.pipeline_layout_ci_ = pipeline_layout_info;
3530 };
3531 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3532 }
3533
3534 if (storage_16_bit_features.storageInputOutput16 == VK_TRUE) {
3535 char const *vsSource = R"glsl(
3536 #version 450
3537 #extension GL_EXT_shader_16bit_storage: enable
3538 #extension GL_EXT_shader_explicit_arithmetic_types_float16: enable
3539 layout(location = 0) out float16_t outData;
3540 void main(){
3541 outData = float16_t(1);
3542 gl_Position = vec4(0.0);
3543 }
3544 )glsl";
3545 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3546
3547 // Need to match in/out
3548 char const *fsSource = R"glsl(
3549 #version 450
3550 #extension GL_EXT_shader_16bit_storage: enable
3551 #extension GL_EXT_shader_explicit_arithmetic_types_float16: enable
3552 layout(location = 0) in float16_t x;
3553 layout(location = 0) out vec4 uFragColor;
3554 void main(){
3555 uFragColor = vec4(0,1,0,1);
3556 }
3557 )glsl";
3558 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
3559
3560 const auto set_info = [&](CreatePipelineHelper &helper) {
3561 helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3562 };
3563 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3564 }
3565 }
3566
3567 // 16 bit int tests
3568 if ((support_16_bit == true) && (features2.features.shaderInt16 == VK_TRUE)) {
3569 if (storage_16_bit_features.storageBuffer16BitAccess == VK_TRUE) {
3570 char const *vsSource = R"glsl(
3571 #version 450
3572 #extension GL_EXT_shader_16bit_storage: enable
3573 #extension GL_EXT_shader_explicit_arithmetic_types_int16: enable
3574 layout(set = 0, binding = 0) buffer SSBO { int16_t x; } data;
3575 void main(){
3576 int16_t a = data.x + data.x;
3577 gl_Position = vec4(float(a) * 0.0);
3578 }
3579 )glsl";
3580 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3581
3582 const auto set_info = [&](CreatePipelineHelper &helper) {
3583 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3584 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
3585 };
3586 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3587 }
3588
3589 if (storage_16_bit_features.uniformAndStorageBuffer16BitAccess == VK_TRUE) {
3590 char const *vsSource = R"glsl(
3591 #version 450
3592 #extension GL_EXT_shader_16bit_storage: enable
3593 #extension GL_EXT_shader_explicit_arithmetic_types_int16: enable
3594 layout(set = 0, binding = 0) uniform UBO { int16_t x; } data;
3595 void main(){
3596 int16_t a = data.x + data.x;
3597 gl_Position = vec4(float(a) * 0.0);
3598 }
3599 )glsl";
3600 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3601
3602 const auto set_info = [&](CreatePipelineHelper &helper) {
3603 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3604 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
3605 };
3606 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3607 }
3608
3609 if (storage_16_bit_features.storagePushConstant16 == VK_TRUE) {
3610 char const *vsSource = R"glsl(
3611 #version 450
3612 #extension GL_EXT_shader_16bit_storage: enable
3613 #extension GL_EXT_shader_explicit_arithmetic_types_int16: enable
3614 layout(push_constant) uniform PushConstant { int16_t x; } data;
3615 void main(){
3616 int16_t a = data.x + data.x;
3617 gl_Position = vec4(float(a) * 0.0);
3618 }
3619 )glsl";
3620 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3621
3622 VkPushConstantRange push_constant_range = {VK_SHADER_STAGE_VERTEX_BIT, 0, 4};
3623 VkPipelineLayoutCreateInfo pipeline_layout_info{
3624 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &push_constant_range};
3625 const auto set_info = [&](CreatePipelineHelper &helper) {
3626 helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3627 helper.pipeline_layout_ci_ = pipeline_layout_info;
3628 };
3629 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3630 }
3631
3632 if (storage_16_bit_features.storageInputOutput16 == VK_TRUE) {
3633 char const *vsSource = R"glsl(
3634 #version 450
3635 #extension GL_EXT_shader_16bit_storage: enable
3636 #extension GL_EXT_shader_explicit_arithmetic_types_int16: enable
3637 layout(location = 0) out int16_t outData;
3638 void main(){
3639 outData = int16_t(1);
3640 gl_Position = vec4(0.0);
3641 }
3642 )glsl";
3643 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
3644
3645 // Need to match in/out
3646 char const *fsSource = R"glsl(
3647 #version 450
3648 #extension GL_EXT_shader_16bit_storage: enable
3649 #extension GL_EXT_shader_explicit_arithmetic_types_int16: enable
3650 layout(location = 0) flat in int16_t x;
3651 layout(location = 0) out vec4 uFragColor;
3652 void main(){
3653 uFragColor = vec4(0,1,0,1);
3654 }
3655 )glsl";
3656 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
3657
3658 const auto set_info = [&](CreatePipelineHelper &helper) {
3659 helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3660 };
3661 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3662 }
3663 }
3664 m_errorMonitor->VerifyNotFound();
3665 }
3666
TEST_F(VkPositiveLayerTest,ReadShaderClock)3667 TEST_F(VkPositiveLayerTest, ReadShaderClock) {
3668 TEST_DESCRIPTION("Test VK_KHR_shader_clock");
3669
3670 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
3671 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3672 } else {
3673 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
3674 return;
3675 }
3676 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3677
3678 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SHADER_CLOCK_EXTENSION_NAME)) {
3679 m_device_extension_names.push_back(VK_KHR_SHADER_CLOCK_EXTENSION_NAME);
3680 } else {
3681 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_SHADER_CLOCK_EXTENSION_NAME);
3682 return;
3683 }
3684
3685 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
3686 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
3687 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
3688
3689 auto shader_clock_features = LvlInitStruct<VkPhysicalDeviceShaderClockFeaturesKHR>();
3690 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&shader_clock_features);
3691 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
3692
3693 if ((shader_clock_features.shaderDeviceClock == VK_FALSE) && (shader_clock_features.shaderSubgroupClock == VK_FALSE)) {
3694 // shaderSubgroupClock should be supported, but extra check
3695 printf("%s no support for shaderDeviceClock or shaderSubgroupClock.\n", kSkipPrefix);
3696 return;
3697 }
3698
3699 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
3700 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
3701
3702 // Device scope using GL_EXT_shader_realtime_clock
3703 char const *vsSourceDevice = R"glsl(
3704 #version 450
3705 #extension GL_EXT_shader_realtime_clock: enable
3706 void main(){
3707 uvec2 a = clockRealtime2x32EXT();
3708 gl_Position = vec4(float(a.x) * 0.0);
3709 }
3710 )glsl";
3711 VkShaderObj vs_device(m_device, vsSourceDevice, VK_SHADER_STAGE_VERTEX_BIT, this);
3712
3713 // Subgroup scope using ARB_shader_clock
3714 char const *vsSourceScope = R"glsl(
3715 #version 450
3716 #extension GL_ARB_shader_clock: enable
3717 void main(){
3718 uvec2 a = clock2x32ARB();
3719 gl_Position = vec4(float(a.x) * 0.0);
3720 }
3721 )glsl";
3722 VkShaderObj vs_subgroup(m_device, vsSourceScope, VK_SHADER_STAGE_VERTEX_BIT, this);
3723
3724 if (shader_clock_features.shaderDeviceClock == VK_TRUE) {
3725 const auto set_info = [&](CreatePipelineHelper &helper) {
3726 helper.shader_stages_ = {vs_device.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3727 };
3728 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3729 }
3730
3731 if (shader_clock_features.shaderSubgroupClock == VK_TRUE) {
3732 const auto set_info = [&](CreatePipelineHelper &helper) {
3733 helper.shader_stages_ = {vs_subgroup.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
3734 };
3735 CreatePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
3736 }
3737 }
3738
3739 // Android Hardware Buffer Positive Tests
3740 #include "android_ndk_types.h"
3741 #ifdef AHB_VALIDATION_SUPPORT
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferMemoryRequirements)3742 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferMemoryRequirements) {
3743 TEST_DESCRIPTION("Verify AndroidHardwareBuffer doesn't conflict with memory requirements.");
3744
3745 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3746
3747 if (IsPlatform(kGalaxyS10)) {
3748 printf("%s This test should not run on Galaxy S10\n", kSkipPrefix);
3749 return;
3750 }
3751
3752 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
3753 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
3754 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
3755 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
3756 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
3757 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
3758 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
3759 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
3760 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
3761 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
3762 } else {
3763 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
3764 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
3765 return;
3766 }
3767
3768 ASSERT_NO_FATAL_FAILURE(InitState());
3769
3770 PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
3771 (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(m_device->device(),
3772 "vkGetAndroidHardwareBufferPropertiesANDROID");
3773 ASSERT_TRUE(pfn_GetAHBProps != nullptr);
3774
3775 // Allocate an AHardwareBuffer
3776 AHardwareBuffer *ahb;
3777 AHardwareBuffer_Desc ahb_desc = {};
3778 ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
3779 ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
3780 ahb_desc.width = 64;
3781 ahb_desc.height = 1;
3782 ahb_desc.layers = 1;
3783 ahb_desc.stride = 1;
3784 AHardwareBuffer_allocate(&ahb_desc, &ahb);
3785
3786 VkExternalMemoryBufferCreateInfo ext_buf_info = LvlInitStruct<VkExternalMemoryBufferCreateInfo>();
3787 ext_buf_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3788
3789 VkBufferCreateInfo buffer_create_info = LvlInitStruct<VkBufferCreateInfo>(&ext_buf_info);
3790 buffer_create_info.size = 512;
3791 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
3792
3793 VkBuffer buffer = VK_NULL_HANDLE;
3794 vk::CreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
3795
3796 VkImportAndroidHardwareBufferInfoANDROID import_ahb_Info = LvlInitStruct<VkImportAndroidHardwareBufferInfoANDROID>();
3797 import_ahb_Info.buffer = ahb;
3798
3799 VkAndroidHardwareBufferPropertiesANDROID ahb_props = LvlInitStruct<VkAndroidHardwareBufferPropertiesANDROID>();
3800 pfn_GetAHBProps(m_device->device(), ahb, &ahb_props);
3801
3802 VkMemoryAllocateInfo memory_allocate_info = LvlInitStruct<VkMemoryAllocateInfo>(&import_ahb_Info);
3803 memory_allocate_info.allocationSize = ahb_props.allocationSize;
3804
3805 // Set index to match one of the bits in ahb_props that is also only Device Local
3806 // Android implemenetations "should have" a DEVICE_LOCAL only index designed for AHB
3807 VkMemoryPropertyFlagBits property = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3808 VkPhysicalDeviceMemoryProperties gpu_memory_props;
3809 vk::GetPhysicalDeviceMemoryProperties(gpu(), &gpu_memory_props);
3810 memory_allocate_info.memoryTypeIndex = gpu_memory_props.memoryTypeCount + 1;
3811 for (uint32_t i = 0; i < gpu_memory_props.memoryTypeCount; i++) {
3812 if ((ahb_props.memoryTypeBits & (1 << i)) && ((gpu_memory_props.memoryTypes[i].propertyFlags & property) == property)) {
3813 memory_allocate_info.memoryTypeIndex = i;
3814 break;
3815 }
3816 }
3817
3818 if (memory_allocate_info.memoryTypeIndex >= gpu_memory_props.memoryTypeCount) {
3819 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
3820 AHardwareBuffer_release(ahb);
3821 vk::DestroyBuffer(m_device->device(), buffer, nullptr);
3822 return;
3823 }
3824
3825 // Should be able to bind memory with no error
3826 VkDeviceMemory memory;
3827 m_errorMonitor->ExpectSuccess();
3828 vk::AllocateMemory(m_device->device(), &memory_allocate_info, nullptr, &memory);
3829 vk::BindBufferMemory(m_device->device(), buffer, memory, 0);
3830 m_errorMonitor->VerifyNotFound();
3831
3832 vk::DestroyBuffer(m_device->device(), buffer, nullptr);
3833 vk::FreeMemory(m_device->device(), memory, nullptr);
3834 }
3835
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferDepthStencil)3836 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferDepthStencil) {
3837 TEST_DESCRIPTION("Verify AndroidHardwareBuffer can import Depth/Stencil");
3838
3839 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3840
3841 if (IsPlatform(kGalaxyS10) || IsPlatform(kShieldTV) || IsPlatform(kShieldTVb)) {
3842 printf("%s This test should not run on Galaxy S10 or the ShieldTV\n", kSkipPrefix);
3843 return;
3844 }
3845
3846 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
3847 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
3848 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
3849 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
3850 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
3851 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
3852 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
3853 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
3854 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
3855 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
3856 m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
3857 } else {
3858 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
3859 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
3860 return;
3861 }
3862
3863 ASSERT_NO_FATAL_FAILURE(InitState());
3864
3865 PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
3866 (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(m_device->device(),
3867 "vkGetAndroidHardwareBufferPropertiesANDROID");
3868 ASSERT_TRUE(pfn_GetAHBProps != nullptr);
3869
3870 // Allocate an AHardwareBuffer
3871 AHardwareBuffer *ahb;
3872 AHardwareBuffer_Desc ahb_desc = {};
3873 ahb_desc.format = AHARDWAREBUFFER_FORMAT_D16_UNORM;
3874 ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
3875 ahb_desc.width = 64;
3876 ahb_desc.height = 1;
3877 ahb_desc.layers = 1;
3878 ahb_desc.stride = 1;
3879 AHardwareBuffer_allocate(&ahb_desc, &ahb);
3880
3881 VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = LvlInitStruct<VkAndroidHardwareBufferFormatPropertiesANDROID>();
3882
3883 VkAndroidHardwareBufferPropertiesANDROID ahb_props = LvlInitStruct<VkAndroidHardwareBufferPropertiesANDROID>(&ahb_fmt_props);
3884 pfn_GetAHBProps(m_device->device(), ahb, &ahb_props);
3885
3886 VkExternalMemoryImageCreateInfo ext_image_info = LvlInitStruct<VkExternalMemoryImageCreateInfo>();
3887 ext_image_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3888
3889 // Create a Depth/Stencil image
3890 VkImage dsImage;
3891 VkImageCreateInfo image_create_info = LvlInitStruct<VkImageCreateInfo>(&ext_image_info);
3892 image_create_info.flags = 0;
3893 image_create_info.imageType = VK_IMAGE_TYPE_2D;
3894 image_create_info.format = ahb_fmt_props.format;
3895 image_create_info.extent = {64, 1, 1};
3896 image_create_info.mipLevels = 1;
3897 image_create_info.arrayLayers = 1;
3898 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
3899 image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
3900 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
3901 image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
3902 image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3903 vk::CreateImage(m_device->device(), &image_create_info, nullptr, &dsImage);
3904
3905 VkMemoryDedicatedAllocateInfo memory_dedicated_info = LvlInitStruct<VkMemoryDedicatedAllocateInfo>();
3906 memory_dedicated_info.image = dsImage;
3907 memory_dedicated_info.buffer = VK_NULL_HANDLE;
3908
3909 VkImportAndroidHardwareBufferInfoANDROID import_ahb_Info =
3910 LvlInitStruct<VkImportAndroidHardwareBufferInfoANDROID>(&memory_dedicated_info);
3911 import_ahb_Info.buffer = ahb;
3912
3913 VkMemoryAllocateInfo memory_allocate_info = LvlInitStruct<VkMemoryAllocateInfo>(&import_ahb_Info);
3914 memory_allocate_info.allocationSize = ahb_props.allocationSize;
3915
3916 // Set index to match one of the bits in ahb_props that is also only Device Local
3917 // Android implemenetations "should have" a DEVICE_LOCAL only index designed for AHB
3918 VkMemoryPropertyFlagBits property = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3919 VkPhysicalDeviceMemoryProperties gpu_memory_props;
3920 vk::GetPhysicalDeviceMemoryProperties(gpu(), &gpu_memory_props);
3921 memory_allocate_info.memoryTypeIndex = gpu_memory_props.memoryTypeCount + 1;
3922 for (uint32_t i = 0; i < gpu_memory_props.memoryTypeCount; i++) {
3923 if ((ahb_props.memoryTypeBits & (1 << i)) && ((gpu_memory_props.memoryTypes[i].propertyFlags & property) == property)) {
3924 memory_allocate_info.memoryTypeIndex = i;
3925 break;
3926 }
3927 }
3928
3929 if (memory_allocate_info.memoryTypeIndex >= gpu_memory_props.memoryTypeCount) {
3930 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
3931 AHardwareBuffer_release(ahb);
3932 vk::DestroyImage(m_device->device(), dsImage, nullptr);
3933 return;
3934 }
3935
3936 VkDeviceMemory memory;
3937 m_errorMonitor->ExpectSuccess();
3938 vk::AllocateMemory(m_device->device(), &memory_allocate_info, nullptr, &memory);
3939 vk::BindImageMemory(m_device->device(), dsImage, memory, 0);
3940 m_errorMonitor->VerifyNotFound();
3941
3942 vk::DestroyImage(m_device->device(), dsImage, nullptr);
3943 vk::FreeMemory(m_device->device(), memory, nullptr);
3944 }
3945
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferBindBufferMemory)3946 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferBindBufferMemory) {
3947 TEST_DESCRIPTION("Verify AndroidHardwareBuffer Buffers can be queried for mem requirements while unbound.");
3948
3949 SetTargetApiVersion(VK_API_VERSION_1_1);
3950 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
3951
3952 if (IsPlatform(kGalaxyS10)) {
3953 printf("%s This test should not run on Galaxy S10\n", kSkipPrefix);
3954 return;
3955 }
3956
3957 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
3958 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
3959 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
3960 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
3961 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
3962 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
3963 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
3964 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
3965 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
3966 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
3967 } else {
3968 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
3969 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
3970 return;
3971 }
3972
3973 ASSERT_NO_FATAL_FAILURE(InitState());
3974
3975 PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
3976 (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(m_device->device(),
3977 "vkGetAndroidHardwareBufferPropertiesANDROID");
3978 ASSERT_TRUE(pfn_GetAHBProps != nullptr);
3979
3980 // Allocate an AHardwareBuffer
3981 AHardwareBuffer *ahb;
3982 AHardwareBuffer_Desc ahb_desc = {};
3983 ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
3984 ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
3985 ahb_desc.width = 64;
3986 ahb_desc.height = 1;
3987 ahb_desc.layers = 1;
3988 ahb_desc.stride = 1;
3989 AHardwareBuffer_allocate(&ahb_desc, &ahb);
3990
3991 VkExternalMemoryBufferCreateInfo ext_buf_info = LvlInitStruct<VkExternalMemoryBufferCreateInfo>();
3992 ext_buf_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
3993
3994 VkBufferCreateInfo buffer_create_info = LvlInitStruct<VkBufferCreateInfo>(&ext_buf_info);
3995 buffer_create_info.size = 8192; // greater than the 4k AHB usually are
3996 buffer_create_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3997
3998 VkBuffer buffer = VK_NULL_HANDLE;
3999 vk::CreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
4000
4001 m_errorMonitor->ExpectSuccess();
4002 // Try to get memory requirements prior to binding memory
4003 VkMemoryRequirements mem_reqs;
4004 vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
4005
4006 // Test bind memory 2 extension
4007 VkBufferMemoryRequirementsInfo2 buffer_mem_reqs2 = LvlInitStruct<VkBufferMemoryRequirementsInfo2>();
4008 buffer_mem_reqs2.buffer = buffer;
4009 VkMemoryRequirements2 mem_reqs2 = LvlInitStruct<VkMemoryRequirements2>();
4010 vk::GetBufferMemoryRequirements2(m_device->device(), &buffer_mem_reqs2, &mem_reqs2);
4011
4012 VkImportAndroidHardwareBufferInfoANDROID import_ahb_Info = LvlInitStruct<VkImportAndroidHardwareBufferInfoANDROID>();
4013 import_ahb_Info.buffer = ahb;
4014
4015 VkMemoryAllocateInfo memory_info = LvlInitStruct<VkMemoryAllocateInfo>(&import_ahb_Info);
4016 memory_info.allocationSize = mem_reqs.size + mem_reqs.alignment; // save room for offset
4017 bool has_memtype = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &memory_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
4018 if (!has_memtype) {
4019 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
4020 AHardwareBuffer_release(ahb);
4021 vk::DestroyBuffer(m_device->device(), buffer, nullptr);
4022 return;
4023 }
4024
4025 // Some drivers don't return exact size in getBufferMemory as getAHB
4026 m_errorMonitor->SetUnexpectedError("VUID-VkMemoryAllocateInfo-allocationSize-02383");
4027 VkDeviceMemory memory;
4028 vk::AllocateMemory(m_device->device(), &memory_info, NULL, &memory);
4029 vk::BindBufferMemory(m_device->device(), buffer, memory, mem_reqs.alignment);
4030
4031 m_errorMonitor->VerifyNotFound();
4032
4033 vk::DestroyBuffer(m_device->device(), buffer, nullptr);
4034 vk::FreeMemory(m_device->device(), memory, nullptr);
4035 }
4036
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferExportBuffer)4037 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferExportBuffer) {
4038 TEST_DESCRIPTION("Verify VkBuffers can export to an AHB.");
4039
4040 SetTargetApiVersion(VK_API_VERSION_1_1);
4041 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4042
4043 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
4044 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
4045 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
4046 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4047 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
4048 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
4049 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
4050 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
4051 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
4052 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
4053 } else {
4054 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
4055 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4056 return;
4057 }
4058
4059 ASSERT_NO_FATAL_FAILURE(InitState());
4060
4061 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID =
4062 (PFN_vkGetMemoryAndroidHardwareBufferANDROID)vk::GetDeviceProcAddr(device(), "vkGetMemoryAndroidHardwareBufferANDROID");
4063 ASSERT_TRUE(vkGetMemoryAndroidHardwareBufferANDROID != nullptr);
4064
4065 m_errorMonitor->ExpectSuccess();
4066
4067 // Create VkBuffer to be exported to an AHB
4068 VkBuffer buffer = VK_NULL_HANDLE;
4069 VkExternalMemoryBufferCreateInfo ext_buf_info = LvlInitStruct<VkExternalMemoryBufferCreateInfo>();
4070 ext_buf_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
4071
4072 VkBufferCreateInfo buffer_create_info = LvlInitStruct<VkBufferCreateInfo>(&ext_buf_info);
4073 buffer_create_info.size = 4096;
4074 buffer_create_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4075 vk::CreateBuffer(device(), &buffer_create_info, nullptr, &buffer);
4076
4077 VkMemoryRequirements mem_reqs;
4078 vk::GetBufferMemoryRequirements(device(), buffer, &mem_reqs);
4079
4080 VkExportMemoryAllocateInfo export_memory_info = LvlInitStruct<VkExportMemoryAllocateInfo>();
4081 export_memory_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
4082
4083 VkMemoryAllocateInfo memory_info = LvlInitStruct<VkMemoryAllocateInfo>(&export_memory_info);
4084 memory_info.allocationSize = mem_reqs.size;
4085
4086 bool has_memtype = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &memory_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
4087 if (!has_memtype) {
4088 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
4089 vk::DestroyBuffer(device(), buffer, nullptr);
4090 return;
4091 }
4092
4093 VkDeviceMemory memory = VK_NULL_HANDLE;
4094 vk::AllocateMemory(device(), &memory_info, NULL, &memory);
4095 vk::BindBufferMemory(device(), buffer, memory, 0);
4096
4097 // Export memory to AHB
4098 AHardwareBuffer *ahb = nullptr;
4099
4100 VkMemoryGetAndroidHardwareBufferInfoANDROID get_ahb_info = LvlInitStruct<VkMemoryGetAndroidHardwareBufferInfoANDROID>();
4101 get_ahb_info.memory = memory;
4102 vkGetMemoryAndroidHardwareBufferANDROID(device(), &get_ahb_info, &ahb);
4103
4104 m_errorMonitor->VerifyNotFound();
4105
4106 // App in charge of releasing after exporting
4107 AHardwareBuffer_release(ahb);
4108 vk::FreeMemory(device(), memory, NULL);
4109 vk::DestroyBuffer(device(), buffer, nullptr);
4110 }
4111
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferExportImage)4112 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferExportImage) {
4113 TEST_DESCRIPTION("Verify VkImages can export to an AHB.");
4114
4115 SetTargetApiVersion(VK_API_VERSION_1_1);
4116 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4117
4118 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
4119 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
4120 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
4121 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4122 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
4123 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
4124 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
4125 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
4126 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
4127 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
4128 } else {
4129 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
4130 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4131 return;
4132 }
4133
4134 ASSERT_NO_FATAL_FAILURE(InitState());
4135
4136 PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID =
4137 (PFN_vkGetMemoryAndroidHardwareBufferANDROID)vk::GetDeviceProcAddr(device(), "vkGetMemoryAndroidHardwareBufferANDROID");
4138 ASSERT_TRUE(vkGetMemoryAndroidHardwareBufferANDROID != nullptr);
4139
4140 m_errorMonitor->ExpectSuccess();
4141
4142 // Create VkImage to be exported to an AHB
4143 VkExternalMemoryImageCreateInfo ext_image_info = LvlInitStruct<VkExternalMemoryImageCreateInfo>();
4144 ext_image_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
4145
4146 VkImage image = VK_NULL_HANDLE;
4147 VkImageCreateInfo image_create_info = LvlInitStruct<VkImageCreateInfo>(&ext_image_info);
4148 image_create_info.flags = 0;
4149 image_create_info.imageType = VK_IMAGE_TYPE_2D;
4150 image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
4151 image_create_info.extent = {64, 1, 1};
4152 image_create_info.mipLevels = 1;
4153 image_create_info.arrayLayers = 1;
4154 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
4155 image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
4156 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
4157 image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4158 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4159 vk::CreateImage(device(), &image_create_info, nullptr, &image);
4160
4161 VkMemoryDedicatedAllocateInfo memory_dedicated_info = LvlInitStruct<VkMemoryDedicatedAllocateInfo>();
4162 memory_dedicated_info.image = image;
4163 memory_dedicated_info.buffer = VK_NULL_HANDLE;
4164
4165 VkExportMemoryAllocateInfo export_memory_info = LvlInitStruct<VkExportMemoryAllocateInfo>(&memory_dedicated_info);
4166 export_memory_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
4167
4168 VkMemoryAllocateInfo memory_info = LvlInitStruct<VkMemoryAllocateInfo>(&export_memory_info);
4169
4170 // "When allocating new memory for an image that can be exported to an Android hardware buffer, the memory’s allocationSize must
4171 // be zero":
4172 memory_info.allocationSize = 0;
4173
4174 // Use any DEVICE_LOCAL memory found
4175 bool has_memtype = m_device->phy().set_memory_type(0xFFFFFFFF, &memory_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
4176 if (!has_memtype) {
4177 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
4178 vk::DestroyImage(device(), image, nullptr);
4179 return;
4180 }
4181
4182 VkDeviceMemory memory = VK_NULL_HANDLE;
4183 vk::AllocateMemory(device(), &memory_info, NULL, &memory);
4184 vk::BindImageMemory(device(), image, memory, 0);
4185
4186 // Export memory to AHB
4187 AHardwareBuffer *ahb = nullptr;
4188
4189 VkMemoryGetAndroidHardwareBufferInfoANDROID get_ahb_info = LvlInitStruct<VkMemoryGetAndroidHardwareBufferInfoANDROID>();
4190 get_ahb_info.memory = memory;
4191 vkGetMemoryAndroidHardwareBufferANDROID(device(), &get_ahb_info, &ahb);
4192
4193 m_errorMonitor->VerifyNotFound();
4194
4195 // App in charge of releasing after exporting
4196 AHardwareBuffer_release(ahb);
4197 vk::FreeMemory(device(), memory, NULL);
4198 vk::DestroyImage(device(), image, nullptr);
4199 }
4200
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferExternalImage)4201 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferExternalImage) {
4202 TEST_DESCRIPTION("Verify AndroidHardwareBuffer can import AHB with external format");
4203
4204 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4205
4206 if (IsPlatform(kGalaxyS10)) {
4207 printf("%s This test should not run on Galaxy S10\n", kSkipPrefix);
4208 return;
4209 }
4210
4211 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
4212 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
4213 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
4214 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4215 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
4216 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
4217 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
4218 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
4219 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
4220 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
4221 m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
4222 } else {
4223 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
4224 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4225 return;
4226 }
4227
4228 ASSERT_NO_FATAL_FAILURE(InitState());
4229
4230 PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
4231 (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(m_device->device(),
4232 "vkGetAndroidHardwareBufferPropertiesANDROID");
4233 ASSERT_TRUE(pfn_GetAHBProps != nullptr);
4234
4235 // FORMAT_Y8Cb8Cr8_420 is a known/public valid AHB Format but does not have a Vulkan mapping to it
4236 // Will use the external image feature to get access to it
4237 AHardwareBuffer *ahb;
4238 AHardwareBuffer_Desc ahb_desc = {};
4239 ahb_desc.format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
4240 ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
4241 ahb_desc.width = 64;
4242 ahb_desc.height = 64;
4243 ahb_desc.layers = 1;
4244 ahb_desc.stride = 1;
4245 int result = AHardwareBuffer_allocate(&ahb_desc, &ahb);
4246 if (result != 0) {
4247 printf("%s could not allocate AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420, skipping tests\n", kSkipPrefix);
4248 return;
4249 }
4250
4251 VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = LvlInitStruct<VkAndroidHardwareBufferFormatPropertiesANDROID>();
4252
4253 VkAndroidHardwareBufferPropertiesANDROID ahb_props = LvlInitStruct<VkAndroidHardwareBufferPropertiesANDROID>(&ahb_fmt_props);
4254 pfn_GetAHBProps(m_device->device(), ahb, &ahb_props);
4255
4256 // The spec says the driver must not return zero, even if a VkFormat is returned with it, some older drivers do as a driver bug
4257 if (ahb_fmt_props.externalFormat == 0) {
4258 printf("%s externalFormat was zero which is not valid, skipping tests\n", kSkipPrefix);
4259 return;
4260 }
4261
4262 // Create an image w/ external format
4263 VkExternalFormatANDROID ext_format = LvlInitStruct<VkExternalFormatANDROID>();
4264 ext_format.externalFormat = ahb_fmt_props.externalFormat;
4265
4266 VkExternalMemoryImageCreateInfo ext_image_info = LvlInitStruct<VkExternalMemoryImageCreateInfo>(&ext_format);
4267 ext_image_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
4268
4269 VkImage image = VK_NULL_HANDLE;
4270 VkImageCreateInfo image_create_info = LvlInitStruct<VkImageCreateInfo>(&ext_image_info);
4271 image_create_info.flags = 0;
4272 image_create_info.imageType = VK_IMAGE_TYPE_2D;
4273 image_create_info.format = VK_FORMAT_UNDEFINED;
4274 image_create_info.extent = {64, 64, 1};
4275 image_create_info.mipLevels = 1;
4276 image_create_info.arrayLayers = 1;
4277 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
4278 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
4279 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
4280 image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4281 image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
4282 vk::CreateImage(m_device->device(), &image_create_info, nullptr, &image);
4283 if (image == VK_NULL_HANDLE) {
4284 printf("%s could not create image with external format, skipping tests\n", kSkipPrefix);
4285 return;
4286 }
4287
4288 VkMemoryDedicatedAllocateInfo memory_dedicated_info = LvlInitStruct<VkMemoryDedicatedAllocateInfo>();
4289 memory_dedicated_info.image = image;
4290 memory_dedicated_info.buffer = VK_NULL_HANDLE;
4291
4292 VkImportAndroidHardwareBufferInfoANDROID import_ahb_Info =
4293 LvlInitStruct<VkImportAndroidHardwareBufferInfoANDROID>(&memory_dedicated_info);
4294 import_ahb_Info.buffer = ahb;
4295
4296 VkMemoryAllocateInfo memory_allocate_info = LvlInitStruct<VkMemoryAllocateInfo>(&import_ahb_Info);
4297 memory_allocate_info.allocationSize = ahb_props.allocationSize;
4298
4299 // Set index to match one of the bits in ahb_props that is also only Device Local
4300 // Android implemenetations "should have" a DEVICE_LOCAL only index designed for AHB
4301 VkMemoryPropertyFlagBits property = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4302 VkPhysicalDeviceMemoryProperties gpu_memory_props;
4303 vk::GetPhysicalDeviceMemoryProperties(gpu(), &gpu_memory_props);
4304 memory_allocate_info.memoryTypeIndex = gpu_memory_props.memoryTypeCount + 1;
4305 for (uint32_t i = 0; i < gpu_memory_props.memoryTypeCount; i++) {
4306 if ((ahb_props.memoryTypeBits & (1 << i)) && ((gpu_memory_props.memoryTypes[i].propertyFlags & property) == property)) {
4307 memory_allocate_info.memoryTypeIndex = i;
4308 break;
4309 }
4310 }
4311
4312 if (memory_allocate_info.memoryTypeIndex >= gpu_memory_props.memoryTypeCount) {
4313 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
4314 AHardwareBuffer_release(ahb);
4315 vk::DestroyImage(m_device->device(), image, nullptr);
4316 return;
4317 }
4318
4319 VkDeviceMemory memory;
4320 m_errorMonitor->ExpectSuccess();
4321 vk::AllocateMemory(m_device->device(), &memory_allocate_info, nullptr, &memory);
4322 vk::BindImageMemory(m_device->device(), image, memory, 0);
4323 m_errorMonitor->VerifyNotFound();
4324
4325 vk::DestroyImage(m_device->device(), image, nullptr);
4326 vk::FreeMemory(m_device->device(), memory, nullptr);
4327 }
4328
TEST_F(VkPositiveLayerTest,AndroidHardwareBufferExternalCameraFormat)4329 TEST_F(VkPositiveLayerTest, AndroidHardwareBufferExternalCameraFormat) {
4330 TEST_DESCRIPTION("Verify AndroidHardwareBuffer can import AHB with external format");
4331
4332 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4333
4334 if (IsPlatform(kGalaxyS10)) {
4335 printf("%s This test should not run on Galaxy S10\n", kSkipPrefix);
4336 return;
4337 }
4338
4339 if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
4340 // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
4341 (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
4342 m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4343 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
4344 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
4345 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
4346 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
4347 m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
4348 m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
4349 m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
4350 } else {
4351 printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
4352 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
4353 return;
4354 }
4355
4356 ASSERT_NO_FATAL_FAILURE(InitState());
4357
4358 PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
4359 (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(m_device->device(),
4360 "vkGetAndroidHardwareBufferPropertiesANDROID");
4361 ASSERT_TRUE(pfn_GetAHBProps != nullptr);
4362
4363 m_errorMonitor->ExpectSuccess();
4364
4365 // Simulate camera usage of AHB
4366 AHardwareBuffer *ahb;
4367 AHardwareBuffer_Desc ahb_desc = {};
4368 ahb_desc.format = AHARDWAREBUFFER_FORMAT_IMPLEMENTATION_DEFINED;
4369 ahb_desc.usage =
4370 AHARDWAREBUFFER_USAGE_CAMERA_WRITE | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
4371 ahb_desc.width = 64;
4372 ahb_desc.height = 64;
4373 ahb_desc.layers = 1;
4374 ahb_desc.stride = 1;
4375 int result = AHardwareBuffer_allocate(&ahb_desc, &ahb);
4376 if (result != 0) {
4377 printf("%s could not allocate AHARDWAREBUFFER_FORMAT_IMPLEMENTATION_DEFINED, skipping tests\n", kSkipPrefix);
4378 return;
4379 }
4380
4381 VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = LvlInitStruct<VkAndroidHardwareBufferFormatPropertiesANDROID>();
4382
4383 VkAndroidHardwareBufferPropertiesANDROID ahb_props = LvlInitStruct<VkAndroidHardwareBufferPropertiesANDROID>(&ahb_fmt_props);
4384 pfn_GetAHBProps(m_device->device(), ahb, &ahb_props);
4385
4386 // The spec says the driver must not return zero, even if a VkFormat is returned with it, some older drivers do as a driver bug
4387 if (ahb_fmt_props.externalFormat == 0) {
4388 printf("%s externalFormat was zero which is not valid, skipping tests\n", kSkipPrefix);
4389 return;
4390 }
4391
4392 // Create an image w/ external format
4393 VkExternalFormatANDROID ext_format = LvlInitStruct<VkExternalFormatANDROID>();
4394 ext_format.externalFormat = ahb_fmt_props.externalFormat;
4395
4396 VkExternalMemoryImageCreateInfo ext_image_info = LvlInitStruct<VkExternalMemoryImageCreateInfo>(&ext_format);
4397 ext_image_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
4398
4399 VkImage image = VK_NULL_HANDLE;
4400 VkImageCreateInfo image_create_info = LvlInitStruct<VkImageCreateInfo>(&ext_image_info);
4401 image_create_info.flags = 0;
4402 image_create_info.imageType = VK_IMAGE_TYPE_2D;
4403 image_create_info.format = VK_FORMAT_UNDEFINED;
4404 image_create_info.extent = {64, 64, 1};
4405 image_create_info.mipLevels = 1;
4406 image_create_info.arrayLayers = 1;
4407 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
4408 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
4409 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
4410 image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4411 image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
4412 vk::CreateImage(m_device->device(), &image_create_info, nullptr, &image);
4413 if (image == VK_NULL_HANDLE) {
4414 printf("%s could not create image with external format, skipping tests\n", kSkipPrefix);
4415 return;
4416 }
4417
4418 VkMemoryDedicatedAllocateInfo memory_dedicated_info = LvlInitStruct<VkMemoryDedicatedAllocateInfo>();
4419 memory_dedicated_info.image = image;
4420 memory_dedicated_info.buffer = VK_NULL_HANDLE;
4421
4422 VkImportAndroidHardwareBufferInfoANDROID import_ahb_Info =
4423 LvlInitStruct<VkImportAndroidHardwareBufferInfoANDROID>(&memory_dedicated_info);
4424 import_ahb_Info.buffer = ahb;
4425
4426 VkMemoryAllocateInfo memory_allocate_info = LvlInitStruct<VkMemoryAllocateInfo>(&import_ahb_Info);
4427 memory_allocate_info.allocationSize = ahb_props.allocationSize;
4428
4429 // Set index to match one of the bits in ahb_props that is also only Device Local
4430 // Android implemenetations "should have" a DEVICE_LOCAL only index designed for AHB
4431 VkMemoryPropertyFlagBits property = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4432 VkPhysicalDeviceMemoryProperties gpu_memory_props;
4433 vk::GetPhysicalDeviceMemoryProperties(gpu(), &gpu_memory_props);
4434 memory_allocate_info.memoryTypeIndex = gpu_memory_props.memoryTypeCount + 1;
4435 for (uint32_t i = 0; i < gpu_memory_props.memoryTypeCount; i++) {
4436 if ((ahb_props.memoryTypeBits & (1 << i)) && ((gpu_memory_props.memoryTypes[i].propertyFlags & property) == property)) {
4437 memory_allocate_info.memoryTypeIndex = i;
4438 break;
4439 }
4440 }
4441
4442 if (memory_allocate_info.memoryTypeIndex >= gpu_memory_props.memoryTypeCount) {
4443 printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
4444 AHardwareBuffer_release(ahb);
4445 vk::DestroyImage(m_device->device(), image, nullptr);
4446 return;
4447 }
4448
4449 VkDeviceMemory memory;
4450 vk::AllocateMemory(m_device->device(), &memory_allocate_info, nullptr, &memory);
4451 vk::BindImageMemory(m_device->device(), image, memory, 0);
4452 m_errorMonitor->VerifyNotFound();
4453
4454 vk::DestroyImage(m_device->device(), image, nullptr);
4455 vk::FreeMemory(m_device->device(), memory, nullptr);
4456 }
4457
4458 #endif // AHB_VALIDATION_SUPPORT
4459
TEST_F(VkPositiveLayerTest,PhysicalStorageBuffer)4460 TEST_F(VkPositiveLayerTest, PhysicalStorageBuffer) {
4461 TEST_DESCRIPTION("Reproduces Github issue #2467 and effectively #2465 as well.");
4462
4463 app_info_.apiVersion = VK_API_VERSION_1_2;
4464 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4465
4466 std::vector<const char *> exts = {
4467 "VK_EXT_buffer_device_address", // TODO (ncesario) why does VK_KHR_buffer_device_address not work?
4468 "VK_KHR_shader_non_semantic_info",
4469 "VK_EXT_scalar_block_layout",
4470 };
4471 for (const auto *ext : exts) {
4472 if (DeviceExtensionSupported(gpu(), nullptr, ext)) {
4473 m_device_extension_names.push_back(ext);
4474 } else {
4475 printf("%s %s extension not supported. Skipping.", kSkipPrefix, ext);
4476 return;
4477 }
4478 }
4479
4480 auto features12 = LvlInitStruct<VkPhysicalDeviceVulkan12Features>();
4481 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&features12);
4482 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
4483
4484 if (VK_TRUE != features12.bufferDeviceAddress) {
4485 printf("%s VkPhysicalDeviceVulkan12Features::bufferDeviceAddress not supported and is required. Skipping.", kSkipPrefix);
4486 return;
4487 }
4488
4489 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
4490 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4491
4492 const char *vertex_source = R"glsl(
4493 #version 450
4494
4495 #extension GL_EXT_buffer_reference : enable
4496 #extension GL_EXT_scalar_block_layout : enable
4497
4498 layout(buffer_reference, buffer_reference_align=16, scalar) readonly buffer VectorBuffer {
4499 vec3 v;
4500 };
4501
4502 layout(push_constant, scalar) uniform pc {
4503 VectorBuffer vb;
4504 } pcs;
4505
4506 void main() {
4507 gl_Position = vec4(pcs.vb.v, 1.0);
4508 }
4509 )glsl";
4510 const VkShaderObj vs(m_device, vertex_source, VK_SHADER_STAGE_VERTEX_BIT, this);
4511
4512 const char *fragment_source = R"glsl(
4513 #version 450
4514
4515 #extension GL_EXT_buffer_reference : enable
4516 #extension GL_EXT_scalar_block_layout : enable
4517
4518 layout(buffer_reference, buffer_reference_align=16, scalar) readonly buffer VectorBuffer {
4519 vec3 v;
4520 };
4521
4522 layout(push_constant, scalar) uniform pushConstants {
4523 layout(offset=8) VectorBuffer vb;
4524 } pcs;
4525
4526 layout(location=0) out vec4 o;
4527 void main() {
4528 o = vec4(pcs.vb.v, 1.0);
4529 }
4530 )glsl";
4531 const VkShaderObj fs(m_device, fragment_source, VK_SHADER_STAGE_FRAGMENT_BIT, this);
4532
4533 m_errorMonitor->ExpectSuccess();
4534
4535 std::array<VkPushConstantRange, 2> push_ranges;
4536 push_ranges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
4537 push_ranges[0].size = sizeof(uint64_t);
4538 push_ranges[0].offset = 0;
4539 push_ranges[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4540 push_ranges[1].size = sizeof(uint64_t);
4541 push_ranges[1].offset = sizeof(uint64_t);
4542
4543 VkPipelineLayoutCreateInfo const pipeline_layout_info{
4544 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr,
4545 static_cast<uint32_t>(push_ranges.size()), push_ranges.data()};
4546
4547 CreatePipelineHelper pipe(*this);
4548 pipe.InitInfo();
4549 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4550 pipe.pipeline_layout_ci_ = pipeline_layout_info;
4551 pipe.InitState();
4552 m_errorMonitor->ExpectSuccess();
4553 pipe.CreateGraphicsPipeline();
4554 m_errorMonitor->VerifyNotFound();
4555 }
4556
TEST_F(VkPositiveLayerTest,OpCopyObjectSampler)4557 TEST_F(VkPositiveLayerTest, OpCopyObjectSampler) {
4558 TEST_DESCRIPTION("Reproduces a use case involving GL_EXT_nonuniform_qualifier and image samplers found in Doom Eternal trace");
4559
4560 // https://github.com/KhronosGroup/glslang/pull/1762 appears to be the change that introduces the OpCopyObject in this context.
4561
4562 SetTargetApiVersion(VK_API_VERSION_1_2);
4563 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4564
4565 auto features12 = LvlInitStruct<VkPhysicalDeviceVulkan12Features>();
4566 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&features12);
4567 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
4568
4569 if (VK_TRUE != features12.shaderStorageTexelBufferArrayNonUniformIndexing) {
4570 printf(
4571 "%s VkPhysicalDeviceVulkan12Features::shaderStorageTexelBufferArrayNonUniformIndexing not supported and is required. "
4572 "Skipping.",
4573 kSkipPrefix);
4574 return;
4575 }
4576 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
4577 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4578
4579 const char *vertex_source = R"glsl(
4580 #version 450
4581
4582 layout(location=0) out int idx;
4583
4584 void main() {
4585 idx = 0;
4586 gl_Position = vec4(0.0);
4587 }
4588 )glsl";
4589 const VkShaderObj vs(m_device, vertex_source, VK_SHADER_STAGE_VERTEX_BIT, this);
4590
4591 const char *fragment_source = R"glsl(
4592 #version 450
4593 #extension GL_EXT_nonuniform_qualifier : require
4594
4595 layout(set=0, binding=0) uniform sampler s;
4596 layout(set=0, binding=1) uniform texture2D t[1];
4597 layout(location=0) in flat int idx;
4598
4599 layout(location=0) out vec4 frag_color;
4600
4601 void main() {
4602 // Using nonuniformEXT on the index into the image array creates the OpCopyObject instead of an OpLoad, which
4603 // was causing problems with how constants are identified.
4604 frag_color = texture(sampler2D(t[nonuniformEXT(idx)], s), vec2(0.0));
4605 }
4606
4607 )glsl";
4608 const VkShaderObj fs(m_device, fragment_source, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main", false, nullptr, SPV_ENV_VULKAN_1_2);
4609
4610 CreatePipelineHelper pipe(*this);
4611 pipe.InitInfo();
4612 pipe.dsl_bindings_ = {
4613 {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
4614 {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
4615 };
4616 pipe.InitState();
4617 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4618 m_errorMonitor->ExpectSuccess();
4619 pipe.CreateGraphicsPipeline();
4620 m_errorMonitor->VerifyNotFound();
4621 }
4622
TEST_F(VkPositiveLayerTest,InitSwapchain)4623 TEST_F(VkPositiveLayerTest, InitSwapchain) {
4624 TEST_DESCRIPTION("Make sure InitSwapchain is not producing anying invalid usage");
4625
4626 if (!AddSurfaceInstanceExtension()) {
4627 printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
4628 return;
4629 }
4630
4631 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4632
4633 if (!AddSwapchainDeviceExtension()) {
4634 printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
4635 return;
4636 }
4637
4638 ASSERT_NO_FATAL_FAILURE(InitState());
4639
4640 m_errorMonitor->ExpectSuccess();
4641 if (InitSwapchain()) {
4642 DestroySwapchain();
4643 }
4644 m_errorMonitor->VerifyNotFound();
4645 }
4646
TEST_F(VkPositiveLayerTest,DestroySwapchainWithBoundImages)4647 TEST_F(VkPositiveLayerTest, DestroySwapchainWithBoundImages) {
4648 TEST_DESCRIPTION("Try destroying a swapchain which has multiple images");
4649
4650 if (!AddSurfaceInstanceExtension()) return;
4651 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4652 // Check for VK_KHR_get_memory_requirements2 extension
4653 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME)) {
4654 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
4655 } else {
4656 printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
4657 return;
4658 }
4659
4660 if (!AddSwapchainDeviceExtension()) return;
4661 ASSERT_NO_FATAL_FAILURE(InitState());
4662 if (!InitSwapchain()) {
4663 printf("%s Cannot create surface or swapchain, skipping test\n", kSkipPrefix);
4664 return;
4665 }
4666
4667 auto vkBindImageMemory2KHR =
4668 reinterpret_cast<PFN_vkBindImageMemory2KHR>(vk::GetDeviceProcAddr(m_device->device(), "vkBindImageMemory2KHR"));
4669
4670 auto image_create_info = LvlInitStruct<VkImageCreateInfo>();
4671 image_create_info.imageType = VK_IMAGE_TYPE_2D;
4672 image_create_info.format = m_surface_formats[0].format;
4673 image_create_info.extent.width = m_surface_capabilities.minImageExtent.width;
4674 image_create_info.extent.height = m_surface_capabilities.minImageExtent.height;
4675 image_create_info.extent.depth = 1;
4676 image_create_info.mipLevels = 1;
4677 image_create_info.arrayLayers = 1;
4678 image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
4679 image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
4680 image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4681 image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
4682 image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
4683
4684 auto image_swapchain_create_info = LvlInitStruct<VkImageSwapchainCreateInfoKHR>();
4685 image_swapchain_create_info.swapchain = m_swapchain;
4686
4687 image_create_info.pNext = &image_swapchain_create_info;
4688 std::array<VkImage, 3> images;
4689
4690 m_errorMonitor->ExpectSuccess();
4691 for (auto &image : images) {
4692 vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
4693 auto bind_swapchain_info = LvlInitStruct<VkBindImageMemorySwapchainInfoKHR>();
4694 bind_swapchain_info.swapchain = m_swapchain;
4695 bind_swapchain_info.imageIndex = 0;
4696
4697 auto bind_info = LvlInitStruct<VkBindImageMemoryInfo>(&bind_swapchain_info);
4698 bind_info.image = image;
4699 bind_info.memory = VK_NULL_HANDLE;
4700 bind_info.memoryOffset = 0;
4701
4702 vkBindImageMemory2KHR(m_device->device(), 1, &bind_info);
4703 }
4704 DestroySwapchain();
4705 m_errorMonitor->VerifyNotFound();
4706 }
4707
TEST_F(VkPositiveLayerTest,ProtectedSwapchainImageColorAttachment)4708 TEST_F(VkPositiveLayerTest, ProtectedSwapchainImageColorAttachment) {
4709 TEST_DESCRIPTION(
4710 "Make sure images from protected swapchain are considered protected image when writing to it as a color attachment");
4711
4712 #if !defined(ANDROID)
4713 // Protected swapchains are guaranteed in Android Loader
4714 // VK_KHR_surface_protected_capabilities is needed for other platforms
4715 // Without device to test with, blocking this test from non-Android platforms for now
4716 printf("%s VK_KHR_surface_protected_capabilities test logic not implemented, skipping test for non-Android\n", kSkipPrefix);
4717 return;
4718 #endif
4719
4720 m_errorMonitor->ExpectSuccess();
4721 SetTargetApiVersion(VK_API_VERSION_1_1);
4722
4723 if (!AddSurfaceInstanceExtension()) {
4724 printf("%s surface extensions not supported, skipping ProtectedSwapchainImageColorAttachment test\n", kSkipPrefix);
4725 return;
4726 }
4727
4728 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
4729 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
4730 } else {
4731 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
4732 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
4733 return;
4734 }
4735 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4736
4737 if (!AddSwapchainDeviceExtension()) {
4738 printf("%s swapchain extensions not supported, skipping ProtectedSwapchainImageColorAttachment test\n", kSkipPrefix);
4739 return;
4740 }
4741
4742 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
4743 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
4744 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
4745
4746 auto protected_memory_features = LvlInitStruct<VkPhysicalDeviceProtectedMemoryFeatures>();
4747 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&protected_memory_features);
4748 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
4749
4750 if (protected_memory_features.protectedMemory == VK_FALSE) {
4751 printf("%s protectedMemory feature not supported, skipped.\n", kSkipPrefix);
4752 return;
4753 };
4754
4755 // Turns m_commandBuffer into a unprotected command buffer
4756 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
4757
4758 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
4759 printf("%s Tests requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
4760 return;
4761 }
4762
4763 if (!InitSurface()) {
4764 printf("%s Cannot create surface, skipping test\n", kSkipPrefix);
4765 return;
4766 }
4767 InitSwapchainInfo();
4768
4769 // Create protected swapchain
4770 VkBool32 supported;
4771 vk::GetPhysicalDeviceSurfaceSupportKHR(gpu(), m_device->graphics_queue_node_index_, m_surface, &supported);
4772 if (!supported) {
4773 printf("%s Graphics queue does not support present, skipping test\n", kSkipPrefix);
4774 return;
4775 }
4776
4777 auto surface = m_surface;
4778 VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4779 VkSurfaceTransformFlagBitsKHR preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
4780
4781 VkSwapchainCreateInfoKHR swapchain_create_info = {};
4782 swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
4783 swapchain_create_info.pNext = 0;
4784 swapchain_create_info.flags = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR;
4785 swapchain_create_info.surface = surface;
4786 swapchain_create_info.minImageCount = m_surface_capabilities.minImageCount;
4787 swapchain_create_info.imageFormat = m_surface_formats[0].format;
4788 swapchain_create_info.imageColorSpace = m_surface_formats[0].colorSpace;
4789 swapchain_create_info.imageExtent = {m_surface_capabilities.minImageExtent.width, m_surface_capabilities.minImageExtent.height};
4790 swapchain_create_info.imageArrayLayers = 1;
4791 swapchain_create_info.imageUsage = imageUsage;
4792 swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
4793 swapchain_create_info.preTransform = preTransform;
4794 swapchain_create_info.compositeAlpha = m_surface_composite_alpha;
4795 swapchain_create_info.presentMode = m_surface_non_shared_present_mode;
4796 swapchain_create_info.clipped = VK_FALSE;
4797 swapchain_create_info.oldSwapchain = 0;
4798 swapchain_create_info.queueFamilyIndexCount = 4094967295; // This SHOULD get ignored
4799 uint32_t bogus_int = 99;
4800 swapchain_create_info.pQueueFamilyIndices = &bogus_int;
4801 ASSERT_VK_SUCCESS(vk::CreateSwapchainKHR(device(), &swapchain_create_info, nullptr, &m_swapchain));
4802
4803 // Get VkImage from swapchain which should be protected
4804 PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR =
4805 (PFN_vkGetSwapchainImagesKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkGetSwapchainImagesKHR");
4806 ASSERT_TRUE(vkGetSwapchainImagesKHR != nullptr);
4807 uint32_t image_count;
4808 std::vector<VkImage> swapchain_images;
4809 vkGetSwapchainImagesKHR(device(), m_swapchain, &image_count, nullptr);
4810 swapchain_images.resize(image_count, VK_NULL_HANDLE);
4811 vkGetSwapchainImagesKHR(device(), m_swapchain, &image_count, swapchain_images.data());
4812 VkImage protected_image = swapchain_images.at(0); // only need 1 image to test
4813
4814 // Create a protected image view
4815 VkImageView image_view;
4816 VkImageViewCreateInfo image_view_create_info = {
4817 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4818 nullptr,
4819 0,
4820 protected_image,
4821 VK_IMAGE_VIEW_TYPE_2D,
4822 swapchain_create_info.imageFormat,
4823 {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
4824 VK_COMPONENT_SWIZZLE_IDENTITY},
4825 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
4826 };
4827 ASSERT_VK_SUCCESS(vk::CreateImageView(device(), &image_view_create_info, nullptr, &image_view));
4828
4829 // A renderpass and framebuffer that contains a protected color image view
4830 VkAttachmentDescription attachments[1] = {{0, swapchain_create_info.imageFormat, VK_SAMPLE_COUNT_1_BIT,
4831 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
4832 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
4833 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}};
4834 VkAttachmentReference references[1] = {{0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}};
4835 VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, references, nullptr, nullptr, 0, nullptr};
4836 VkSubpassDependency dependency = {0,
4837 0,
4838 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
4839 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
4840 VK_ACCESS_SHADER_WRITE_BIT,
4841 VK_ACCESS_SHADER_WRITE_BIT,
4842 VK_DEPENDENCY_BY_REGION_BIT};
4843 // Use framework render pass and framebuffer so pipeline helper uses it
4844 m_renderPass_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attachments, 1, &subpass, 1, &dependency};
4845 ASSERT_VK_SUCCESS(vk::CreateRenderPass(device(), &m_renderPass_info, nullptr, &m_renderPass));
4846 m_framebuffer_info = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4847 nullptr,
4848 0,
4849 m_renderPass,
4850 1,
4851 &image_view,
4852 swapchain_create_info.imageExtent.width,
4853 swapchain_create_info.imageExtent.height,
4854 1};
4855 ASSERT_VK_SUCCESS(vk::CreateFramebuffer(device(), &m_framebuffer_info, nullptr, &m_framebuffer));
4856
4857 // basic pipeline to allow for a valid vkCmdDraw()
4858 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
4859 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
4860 CreatePipelineHelper pipe(*this);
4861 pipe.InitInfo();
4862 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4863 pipe.InitState();
4864 pipe.CreateGraphicsPipeline();
4865
4866 // Create a protected command buffer/pool to use
4867 VkCommandPoolObj protectedCommandPool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_PROTECTED_BIT);
4868 VkCommandBufferObj protectedCommandBuffer(m_device, &protectedCommandPool);
4869
4870 protectedCommandBuffer.begin();
4871 VkRect2D render_area = {{0, 0}, swapchain_create_info.imageExtent};
4872 VkRenderPassBeginInfo render_pass_begin = {
4873 VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, m_renderPass, m_framebuffer, render_area, 0, nullptr};
4874 vk::CmdBeginRenderPass(protectedCommandBuffer.handle(), &render_pass_begin, VK_SUBPASS_CONTENTS_INLINE);
4875 vk::CmdBindPipeline(protectedCommandBuffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
4876 // This should be valid since the framebuffer color attachment is a protected swapchain image
4877 vk::CmdDraw(protectedCommandBuffer.handle(), 3, 1, 0, 0);
4878 vk::CmdEndRenderPass(protectedCommandBuffer.handle());
4879 protectedCommandBuffer.end();
4880
4881 DestroySwapchain();
4882 m_errorMonitor->VerifyNotFound();
4883 }
4884
TEST_F(VkPositiveLayerTest,ImageDrmFormatModifier)4885 TEST_F(VkPositiveLayerTest, ImageDrmFormatModifier) {
4886 // See https://github.com/KhronosGroup/Vulkan-ValidationLayers/pull/2610
4887 TEST_DESCRIPTION("Create image and imageView using VK_EXT_image_drm_format_modifier");
4888
4889 SetTargetApiVersion(VK_API_VERSION_1_1); // for extension dependencies
4890 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
4891
4892 if (IsPlatform(kMockICD)) {
4893 printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
4894 return;
4895 }
4896
4897 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
4898 printf("%s Vulkan 1.1 not supported but required. Skipping\n", kSkipPrefix);
4899 return;
4900 }
4901
4902 if (!DeviceExtensionSupported(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME)) {
4903 printf("%s VK_EXT_image_drm_format_modifier is not supported but required. Skipping\n", kSkipPrefix);
4904 return;
4905 }
4906
4907 m_device_extension_names.push_back(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME);
4908 ASSERT_NO_FATAL_FAILURE(InitState());
4909
4910 // we just hope that one of these formats supports modifiers
4911 // for more detailed checking, we could also check multi-planar formats.
4912 auto format_list = {
4913 VK_FORMAT_B8G8R8A8_UNORM,
4914 VK_FORMAT_B8G8R8A8_SRGB,
4915 VK_FORMAT_R8G8B8A8_UNORM,
4916 VK_FORMAT_R8G8B8A8_SRGB,
4917 };
4918
4919 for (auto format : format_list) {
4920 std::vector<uint64_t> mods;
4921
4922 // get general features and modifiers
4923 VkDrmFormatModifierPropertiesListEXT modp = {};
4924 modp.sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT;
4925 auto fmtp = LvlInitStruct<VkFormatProperties2>(&modp);
4926
4927 vk::GetPhysicalDeviceFormatProperties2(gpu(), format, &fmtp);
4928
4929 if (modp.drmFormatModifierCount > 0) {
4930 // the first call to vkGetPhysicalDeviceFormatProperties2 did only
4931 // retrieve the number of modifiers, we now have to retrieve
4932 // the modifiers
4933 std::vector<VkDrmFormatModifierPropertiesEXT> mod_props(modp.drmFormatModifierCount);
4934 modp.pDrmFormatModifierProperties = mod_props.data();
4935
4936 vk::GetPhysicalDeviceFormatProperties2(gpu(), format, &fmtp);
4937
4938 for (auto i = 0u; i < modp.drmFormatModifierCount; ++i) {
4939 auto &mod = modp.pDrmFormatModifierProperties[i];
4940 auto features = VK_FORMAT_FEATURE_TRANSFER_DST_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
4941
4942 if ((mod.drmFormatModifierTilingFeatures & features) != features) {
4943 continue;
4944 }
4945
4946 mods.push_back(mod.drmFormatModifier);
4947 }
4948 }
4949
4950 if (mods.empty()) {
4951 continue;
4952 }
4953
4954 // create image
4955 auto ci = LvlInitStruct<VkImageCreateInfo>();
4956 ci.flags = 0;
4957 ci.imageType = VK_IMAGE_TYPE_2D;
4958 ci.format = format;
4959 ci.extent = {128, 128, 1};
4960 ci.mipLevels = 1;
4961 ci.arrayLayers = 1;
4962 ci.samples = VK_SAMPLE_COUNT_1_BIT;
4963 ci.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
4964 ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4965 ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
4966 ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4967
4968 VkImageDrmFormatModifierListCreateInfoEXT mod_list = {};
4969 mod_list.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
4970 mod_list.pDrmFormatModifiers = mods.data();
4971 mod_list.drmFormatModifierCount = mods.size();
4972 ci.pNext = &mod_list;
4973
4974 VkImage image;
4975 m_errorMonitor->ExpectSuccess();
4976 VkResult err = vk::CreateImage(device(), &ci, nullptr, &image);
4977 ASSERT_VK_SUCCESS(err);
4978 m_errorMonitor->VerifyNotFound();
4979
4980 // bind memory
4981 VkPhysicalDeviceMemoryProperties phys_mem_props;
4982 vk::GetPhysicalDeviceMemoryProperties(gpu(), &phys_mem_props);
4983 VkMemoryRequirements mem_reqs;
4984 vk::GetImageMemoryRequirements(device(), image, &mem_reqs);
4985 VkDeviceMemory mem_obj = VK_NULL_HANDLE;
4986 VkMemoryPropertyFlagBits mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4987
4988 for (uint32_t type = 0; type < phys_mem_props.memoryTypeCount; type++) {
4989 if ((mem_reqs.memoryTypeBits & (1 << type)) &&
4990 ((phys_mem_props.memoryTypes[type].propertyFlags & mem_props) == mem_props)) {
4991 VkMemoryAllocateInfo alloc_info = {};
4992 alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
4993 alloc_info.allocationSize = mem_reqs.size;
4994 alloc_info.memoryTypeIndex = type;
4995 ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &alloc_info, nullptr, &mem_obj));
4996 break;
4997 }
4998 }
4999
5000 ASSERT_NE((VkDeviceMemory)VK_NULL_HANDLE, mem_obj);
5001 ASSERT_VK_SUCCESS(vk::BindImageMemory(device(), image, mem_obj, 0));
5002
5003 // create image view
5004 VkImageViewCreateInfo ivci = {
5005 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5006 nullptr,
5007 0,
5008 image,
5009 VK_IMAGE_VIEW_TYPE_2D,
5010 format,
5011 {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
5012 VK_COMPONENT_SWIZZLE_IDENTITY},
5013 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
5014 };
5015
5016 CreateImageViewTest(*this, &ivci);
5017
5018 // for more detailed checking, we could export the image to dmabuf
5019 // and then import it again (using VkImageDrmFormatModifierExplicitCreateInfoEXT)
5020
5021 vk::FreeMemory(device(), mem_obj, nullptr);
5022 vk::DestroyImage(device(), image, nullptr);
5023 }
5024 }
5025
TEST_F(VkPositiveLayerTest,AllowedDuplicateStype)5026 TEST_F(VkPositiveLayerTest, AllowedDuplicateStype) {
5027 TEST_DESCRIPTION("Pass duplicate structs to whose vk.xml definition contains allowduplicate=true");
5028
5029 ASSERT_NO_FATAL_FAILURE(InitFramework());
5030
5031 VkInstance instance;
5032
5033 VkInstanceCreateInfo ici = {};
5034 ici.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
5035 ici.enabledLayerCount = instance_layers_.size();
5036 ici.ppEnabledLayerNames = instance_layers_.data();
5037
5038 auto dbgUtils0 = LvlInitStruct<VkDebugUtilsMessengerCreateInfoEXT>();
5039 auto dbgUtils1 = LvlInitStruct<VkDebugUtilsMessengerCreateInfoEXT>(&dbgUtils0);
5040 ici.pNext = &dbgUtils1;
5041
5042 m_errorMonitor->ExpectSuccess();
5043 ASSERT_VK_SUCCESS(vk::CreateInstance(&ici, nullptr, &instance));
5044 m_errorMonitor->VerifyNotFound();
5045
5046 ASSERT_NO_FATAL_FAILURE(vk::DestroyInstance(instance, nullptr));
5047 }
5048
TEST_F(VkPositiveLayerTest,MeshShaderOnly)5049 TEST_F(VkPositiveLayerTest, MeshShaderOnly) {
5050 TEST_DESCRIPTION("Test using a mesh shader without a vertex shader.");
5051
5052 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
5053 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5054 } else {
5055 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
5056 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5057 return;
5058 }
5059 ASSERT_NO_FATAL_FAILURE(InitFramework());
5060 std::array<const char *, 2> required_device_extensions = {
5061 {VK_NV_MESH_SHADER_EXTENSION_NAME, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME}};
5062 for (auto device_extension : required_device_extensions) {
5063 if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
5064 m_device_extension_names.push_back(device_extension);
5065 } else {
5066 printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
5067 return;
5068 }
5069 }
5070
5071 if (IsPlatform(kMockICD) || DeviceSimulation()) {
5072 printf("%sNot suppored by MockICD, skipping tests\n", kSkipPrefix);
5073 return;
5074 }
5075
5076 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
5077 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
5078 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
5079
5080 // Create a device that enables mesh_shader
5081 auto mesh_shader_features = LvlInitStruct<VkPhysicalDeviceMeshShaderFeaturesNV>();
5082 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&mesh_shader_features);
5083 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
5084
5085 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
5086 if (mesh_shader_features.meshShader != VK_TRUE) {
5087 printf("%sMesh shader feature not supported\n", kSkipPrefix);
5088 return;
5089 }
5090
5091 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5092
5093 static const char meshShaderText[] = R"glsl(
5094 #version 450
5095 #extension GL_NV_mesh_shader : require
5096 layout(local_size_x = 1) in;
5097 layout(max_vertices = 3) out;
5098 layout(max_primitives = 1) out;
5099 layout(triangles) out;
5100 void main() {
5101 gl_MeshVerticesNV[0].gl_Position = vec4(-1.0, -1.0, 0, 1);
5102 gl_MeshVerticesNV[1].gl_Position = vec4( 1.0, -1.0, 0, 1);
5103 gl_MeshVerticesNV[2].gl_Position = vec4( 0.0, 1.0, 0, 1);
5104 gl_PrimitiveIndicesNV[0] = 0;
5105 gl_PrimitiveIndicesNV[1] = 1;
5106 gl_PrimitiveIndicesNV[2] = 2;
5107 gl_PrimitiveCountNV = 1;
5108 }
5109 )glsl";
5110
5111 VkShaderObj ms(m_device, meshShaderText, VK_SHADER_STAGE_MESH_BIT_NV, this);
5112 VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5113
5114 CreatePipelineHelper helper(*this);
5115 helper.InitInfo();
5116 helper.shader_stages_ = {ms.GetStageCreateInfo(), fs.GetStageCreateInfo()};
5117
5118 // Ensure pVertexInputState and pInputAssembly state are null, as these should be ignored.
5119 helper.gp_ci_.pVertexInputState = nullptr;
5120 helper.gp_ci_.pInputAssemblyState = nullptr;
5121
5122 helper.InitState();
5123
5124 m_errorMonitor->ExpectSuccess();
5125 helper.CreateGraphicsPipeline();
5126 m_errorMonitor->VerifyNotFound();
5127 }
5128
TEST_F(VkPositiveLayerTest,CopyImageSubresource)5129 TEST_F(VkPositiveLayerTest, CopyImageSubresource) {
5130 ASSERT_NO_FATAL_FAILURE(InitFramework());
5131 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
5132
5133 VkImageUsageFlags usage =
5134 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
5135 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
5136 VkImageObj image(m_device);
5137 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 2, 5, format, usage, VK_IMAGE_TILING_OPTIMAL);
5138 image.InitNoLayout(image_ci);
5139 ASSERT_TRUE(image.initialized());
5140
5141 m_errorMonitor->ExpectSuccess();
5142
5143 VkImageSubresourceLayers src_layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
5144 VkImageSubresourceLayers dst_layer{VK_IMAGE_ASPECT_COLOR_BIT, 1, 3, 1};
5145 VkOffset3D zero_offset{0, 0, 0};
5146 VkExtent3D full_extent{128 / 2, 128 / 2, 1}; // <-- image type is 2D
5147 VkImageCopy region = {src_layer, zero_offset, dst_layer, zero_offset, full_extent};
5148 auto init_layout = VK_IMAGE_LAYOUT_UNDEFINED;
5149 auto src_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
5150 auto dst_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
5151 auto final_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
5152
5153 m_commandBuffer->begin();
5154
5155 auto cb = m_commandBuffer->handle();
5156
5157 VkImageSubresourceRange src_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
5158 VkImageMemoryBarrier image_barriers[2];
5159
5160 image_barriers[0] = LvlInitStruct<VkImageMemoryBarrier>();
5161 image_barriers[0].srcAccessMask = 0;
5162 image_barriers[0].dstAccessMask = 0;
5163 image_barriers[0].image = image.handle();
5164 image_barriers[0].subresourceRange = src_range;
5165 image_barriers[0].oldLayout = init_layout;
5166 image_barriers[0].newLayout = dst_layout;
5167
5168 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
5169 image_barriers);
5170 VkClearColorValue clear_color{};
5171 vk::CmdClearColorImage(cb, image.handle(), dst_layout, &clear_color, 1, &src_range);
5172 m_commandBuffer->end();
5173
5174 auto submit_info = LvlInitStruct<VkSubmitInfo>();
5175 submit_info.commandBufferCount = 1;
5176 submit_info.pCommandBuffers = &m_commandBuffer->handle();
5177
5178 vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
5179 vk::QueueWaitIdle(m_device->m_queue);
5180
5181 m_commandBuffer->begin();
5182
5183 image_barriers[0].oldLayout = dst_layout;
5184 image_barriers[0].newLayout = src_layout;
5185
5186 VkImageSubresourceRange dst_range{VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 3, 1};
5187 image_barriers[1] = LvlInitStruct<VkImageMemoryBarrier>();
5188 image_barriers[1].srcAccessMask = 0;
5189 image_barriers[1].dstAccessMask = 0;
5190 image_barriers[1].image = image.handle();
5191 image_barriers[1].subresourceRange = dst_range;
5192 image_barriers[1].oldLayout = init_layout;
5193 image_barriers[1].newLayout = dst_layout;
5194
5195 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 2,
5196 image_barriers);
5197
5198 vk::CmdCopyImage(cb, image.handle(), src_layout, image.handle(), dst_layout, 1, ®ion);
5199
5200 image_barriers[0].oldLayout = src_layout;
5201 image_barriers[0].newLayout = final_layout;
5202 image_barriers[1].oldLayout = dst_layout;
5203 image_barriers[1].newLayout = final_layout;
5204 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 2,
5205 image_barriers);
5206 m_commandBuffer->end();
5207
5208 vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
5209 vk::QueueWaitIdle(m_device->m_queue);
5210 m_errorMonitor->VerifyNotFound();
5211 }
5212
TEST_F(VkPositiveLayerTest,ImageDescriptorSubresourceLayout)5213 TEST_F(VkPositiveLayerTest, ImageDescriptorSubresourceLayout) {
5214 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
5215 bool maint2_support = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_2_EXTENSION_NAME);
5216 if (maint2_support) {
5217 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_2_EXTENSION_NAME);
5218 } else {
5219 printf("%s Relaxed layout matching subtest requires API >= 1.1 or KHR_MAINTENANCE2 extension, unavailable - skipped.\n",
5220 kSkipPrefix);
5221 }
5222 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
5223
5224 ASSERT_NO_FATAL_FAILURE(InitViewport());
5225 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5226
5227 OneOffDescriptorSet descriptor_set(m_device,
5228 {
5229 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
5230 });
5231 VkDescriptorSet descriptorSet = descriptor_set.set_;
5232
5233 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
5234
5235 // Create image, view, and sampler
5236 const VkFormat format = VK_FORMAT_B8G8R8A8_UNORM;
5237 VkImageObj image(m_device);
5238 auto usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
5239 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 5, format, usage, VK_IMAGE_TILING_OPTIMAL);
5240 image.Init(image_ci);
5241 ASSERT_TRUE(image.initialized());
5242
5243 VkImageSubresourceRange view_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 3, 1};
5244 VkImageSubresourceRange first_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
5245 VkImageSubresourceRange full_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 5};
5246 vk_testing::ImageView view;
5247 auto image_view_create_info = lvl_init_struct<VkImageViewCreateInfo>();
5248 image_view_create_info.image = image.handle();
5249 image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
5250 image_view_create_info.format = format;
5251 image_view_create_info.subresourceRange = view_range;
5252
5253 view.init(*m_device, image_view_create_info);
5254 ASSERT_TRUE(view.initialized());
5255
5256 // Create Sampler
5257 vk_testing::Sampler sampler;
5258 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
5259 sampler.init(*m_device, sampler_ci);
5260 ASSERT_TRUE(sampler.initialized());
5261
5262 // Setup structure for descriptor update with sampler, for update in do_test below
5263 VkDescriptorImageInfo img_info = {};
5264 img_info.sampler = sampler.handle();
5265
5266 VkWriteDescriptorSet descriptor_write;
5267 memset(&descriptor_write, 0, sizeof(descriptor_write));
5268 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5269 descriptor_write.dstSet = descriptorSet;
5270 descriptor_write.dstBinding = 0;
5271 descriptor_write.descriptorCount = 1;
5272 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5273 descriptor_write.pImageInfo = &img_info;
5274
5275 // Create PSO to be used for draw-time errors below
5276 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
5277 VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5278 VkPipelineObj pipe(m_device);
5279 pipe.AddShader(&vs);
5280 pipe.AddShader(&fs);
5281 pipe.AddDefaultColorAttachment();
5282 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
5283
5284 VkViewport viewport = {0, 0, 16, 16, 0, 1};
5285 VkRect2D scissor = {{0, 0}, {16, 16}};
5286
5287 VkCommandBufferObj cmd_buf(m_device, m_commandPool);
5288
5289 VkSubmitInfo submit_info = {};
5290 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
5291 submit_info.commandBufferCount = 1;
5292 submit_info.pCommandBuffers = &cmd_buf.handle();
5293
5294 enum TestType {
5295 kInternal, // Image layout mismatch is *within* a given command buffer
5296 kExternal // Image layout mismatch is with the current state of the image, found at QueueSubmit
5297 };
5298 std::array<TestType, 2> test_list = {{kInternal, kExternal}};
5299
5300 auto do_test = [&](VkImageObj *image, vk_testing::ImageView *view, VkImageAspectFlags aspect_mask,
5301 VkImageLayout descriptor_layout) {
5302 // Set up the descriptor
5303 img_info.imageView = view->handle();
5304 img_info.imageLayout = descriptor_layout;
5305 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
5306
5307 for (TestType test_type : test_list) {
5308 auto init_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
5309 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
5310
5311 cmd_buf.begin();
5312 m_errorMonitor->ExpectSuccess();
5313 image_barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
5314 image_barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
5315 image_barrier.image = image->handle();
5316 image_barrier.subresourceRange = full_range;
5317 image_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5318 image_barrier.newLayout = init_layout;
5319
5320 cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
5321 nullptr, 1, &image_barrier);
5322
5323 image_barrier.subresourceRange = first_range;
5324 image_barrier.oldLayout = init_layout;
5325 image_barrier.newLayout = descriptor_layout;
5326 cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
5327 nullptr, 1, &image_barrier);
5328
5329 image_barrier.subresourceRange = view_range;
5330 image_barrier.oldLayout = init_layout;
5331 image_barrier.newLayout = descriptor_layout;
5332 cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
5333 nullptr, 1, &image_barrier);
5334 m_errorMonitor->VerifyNotFound();
5335
5336 if (test_type == kExternal) {
5337 // The image layout is external to the command buffer we are recording to test. Submit to push to instance scope.
5338 cmd_buf.end();
5339 m_errorMonitor->ExpectSuccess();
5340 vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
5341 vk::QueueWaitIdle(m_device->m_queue);
5342 m_errorMonitor->VerifyNotFound();
5343 cmd_buf.begin();
5344 }
5345
5346 m_errorMonitor->ExpectSuccess();
5347 cmd_buf.BeginRenderPass(m_renderPassBeginInfo);
5348 vk::CmdBindPipeline(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
5349 vk::CmdBindDescriptorSets(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
5350 &descriptorSet, 0, NULL);
5351 vk::CmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
5352 vk::CmdSetScissor(cmd_buf.handle(), 0, 1, &scissor);
5353
5354 cmd_buf.Draw(1, 0, 0, 0);
5355
5356 cmd_buf.EndRenderPass();
5357 cmd_buf.end();
5358 m_errorMonitor->VerifyNotFound();
5359
5360 // Submit cmd buffer
5361 m_errorMonitor->ExpectSuccess();
5362 vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
5363 vk::QueueWaitIdle(m_device->m_queue);
5364 m_errorMonitor->VerifyNotFound();
5365 }
5366 };
5367 do_test(&image, &view, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
5368 }
5369
TEST_F(VkPositiveLayerTest,DevsimLoaderCrash)5370 TEST_F(VkPositiveLayerTest, DevsimLoaderCrash) {
5371 TEST_DESCRIPTION("Test to see if instance extensions are called during CreateInstance.");
5372
5373 // See https://github.com/KhronosGroup/Vulkan-Loader/issues/537 for more details.
5374 // This is specifically meant to ensure a crash encountered in devsim does not occur, but also to
5375 // attempt to ensure that no extension calls have been added to CreateInstance hooks.
5376 // NOTE: it is certainly possible that a layer will call an extension during the Createinstance hook
5377 // and the loader will _not_ crash (e.g., nvidia, android seem to not crash in this case, but AMD does).
5378 // So, this test will only catch an erroneous extension _if_ run on HW/a driver that crashes in this use
5379 // case.
5380
5381 for (const auto &ext : InstanceExtensions::get_info_map()) {
5382 // Add all "real" instance extensions
5383 if (InstanceExtensionSupported(ext.first.c_str())) {
5384 m_instance_extension_names.emplace_back(ext.first.c_str());
5385 }
5386 }
5387
5388 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
5389 }
5390
TEST_F(VkPositiveLayerTest,ImageDescriptor3D2DSubresourceLayout)5391 TEST_F(VkPositiveLayerTest, ImageDescriptor3D2DSubresourceLayout) {
5392 TEST_DESCRIPTION("Verify renderpass layout transitions for a 2d ImageView created from a 3d Image.");
5393 m_errorMonitor->ExpectSuccess();
5394 SetTargetApiVersion(VK_API_VERSION_1_1);
5395 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
5396 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
5397 printf("%s Tests requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
5398 return;
5399 }
5400
5401 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
5402
5403 ASSERT_NO_FATAL_FAILURE(InitViewport());
5404 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5405
5406 OneOffDescriptorSet descriptor_set(m_device,
5407 {
5408 {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
5409 });
5410 VkDescriptorSet descriptorSet = descriptor_set.set_;
5411
5412 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
5413
5414 // Create image, view, and sampler
5415 const VkFormat format = VK_FORMAT_B8G8R8A8_UNORM;
5416 VkImageObj image_3d(m_device);
5417 VkImageObj other_image(m_device);
5418 auto usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
5419
5420 static const uint32_t kWidth = 128;
5421 static const uint32_t kHeight = 128;
5422
5423 auto image_ci_3d = lvl_init_struct<VkImageCreateInfo>();
5424 image_ci_3d.flags = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
5425 image_ci_3d.imageType = VK_IMAGE_TYPE_3D;
5426 image_ci_3d.format = format;
5427 image_ci_3d.extent.width = kWidth;
5428 image_ci_3d.extent.height = kHeight;
5429 image_ci_3d.extent.depth = 8;
5430 image_ci_3d.mipLevels = 1;
5431 image_ci_3d.arrayLayers = 1;
5432 image_ci_3d.samples = VK_SAMPLE_COUNT_1_BIT;
5433 image_ci_3d.tiling = VK_IMAGE_TILING_OPTIMAL;
5434 image_ci_3d.usage = usage;
5435 image_3d.Init(image_ci_3d);
5436 ASSERT_TRUE(image_3d.initialized());
5437
5438 other_image.Init(kWidth, kHeight, 1, format, usage, VK_IMAGE_TILING_OPTIMAL, 0);
5439 ASSERT_TRUE(other_image.initialized());
5440
5441 // The image view is a 2D slice of the 3D image at depth = 4, which we request by
5442 // asking for arrayLayer = 4
5443 VkImageSubresourceRange view_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 4, 1};
5444 // But, the spec says:
5445 // Automatic layout transitions apply to the entire image subresource attached
5446 // to the framebuffer. If the attachment view is a 2D or 2D array view of a
5447 // 3D image, even if the attachment view only refers to a subset of the slices
5448 // of the selected mip level of the 3D image, automatic layout transitions apply
5449 // to the entire subresource referenced which is the entire mip level in this case.
5450 VkImageSubresourceRange full_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
5451 vk_testing::ImageView view_2d, other_view;
5452 auto image_view_create_info = lvl_init_struct<VkImageViewCreateInfo>();
5453 image_view_create_info.image = image_3d.handle();
5454 image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
5455 image_view_create_info.format = format;
5456 image_view_create_info.subresourceRange = view_range;
5457
5458 view_2d.init(*m_device, image_view_create_info);
5459 ASSERT_TRUE(view_2d.initialized());
5460
5461 image_view_create_info.image = other_image.handle();
5462 image_view_create_info.subresourceRange = full_range;
5463 other_view.init(*m_device, image_view_create_info);
5464 ASSERT_TRUE(other_view.initialized());
5465
5466 std::vector<VkAttachmentDescription> attachments = {
5467 {0, format, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
5468 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
5469 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL},
5470 };
5471
5472 std::vector<VkAttachmentReference> color = {
5473 {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5474 };
5475
5476 VkSubpassDescription subpass = {
5477 0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, (uint32_t)color.size(), color.data(), nullptr, nullptr, 0, nullptr};
5478
5479 std::vector<VkSubpassDependency> deps = {
5480 {VK_SUBPASS_EXTERNAL, 0,
5481 (VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
5482 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT |
5483 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT),
5484 (VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
5485 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT),
5486 (VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
5487 VK_ACCESS_TRANSFER_WRITE_BIT),
5488 (VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_MEMORY_WRITE_BIT), 0},
5489 {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
5490 (VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT), VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
5491 (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT), 0},
5492 };
5493
5494 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
5495 nullptr,
5496 0,
5497 (uint32_t)attachments.size(),
5498 attachments.data(),
5499 1,
5500 &subpass,
5501 (uint32_t)deps.size(),
5502 deps.data()};
5503 // Create Sampler
5504 vk_testing::Sampler sampler;
5505 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
5506 sampler.init(*m_device, sampler_ci);
5507 ASSERT_TRUE(sampler.initialized());
5508
5509 // Setup structure for descriptor update with sampler, for update in do_test below
5510 VkDescriptorImageInfo img_info = {};
5511 img_info.sampler = sampler.handle();
5512
5513 VkWriteDescriptorSet descriptor_write;
5514 memset(&descriptor_write, 0, sizeof(descriptor_write));
5515 descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5516 descriptor_write.dstSet = descriptorSet;
5517 descriptor_write.dstBinding = 0;
5518 descriptor_write.descriptorCount = 1;
5519 descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5520 descriptor_write.pImageInfo = &img_info;
5521
5522 // Create PSO to be used for draw-time errors below
5523 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
5524 VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
5525 VkPipelineObj pipe(m_device);
5526 pipe.AddShader(&vs);
5527 pipe.AddShader(&fs);
5528 pipe.AddDefaultColorAttachment();
5529 pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
5530
5531 VkViewport viewport = {0, 0, kWidth, kHeight, 0, 1};
5532 VkRect2D scissor = {{0, 0}, {kWidth, kHeight}};
5533
5534 VkCommandBufferObj cmd_buf(m_device, m_commandPool);
5535
5536 VkSubmitInfo submit_info = {};
5537 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
5538 submit_info.commandBufferCount = 1;
5539 submit_info.pCommandBuffers = &cmd_buf.handle();
5540
5541 enum TestType {
5542 kInternal, // Image layout mismatch is *within* a given command buffer
5543 kExternal // Image layout mismatch is with the current state of the image, found at QueueSubmit
5544 };
5545 std::array<TestType, 2> test_list = {{kInternal, kExternal}};
5546
5547 auto do_test = [&](VkImageObj *image, vk_testing::ImageView *view, VkImageObj *o_image, vk_testing::ImageView *o_view,
5548 VkImageAspectFlags aspect_mask, VkImageLayout descriptor_layout) {
5549 // Set up the descriptor
5550 img_info.imageView = o_view->handle();
5551 img_info.imageLayout = descriptor_layout;
5552 vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
5553
5554 for (TestType test_type : test_list) {
5555 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
5556
5557 VkRenderPass rp;
5558 VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
5559 ASSERT_VK_SUCCESS(err);
5560
5561 VkFramebufferCreateInfo fbci = {
5562 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view->handle(), kWidth, kHeight, 1};
5563 VkFramebuffer fb;
5564 err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
5565 ASSERT_VK_SUCCESS(err);
5566
5567 cmd_buf.begin();
5568 image_barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
5569 image_barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
5570 image_barrier.image = image->handle();
5571 image_barrier.subresourceRange = full_range;
5572 image_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5573 image_barrier.newLayout = descriptor_layout;
5574
5575 cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
5576 nullptr, 1, &image_barrier);
5577 image_barrier.image = o_image->handle();
5578 cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
5579 nullptr, 1, &image_barrier);
5580
5581 if (test_type == kExternal) {
5582 // The image layout is external to the command buffer we are recording to test. Submit to push to instance scope.
5583 cmd_buf.end();
5584 vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
5585 vk::QueueWaitIdle(m_device->m_queue);
5586 cmd_buf.begin();
5587 }
5588
5589 m_errorMonitor->ExpectSuccess();
5590 m_renderPassBeginInfo.renderPass = rp;
5591 m_renderPassBeginInfo.framebuffer = fb;
5592 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
5593
5594 cmd_buf.BeginRenderPass(m_renderPassBeginInfo);
5595 vk::CmdBindPipeline(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
5596 vk::CmdBindDescriptorSets(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
5597 &descriptorSet, 0, NULL);
5598 vk::CmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
5599 vk::CmdSetScissor(cmd_buf.handle(), 0, 1, &scissor);
5600
5601 cmd_buf.Draw(1, 0, 0, 0);
5602
5603 cmd_buf.EndRenderPass();
5604 cmd_buf.end();
5605
5606 // Submit cmd buffer
5607 vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
5608 vk::QueueWaitIdle(m_device->m_queue);
5609 vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
5610 vk::DestroyRenderPass(m_device->device(), rp, nullptr);
5611 }
5612 };
5613 do_test(&image_3d, &view_2d, &other_image, &other_view, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
5614 m_errorMonitor->VerifyNotFound();
5615 }
5616
TEST_F(VkPositiveLayerTest,RenderPassInputResolve)5617 TEST_F(VkPositiveLayerTest, RenderPassInputResolve) {
5618 TEST_DESCRIPTION("Create render pass where input attachment == resolve attachment");
5619
5620 // Check for VK_KHR_get_physical_device_properties2
5621 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
5622 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5623 }
5624
5625 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
5626 bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
5627 ASSERT_NO_FATAL_FAILURE(InitState());
5628
5629 std::vector<VkAttachmentDescription> attachments = {
5630 // input attachments
5631 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
5632 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
5633 // color attachments
5634 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
5635 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5636 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5637 // resolve attachment
5638 {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
5639 VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5640 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5641 };
5642
5643 std::vector<VkAttachmentReference> input = {
5644 {0, VK_IMAGE_LAYOUT_GENERAL},
5645 };
5646 std::vector<VkAttachmentReference> color = {
5647 {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5648 };
5649 std::vector<VkAttachmentReference> resolve = {
5650 {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5651 {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
5652 };
5653
5654 VkSubpassDescription subpass = {0,
5655 VK_PIPELINE_BIND_POINT_GRAPHICS,
5656 (uint32_t)input.size(),
5657 input.data(),
5658 (uint32_t)color.size(),
5659 color.data(),
5660 resolve.data(),
5661 nullptr,
5662 0,
5663 nullptr};
5664
5665 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
5666 nullptr,
5667 0,
5668 (uint32_t)attachments.size(),
5669 attachments.data(),
5670 1,
5671 &subpass,
5672 0,
5673 nullptr};
5674
5675 PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported);
5676 }
5677
TEST_F(VkPositiveLayerTest,SpecializationUnused)5678 TEST_F(VkPositiveLayerTest, SpecializationUnused) {
5679 TEST_DESCRIPTION("Make sure an unused spec constant is valid to us");
5680
5681 ASSERT_NO_FATAL_FAILURE(Init());
5682 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5683
5684 // layout (constant_id = 2) const int a = 3;
5685 std::string cs_src = R"(
5686 OpCapability Shader
5687 OpMemoryModel Logical GLSL450
5688 OpEntryPoint GLCompute %main "main"
5689 OpExecutionMode %main LocalSize 1 1 1
5690 OpSource GLSL 450
5691 OpDecorate %a SpecId 2
5692 %void = OpTypeVoid
5693 %func = OpTypeFunction %void
5694 %int = OpTypeInt 32 1
5695 %a = OpSpecConstant %int 3
5696 %main = OpFunction %void None %func
5697 %label = OpLabel
5698 OpReturn
5699 OpFunctionEnd
5700 )";
5701
5702 VkSpecializationMapEntry entries[4] = {
5703 {0, 0, 1}, // unused
5704 {1, 0, 1}, // usued
5705 {2, 0, 4}, // OpTypeInt 32
5706 {3, 0, 4}, // usued
5707 };
5708
5709 int32_t data = 0;
5710 VkSpecializationInfo specialization_info = {
5711 4,
5712 entries,
5713 1 * sizeof(decltype(data)),
5714 &data,
5715 };
5716
5717 const auto set_info = [&](CreateComputePipelineHelper &helper) {
5718 helper.cs_.reset(new VkShaderObj(m_device, cs_src, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", &specialization_info));
5719 };
5720 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit | kWarningBit, "", true);
5721
5722 // Even if the ID is never seen in VkSpecializationMapEntry the OpSpecConstant will use the default and still is valid
5723 specialization_info.mapEntryCount = 1;
5724 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit | kWarningBit, "", true);
5725
5726 // try another random unused value other than zero
5727 entries[0].constantID = 100;
5728 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit | kWarningBit, "", true);
5729 }
5730
TEST_F(VkPositiveLayerTest,FillBufferCmdPoolTransferQueue)5731 TEST_F(VkPositiveLayerTest, FillBufferCmdPoolTransferQueue) {
5732 TEST_DESCRIPTION(
5733 "Use a command buffer with vkCmdFillBuffer that was allocated from a command pool that does not support graphics or "
5734 "compute opeartions");
5735
5736 uint32_t version = SetTargetApiVersion(VK_API_VERSION_1_1);
5737 if (version < VK_API_VERSION_1_1) {
5738 printf("%s At least Vulkan version 1.1 is required, skipping test.\n", kSkipPrefix);
5739 return;
5740 }
5741
5742 ASSERT_NO_FATAL_FAILURE(Init());
5743 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
5744 printf("%s Tests requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
5745 return;
5746 }
5747 m_errorMonitor->ExpectSuccess();
5748
5749 uint32_t transfer = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT);
5750 if (transfer == UINT32_MAX) {
5751 printf("%s Required queue families not present (non-graphics non-compute capable required).\n", kSkipPrefix);
5752 return;
5753 }
5754 VkQueueObj *queue = m_device->queue_family_queues(transfer)[0].get();
5755
5756 VkCommandPoolObj pool(m_device, transfer, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
5757 VkCommandBufferObj cb(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
5758
5759 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5760 VkBufferObj buffer;
5761 buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
5762
5763 cb.begin();
5764 cb.FillBuffer(buffer.handle(), 0, 12, 0x11111111);
5765 cb.end();
5766 m_errorMonitor->VerifyNotFound();
5767 }
5768
TEST_F(VkPositiveLayerTest,ShaderAtomicInt64)5769 TEST_F(VkPositiveLayerTest, ShaderAtomicInt64) {
5770 TEST_DESCRIPTION("Test VK_KHR_shader_atomic_int64.");
5771 SetTargetApiVersion(VK_API_VERSION_1_1);
5772
5773 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
5774 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5775 } else {
5776 printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
5777 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
5778 return;
5779 }
5780
5781 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
5782
5783 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME)) {
5784 m_device_extension_names.push_back(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME);
5785 } else {
5786 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME);
5787 return;
5788 }
5789
5790 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
5791 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
5792 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
5793
5794 auto atomic_int64_features = lvl_init_struct<VkPhysicalDeviceShaderAtomicInt64Features>();
5795 auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&atomic_int64_features);
5796 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
5797
5798 if (features2.features.shaderInt64 == VK_FALSE) {
5799 printf("%s shaderInt64 feature not supported, skipping tests\n", kSkipPrefix);
5800 return;
5801 }
5802
5803 // at least shaderBufferInt64Atomics is guaranteed to be supported
5804 if (atomic_int64_features.shaderBufferInt64Atomics == VK_FALSE) {
5805 printf(
5806 "%s shaderBufferInt64Atomics feature is required for VK_KHR_shader_atomic_int64 but not expose, likely driver bug, "
5807 "skipping tests\n",
5808 kSkipPrefix);
5809 return;
5810 }
5811
5812 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
5813
5814 if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
5815 printf("%s At least Vulkan version 1.1 is required for SPIR-V 1.3, skipping test.\n", kSkipPrefix);
5816 return;
5817 }
5818
5819 std::string cs_base = R"glsl(
5820 #version 450
5821 #extension GL_EXT_shader_explicit_arithmetic_types_int64 : enable
5822 #extension GL_EXT_shader_atomic_int64 : enable
5823 #extension GL_KHR_memory_scope_semantics : enable
5824 shared uint64_t x;
5825 layout(set = 0, binding = 0) buffer ssbo { uint64_t y; };
5826 void main() {
5827 )glsl";
5828
5829 // clang-format off
5830 // StorageBuffer storage class
5831 std::string cs_storage_buffer = cs_base + R"glsl(
5832 atomicAdd(y, 1);
5833 }
5834 )glsl";
5835
5836 // StorageBuffer storage class using AtomicStore
5837 // atomicStore is slightly different than other atomics, so good edge case
5838 std::string cs_store = cs_base + R"glsl(
5839 atomicStore(y, 1ul, gl_ScopeDevice, gl_StorageSemanticsBuffer, gl_SemanticsRelaxed);
5840 }
5841 )glsl";
5842
5843 // Workgroup storage class
5844 std::string cs_workgroup = cs_base + R"glsl(
5845 atomicAdd(x, 1);
5846 barrier();
5847 y = x + 1;
5848 }
5849 )glsl";
5850 // clang-format on
5851
5852 const char *current_shader = nullptr;
5853 const auto set_info = [&](CreateComputePipelineHelper &helper) {
5854 // Requires SPIR-V 1.3 for SPV_KHR_storage_buffer_storage_class
5855 helper.cs_.reset(new VkShaderObj(m_device, current_shader, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", false, nullptr,
5856 SPV_ENV_VULKAN_1_1));
5857 helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
5858 };
5859
5860 current_shader = cs_storage_buffer.c_str();
5861 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
5862
5863 current_shader = cs_store.c_str();
5864 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
5865
5866 if (atomic_int64_features.shaderSharedInt64Atomics == VK_TRUE) {
5867 current_shader = cs_workgroup.c_str();
5868 CreateComputePipelineHelper::OneshotTest(*this, set_info, kErrorBit, "", true);
5869 }
5870 }
5871
TEST_F(VkPositiveLayerTest,TopologyAtRasterizer)5872 TEST_F(VkPositiveLayerTest, TopologyAtRasterizer) {
5873 TEST_DESCRIPTION("Test topology set when creating a pipeline with tessellation and geometry shader.");
5874
5875 ASSERT_NO_FATAL_FAILURE(Init());
5876
5877 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5878
5879 if (!m_device->phy().features().tessellationShader) {
5880 printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
5881 return;
5882 }
5883
5884 m_errorMonitor->ExpectSuccess();
5885
5886 char const *tcsSource = R"glsl(
5887 #version 450
5888 layout(vertices = 3) out;
5889 void main(){
5890 gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;
5891 gl_TessLevelInner[0] = 1;
5892 }
5893 )glsl";
5894 char const *tesSource = R"glsl(
5895 #version 450
5896 layout(isolines, equal_spacing, cw) in;
5897 void main(){
5898 gl_Position.xyz = gl_TessCoord;
5899 gl_Position.w = 1.0f;
5900 }
5901 )glsl";
5902 static char const *gsSource = R"glsl(
5903 #version 450
5904 layout (triangles) in;
5905 layout (triangle_strip) out;
5906 layout (max_vertices = 1) out;
5907 void main() {
5908 gl_Position = vec4(1.0, 0.5, 0.5, 0.0);
5909 EmitVertex();
5910 }
5911 )glsl";
5912 VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
5913 VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
5914 VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
5915
5916 VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
5917 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
5918
5919 VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
5920
5921 VkDynamicState dyn_state = VK_DYNAMIC_STATE_LINE_WIDTH;
5922 VkPipelineDynamicStateCreateInfo dyn_state_ci = LvlInitStruct<VkPipelineDynamicStateCreateInfo>();
5923 dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
5924 dyn_state_ci.dynamicStateCount = 1;
5925 dyn_state_ci.pDynamicStates = &dyn_state;
5926
5927 CreatePipelineHelper pipe(*this);
5928 pipe.InitInfo();
5929 pipe.gp_ci_.pTessellationState = &tsci;
5930 pipe.gp_ci_.pInputAssemblyState = &iasci;
5931 pipe.shader_stages_.emplace_back(gs.GetStageCreateInfo());
5932 pipe.shader_stages_.emplace_back(tcs.GetStageCreateInfo());
5933 pipe.shader_stages_.emplace_back(tes.GetStageCreateInfo());
5934 pipe.InitState();
5935 pipe.dyn_state_ci_ = dyn_state_ci;
5936 pipe.CreateGraphicsPipeline();
5937
5938 VkRenderPassBeginInfo rpbi = LvlInitStruct<VkRenderPassBeginInfo>();
5939 rpbi.renderPass = m_renderPass;
5940 rpbi.framebuffer = m_framebuffer;
5941 rpbi.renderArea.offset.x = 0;
5942 rpbi.renderArea.offset.y = 0;
5943 rpbi.renderArea.extent.width = 32;
5944 rpbi.renderArea.extent.height = 32;
5945 rpbi.clearValueCount = static_cast<uint32_t>(m_renderPassClearValues.size());
5946 rpbi.pClearValues = m_renderPassClearValues.data();
5947
5948 m_commandBuffer->begin();
5949 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
5950 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
5951 vk::CmdDraw(m_commandBuffer->handle(), 4, 1, 0, 0);
5952 vk::CmdEndRenderPass(m_commandBuffer->handle());
5953 m_commandBuffer->end();
5954 m_errorMonitor->VerifyNotFound();
5955 }
5956
TEST_F(VkPositiveLayerTest,TestDynamicVertexInput)5957 TEST_F(VkPositiveLayerTest, TestDynamicVertexInput) {
5958 TEST_DESCRIPTION("Test using dynamic vertex input and not setting pVertexInputState in the graphics pipeline create info");
5959 SetTargetApiVersion(VK_API_VERSION_1_1);
5960
5961 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
5962
5963 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
5964 printf("%s Tests requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
5965 return;
5966 }
5967
5968 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME)) {
5969 m_device_extension_names.push_back(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
5970 } else {
5971 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
5972 return;
5973 }
5974
5975 auto vertex_input_dynamic_state_features = LvlInitStruct<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>();
5976 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&vertex_input_dynamic_state_features);
5977 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
5978
5979 if (!vertex_input_dynamic_state_features.vertexInputDynamicState) {
5980 printf("%s Feature vertexInputDynamicState is not supported.\n", kSkipPrefix);
5981 return;
5982 }
5983
5984 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
5985 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
5986
5987 CreatePipelineHelper pipe(*this);
5988 pipe.InitInfo();
5989 const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VERTEX_INPUT_EXT};
5990 auto dyn_state_ci = LvlInitStruct<VkPipelineDynamicStateCreateInfo>();
5991 dyn_state_ci.dynamicStateCount = size(dyn_states);
5992 dyn_state_ci.pDynamicStates = dyn_states;
5993 pipe.dyn_state_ci_ = dyn_state_ci;
5994 pipe.InitState();
5995 pipe.gp_ci_.pVertexInputState = nullptr;
5996 m_errorMonitor->ExpectSuccess();
5997 pipe.CreateGraphicsPipeline();
5998 m_errorMonitor->VerifyNotFound();
5999 }
6000
TEST_F(VkPositiveLayerTest,TestCmdSetVertexInputEXT)6001 TEST_F(VkPositiveLayerTest, TestCmdSetVertexInputEXT) {
6002 TEST_DESCRIPTION("Test CmdSetVertexInputEXT");
6003 SetTargetApiVersion(VK_API_VERSION_1_1);
6004
6005 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
6006
6007 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
6008 printf("%s Tests requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
6009 return;
6010 }
6011
6012 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME)) {
6013 m_device_extension_names.push_back(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
6014 } else {
6015 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
6016 return;
6017 }
6018
6019 auto vertex_input_dynamic_state_features = LvlInitStruct<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>();
6020 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&vertex_input_dynamic_state_features);
6021 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
6022
6023 if (!vertex_input_dynamic_state_features.vertexInputDynamicState) {
6024 printf("%s Feature vertexInputDynamicState is not supported.\n", kSkipPrefix);
6025 return;
6026 }
6027
6028 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6029 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6030
6031 auto vkCmdSetVertexInputEXT =
6032 reinterpret_cast<PFN_vkCmdSetVertexInputEXT>(vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetVertexInputEXT"));
6033
6034 CreatePipelineHelper pipe(*this);
6035 pipe.InitInfo();
6036 const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VERTEX_INPUT_EXT};
6037 auto dyn_state_ci = LvlInitStruct<VkPipelineDynamicStateCreateInfo>();
6038 dyn_state_ci.dynamicStateCount = size(dyn_states);
6039 dyn_state_ci.pDynamicStates = dyn_states;
6040 pipe.dyn_state_ci_ = dyn_state_ci;
6041 pipe.InitState();
6042 pipe.gp_ci_.pVertexInputState = nullptr;
6043 pipe.CreateGraphicsPipeline();
6044
6045 VkVertexInputBindingDescription2EXT binding = LvlInitStruct<VkVertexInputBindingDescription2EXT>();
6046 binding.binding = 0;
6047 binding.stride = sizeof(float);
6048 binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
6049 binding.divisor = 1;
6050 VkVertexInputAttributeDescription2EXT attribute = LvlInitStruct<VkVertexInputAttributeDescription2EXT>();
6051 attribute.location = 0;
6052 attribute.binding = 0;
6053 attribute.format = VK_FORMAT_R32_SFLOAT;
6054 attribute.offset = 0;
6055
6056 m_errorMonitor->ExpectSuccess();
6057 m_commandBuffer->begin();
6058 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
6059 vkCmdSetVertexInputEXT(m_commandBuffer->handle(), 1, &binding, 1, &attribute);
6060 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
6061 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
6062 m_commandBuffer->EndRenderPass();
6063 m_commandBuffer->end();
6064 m_errorMonitor->VerifyNotFound();
6065 }
6066
TEST_F(VkPositiveLayerTest,TestCmdSetVertexInputEXTStride)6067 TEST_F(VkPositiveLayerTest, TestCmdSetVertexInputEXTStride) {
6068 TEST_DESCRIPTION("Test CmdSetVertexInputEXT");
6069 SetTargetApiVersion(VK_API_VERSION_1_1);
6070
6071 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
6072
6073 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
6074 printf("%s Tests requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
6075 return;
6076 }
6077
6078 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME)) {
6079 m_device_extension_names.push_back(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
6080 } else {
6081 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
6082 return;
6083 }
6084
6085 auto vertex_input_dynamic_state_features = LvlInitStruct<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>();
6086 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&vertex_input_dynamic_state_features);
6087 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
6088
6089 if (!vertex_input_dynamic_state_features.vertexInputDynamicState) {
6090 printf("%s Feature vertexInputDynamicState is not supported.\n", kSkipPrefix);
6091 return;
6092 }
6093
6094 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6095 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6096
6097 auto vkCmdSetVertexInputEXT =
6098 reinterpret_cast<PFN_vkCmdSetVertexInputEXT>(vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetVertexInputEXT"));
6099
6100 CreatePipelineHelper pipe(*this);
6101 pipe.InitInfo();
6102 const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT};
6103 auto dyn_state_ci = LvlInitStruct<VkPipelineDynamicStateCreateInfo>();
6104 dyn_state_ci.dynamicStateCount = size(dyn_states);
6105 dyn_state_ci.pDynamicStates = dyn_states;
6106 pipe.dyn_state_ci_ = dyn_state_ci;
6107 pipe.InitState();
6108 pipe.gp_ci_.pVertexInputState = nullptr;
6109 pipe.CreateGraphicsPipeline();
6110
6111 VkVertexInputBindingDescription2EXT binding = LvlInitStruct<VkVertexInputBindingDescription2EXT>();
6112 binding.binding = 0;
6113 binding.stride = sizeof(float);
6114 binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
6115 binding.divisor = 1;
6116 VkVertexInputAttributeDescription2EXT attribute = LvlInitStruct<VkVertexInputAttributeDescription2EXT>();
6117 attribute.location = 0;
6118 attribute.binding = 0;
6119 attribute.format = VK_FORMAT_R32_SFLOAT;
6120 attribute.offset = 0;
6121
6122 m_errorMonitor->ExpectSuccess();
6123 m_commandBuffer->begin();
6124 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
6125 vkCmdSetVertexInputEXT(m_commandBuffer->handle(), 1, &binding, 1, &attribute);
6126 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
6127 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
6128 m_commandBuffer->EndRenderPass();
6129 m_commandBuffer->end();
6130 m_errorMonitor->VerifyNotFound();
6131 }
6132
TEST_F(VkPositiveLayerTest,TestPervertexNVShaderAttributes)6133 TEST_F(VkPositiveLayerTest, TestPervertexNVShaderAttributes) {
6134 TEST_DESCRIPTION("Test using TestRasterizationStateStreamCreateInfoEXT with invalid rasterizationStream.");
6135
6136 AddRequiredExtensions(VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME);
6137 ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
6138 if (!AreRequestedExtensionsEnabled()) {
6139 printf("%s Extension %s is not supported, skipping test.\n", kSkipPrefix, VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME);
6140 return;
6141 }
6142
6143 VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV fragment_shader_barycentric_features =
6144 LvlInitStruct<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>();
6145 fragment_shader_barycentric_features.fragmentShaderBarycentric = VK_TRUE;
6146 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&fragment_shader_barycentric_features);
6147 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6148
6149 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6150
6151 char const *vsSource = R"glsl(
6152 #version 450
6153
6154 layout(location = 0) out PerVertex {
6155 vec3 vtxPos;
6156 } outputs;
6157
6158 vec2 triangle_positions[3] = vec2[](
6159 vec2(0.5, -0.5),
6160 vec2(0.5, 0.5),
6161 vec2(-0.5, 0.5)
6162 );
6163
6164 void main() {
6165 gl_Position = vec4(triangle_positions[gl_VertexIndex], 0.0, 1.0);
6166 outputs.vtxPos = gl_Position.xyz;
6167 }
6168 )glsl";
6169
6170 char const *fsSource = R"glsl(
6171 #version 450
6172
6173 #extension GL_NV_fragment_shader_barycentric : enable
6174
6175 layout(location = 0) in pervertexNV PerVertex {
6176 vec3 vtxPos;
6177 } inputs[3];
6178
6179 layout(location = 0) out vec4 out_color;
6180
6181 void main() {
6182 vec3 b = gl_BaryCoordNV;
6183 if (b.x > b.y && b.x > b.z) {
6184 out_color = vec4(inputs[0].vtxPos, 1.0);
6185 }
6186 else if(b.y > b.z) {
6187 out_color = vec4(inputs[1].vtxPos, 1.0);
6188 }
6189 else {
6190 out_color = vec4(inputs[2].vtxPos, 1.0);
6191 }
6192 }
6193 )glsl";
6194
6195 m_errorMonitor->ExpectSuccess();
6196 VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
6197 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
6198
6199 CreatePipelineHelper pipe(*this);
6200 pipe.InitInfo();
6201 pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
6202 pipe.InitState();
6203 pipe.CreateGraphicsPipeline();
6204 m_errorMonitor->VerifyNotFound();
6205 }
6206
TEST_F(VkPositiveLayerTest,RayTracingPipelineShaderGroupsKHR)6207 TEST_F(VkPositiveLayerTest, RayTracingPipelineShaderGroupsKHR) {
6208 TEST_DESCRIPTION("Test that no warning is produced when a library is referenced in the raytracing shader groups.");
6209 SetTargetApiVersion(VK_API_VERSION_1_2);
6210 if (!InitFrameworkForRayTracingTest(this, true, m_instance_extension_names, m_device_extension_names, m_errorMonitor, false,
6211 false, true)) {
6212 return;
6213 }
6214
6215 m_errorMonitor->ExpectSuccess();
6216
6217 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(
6218 vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR"));
6219 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
6220
6221 auto ray_tracing_features = LvlInitStruct<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>();
6222 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&ray_tracing_features);
6223 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
6224
6225 if (!ray_tracing_features.rayTracingPipeline) {
6226 printf("%s Feature rayTracing is not supported.\n", kSkipPrefix);
6227 return;
6228 }
6229
6230 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6231
6232 const VkPipelineLayoutObj empty_pipeline_layout(m_device, {});
6233
6234 const std::string empty_shader = R"glsl(
6235 #version 460
6236 #extension GL_EXT_ray_tracing : require
6237 void main() {}
6238 )glsl";
6239
6240 VkShaderObj rgen_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_RAYGEN_BIT_KHR, this, "main", false, nullptr,
6241 SPV_ENV_VULKAN_1_2);
6242 VkShaderObj chit_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, this, "main", false, nullptr,
6243 SPV_ENV_VULKAN_1_2);
6244
6245 PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR =
6246 reinterpret_cast<PFN_vkCreateRayTracingPipelinesKHR>(vk::GetInstanceProcAddr(instance(), "vkCreateRayTracingPipelinesKHR"));
6247 ASSERT_TRUE(vkCreateRayTracingPipelinesKHR != nullptr);
6248
6249 PFN_vkDestroyPipeline vkDestroyPipeline =
6250 reinterpret_cast<PFN_vkDestroyPipeline>(vk::GetInstanceProcAddr(instance(), "vkDestroyPipeline"));
6251 ASSERT_TRUE(vkDestroyPipeline != nullptr);
6252
6253 VkPipeline pipeline = VK_NULL_HANDLE;
6254
6255 const VkPipelineLayoutObj pipeline_layout(m_device, {});
6256
6257 VkPipelineShaderStageCreateInfo stage_create_info = LvlInitStruct<VkPipelineShaderStageCreateInfo>();
6258 stage_create_info.stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR;
6259 stage_create_info.module = chit_shader.handle();
6260 stage_create_info.pName = "main";
6261
6262 VkRayTracingShaderGroupCreateInfoKHR group_create_info = LvlInitStruct<VkRayTracingShaderGroupCreateInfoKHR>();
6263 group_create_info.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR;
6264 group_create_info.generalShader = VK_SHADER_UNUSED_KHR;
6265 group_create_info.closestHitShader = 0;
6266 group_create_info.anyHitShader = VK_SHADER_UNUSED_KHR;
6267 group_create_info.intersectionShader = VK_SHADER_UNUSED_KHR;
6268
6269 VkRayTracingPipelineInterfaceCreateInfoKHR interface_ci = LvlInitStruct<VkRayTracingPipelineInterfaceCreateInfoKHR>();
6270 interface_ci.maxPipelineRayHitAttributeSize = 4;
6271 interface_ci.maxPipelineRayPayloadSize = 4;
6272
6273 VkRayTracingPipelineCreateInfoKHR library_pipeline = LvlInitStruct<VkRayTracingPipelineCreateInfoKHR>();
6274 library_pipeline.flags = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR;
6275 library_pipeline.stageCount = 1;
6276 library_pipeline.pStages = &stage_create_info;
6277 library_pipeline.groupCount = 1;
6278 library_pipeline.pGroups = &group_create_info;
6279 library_pipeline.layout = pipeline_layout.handle();
6280 library_pipeline.pLibraryInterface = &interface_ci;
6281
6282 VkPipeline library = VK_NULL_HANDLE;
6283 vkCreateRayTracingPipelinesKHR(m_device->handle(), VK_NULL_HANDLE, VK_NULL_HANDLE, 1, &library_pipeline, nullptr, &library);
6284
6285 VkPipelineLibraryCreateInfoKHR library_info_one = LvlInitStruct<VkPipelineLibraryCreateInfoKHR>();
6286 library_info_one.libraryCount = 1;
6287 library_info_one.pLibraries = &library;
6288
6289 VkPipelineShaderStageCreateInfo stage_create_infos[1] = {};
6290 stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
6291 stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_KHR;
6292 stage_create_infos[0].module = rgen_shader.handle();
6293 stage_create_infos[0].pName = "main";
6294
6295 VkRayTracingShaderGroupCreateInfoKHR group_create_infos[2] = {};
6296 group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR;
6297 group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR;
6298 group_create_infos[0].generalShader = 0;
6299 group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_KHR;
6300 group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_KHR;
6301 group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_KHR;
6302
6303 group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR;
6304 group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR;
6305 group_create_infos[1].generalShader = VK_SHADER_UNUSED_KHR;
6306 group_create_infos[1].closestHitShader = 1; // Index 1 corresponds to the closest hit shader from the library
6307 group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_KHR;
6308 group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_KHR;
6309
6310 VkRayTracingPipelineCreateInfoKHR pipeline_ci = LvlInitStruct<VkRayTracingPipelineCreateInfoKHR>();
6311 pipeline_ci.pLibraryInfo = &library_info_one;
6312 pipeline_ci.stageCount = 1;
6313 pipeline_ci.pStages = stage_create_infos;
6314 pipeline_ci.groupCount = 2;
6315 pipeline_ci.pGroups = group_create_infos;
6316 pipeline_ci.layout = empty_pipeline_layout.handle();
6317 pipeline_ci.pLibraryInterface = &interface_ci;
6318
6319 VkResult err =
6320 vkCreateRayTracingPipelinesKHR(m_device->handle(), VK_NULL_HANDLE, VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
6321 m_errorMonitor->VerifyNotFound();
6322 ASSERT_VK_SUCCESS(err);
6323 ASSERT_NE(pipeline, VK_NULL_HANDLE);
6324
6325 vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
6326 vkDestroyPipeline(m_device->handle(), library, nullptr);
6327 }
6328
TEST_F(VkPositiveLayerTest,LineTopologyClasses)6329 TEST_F(VkPositiveLayerTest, LineTopologyClasses) {
6330 TEST_DESCRIPTION("Check different line topologies within the same topology class");
6331
6332 m_errorMonitor->ExpectSuccess();
6333
6334 SetTargetApiVersion(VK_API_VERSION_1_1);
6335
6336 AddRequiredExtensions(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
6337 auto extended_dynamic_state_features = LvlInitStruct<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>();
6338 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&extended_dynamic_state_features);
6339 ASSERT_NO_FATAL_FAILURE(InitFrameworkAndRetrieveFeatures(features2));
6340
6341 if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
6342 printf("%s API version +1.1 required\n", kSkipPrefix);
6343 }
6344
6345 if (!AreRequestedExtensionsEnabled()) {
6346 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
6347 return;
6348 }
6349
6350 if (!extended_dynamic_state_features.extendedDynamicState) {
6351 printf("%s Test requires (unsupported) extendedDynamicState, skipping\n", kSkipPrefix);
6352 return;
6353 }
6354
6355 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6356
6357 auto vkCmdSetPrimitiveTopologyEXT = reinterpret_cast<PFN_vkCmdSetPrimitiveTopologyEXT>(
6358 vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetPrimitiveTopologyEXT"));
6359
6360 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
6361
6362 const VkDynamicState dyn_states[1] = {
6363 VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
6364 };
6365
6366 // Verify each vkCmdSet command
6367 CreatePipelineHelper pipe(*this);
6368 pipe.InitInfo();
6369 auto dyn_state_ci = LvlInitStruct<VkPipelineDynamicStateCreateInfo>();
6370 dyn_state_ci.dynamicStateCount = size(dyn_states);
6371 dyn_state_ci.pDynamicStates = dyn_states;
6372 pipe.dyn_state_ci_ = dyn_state_ci;
6373 pipe.vi_ci_.vertexBindingDescriptionCount = 1;
6374 VkVertexInputBindingDescription inputBinding = {0, sizeof(float), VK_VERTEX_INPUT_RATE_VERTEX};
6375 pipe.vi_ci_.pVertexBindingDescriptions = &inputBinding;
6376 pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
6377 VkVertexInputAttributeDescription attribute = {0, 0, VK_FORMAT_R32_SFLOAT, 0};
6378 pipe.vi_ci_.pVertexAttributeDescriptions = &attribute;
6379 pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
6380 pipe.InitState();
6381 pipe.CreateGraphicsPipeline();
6382
6383 const float vbo_data[3] = {0};
6384 VkConstantBufferObj vb(m_device, sizeof(vbo_data), reinterpret_cast<const void *>(&vbo_data),
6385 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
6386
6387 VkCommandBufferObj cb(m_device, m_commandPool);
6388 cb.begin();
6389 cb.BeginRenderPass(m_renderPassBeginInfo);
6390
6391 vk::CmdBindPipeline(cb.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
6392 cb.BindVertexBuffer(&vb, 0, 0);
6393 vkCmdSetPrimitiveTopologyEXT(cb.handle(), VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY);
6394 vk::CmdDraw(cb.handle(), 1, 1, 0, 0);
6395
6396 cb.EndRenderPass();
6397
6398 cb.end();
6399 m_errorMonitor->VerifyNotFound();
6400 }
6401
TEST_F(VkPositiveLayerTest,CreateGraphicsPipelineDynamicRendering)6402 TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineDynamicRendering) {
6403 TEST_DESCRIPTION("Test for a creating a pipeline with VK_KHR_dynamic_rendering enabled");
6404 SetTargetApiVersion(VK_API_VERSION_1_1);
6405 ASSERT_NO_FATAL_FAILURE(InitFramework());
6406 if (!AddRequiredDeviceExtensions(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME)) {
6407 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
6408 return;
6409 }
6410 m_errorMonitor->ExpectSuccess();
6411
6412 auto dynamic_rendering_features = LvlInitStruct<VkPhysicalDeviceDynamicRenderingFeaturesKHR>();
6413 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&dynamic_rendering_features);
6414 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
6415 if (!dynamic_rendering_features.dynamicRendering) {
6416 printf("%s Test requires (unsupported) dynamicRendering , skipping\n", kSkipPrefix);
6417 return;
6418 }
6419
6420 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6421
6422 char const *fsSource = R"glsl(
6423 #version 450
6424 layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;
6425 layout(location=0) out vec4 color;
6426 void main() {
6427 color = subpassLoad(x);
6428 }
6429 )glsl";
6430
6431 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
6432 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
6433
6434 VkPipelineObj pipe(m_device);
6435 pipe.AddShader(&vs);
6436 pipe.AddShader(&fs);
6437 pipe.AddDefaultColorAttachment();
6438
6439 VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
6440 const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
6441 const VkPipelineLayoutObj pl(m_device, {&dsl});
6442
6443 VkFormat color_format = VK_FORMAT_R8G8B8A8_UNORM;
6444 auto rendering_info = LvlInitStruct<VkPipelineRenderingCreateInfoKHR>();
6445 rendering_info.colorAttachmentCount = 1;
6446 rendering_info.pColorAttachmentFormats = &color_format;
6447
6448 auto create_info = LvlInitStruct<VkGraphicsPipelineCreateInfo>();
6449 pipe.InitGraphicsPipelineCreateInfo(&create_info);
6450 create_info.pNext = &rendering_info;
6451
6452 pipe.CreateVKPipeline(pl.handle(), VK_NULL_HANDLE, &create_info);
6453 m_errorMonitor->VerifyNotFound();
6454 }
6455
TEST_F(VkPositiveLayerTest,CreateGraphicsPipelineDynamicRenderingNoInfo)6456 TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineDynamicRenderingNoInfo) {
6457 TEST_DESCRIPTION("Test for a creating a pipeline with VK_KHR_dynamic_rendering enabled but no rendering info struct.");
6458 SetTargetApiVersion(VK_API_VERSION_1_1);
6459 ASSERT_NO_FATAL_FAILURE(InitFramework());
6460 if (!AddRequiredDeviceExtensions(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME)) {
6461 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
6462 return;
6463 }
6464
6465 m_errorMonitor->ExpectSuccess();
6466
6467 auto dynamic_rendering_features = LvlInitStruct<VkPhysicalDeviceDynamicRenderingFeaturesKHR>();
6468 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&dynamic_rendering_features);
6469 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
6470 if (!dynamic_rendering_features.dynamicRendering) {
6471 printf("%s Test requires (unsupported) dynamicRendering , skipping\n", kSkipPrefix);
6472 return;
6473 }
6474
6475 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6476
6477 char const *fsSource = R"glsl(
6478 #version 450
6479 layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;
6480 layout(location=0) out vec4 color;
6481 void main() {
6482 color = subpassLoad(x);
6483 }
6484 )glsl";
6485
6486 VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
6487 VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
6488
6489 VkPipelineObj pipe(m_device);
6490 pipe.AddShader(&vs);
6491 pipe.AddShader(&fs);
6492 pipe.AddDefaultColorAttachment();
6493
6494 VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
6495 const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
6496 const VkPipelineLayoutObj pl(m_device, {&dsl});
6497
6498 auto create_info = LvlInitStruct<VkGraphicsPipelineCreateInfo>();
6499 pipe.InitGraphicsPipelineCreateInfo(&create_info);
6500 // if there isn't a VkPipelineRenderingCreateInfoKHR, the driver is supposed to use safe default values
6501 pipe.CreateVKPipeline(pl.handle(), VK_NULL_HANDLE, &create_info);
6502 m_errorMonitor->VerifyNotFound();
6503 }
6504
TEST_F(VkPositiveLayerTest,CreateGraphicsPipelineRasterizationOrderAttachmentAccessFlags)6505 TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineRasterizationOrderAttachmentAccessFlags) {
6506 TEST_DESCRIPTION("Test for a creating a pipeline with VK_ARM_rasterization_order_attachment_access enabled");
6507 m_errorMonitor->ExpectSuccess();
6508
6509 SetTargetApiVersion(VK_API_VERSION_1_2);
6510 AddRequiredExtensions(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME);
6511
6512 auto rasterization_order_features = LvlInitStruct<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>();
6513 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2>(&rasterization_order_features);
6514
6515 ASSERT_NO_FATAL_FAILURE(InitFrameworkAndRetrieveFeatures(features2));
6516
6517 if (!AreRequestedExtensionsEnabled()) {
6518 printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME);
6519 return;
6520 }
6521
6522 if (!rasterization_order_features.rasterizationOrderColorAttachmentAccess &&
6523 !rasterization_order_features.rasterizationOrderDepthAttachmentAccess &&
6524 !rasterization_order_features.rasterizationOrderStencilAttachmentAccess) {
6525 printf("%s Test requires (unsupported) rasterizationOrderAttachmentAccess , skipping\n", kSkipPrefix);
6526 return;
6527 }
6528
6529 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
6530
6531 m_errorMonitor->VerifyNotFound();
6532
6533 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
6534 VkPipelineColorBlendAttachmentState cb_as = {};
6535 auto cb_ci = LvlInitStruct<VkPipelineColorBlendStateCreateInfo>();
6536 cb_ci.attachmentCount = 1;
6537 cb_ci.pAttachments = &cb_as;
6538 VkRenderPass render_pass_handle = VK_NULL_HANDLE;
6539
6540 auto create_render_pass = [&](VkPipelineDepthStencilStateCreateFlags subpass_flags, vk_testing::RenderPass &render_pass) {
6541 VkAttachmentDescription attachments[2] = {};
6542 attachments[0].flags = 0;
6543 attachments[0].format = VK_FORMAT_B8G8R8A8_UNORM;
6544 attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
6545 attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
6546 attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
6547 attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
6548 attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
6549 attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
6550 attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
6551
6552 attachments[1].flags = 0;
6553 attachments[1].format = FindSupportedDepthStencilFormat(this->gpu());
6554 attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
6555 attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
6556 attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
6557 attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
6558 attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
6559 attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
6560 attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6561
6562 VkAttachmentReference cAttachRef = {};
6563 cAttachRef.attachment = 0;
6564 cAttachRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
6565
6566 VkAttachmentReference dsAttachRef = {};
6567 dsAttachRef.attachment = 1;
6568 dsAttachRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
6569
6570 VkSubpassDescription subpass = {};
6571 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
6572 subpass.colorAttachmentCount = 1;
6573 subpass.pColorAttachments = &cAttachRef;
6574 subpass.pDepthStencilAttachment = &dsAttachRef;
6575 subpass.flags = subpass_flags;
6576
6577 VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
6578 rpci.attachmentCount = 2;
6579 rpci.pAttachments = attachments;
6580 rpci.subpassCount = 1;
6581 rpci.pSubpasses = &subpass;
6582
6583 render_pass.init(*this->m_device, rpci);
6584 };
6585
6586 auto set_flgas_pipeline_createinfo = [&](CreatePipelineHelper &helper) {
6587 helper.gp_ci_.pDepthStencilState = &ds_ci;
6588 helper.gp_ci_.pColorBlendState = &cb_ci;
6589 helper.gp_ci_.renderPass = render_pass_handle;
6590 };
6591
6592 // Color attachment
6593 if (rasterization_order_features.rasterizationOrderColorAttachmentAccess) {
6594 cb_ci.flags = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM;
6595 ds_ci.flags = 0;
6596
6597 vk_testing::RenderPass render_pass;
6598 create_render_pass(VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM, render_pass);
6599 render_pass_handle = render_pass.handle();
6600 CreatePipelineHelper::OneshotTest(*this, set_flgas_pipeline_createinfo, kErrorBit, "", true);
6601 }
6602
6603 // Depth attachment
6604 if (rasterization_order_features.rasterizationOrderDepthAttachmentAccess) {
6605 cb_ci.flags = 0;
6606 ds_ci.flags = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM;
6607
6608 vk_testing::RenderPass render_pass;
6609 create_render_pass(VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, render_pass);
6610 render_pass_handle = render_pass.handle();
6611 CreatePipelineHelper::OneshotTest(*this, set_flgas_pipeline_createinfo, kErrorBit, "", true);
6612 }
6613
6614 // Stencil attachment
6615 if (rasterization_order_features.rasterizationOrderStencilAttachmentAccess) {
6616 cb_ci.flags = 0;
6617 ds_ci.flags = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM;
6618
6619 vk_testing::RenderPass render_pass;
6620 create_render_pass(VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM, render_pass);
6621 render_pass_handle = render_pass.handle();
6622
6623 CreatePipelineHelper::OneshotTest(*this, set_flgas_pipeline_createinfo, kErrorBit, "", true);
6624 }
6625
6626 m_errorMonitor->VerifyNotFound();
6627 }