1 /*
2 * Copyright (c) 2015-2021 The Khronos Group Inc.
3 * Copyright (c) 2015-2021 Valve Corporation
4 * Copyright (c) 2015-2021 LunarG, Inc.
5 * Copyright (c) 2015-2021 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
20 * Author: Tony Barbour <tony@LunarG.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 */
23
24 #include "vkrenderframework.h"
25
26 #include <algorithm>
27 #include <cassert>
28 #include <cstring>
29 #include <utility>
30 #include <vector>
31
32 #include "vk_format_utils.h"
33 #include "vk_extension_helper.h"
34
35 using std::string;
36 using std::strncmp;
37 using std::vector;
38
39 template <typename C, typename F>
RemoveIf(C & container,F && fn)40 typename C::iterator RemoveIf(C &container, F &&fn) {
41 return container.erase(std::remove_if(container.begin(), container.end(), std::forward<F>(fn)), container.end());
42 }
43
ErrorMonitor(Behavior behavior)44 ErrorMonitor::ErrorMonitor(Behavior behavior) : behavior_(behavior) {
45 test_platform_thread_create_mutex(&mutex_);
46 MonitorReset();
47 if (behavior_ == Behavior::DefaultSuccess) {
48 ExpectSuccess(kErrorBit);
49 }
50 }
51
~ErrorMonitor()52 ErrorMonitor::~ErrorMonitor() NOEXCEPT { test_platform_thread_delete_mutex(&mutex_); }
53
MonitorReset()54 void ErrorMonitor::MonitorReset() {
55 message_flags_ = 0;
56 bailout_ = NULL;
57 message_found_ = VK_FALSE;
58 failure_message_strings_.clear();
59 desired_message_strings_.clear();
60 ignore_message_strings_.clear();
61 allowed_message_strings_.clear();
62 other_messages_.clear();
63 }
64
Reset()65 void ErrorMonitor::Reset() {
66 test_platform_thread_lock_mutex(&mutex_);
67 MonitorReset();
68 test_platform_thread_unlock_mutex(&mutex_);
69 }
70
SetDesiredFailureMsg(const VkFlags msgFlags,const string msg)71 void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const string msg) { SetDesiredFailureMsg(msgFlags, msg.c_str()); }
72
SetDesiredFailureMsg(const VkFlags msgFlags,const char * const msgString)73 void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString) {
74 if (NeedCheckSuccess()) {
75 VerifyNotFound();
76 }
77
78 test_platform_thread_lock_mutex(&mutex_);
79 desired_message_strings_.insert(msgString);
80 message_flags_ |= msgFlags;
81 test_platform_thread_unlock_mutex(&mutex_);
82 }
83
SetAllowedFailureMsg(const char * const msg)84 void ErrorMonitor::SetAllowedFailureMsg(const char *const msg) {
85 test_platform_thread_lock_mutex(&mutex_);
86 allowed_message_strings_.emplace_back(msg);
87 test_platform_thread_unlock_mutex(&mutex_);
88 }
89
SetUnexpectedError(const char * const msg)90 void ErrorMonitor::SetUnexpectedError(const char *const msg) {
91 if (NeedCheckSuccess()) {
92 VerifyNotFound();
93 }
94 test_platform_thread_lock_mutex(&mutex_);
95 ignore_message_strings_.emplace_back(msg);
96 test_platform_thread_unlock_mutex(&mutex_);
97 }
98
CheckForDesiredMsg(const char * const msgString)99 VkBool32 ErrorMonitor::CheckForDesiredMsg(const char *const msgString) {
100 VkBool32 result = VK_FALSE;
101 test_platform_thread_lock_mutex(&mutex_);
102 if (bailout_ != nullptr) {
103 *bailout_ = true;
104 }
105 string errorString(msgString);
106 bool found_expected = false;
107
108 if (!IgnoreMessage(errorString)) {
109 for (auto desired_msg_it = desired_message_strings_.begin(); desired_msg_it != desired_message_strings_.end();
110 ++desired_msg_it) {
111 if ((*desired_msg_it).length() == 0) {
112 // An empty desired_msg string "" indicates a positive test - not expecting an error.
113 // Return true to avoid calling layers/driver with this error.
114 // And don't erase the "" string, so it remains if another error is found.
115 result = VK_TRUE;
116 found_expected = true;
117 message_found_ = true;
118 failure_message_strings_.insert(errorString);
119 } else if (errorString.find(*desired_msg_it) != string::npos) {
120 found_expected = true;
121 failure_message_strings_.insert(errorString);
122 message_found_ = true;
123 result = VK_TRUE;
124 // Remove a maximum of one failure message from the set
125 // Multiset mutation is acceptable because `break` causes flow of control to exit the for loop
126 desired_message_strings_.erase(desired_msg_it);
127 break;
128 }
129 }
130
131 if (!found_expected && allowed_message_strings_.size()) {
132 for (auto allowed_msg_it = allowed_message_strings_.begin(); allowed_msg_it != allowed_message_strings_.end();
133 ++allowed_msg_it) {
134 if (errorString.find(*allowed_msg_it) != string::npos) {
135 found_expected = true;
136 break;
137 }
138 }
139 }
140
141 if (!found_expected) {
142 printf("Unexpected: %s\n", msgString);
143 other_messages_.push_back(errorString);
144 }
145 }
146
147 test_platform_thread_unlock_mutex(&mutex_);
148 return result;
149 }
150
GetOtherFailureMsgs() const151 vector<string> ErrorMonitor::GetOtherFailureMsgs() const { return other_messages_; }
152
GetMessageFlags()153 VkDebugReportFlagsEXT ErrorMonitor::GetMessageFlags() { return message_flags_; }
154
AnyDesiredMsgFound() const155 bool ErrorMonitor::AnyDesiredMsgFound() const { return message_found_; }
156
AllDesiredMsgsFound() const157 bool ErrorMonitor::AllDesiredMsgsFound() const { return desired_message_strings_.empty(); }
158
SetError(const char * const errorString)159 void ErrorMonitor::SetError(const char *const errorString) {
160 test_platform_thread_lock_mutex(&mutex_);
161 message_found_ = true;
162 failure_message_strings_.insert(errorString);
163 test_platform_thread_unlock_mutex(&mutex_);
164 }
165
SetBailout(bool * bailout)166 void ErrorMonitor::SetBailout(bool *bailout) {
167 test_platform_thread_lock_mutex(&mutex_);
168 bailout_ = bailout;
169 test_platform_thread_unlock_mutex(&mutex_);
170 }
171
DumpFailureMsgs() const172 void ErrorMonitor::DumpFailureMsgs() const {
173 vector<string> otherMsgs = GetOtherFailureMsgs();
174 if (otherMsgs.size()) {
175 std::cout << "Other error messages logged for this test were:" << std::endl;
176 for (auto iter = otherMsgs.begin(); iter != otherMsgs.end(); iter++) {
177 std::cout << " " << *iter << std::endl;
178 }
179 }
180 }
181
ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask)182 void ErrorMonitor::ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask) {
183 // Match ANY message matching specified type
184 test_platform_thread_lock_mutex(&mutex_);
185 desired_message_strings_.insert("");
186 message_flags_ = message_flag_mask;
187 test_platform_thread_unlock_mutex(&mutex_);
188 }
189
VerifyFound()190 void ErrorMonitor::VerifyFound() {
191 test_platform_thread_lock_mutex(&mutex_);
192 // Not receiving expected message(s) is a failure. /Before/ throwing, dump any other messages
193 if (!AllDesiredMsgsFound()) {
194 DumpFailureMsgs();
195 for (const auto &desired_msg : desired_message_strings_) {
196 ADD_FAILURE() << "Did not receive expected error '" << desired_msg << "'";
197 }
198 } else if (GetOtherFailureMsgs().size() > 0) {
199 // Fail test case for any unexpected errors
200 #if defined(ANDROID)
201 // This will get unexpected errors into the adb log
202 for (auto msg : other_messages_) {
203 __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
204 }
205 #else
206 ADD_FAILURE() << "Received unexpected error(s).";
207 #endif
208 }
209 MonitorReset();
210 test_platform_thread_unlock_mutex(&mutex_);
211
212 if (behavior_ == Behavior::DefaultSuccess) {
213 ExpectSuccess();
214 }
215 }
216
VerifyNotFound()217 void ErrorMonitor::VerifyNotFound() {
218 test_platform_thread_lock_mutex(&mutex_);
219 // ExpectSuccess() configured us to match anything. Any error is a failure.
220 if (AnyDesiredMsgFound()) {
221 DumpFailureMsgs();
222 for (const auto &msg : failure_message_strings_) {
223 ADD_FAILURE() << "Expected to succeed but got error: " << msg;
224 }
225 } else if (GetOtherFailureMsgs().size() > 0) {
226 // Fail test case for any unexpected errors
227 #if defined(ANDROID)
228 // This will get unexpected errors into the adb log
229 for (auto msg : other_messages_) {
230 __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
231 }
232 #else
233 ADD_FAILURE() << "Received unexpected error(s).";
234 #endif
235 }
236 MonitorReset();
237 test_platform_thread_unlock_mutex(&mutex_);
238 }
239
IgnoreMessage(string const & msg) const240 bool ErrorMonitor::IgnoreMessage(string const &msg) const {
241 if (ignore_message_strings_.empty()) {
242 return false;
243 }
244
245 return std::find_if(ignore_message_strings_.begin(), ignore_message_strings_.end(),
246 [&msg](string const &str) { return msg.find(str) != string::npos; }) != ignore_message_strings_.end();
247 }
248
Create(VkInstance instance)249 void DebugReporter::Create(VkInstance instance) NOEXCEPT {
250 assert(instance);
251 assert(!debug_obj_);
252
253 auto DebugCreate = reinterpret_cast<DebugCreateFnType>(vk::GetInstanceProcAddr(instance, debug_create_fn_name_));
254 if (!DebugCreate) return;
255
256 const VkResult err = DebugCreate(instance, &debug_create_info_, nullptr, &debug_obj_);
257 if (err) debug_obj_ = VK_NULL_HANDLE;
258 }
259
Destroy(VkInstance instance)260 void DebugReporter::Destroy(VkInstance instance) NOEXCEPT {
261 assert(instance);
262 assert(debug_obj_); // valid to call with null object, but probably bug
263
264 auto DebugDestroy = reinterpret_cast<DebugDestroyFnType>(vk::GetInstanceProcAddr(instance, debug_destroy_fn_name_));
265 assert(DebugDestroy);
266
267 DebugDestroy(instance, debug_obj_, nullptr);
268 debug_obj_ = VK_NULL_HANDLE;
269 }
270
271 #ifdef VK_USE_PLATFORM_ANDROID_KHR
DebugCallback(VkDebugReportFlagsEXT message_flags,VkDebugReportObjectTypeEXT,uint64_t,size_t,int32_t,const char *,const char * message,void * user_data)272 VKAPI_ATTR VkBool32 VKAPI_CALL DebugReporter::DebugCallback(VkDebugReportFlagsEXT message_flags, VkDebugReportObjectTypeEXT,
273 uint64_t, size_t, int32_t, const char *, const char *message,
274 void *user_data) {
275 #else
276 VKAPI_ATTR VkBool32 VKAPI_CALL DebugReporter::DebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
277 VkDebugUtilsMessageTypeFlagsEXT message_types,
278 const VkDebugUtilsMessengerCallbackDataEXT *callback_data,
279 void *user_data) {
280 const auto message_flags = DebugAnnotFlagsToReportFlags(message_severity, message_types);
281 const char *message = callback_data->pMessage;
282 #endif
283 ErrorMonitor *errMonitor = (ErrorMonitor *)user_data;
284
285 if (message_flags & errMonitor->GetMessageFlags()) {
286 return errMonitor->CheckForDesiredMsg(message);
287 }
288 return VK_FALSE;
289 }
290
291 VkRenderFramework::VkRenderFramework()
292 : instance_(NULL),
293 m_device(NULL),
294 m_commandPool(VK_NULL_HANDLE),
295 m_commandBuffer(NULL),
296 m_renderPass(VK_NULL_HANDLE),
297 m_framebuffer(VK_NULL_HANDLE),
298 m_surface(VK_NULL_HANDLE),
299 #if defined(VK_USE_PLATFORM_XLIB_KHR)
300 m_surface_dpy(nullptr),
301 m_surface_window(None),
302 #endif
303 #if defined(VK_USE_PLATFORM_XCB_KHR)
304 m_surface_xcb_conn(nullptr),
305 #endif
306 m_swapchain(VK_NULL_HANDLE),
307 m_addRenderPassSelfDependency(false),
308 m_width(256.0), // default window width
309 m_height(256.0), // default window height
310 m_render_target_fmt(VK_FORMAT_R8G8B8A8_UNORM),
311 m_depth_stencil_fmt(VK_FORMAT_UNDEFINED),
312 m_clear_via_load_op(true),
313 m_depth_clear_color(1.0),
314 m_stencil_clear_color(0),
315 m_depthStencil(NULL) {
316 m_framebuffer_info = LvlInitStruct<VkFramebufferCreateInfo>();
317 m_renderPass_info = LvlInitStruct<VkRenderPassCreateInfo>();
318 m_renderPassBeginInfo = LvlInitStruct<VkRenderPassBeginInfo>();
319
320 // clear the back buffer to dark grey
321 m_clear_color.float32[0] = 0.25f;
322 m_clear_color.float32[1] = 0.25f;
323 m_clear_color.float32[2] = 0.25f;
324 m_clear_color.float32[3] = 0.0f;
325 }
326
327 VkRenderFramework::~VkRenderFramework() { ShutdownFramework(); }
328
329 VkPhysicalDevice VkRenderFramework::gpu() {
330 EXPECT_NE((VkInstance)0, instance_); // Invalid to request gpu before instance exists
331 return gpu_;
332 }
333
334 VkPhysicalDeviceProperties VkRenderFramework::physDevProps() {
335 EXPECT_NE((VkPhysicalDevice)0, gpu_); // Invalid to request physical device properties before gpu
336 return physDevProps_;
337 }
338
339 // Return true if layer name is found and spec+implementation values are >= requested values
340 bool VkRenderFramework::InstanceLayerSupported(const char *const layer_name, const uint32_t spec_version,
341 const uint32_t impl_version) {
342 const auto layers = vk_testing::GetGlobalLayers();
343
344 for (const auto &layer : layers) {
345 if (0 == strncmp(layer_name, layer.layerName, VK_MAX_EXTENSION_NAME_SIZE)) {
346 return layer.specVersion >= spec_version && layer.implementationVersion >= impl_version;
347 }
348 }
349 return false;
350 }
351
352 // Return true if extension name is found and spec value is >= requested spec value
353 // WARNING: for simplicity, does not cover layers' extensions
354 bool VkRenderFramework::InstanceExtensionSupported(const char *const extension_name, const uint32_t spec_version) {
355 // WARNING: assume debug and validation feature extensions are always supported, which are usually provided by layers
356 if (0 == strncmp(extension_name, VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_MAX_EXTENSION_NAME_SIZE)) return true;
357 if (0 == strncmp(extension_name, VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_MAX_EXTENSION_NAME_SIZE)) return true;
358 if (0 == strncmp(extension_name, VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME, VK_MAX_EXTENSION_NAME_SIZE)) return true;
359
360 const auto extensions = vk_testing::GetGlobalExtensions();
361
362 const auto IsTheQueriedExtension = [extension_name, spec_version](const VkExtensionProperties &extension) {
363 return strncmp(extension_name, extension.extensionName, VK_MAX_EXTENSION_NAME_SIZE) == 0 &&
364 extension.specVersion >= spec_version;
365 };
366
367 return std::any_of(extensions.begin(), extensions.end(), IsTheQueriedExtension);
368 }
369
370 // Enable device profile as last layer on stack overriding devsim if there, or return if not available
371 bool VkRenderFramework::EnableDeviceProfileLayer() {
372 if (InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
373 if (VkTestFramework::m_devsim_layer) {
374 assert(0 == strncmp(instance_layers_.back(), "VK_LAYER_LUNARG_device_simulation", VK_MAX_EXTENSION_NAME_SIZE));
375 instance_layers_.back() = "VK_LAYER_LUNARG_device_profile_api";
376 } else {
377 instance_layers_.push_back("VK_LAYER_LUNARG_device_profile_api");
378 }
379 } else {
380 printf(" Did not find VK_LAYER_LUNARG_device_profile_api layer; skipped.\n");
381 return false;
382 }
383 return true;
384 }
385
386 // Return true if instance exists and extension name is in the list
387 bool VkRenderFramework::InstanceExtensionEnabled(const char *ext_name) {
388 if (!instance_) return false;
389
390 return std::any_of(instance_extensions_.begin(), instance_extensions_.end(),
391 [ext_name](const char *e) { return 0 == strncmp(ext_name, e, VK_MAX_EXTENSION_NAME_SIZE); });
392 }
393 // Return true if extension name is found and spec value is >= requested spec value
394 bool VkRenderFramework::DeviceExtensionSupported(const char *extension_name, const uint32_t spec_version) const {
395 if (!instance_ || !gpu_) {
396 EXPECT_NE((VkInstance)0, instance_); // Complain, not cool without an instance
397 EXPECT_NE((VkPhysicalDevice)0, gpu_);
398 return false;
399 }
400
401 const vk_testing::PhysicalDevice device_obj(gpu_);
402
403 const auto enabled_layers = instance_layers_; // assumes instance_layers_ contains enabled layers
404
405 auto extensions = device_obj.extensions();
406 for (const auto &layer : enabled_layers) {
407 const auto layer_extensions = device_obj.extensions(layer);
408 extensions.insert(extensions.end(), layer_extensions.begin(), layer_extensions.end());
409 }
410
411 const auto IsTheQueriedExtension = [extension_name, spec_version](const VkExtensionProperties &extension) {
412 return strncmp(extension_name, extension.extensionName, VK_MAX_EXTENSION_NAME_SIZE) == 0 &&
413 extension.specVersion >= spec_version;
414 };
415
416 return std::any_of(extensions.begin(), extensions.end(), IsTheQueriedExtension);
417 }
418
419 // Return true if device is created and extension name is found in the list
420 bool VkRenderFramework::DeviceExtensionEnabled(const char *ext_name) {
421 if (NULL == m_device) return false;
422
423 bool ext_found = false;
424 for (auto ext : m_device_extension_names) {
425 if (!strncmp(ext, ext_name, VK_MAX_EXTENSION_NAME_SIZE)) {
426 ext_found = true;
427 break;
428 }
429 }
430 return ext_found;
431 }
432
433 // Some tests may need to be skipped if the devsim layer is in use.
434 bool VkRenderFramework::DeviceSimulation() { return m_devsim_layer; }
435
436 VkInstanceCreateInfo VkRenderFramework::GetInstanceCreateInfo() const {
437 return {
438 VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
439 &debug_reporter_.debug_create_info_,
440 0,
441 &app_info_,
442 static_cast<uint32_t>(instance_layers_.size()),
443 instance_layers_.data(),
444 static_cast<uint32_t>(instance_extensions_.size()),
445 instance_extensions_.data(),
446 };
447 }
448
449 void VkRenderFramework::InitFramework(void * /*unused compatibility parameter*/, void *instance_pnext) {
450 ASSERT_EQ((VkInstance)0, instance_);
451
452 const auto LayerNotSupportedWithReporting = [](const char *layer) {
453 if (InstanceLayerSupported(layer))
454 return false;
455 else {
456 ADD_FAILURE() << "InitFramework(): Requested layer \"" << layer << "\" is not supported. It will be disabled.";
457 return true;
458 }
459 };
460 const auto ExtensionNotSupportedWithReporting = [](const char *extension) {
461 if (InstanceExtensionSupported(extension))
462 return false;
463 else {
464 ADD_FAILURE() << "InitFramework(): Requested extension \"" << extension << "\" is not supported. It will be disabled.";
465 return true;
466 }
467 };
468
469 static bool driver_printed = false;
470 static bool print_driver_info = GetEnvironment("VK_LAYER_TESTS_PRINT_DRIVER") != "";
471 if (print_driver_info && !driver_printed &&
472 InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
473 instance_extensions_.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
474 }
475
476 RemoveIf(instance_layers_, LayerNotSupportedWithReporting);
477 RemoveIf(instance_extensions_, ExtensionNotSupportedWithReporting);
478
479 auto ici = GetInstanceCreateInfo();
480
481 // concatenate pNexts
482 void *last_pnext = nullptr;
483 if (instance_pnext) {
484 last_pnext = instance_pnext;
485 while (reinterpret_cast<const VkBaseOutStructure *>(last_pnext)->pNext)
486 last_pnext = reinterpret_cast<VkBaseOutStructure *>(last_pnext)->pNext;
487
488 void *&link = reinterpret_cast<void *&>(reinterpret_cast<VkBaseOutStructure *>(last_pnext)->pNext);
489 link = const_cast<void *>(ici.pNext);
490 ici.pNext = instance_pnext;
491 }
492
493 ASSERT_VK_SUCCESS(vk::CreateInstance(&ici, nullptr, &instance_));
494 if (instance_pnext) reinterpret_cast<VkBaseOutStructure *>(last_pnext)->pNext = nullptr; // reset back borrowed pNext chain
495
496 // Choose a physical device
497 uint32_t gpu_count = 0;
498 const VkResult err = vk::EnumeratePhysicalDevices(instance_, &gpu_count, nullptr);
499 ASSERT_TRUE(err == VK_SUCCESS || err == VK_INCOMPLETE) << vk_result_string(err);
500 ASSERT_GT(gpu_count, (uint32_t)0) << "No GPU (i.e. VkPhysicalDevice) available";
501
502 std::vector<VkPhysicalDevice> phys_devices(gpu_count);
503 vk::EnumeratePhysicalDevices(instance_, &gpu_count, phys_devices.data());
504
505 const int phys_device_index = VkTestFramework::m_phys_device_index;
506 if ((phys_device_index >= 0) && (phys_device_index < static_cast<int>(gpu_count))) {
507 gpu_ = phys_devices[phys_device_index];
508 vk::GetPhysicalDeviceProperties(gpu_, &physDevProps_);
509 m_gpu_index = phys_device_index;
510 } else {
511 // Specify a "physical device priority" with larger values meaning higher priority.
512 std::array<int, VK_PHYSICAL_DEVICE_TYPE_CPU + 1> device_type_rank;
513 device_type_rank[VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU] = 4;
514 device_type_rank[VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU] = 3;
515 device_type_rank[VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU] = 2;
516 device_type_rank[VK_PHYSICAL_DEVICE_TYPE_CPU] = 1;
517 device_type_rank[VK_PHYSICAL_DEVICE_TYPE_OTHER] = 0;
518
519 // Initialize physical device and properties with first device found
520 gpu_ = phys_devices[0];
521 m_gpu_index = 0;
522 vk::GetPhysicalDeviceProperties(gpu_, &physDevProps_);
523
524 // See if there are any higher priority devices found
525 for (size_t i = 1; i < phys_devices.size(); ++i) {
526 VkPhysicalDeviceProperties tmp_props;
527 vk::GetPhysicalDeviceProperties(phys_devices[i], &tmp_props);
528 if (device_type_rank[tmp_props.deviceType] > device_type_rank[physDevProps_.deviceType]) {
529 physDevProps_ = tmp_props;
530 gpu_ = phys_devices[i];
531 m_gpu_index = i;
532 }
533 }
534 }
535
536 debug_reporter_.Create(instance_);
537
538 if (print_driver_info && !driver_printed) {
539 auto driver_properties = LvlInitStruct<VkPhysicalDeviceDriverProperties>();
540 auto physical_device_properties2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&driver_properties);
541 vk::GetPhysicalDeviceProperties2(gpu_, &physical_device_properties2);
542 printf("Driver Name = %s\n", driver_properties.driverName);
543 printf("Driver Info = %s\n", driver_properties.driverInfo);
544
545 driver_printed = true;
546 }
547
548 for (const auto &ext : m_requested_extensions) {
549 AddRequiredDeviceExtensions(ext);
550 }
551 }
552
553 bool VkRenderFramework::AddRequiredExtensions(const char *ext_name) {
554 m_requested_extensions.push_back(ext_name);
555 return AddRequiredInstanceExtensions(ext_name);
556 }
557
558 bool VkRenderFramework::AreRequestedExtensionsEnabled() const {
559 for (const auto &ext : m_requested_extensions) {
560 // `ext` may refer to an instance or device extension
561 if (!CanEnableDeviceExtension(ext) && !CanEnableInstanceExtension(ext)) {
562 return false;
563 }
564 }
565 return true;
566 }
567
568 bool VkRenderFramework::AddRequiredInstanceExtensions(const char *ext_name) {
569 if (CanEnableInstanceExtension(ext_name)) {
570 return true;
571 }
572
573 const auto &instance_exts_map = InstanceExtensions::get_info_map();
574 bool is_instance_ext = false;
575 if (instance_exts_map.count(ext_name) > 0) {
576 if (!InstanceExtensionSupported(ext_name)) {
577 return false;
578 } else {
579 is_instance_ext = true;
580 }
581 }
582
583 // Different tables need to be used for extension dependency lookup depending on whether `ext_name` refers to a device or
584 // instance extension
585 if (is_instance_ext) {
586 const auto &info = InstanceExtensions::get_info(ext_name);
587 for (const auto &req : info.requirements) {
588 if (!AddRequiredInstanceExtensions(req.name)) {
589 return false;
590 }
591 }
592 m_instance_extension_names.push_back(ext_name);
593 } else {
594 const auto &info = DeviceExtensions::get_info(ext_name);
595 for (const auto &req : info.requirements) {
596 if (!AddRequiredInstanceExtensions(req.name)) {
597 return false;
598 }
599 }
600 }
601
602 return true;
603 }
604
605 bool VkRenderFramework::CanEnableInstanceExtension(const std::string &inst_ext_name) const {
606 return std::any_of(m_instance_extension_names.cbegin(), m_instance_extension_names.cend(),
607 [&inst_ext_name](const char *ext) { return inst_ext_name == ext; });
608 }
609
610 bool VkRenderFramework::AddRequiredDeviceExtensions(const char *dev_ext_name) {
611 // Check if the extension has already been added
612 if (CanEnableDeviceExtension(dev_ext_name)) {
613 return true;
614 }
615
616 // If this is an instance extension, just return true under the assumption instance extensions do not depend on any device
617 // extensions.
618 const auto &instance_exts_map = InstanceExtensions::get_info_map();
619 if (instance_exts_map.count(dev_ext_name) != 0) {
620 return true;
621 }
622
623 if (!DeviceExtensionSupported(gpu(), nullptr, dev_ext_name)) {
624 return false;
625 }
626 m_device_extension_names.push_back(dev_ext_name);
627
628 const auto &info = DeviceExtensions::get_info(dev_ext_name);
629 for (const auto &req : info.requirements) {
630 if (!AddRequiredDeviceExtensions(req.name)) {
631 return false;
632 }
633 }
634 return true;
635 }
636
637 bool VkRenderFramework::CanEnableDeviceExtension(const std::string &dev_ext_name) const {
638 return std::any_of(m_device_extension_names.cbegin(), m_device_extension_names.cend(),
639 [&dev_ext_name](const char *ext) { return dev_ext_name == ext; });
640 }
641
642 void VkRenderFramework::ShutdownFramework() {
643 debug_reporter_.error_monitor_.Reset();
644
645 // Nothing to shut down without a VkInstance
646 if (!instance_) return;
647
648 delete m_commandBuffer;
649 m_commandBuffer = nullptr;
650 delete m_commandPool;
651 m_commandPool = nullptr;
652 if (m_framebuffer) vk::DestroyFramebuffer(device(), m_framebuffer, NULL);
653 m_framebuffer = VK_NULL_HANDLE;
654 if (m_renderPass) vk::DestroyRenderPass(device(), m_renderPass, NULL);
655 m_renderPass = VK_NULL_HANDLE;
656
657 m_renderTargets.clear();
658
659 delete m_depthStencil;
660 m_depthStencil = nullptr;
661
662 if (m_device && m_device->device() != VK_NULL_HANDLE) {
663 DestroySwapchain();
664 }
665
666 // reset the driver
667 delete m_device;
668 m_device = nullptr;
669
670 debug_reporter_.Destroy(instance_);
671
672 vk::DestroyInstance(instance_, nullptr);
673 instance_ = NULL; // In case we want to re-initialize
674 }
675
676 ErrorMonitor &VkRenderFramework::Monitor() { return debug_reporter_.error_monitor_; }
677
678 void VkRenderFramework::GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures *features) {
679 if (NULL == m_device) {
680 VkDeviceObj *temp_device = new VkDeviceObj(0, gpu_, m_device_extension_names);
681 *features = temp_device->phy().features();
682 delete (temp_device);
683 } else {
684 *features = m_device->phy().features();
685 }
686 }
687
688 bool VkRenderFramework::IsPlatform(PlatformType platform) {
689 return (!vk_gpu_table.find(platform)->second.compare(physDevProps().deviceName));
690 }
691
692 bool VkRenderFramework::IsDriver(VkDriverId driver_id) {
693 // Assumes api version 1.2+
694 auto driver_properties = LvlInitStruct<VkPhysicalDeviceDriverProperties>();
695 auto physical_device_properties2 = LvlInitStruct<VkPhysicalDeviceProperties2>(&driver_properties);
696 vk::GetPhysicalDeviceProperties2(gpu_, &physical_device_properties2);
697 return(driver_properties.driverID == driver_id);
698 }
699
700 void VkRenderFramework::GetPhysicalDeviceProperties(VkPhysicalDeviceProperties *props) { *props = physDevProps_; }
701
702 void VkRenderFramework::InitState(VkPhysicalDeviceFeatures *features, void *create_device_pnext,
703 const VkCommandPoolCreateFlags flags) {
704 const auto ExtensionNotSupportedWithReporting = [this](const char *extension) {
705 if (DeviceExtensionSupported(extension))
706 return false;
707 else {
708 ADD_FAILURE() << "InitState(): Requested device extension \"" << extension
709 << "\" is not supported. It will be disabled.";
710 return true;
711 }
712 };
713
714 RemoveIf(m_device_extension_names, ExtensionNotSupportedWithReporting);
715
716 m_device = new VkDeviceObj(0, gpu_, m_device_extension_names, features, create_device_pnext);
717 m_device->SetDeviceQueue();
718
719 m_depthStencil = new VkDepthStencilObj(m_device);
720
721 m_render_target_fmt = VkTestFramework::GetFormat(instance_, m_device);
722
723 m_lineWidth = 1.0f;
724
725 m_depthBiasConstantFactor = 0.0f;
726 m_depthBiasClamp = 0.0f;
727 m_depthBiasSlopeFactor = 0.0f;
728
729 m_blendConstants[0] = 1.0f;
730 m_blendConstants[1] = 1.0f;
731 m_blendConstants[2] = 1.0f;
732 m_blendConstants[3] = 1.0f;
733
734 m_minDepthBounds = 0.f;
735 m_maxDepthBounds = 1.f;
736
737 m_compareMask = 0xff;
738 m_writeMask = 0xff;
739 m_reference = 0;
740
741 m_commandPool = new VkCommandPoolObj(m_device, m_device->graphics_queue_node_index_, flags);
742
743 m_commandBuffer = new VkCommandBufferObj(m_device, m_commandPool);
744 }
745
746 void VkRenderFramework::InitViewport(float width, float height) {
747 VkViewport viewport;
748 VkRect2D scissor;
749 viewport.x = 0;
750 viewport.y = 0;
751 viewport.width = 1.f * width;
752 viewport.height = 1.f * height;
753 viewport.minDepth = 0.f;
754 viewport.maxDepth = 1.f;
755 m_viewports.push_back(viewport);
756
757 scissor.extent.width = (int32_t)width;
758 scissor.extent.height = (int32_t)height;
759 scissor.offset.x = 0;
760 scissor.offset.y = 0;
761 m_scissors.push_back(scissor);
762
763 m_width = width;
764 m_height = height;
765 }
766
767 void VkRenderFramework::InitViewport() { InitViewport(m_width, m_height); }
768
769 bool VkRenderFramework::InitSurface() { return InitSurface(m_width, m_height, m_surface); }
770
771 bool VkRenderFramework::InitSurface(float width, float height) { return InitSurface(width, height, m_surface); }
772
773 #ifdef VK_USE_PLATFORM_WIN32_KHR
774 LRESULT CALLBACK WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
775 return DefWindowProc(hwnd, uMsg, wParam, lParam);
776 }
777 #endif // VK_USE_PLATFORM_WIN32_KHR
778
779 bool VkRenderFramework::InitSurface(float width, float height, VkSurfaceKHR &surface) {
780 #if defined(VK_USE_PLATFORM_WIN32_KHR)
781 HINSTANCE window_instance = GetModuleHandle(nullptr);
782 const char class_name[] = "test";
783 WNDCLASS wc = {};
784 wc.lpfnWndProc = WindowProc;
785 wc.hInstance = window_instance;
786 wc.lpszClassName = class_name;
787 RegisterClass(&wc);
788 HWND window = CreateWindowEx(0, class_name, 0, 0, 0, 0, (int)m_width, (int)m_height, NULL, NULL, window_instance, NULL);
789 ShowWindow(window, SW_HIDE);
790
791 VkWin32SurfaceCreateInfoKHR surface_create_info = {};
792 surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
793 surface_create_info.hinstance = window_instance;
794 surface_create_info.hwnd = window;
795 VkResult err = vk::CreateWin32SurfaceKHR(instance(), &surface_create_info, nullptr, &surface);
796 if (err != VK_SUCCESS) return false;
797 #endif
798
799 #if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
800 VkAndroidSurfaceCreateInfoKHR surface_create_info = {};
801 surface_create_info.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
802 surface_create_info.window = VkTestFramework::window;
803 VkResult err = vk::CreateAndroidSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
804 if (err != VK_SUCCESS) return false;
805 #endif
806
807 #if defined(VK_USE_PLATFORM_XLIB_KHR)
808 assert(m_surface_dpy == nullptr);
809 m_surface_dpy = XOpenDisplay(NULL);
810 if (m_surface_dpy) {
811 int s = DefaultScreen(m_surface_dpy);
812 m_surface_window = XCreateSimpleWindow(m_surface_dpy, RootWindow(m_surface_dpy, s), 0, 0, (int)m_width, (int)m_height, 1,
813 BlackPixel(m_surface_dpy, s), WhitePixel(m_surface_dpy, s));
814 VkXlibSurfaceCreateInfoKHR surface_create_info = {};
815 surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
816 surface_create_info.dpy = m_surface_dpy;
817 surface_create_info.window = m_surface_window;
818 VkResult err = vk::CreateXlibSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
819 if (err != VK_SUCCESS) return false;
820 }
821 #endif
822
823 #if defined(VK_USE_PLATFORM_XCB_KHR)
824 if (m_surface == VK_NULL_HANDLE) {
825 assert(m_surface_xcb_conn == nullptr);
826 m_surface_xcb_conn = xcb_connect(NULL, NULL);
827 if (m_surface_xcb_conn) {
828 xcb_window_t window = xcb_generate_id(m_surface_xcb_conn);
829 VkXcbSurfaceCreateInfoKHR surface_create_info = {};
830 surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
831 surface_create_info.connection = m_surface_xcb_conn;
832 surface_create_info.window = window;
833 VkResult err = vk::CreateXcbSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
834 if (err != VK_SUCCESS) return false;
835 }
836 }
837 #endif
838
839 return (m_surface == VK_NULL_HANDLE) ? false : true;
840 }
841
842 // Makes query to get information about swapchain needed to create a valid swapchain object each test creating a swapchain will need
843 void VkRenderFramework::InitSwapchainInfo() {
844 const VkPhysicalDevice physicalDevice = gpu();
845
846 vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, m_surface, &m_surface_capabilities);
847
848 uint32_t format_count;
849 vk::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, m_surface, &format_count, nullptr);
850 if (format_count != 0) {
851 m_surface_formats.resize(format_count);
852 vk::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, m_surface, &format_count, m_surface_formats.data());
853 }
854
855 uint32_t present_mode_count;
856 vk::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, m_surface, &present_mode_count, nullptr);
857 if (present_mode_count != 0) {
858 m_surface_present_modes.resize(present_mode_count);
859 vk::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, m_surface, &present_mode_count, m_surface_present_modes.data());
860
861 // Shared Present mode has different requirements most tests won't actually want
862 // Implementation required to support a non-shared present mode
863 for (size_t i = 0; i < m_surface_present_modes.size(); i++) {
864 const VkPresentModeKHR present_mode = m_surface_present_modes[i];
865 if ((present_mode != VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR) &&
866 (present_mode != VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR)) {
867 m_surface_non_shared_present_mode = present_mode;
868 break;
869 }
870 }
871 }
872
873 #ifdef VK_USE_PLATFORM_ANDROID_KHR
874 m_surface_composite_alpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
875 #else
876 m_surface_composite_alpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
877 #endif
878 }
879
880 bool VkRenderFramework::InitSwapchain(VkImageUsageFlags imageUsage, VkSurfaceTransformFlagBitsKHR preTransform) {
881 if (InitSurface()) {
882 return InitSwapchain(m_surface, imageUsage, preTransform);
883 }
884 return false;
885 }
886
887 bool VkRenderFramework::InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage,
888 VkSurfaceTransformFlagBitsKHR preTransform) {
889 return InitSwapchain(surface, imageUsage, preTransform, m_swapchain);
890 }
891
892 bool VkRenderFramework::InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage,
893 VkSurfaceTransformFlagBitsKHR preTransform, VkSwapchainKHR &swapchain,
894 VkSwapchainKHR oldSwapchain) {
895
896 VkBool32 supported;
897 vk::GetPhysicalDeviceSurfaceSupportKHR(gpu(), m_device->graphics_queue_node_index_, surface, &supported);
898 if (!supported) {
899 // Graphics queue does not support present
900 return false;
901 }
902 InitSwapchainInfo();
903
904 VkSwapchainCreateInfoKHR swapchain_create_info = {};
905 swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
906 swapchain_create_info.pNext = 0;
907 swapchain_create_info.surface = surface;
908 swapchain_create_info.minImageCount = m_surface_capabilities.minImageCount;
909 swapchain_create_info.imageFormat = m_surface_formats[0].format;
910 swapchain_create_info.imageColorSpace = m_surface_formats[0].colorSpace;
911 swapchain_create_info.imageExtent = {m_surface_capabilities.minImageExtent.width, m_surface_capabilities.minImageExtent.height};
912 swapchain_create_info.imageArrayLayers = 1;
913 swapchain_create_info.imageUsage = imageUsage;
914 swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
915 swapchain_create_info.preTransform = preTransform;
916 swapchain_create_info.compositeAlpha = m_surface_composite_alpha;
917 swapchain_create_info.presentMode = m_surface_non_shared_present_mode;
918 swapchain_create_info.clipped = VK_FALSE;
919 swapchain_create_info.oldSwapchain = oldSwapchain;
920
921 VkResult err = vk::CreateSwapchainKHR(device(), &swapchain_create_info, nullptr, &swapchain);
922 if (err != VK_SUCCESS) {
923 return false;
924 }
925 uint32_t imageCount = 0;
926 vk::GetSwapchainImagesKHR(device(), swapchain, &imageCount, nullptr);
927 vector<VkImage> swapchainImages;
928 swapchainImages.resize(imageCount);
929 vk::GetSwapchainImagesKHR(device(), swapchain, &imageCount, swapchainImages.data());
930 return true;
931 }
932
933 #if defined(VK_USE_PLATFORM_XLIB_KHR)
934 int IgnoreXErrors(Display *, XErrorEvent *) { return 0; }
935 #endif
936
937 void VkRenderFramework::DestroySwapchain() {
938 if (m_swapchain != VK_NULL_HANDLE) {
939 vk::DestroySwapchainKHR(device(), m_swapchain, nullptr);
940 m_swapchain = VK_NULL_HANDLE;
941 }
942 if (m_surface != VK_NULL_HANDLE) {
943 vk::DestroySurfaceKHR(instance(), m_surface, nullptr);
944 m_surface = VK_NULL_HANDLE;
945 }
946 vk::DeviceWaitIdle(device());
947 #if defined(VK_USE_PLATFORM_XLIB_KHR)
948 if (m_surface_dpy != nullptr) {
949 // Ignore BadDrawable errors we seem to get during shutdown.
950 // The default error handler will exit() and end the test suite.
951 XSetErrorHandler(IgnoreXErrors);
952 XDestroyWindow(m_surface_dpy, m_surface_window);
953 m_surface_window = None;
954 XCloseDisplay(m_surface_dpy);
955 m_surface_dpy = nullptr;
956 XSetErrorHandler(nullptr);
957 }
958 #endif
959 #if defined(VK_USE_PLATFORM_XCB_KHR)
960 if (m_surface_xcb_conn != nullptr) {
961 xcb_disconnect(m_surface_xcb_conn);
962 m_surface_xcb_conn = nullptr;
963 }
964 #endif
965 }
966
967 void VkRenderFramework::InitRenderTarget() { InitRenderTarget(1); }
968
969 void VkRenderFramework::InitRenderTarget(uint32_t targets) { InitRenderTarget(targets, NULL); }
970
971 void VkRenderFramework::InitRenderTarget(VkImageView *dsBinding) { InitRenderTarget(1, dsBinding); }
972
973 void VkRenderFramework::InitRenderTarget(uint32_t targets, VkImageView *dsBinding) {
974 vector<VkAttachmentDescription> &attachments = m_renderPass_attachments;
975 vector<VkAttachmentReference> color_references;
976 vector<VkImageView> &bindings = m_framebuffer_attachments;
977 attachments.reserve(targets + 1); // +1 for dsBinding
978 color_references.reserve(targets);
979 bindings.reserve(targets + 1); // +1 for dsBinding
980
981 VkAttachmentDescription att = {};
982 att.format = m_render_target_fmt;
983 att.samples = VK_SAMPLE_COUNT_1_BIT;
984 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
985 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
986 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
987 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
988 att.initialLayout = (m_clear_via_load_op) ? VK_IMAGE_LAYOUT_UNDEFINED : VK_IMAGE_LAYOUT_GENERAL;
989 att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
990
991 VkAttachmentReference ref = {};
992 ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
993
994 m_renderPassClearValues.clear();
995 VkClearValue clear = {};
996 clear.color = m_clear_color;
997
998 for (uint32_t i = 0; i < targets; i++) {
999 attachments.push_back(att);
1000
1001 ref.attachment = i;
1002 color_references.push_back(ref);
1003
1004 m_renderPassClearValues.push_back(clear);
1005
1006 std::unique_ptr<VkImageObj> img(new VkImageObj(m_device));
1007
1008 VkFormatProperties props;
1009
1010 vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), m_render_target_fmt, &props);
1011
1012 if (props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
1013 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
1014 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
1015 VK_IMAGE_TILING_LINEAR);
1016 } else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
1017 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
1018 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
1019 VK_IMAGE_TILING_OPTIMAL);
1020 } else {
1021 FAIL() << "Neither Linear nor Optimal allowed for render target";
1022 }
1023
1024 bindings.push_back(img->targetView(m_render_target_fmt));
1025 m_renderTargets.push_back(std::move(img));
1026 }
1027
1028 m_renderPass_subpasses.clear();
1029 m_renderPass_subpasses.resize(1);
1030 VkSubpassDescription &subpass = m_renderPass_subpasses[0];
1031 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
1032 subpass.flags = 0;
1033 subpass.inputAttachmentCount = 0;
1034 subpass.pInputAttachments = NULL;
1035 subpass.colorAttachmentCount = targets;
1036 subpass.pColorAttachments = color_references.data();
1037 subpass.pResolveAttachments = NULL;
1038
1039 VkAttachmentReference ds_reference;
1040 if (dsBinding) {
1041 att.format = m_depth_stencil_fmt;
1042 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1043 ;
1044 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1045 att.stencilLoadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1046 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
1047 att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1048 att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1049 attachments.push_back(att);
1050
1051 clear.depthStencil.depth = m_depth_clear_color;
1052 clear.depthStencil.stencil = m_stencil_clear_color;
1053 m_renderPassClearValues.push_back(clear);
1054
1055 bindings.push_back(*dsBinding);
1056
1057 ds_reference.attachment = targets;
1058 ds_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1059 subpass.pDepthStencilAttachment = &ds_reference;
1060 } else {
1061 subpass.pDepthStencilAttachment = NULL;
1062 }
1063
1064 subpass.preserveAttachmentCount = 0;
1065 subpass.pPreserveAttachments = NULL;
1066
1067 VkRenderPassCreateInfo &rp_info = m_renderPass_info;
1068 rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1069 rp_info.attachmentCount = attachments.size();
1070 rp_info.pAttachments = attachments.data();
1071 rp_info.subpassCount = m_renderPass_subpasses.size();
1072 rp_info.pSubpasses = m_renderPass_subpasses.data();
1073
1074 m_renderPass_dependencies.clear();
1075 if (m_addRenderPassSelfDependency) {
1076 m_renderPass_dependencies.resize(1);
1077 VkSubpassDependency &subpass_dep = m_renderPass_dependencies[0];
1078 // Add a subpass self-dependency to subpass 0 of default renderPass
1079 subpass_dep.srcSubpass = 0;
1080 subpass_dep.dstSubpass = 0;
1081 // Just using all framebuffer-space pipeline stages in order to get a reasonably large
1082 // set of bits that can be used for both src & dst
1083 subpass_dep.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
1084 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1085 subpass_dep.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
1086 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1087 // Add all of the gfx mem access bits that correlate to the fb-space pipeline stages
1088 subpass_dep.srcAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
1089 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
1090 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
1091 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
1092 subpass_dep.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
1093 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
1094 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
1095 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
1096 // Must include dep_by_region bit when src & dst both include framebuffer-space stages
1097 subpass_dep.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
1098 }
1099
1100 if (m_additionalSubpassDependencies.size()) {
1101 m_renderPass_dependencies.reserve(m_additionalSubpassDependencies.size() + m_renderPass_dependencies.size());
1102 m_renderPass_dependencies.insert(m_renderPass_dependencies.end(), m_additionalSubpassDependencies.begin(),
1103 m_additionalSubpassDependencies.end());
1104 }
1105
1106 if (m_renderPass_dependencies.size()) {
1107 rp_info.dependencyCount = static_cast<uint32_t>(m_renderPass_dependencies.size());
1108 rp_info.pDependencies = m_renderPass_dependencies.data();
1109 } else {
1110 rp_info.dependencyCount = 0;
1111 rp_info.pDependencies = nullptr;
1112 }
1113
1114 vk::CreateRenderPass(device(), &rp_info, NULL, &m_renderPass);
1115 // Create Framebuffer and RenderPass with color attachments and any
1116 // depth/stencil attachment
1117 VkFramebufferCreateInfo &fb_info = m_framebuffer_info;
1118 fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
1119 fb_info.pNext = NULL;
1120 fb_info.renderPass = m_renderPass;
1121 fb_info.attachmentCount = bindings.size();
1122 fb_info.pAttachments = bindings.data();
1123 fb_info.width = (uint32_t)m_width;
1124 fb_info.height = (uint32_t)m_height;
1125 fb_info.layers = 1;
1126
1127 vk::CreateFramebuffer(device(), &fb_info, NULL, &m_framebuffer);
1128
1129 m_renderPassBeginInfo.renderPass = m_renderPass;
1130 m_renderPassBeginInfo.framebuffer = m_framebuffer;
1131 m_renderPassBeginInfo.renderArea.extent.width = (int32_t)m_width;
1132 m_renderPassBeginInfo.renderArea.extent.height = (int32_t)m_height;
1133 m_renderPassBeginInfo.clearValueCount = m_renderPassClearValues.size();
1134 m_renderPassBeginInfo.pClearValues = m_renderPassClearValues.data();
1135 }
1136
1137 void VkRenderFramework::DestroyRenderTarget() {
1138 vk::DestroyRenderPass(device(), m_renderPass, nullptr);
1139 m_renderPass = VK_NULL_HANDLE;
1140 vk::DestroyFramebuffer(device(), m_framebuffer, nullptr);
1141 m_framebuffer = VK_NULL_HANDLE;
1142 }
1143
1144 bool VkRenderFramework::InitFrameworkAndRetrieveFeatures(VkPhysicalDeviceFeatures2KHR &features2) {
1145 if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1146 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1147 }
1148 else {
1149 printf("Instance extension %s not supported, skipping test\n",
1150 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1151 return false;
1152 }
1153 InitFramework();
1154
1155 // Cycle through device extensions and check for support
1156 for (auto extension : m_device_extension_names) {
1157 if (!DeviceExtensionSupported(extension)) {
1158 printf("Device extension %s is not supported\n", extension);
1159 return false;
1160 }
1161 }
1162 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
1163 (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(),
1164 "vkGetPhysicalDeviceFeatures2KHR");
1165
1166 if (vkGetPhysicalDeviceFeatures2KHR) {
1167 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
1168 return true;
1169 }
1170 else {
1171 printf("Cannot use vkGetPhysicalDeviceFeatures to determine available features\n");
1172 return false;
1173 }
1174 }
1175
1176 VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj) : vk_testing::Device(obj), id(id) {
1177 init();
1178
1179 props = phy().properties();
1180 queue_props = phy().queue_properties();
1181 }
1182
1183 VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj, vector<const char *> &extension_names,
1184 VkPhysicalDeviceFeatures *features, void *create_device_pnext)
1185 : vk_testing::Device(obj), id(id) {
1186 init(extension_names, features, create_device_pnext);
1187
1188 props = phy().properties();
1189 queue_props = phy().queue_properties();
1190 }
1191
1192 uint32_t VkDeviceObj::QueueFamilyMatching(VkQueueFlags with, VkQueueFlags without, bool all_bits) {
1193 // Find a queue family with and without desired capabilities
1194 for (uint32_t i = 0; i < queue_props.size(); i++) {
1195 auto flags = queue_props[i].queueFlags;
1196 bool matches = all_bits ? (flags & with) == with : (flags & with) != 0;
1197 if (matches && ((flags & without) == 0) && (queue_props[i].queueCount > 0)) {
1198 return i;
1199 }
1200 }
1201 return UINT32_MAX;
1202 }
1203
1204 void VkDeviceObj::SetDeviceQueue() {
1205 ASSERT_NE(true, graphics_queues().empty());
1206 m_queue = graphics_queues()[0]->handle();
1207 }
1208
1209 VkQueueObj *VkDeviceObj::GetDefaultQueue() {
1210 if (graphics_queues().empty()) return nullptr;
1211 return graphics_queues()[0];
1212 }
1213
1214 VkQueueObj *VkDeviceObj::GetDefaultComputeQueue() {
1215 if (compute_queues().empty()) return nullptr;
1216 return compute_queues()[0];
1217 }
1218
1219 VkDescriptorSetLayoutObj::VkDescriptorSetLayoutObj(const VkDeviceObj *device,
1220 const vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings,
1221 VkDescriptorSetLayoutCreateFlags flags, void *pNext) {
1222 VkDescriptorSetLayoutCreateInfo dsl_ci = {};
1223 dsl_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1224 dsl_ci.pNext = pNext;
1225 dsl_ci.flags = flags;
1226 dsl_ci.bindingCount = static_cast<uint32_t>(descriptor_set_bindings.size());
1227 dsl_ci.pBindings = descriptor_set_bindings.data();
1228
1229 init(*device, dsl_ci);
1230 }
1231
1232 VkDescriptorSetObj::VkDescriptorSetObj(VkDeviceObj *device) : m_device(device), m_nextSlot(0) {}
1233
1234 VkDescriptorSetObj::~VkDescriptorSetObj() NOEXCEPT {
1235 if (m_set) {
1236 delete m_set;
1237 }
1238 }
1239
1240 int VkDescriptorSetObj::AppendDummy() {
1241 /* request a descriptor but do not update it */
1242 VkDescriptorSetLayoutBinding binding = {};
1243 binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1244 binding.descriptorCount = 1;
1245 binding.binding = m_layout_bindings.size();
1246 binding.stageFlags = VK_SHADER_STAGE_ALL;
1247 binding.pImmutableSamplers = NULL;
1248
1249 m_layout_bindings.push_back(binding);
1250 m_type_counts[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] += binding.descriptorCount;
1251
1252 return m_nextSlot++;
1253 }
1254
1255 int VkDescriptorSetObj::AppendBuffer(VkDescriptorType type, VkConstantBufferObj &constantBuffer) {
1256 assert(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1257 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1258 VkDescriptorSetLayoutBinding binding = {};
1259 binding.descriptorType = type;
1260 binding.descriptorCount = 1;
1261 binding.binding = m_layout_bindings.size();
1262 binding.stageFlags = VK_SHADER_STAGE_ALL;
1263 binding.pImmutableSamplers = NULL;
1264
1265 m_layout_bindings.push_back(binding);
1266 m_type_counts[type] += binding.descriptorCount;
1267
1268 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0, type, 1,
1269 &constantBuffer.m_descriptorBufferInfo));
1270
1271 return m_nextSlot++;
1272 }
1273
1274 int VkDescriptorSetObj::AppendSamplerTexture(VkSamplerObj *sampler, VkTextureObj *texture) {
1275 VkDescriptorSetLayoutBinding binding = {};
1276 binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1277 binding.descriptorCount = 1;
1278 binding.binding = m_layout_bindings.size();
1279 binding.stageFlags = VK_SHADER_STAGE_ALL;
1280 binding.pImmutableSamplers = NULL;
1281
1282 m_layout_bindings.push_back(binding);
1283 m_type_counts[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] += binding.descriptorCount;
1284 VkDescriptorImageInfo tmp = texture->DescriptorImageInfo();
1285 tmp.sampler = sampler->handle();
1286 m_imageSamplerDescriptors.push_back(tmp);
1287
1288 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0,
1289 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &tmp));
1290
1291 return m_nextSlot++;
1292 }
1293
1294 VkPipelineLayout VkDescriptorSetObj::GetPipelineLayout() const { return m_pipeline_layout.handle(); }
1295
1296 VkDescriptorSetLayout VkDescriptorSetObj::GetDescriptorSetLayout() const { return m_layout.handle(); }
1297
1298 VkDescriptorSet VkDescriptorSetObj::GetDescriptorSetHandle() const {
1299 if (m_set)
1300 return m_set->handle();
1301 else
1302 return VK_NULL_HANDLE;
1303 }
1304
1305 void VkDescriptorSetObj::CreateVKDescriptorSet(VkCommandBufferObj *commandBuffer) {
1306 if (m_type_counts.size()) {
1307 // create VkDescriptorPool
1308 VkDescriptorPoolSize poolSize;
1309 vector<VkDescriptorPoolSize> sizes;
1310 for (auto it = m_type_counts.begin(); it != m_type_counts.end(); ++it) {
1311 poolSize.descriptorCount = it->second;
1312 poolSize.type = it->first;
1313 sizes.push_back(poolSize);
1314 }
1315 VkDescriptorPoolCreateInfo pool = {};
1316 pool.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1317 pool.poolSizeCount = sizes.size();
1318 pool.maxSets = 1;
1319 pool.pPoolSizes = sizes.data();
1320 init(*m_device, pool);
1321 }
1322
1323 // create VkDescriptorSetLayout
1324 VkDescriptorSetLayoutCreateInfo layout = {};
1325 layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1326 layout.bindingCount = m_layout_bindings.size();
1327 layout.pBindings = m_layout_bindings.data();
1328
1329 m_layout.init(*m_device, layout);
1330 vector<const vk_testing::DescriptorSetLayout *> layouts;
1331 layouts.push_back(&m_layout);
1332
1333 // create VkPipelineLayout
1334 VkPipelineLayoutCreateInfo pipeline_layout = {};
1335 pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1336 pipeline_layout.setLayoutCount = layouts.size();
1337 pipeline_layout.pSetLayouts = NULL;
1338
1339 m_pipeline_layout.init(*m_device, pipeline_layout, layouts);
1340
1341 if (m_type_counts.size()) {
1342 // create VkDescriptorSet
1343 m_set = alloc_sets(*m_device, m_layout);
1344
1345 // build the update array
1346 size_t imageSamplerCount = 0;
1347 for (vector<VkWriteDescriptorSet>::iterator it = m_writes.begin(); it != m_writes.end(); it++) {
1348 it->dstSet = m_set->handle();
1349 if (it->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1350 it->pImageInfo = &m_imageSamplerDescriptors[imageSamplerCount++];
1351 }
1352
1353 // do the updates
1354 m_device->update_descriptor_sets(m_writes);
1355 }
1356 }
1357
1358 VkRenderpassObj::VkRenderpassObj(VkDeviceObj *dev, const VkFormat format) {
1359 // Create a renderPass with a single color attachment
1360 VkAttachmentReference attach = {};
1361 attach.layout = VK_IMAGE_LAYOUT_GENERAL;
1362
1363 VkSubpassDescription subpass = {};
1364 subpass.pColorAttachments = &attach;
1365 subpass.colorAttachmentCount = 1;
1366
1367 VkRenderPassCreateInfo rpci = {};
1368 rpci.subpassCount = 1;
1369 rpci.pSubpasses = &subpass;
1370 rpci.attachmentCount = 1;
1371
1372 VkAttachmentDescription attach_desc = {};
1373 attach_desc.format = format;
1374 attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
1375 attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1376 attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
1377
1378 rpci.pAttachments = &attach_desc;
1379 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1380
1381 init(*dev, rpci);
1382 }
1383
1384 VkRenderpassObj::VkRenderpassObj(VkDeviceObj *dev, VkFormat format, bool depthStencil) {
1385 if (!depthStencil) {
1386 VkRenderpassObj(dev, format);
1387 } else {
1388 // Create a renderPass with a depth/stencil attachment
1389 VkAttachmentReference attach = {};
1390 attach.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1391
1392 VkSubpassDescription subpass = {};
1393 subpass.pDepthStencilAttachment = &attach;
1394
1395 VkRenderPassCreateInfo rpci = {};
1396 rpci.subpassCount = 1;
1397 rpci.pSubpasses = &subpass;
1398 rpci.attachmentCount = 1;
1399
1400 VkAttachmentDescription attach_desc = {};
1401 attach_desc.format = format;
1402 attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
1403 attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1404 attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1405
1406 rpci.pAttachments = &attach_desc;
1407 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1408
1409 init(*dev, rpci);
1410 }
1411 }
1412
1413 VkImageObj::VkImageObj(VkDeviceObj *dev) {
1414 m_device = dev;
1415 m_descriptorImageInfo.imageView = VK_NULL_HANDLE;
1416 m_descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
1417 m_arrayLayers = 0;
1418 m_mipLevels = 0;
1419 }
1420
1421 // clang-format off
1422 void VkImageObj::ImageMemoryBarrier(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect,
1423 VkFlags output_mask /*=
1424 VK_ACCESS_HOST_WRITE_BIT |
1425 VK_ACCESS_SHADER_WRITE_BIT |
1426 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
1427 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
1428 VK_MEMORY_OUTPUT_COPY_BIT*/,
1429 VkFlags input_mask /*=
1430 VK_ACCESS_HOST_READ_BIT |
1431 VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
1432 VK_ACCESS_INDEX_READ_BIT |
1433 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
1434 VK_ACCESS_UNIFORM_READ_BIT |
1435 VK_ACCESS_SHADER_READ_BIT |
1436 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
1437 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
1438 VK_MEMORY_INPUT_COPY_BIT*/, VkImageLayout image_layout,
1439 VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
1440 uint32_t srcQueueFamilyIndex, uint32_t dstQueueFamilyIndex) {
1441 // clang-format on
1442 const VkImageSubresourceRange subresourceRange = subresource_range(aspect, 0, m_mipLevels, 0, m_arrayLayers);
1443 VkImageMemoryBarrier barrier;
1444 barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange, srcQueueFamilyIndex,
1445 dstQueueFamilyIndex);
1446
1447 VkImageMemoryBarrier *pmemory_barrier = &barrier;
1448
1449 // write barrier to the command buffer
1450 vk::CmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, VK_DEPENDENCY_BY_REGION_BIT, 0, NULL, 0, NULL, 1,
1451 pmemory_barrier);
1452 }
1453
1454 void VkImageObj::SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout) {
1455 VkFlags src_mask, dst_mask;
1456 const VkFlags all_cache_outputs = VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
1457 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
1458 const VkFlags all_cache_inputs = VK_ACCESS_HOST_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT |
1459 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
1460 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
1461 VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT;
1462
1463 const VkFlags shader_read_inputs = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT;
1464
1465 if (image_layout == m_descriptorImageInfo.imageLayout) {
1466 return;
1467 }
1468
1469 // Attempt to narrow the src_mask, by what the image could have validly been used for in it's current layout
1470 switch (m_descriptorImageInfo.imageLayout) {
1471 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
1472 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
1473 break;
1474 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
1475 src_mask = shader_read_inputs;
1476 break;
1477 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
1478 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
1479 break;
1480 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
1481 src_mask = VK_ACCESS_TRANSFER_READ_BIT;
1482 break;
1483 case VK_IMAGE_LAYOUT_UNDEFINED:
1484 src_mask = 0;
1485 break;
1486 default:
1487 src_mask = all_cache_outputs; // Only need to worry about writes, as the stage mask will protect reads
1488 }
1489
1490 // Narrow the dst mask by the valid accesss for the new layout
1491 switch (image_layout) {
1492 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
1493 // NOTE: not sure why shader read is here...
1494 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT;
1495 break;
1496
1497 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
1498 dst_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
1499 break;
1500
1501 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
1502 dst_mask = shader_read_inputs;
1503 break;
1504
1505 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
1506 dst_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
1507 break;
1508
1509 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
1510 dst_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
1511 break;
1512
1513 default:
1514 // Must wait all read and write operations for the completion of the layout tranisition
1515 dst_mask = all_cache_inputs | all_cache_outputs;
1516 break;
1517 }
1518
1519 ImageMemoryBarrier(cmd_buf, aspect, src_mask, dst_mask, image_layout);
1520 m_descriptorImageInfo.imageLayout = image_layout;
1521 }
1522
1523 void VkImageObj::SetLayout(VkImageAspectFlags aspect, VkImageLayout image_layout) {
1524 if (image_layout == m_descriptorImageInfo.imageLayout) {
1525 return;
1526 }
1527
1528 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1529 VkCommandBufferObj cmd_buf(m_device, &pool);
1530
1531 /* Build command buffer to set image layout in the driver */
1532 cmd_buf.begin();
1533 SetLayout(&cmd_buf, aspect, image_layout);
1534 cmd_buf.end();
1535
1536 cmd_buf.QueueCommandBuffer();
1537 }
1538
1539 bool VkImageObj::IsCompatible(const VkImageUsageFlags usages, const VkFormatFeatureFlags features) {
1540 VkFormatFeatureFlags all_feature_flags =
1541 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
1542 VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT |
1543 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT |
1544 VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT |
1545 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
1546 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
1547 if (m_device->IsEnabledExtension(VK_IMG_FILTER_CUBIC_EXTENSION_NAME)) {
1548 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG;
1549 }
1550
1551 if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE_1_EXTENSION_NAME)) {
1552 all_feature_flags |= VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR;
1553 }
1554
1555 if (m_device->IsEnabledExtension(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME)) {
1556 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT;
1557 }
1558
1559 if (m_device->IsEnabledExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
1560 all_feature_flags |= VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR |
1561 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR |
1562 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR |
1563 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR |
1564 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR |
1565 VK_FORMAT_FEATURE_DISJOINT_BIT_KHR | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR;
1566 }
1567
1568 if ((features & all_feature_flags) == 0) return false; // whole format unsupported
1569
1570 if ((usages & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) return false;
1571 if ((usages & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) return false;
1572 if ((usages & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) return false;
1573 if ((usages & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
1574 return false;
1575
1576 if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE_1_EXTENSION_NAME)) {
1577 // WORKAROUND: for DevSim not reporting extended enums, and possibly some drivers too
1578 const auto all_nontransfer_feature_flags =
1579 all_feature_flags ^ (VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR);
1580 const bool transfer_probably_supported_anyway = (features & all_nontransfer_feature_flags) > 0;
1581 if (!transfer_probably_supported_anyway) {
1582 if ((usages & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR)) return false;
1583 if ((usages & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) return false;
1584 }
1585 }
1586
1587 return true;
1588 }
1589 VkImageCreateInfo VkImageObj::ImageCreateInfo2D(uint32_t const width, uint32_t const height, uint32_t const mipLevels,
1590 uint32_t const layers, VkFormat const format, VkFlags const usage,
1591 VkImageTiling const requested_tiling, const std::vector<uint32_t> *queue_families) {
1592 VkImageCreateInfo imageCreateInfo = vk_testing::Image::create_info();
1593 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1594 imageCreateInfo.format = format;
1595 imageCreateInfo.extent.width = width;
1596 imageCreateInfo.extent.height = height;
1597 imageCreateInfo.mipLevels = mipLevels;
1598 imageCreateInfo.arrayLayers = layers;
1599 imageCreateInfo.tiling = requested_tiling; // This will be touched up below...
1600 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1601
1602 // Automatically set sharing mode etc. based on queue family information
1603 if (queue_families && (queue_families->size() > 1)) {
1604 imageCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
1605 imageCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queue_families->size());
1606 imageCreateInfo.pQueueFamilyIndices = queue_families->data();
1607 }
1608 imageCreateInfo.usage = usage;
1609 return imageCreateInfo;
1610 }
1611 void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
1612 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
1613 const vector<uint32_t> *queue_families, bool memory) {
1614 InitNoLayout(ImageCreateInfo2D(width, height, mipLevels, 1, format, usage, requested_tiling, queue_families), reqs, memory);
1615 }
1616
1617 void VkImageObj::InitNoLayout(const VkImageCreateInfo &create_info, VkMemoryPropertyFlags const reqs, bool memory) {
1618 VkFormatProperties image_fmt;
1619 // Touch up create info for tiling compatiblity...
1620 auto usage = create_info.usage;
1621 VkImageTiling requested_tiling = create_info.tiling;
1622 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
1623
1624 vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info.format, &image_fmt);
1625
1626 if (requested_tiling == VK_IMAGE_TILING_LINEAR) {
1627 if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
1628 tiling = VK_IMAGE_TILING_LINEAR;
1629 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
1630 tiling = VK_IMAGE_TILING_OPTIMAL;
1631 } else {
1632 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
1633 << ", supported linear features: " << image_fmt.linearTilingFeatures;
1634 }
1635 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
1636 tiling = VK_IMAGE_TILING_OPTIMAL;
1637 } else if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
1638 tiling = VK_IMAGE_TILING_LINEAR;
1639 } else {
1640 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
1641 << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
1642 }
1643
1644 VkImageCreateInfo imageCreateInfo = create_info;
1645 imageCreateInfo.tiling = tiling;
1646
1647 m_mipLevels = imageCreateInfo.mipLevels;
1648 m_arrayLayers = imageCreateInfo.arrayLayers;
1649
1650 Layout(imageCreateInfo.initialLayout);
1651 if (memory)
1652 vk_testing::Image::init(*m_device, imageCreateInfo, reqs);
1653 else
1654 vk_testing::Image::init_no_mem(*m_device, imageCreateInfo);
1655 }
1656
1657 void VkImageObj::Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
1658 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
1659 const vector<uint32_t> *queue_families, bool memory) {
1660 Init(ImageCreateInfo2D(width, height, mipLevels, 1, format, usage, requested_tiling, queue_families), reqs, memory);
1661 }
1662
1663 void VkImageObj::Init(const VkImageCreateInfo &create_info, VkMemoryPropertyFlags const reqs, bool memory) {
1664 InitNoLayout(create_info, reqs, memory);
1665
1666 if (!initialized() || !memory) return; // We don't have a valid handle from early stage init, and thus SetLayout will fail
1667
1668 VkImageLayout newLayout;
1669 const auto usage = create_info.usage;
1670 if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
1671 newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1672 else if (usage & VK_IMAGE_USAGE_SAMPLED_BIT)
1673 newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1674 else
1675 newLayout = m_descriptorImageInfo.imageLayout;
1676
1677 VkImageAspectFlags image_aspect = 0;
1678 const auto format = create_info.format;
1679 if (FormatIsDepthAndStencil(format)) {
1680 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1681 } else if (FormatIsDepthOnly(format)) {
1682 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1683 } else if (FormatIsStencilOnly(format)) {
1684 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1685 } else { // color
1686 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1687 }
1688 SetLayout(image_aspect, newLayout);
1689 }
1690
1691 void VkImageObj::init(const VkImageCreateInfo *create_info) {
1692 VkFormatProperties image_fmt;
1693 vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info->format, &image_fmt);
1694
1695 switch (create_info->tiling) {
1696 case VK_IMAGE_TILING_OPTIMAL:
1697 if (!IsCompatible(create_info->usage, image_fmt.optimalTilingFeatures)) {
1698 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
1699 << create_info->usage << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
1700 }
1701 break;
1702 case VK_IMAGE_TILING_LINEAR:
1703 if (!IsCompatible(create_info->usage, image_fmt.linearTilingFeatures)) {
1704 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
1705 << create_info->usage << ", supported linear features: " << image_fmt.linearTilingFeatures;
1706 }
1707 break;
1708 default:
1709 break;
1710 }
1711 Layout(create_info->initialLayout);
1712
1713 vk_testing::Image::init(*m_device, *create_info, 0);
1714 m_mipLevels = create_info->mipLevels;
1715 m_arrayLayers = create_info->arrayLayers;
1716
1717 VkImageAspectFlags image_aspect = 0;
1718 if (FormatIsDepthAndStencil(create_info->format)) {
1719 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
1720 } else if (FormatIsDepthOnly(create_info->format)) {
1721 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1722 } else if (FormatIsStencilOnly(create_info->format)) {
1723 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
1724 } else { // color
1725 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1726 }
1727 SetLayout(image_aspect, VK_IMAGE_LAYOUT_GENERAL);
1728 }
1729
1730 bool VkImageObj::IsCompatibleCheck(const VkImageCreateInfo &create_info) {
1731 VkFormatProperties image_fmt;
1732 vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info.format, &image_fmt);
1733
1734 switch (create_info.tiling) {
1735 case VK_IMAGE_TILING_OPTIMAL:
1736 return IsCompatible(create_info.usage, image_fmt.optimalTilingFeatures);
1737 case VK_IMAGE_TILING_LINEAR:
1738 return IsCompatible(create_info.usage, image_fmt.linearTilingFeatures);
1739 default:
1740 return true;
1741 }
1742 }
1743
1744 VkResult VkImageObj::CopyImage(VkImageObj &src_image) {
1745 VkImageLayout src_image_layout, dest_image_layout;
1746
1747 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1748 VkCommandBufferObj cmd_buf(m_device, &pool);
1749
1750 /* Build command buffer to copy staging texture to usable texture */
1751 cmd_buf.begin();
1752
1753 /* TODO: Can we determine image aspect from image object? */
1754 src_image_layout = src_image.Layout();
1755 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1756
1757 dest_image_layout = (this->Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout();
1758 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1759
1760 VkImageCopy copy_region = {};
1761 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1762 copy_region.srcSubresource.baseArrayLayer = 0;
1763 copy_region.srcSubresource.mipLevel = 0;
1764 copy_region.srcSubresource.layerCount = 1;
1765 copy_region.srcOffset.x = 0;
1766 copy_region.srcOffset.y = 0;
1767 copy_region.srcOffset.z = 0;
1768 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1769 copy_region.dstSubresource.baseArrayLayer = 0;
1770 copy_region.dstSubresource.mipLevel = 0;
1771 copy_region.dstSubresource.layerCount = 1;
1772 copy_region.dstOffset.x = 0;
1773 copy_region.dstOffset.y = 0;
1774 copy_region.dstOffset.z = 0;
1775 copy_region.extent = src_image.extent();
1776
1777 vk::CmdCopyImage(cmd_buf.handle(), src_image.handle(), src_image.Layout(), handle(), Layout(), 1, ©_region);
1778
1779 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
1780
1781 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
1782
1783 cmd_buf.end();
1784
1785 cmd_buf.QueueCommandBuffer();
1786
1787 return VK_SUCCESS;
1788 }
1789
1790 // Same as CopyImage, but in the opposite direction
1791 VkResult VkImageObj::CopyImageOut(VkImageObj &dst_image) {
1792 VkImageLayout src_image_layout, dest_image_layout;
1793
1794 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
1795 VkCommandBufferObj cmd_buf(m_device, &pool);
1796
1797 cmd_buf.begin();
1798
1799 src_image_layout = this->Layout();
1800 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1801
1802 dest_image_layout = (dst_image.Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : dst_image.Layout();
1803 dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1804
1805 VkImageCopy copy_region = {};
1806 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1807 copy_region.srcSubresource.baseArrayLayer = 0;
1808 copy_region.srcSubresource.mipLevel = 0;
1809 copy_region.srcSubresource.layerCount = 1;
1810 copy_region.srcOffset.x = 0;
1811 copy_region.srcOffset.y = 0;
1812 copy_region.srcOffset.z = 0;
1813 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1814 copy_region.dstSubresource.baseArrayLayer = 0;
1815 copy_region.dstSubresource.mipLevel = 0;
1816 copy_region.dstSubresource.layerCount = 1;
1817 copy_region.dstOffset.x = 0;
1818 copy_region.dstOffset.y = 0;
1819 copy_region.dstOffset.z = 0;
1820 copy_region.extent = dst_image.extent();
1821
1822 vk::CmdCopyImage(cmd_buf.handle(), handle(), Layout(), dst_image.handle(), dst_image.Layout(), 1, ©_region);
1823
1824 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
1825
1826 dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
1827
1828 cmd_buf.end();
1829
1830 cmd_buf.QueueCommandBuffer();
1831
1832 return VK_SUCCESS;
1833 }
1834
1835 // Return 16x16 pixel block
1836 std::array<std::array<uint32_t, 16>, 16> VkImageObj::Read() {
1837 VkImageObj stagingImage(m_device);
1838 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1839
1840 stagingImage.Init(16, 16, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1841 VK_IMAGE_TILING_LINEAR, reqs);
1842 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1843 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
1844 CopyImageOut(stagingImage);
1845 void *data = stagingImage.MapMemory();
1846 std::array<std::array<uint32_t, 16>, 16> m = {};
1847 if (data) {
1848 for (uint32_t y = 0; y < stagingImage.extent().height; y++) {
1849 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
1850 for (uint32_t x = 0; x < stagingImage.extent().width; x++) m[y][x] = row[x];
1851 }
1852 }
1853 stagingImage.UnmapMemory();
1854 return m;
1855 }
1856
1857 VkTextureObj::VkTextureObj(VkDeviceObj *device, uint32_t *colors) : VkImageObj(device) {
1858 m_device = device;
1859 const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
1860 uint32_t tex_colors[2] = {0xffff0000, 0xff00ff00};
1861 void *data;
1862 uint32_t x, y;
1863 VkImageObj stagingImage(device);
1864 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1865
1866 stagingImage.Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
1867 VK_IMAGE_TILING_LINEAR, reqs);
1868 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
1869
1870 if (colors == NULL) colors = tex_colors;
1871
1872 VkImageViewCreateInfo view = {};
1873 view.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1874 view.pNext = NULL;
1875 view.image = VK_NULL_HANDLE;
1876 view.viewType = VK_IMAGE_VIEW_TYPE_2D;
1877 view.format = tex_format;
1878 view.components.r = VK_COMPONENT_SWIZZLE_R;
1879 view.components.g = VK_COMPONENT_SWIZZLE_G;
1880 view.components.b = VK_COMPONENT_SWIZZLE_B;
1881 view.components.a = VK_COMPONENT_SWIZZLE_A;
1882 view.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1883 view.subresourceRange.baseMipLevel = 0;
1884 view.subresourceRange.levelCount = 1;
1885 view.subresourceRange.baseArrayLayer = 0;
1886 view.subresourceRange.layerCount = 1;
1887
1888 /* create image */
1889 Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
1890 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1891
1892 /* create image view */
1893 view.image = handle();
1894 m_textureView.init(*m_device, view);
1895 m_descriptorImageInfo.imageView = m_textureView.handle();
1896
1897 data = stagingImage.MapMemory();
1898
1899 for (y = 0; y < extent().height; y++) {
1900 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
1901 for (x = 0; x < extent().width; x++) row[x] = colors[(x & 1) ^ (y & 1)];
1902 }
1903 stagingImage.UnmapMemory();
1904 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1905 VkImageObj::CopyImage(stagingImage);
1906 }
1907
1908 VkSamplerObj::VkSamplerObj(VkDeviceObj *device) {
1909 m_device = device;
1910
1911 VkSamplerCreateInfo samplerCreateInfo;
1912 memset(&samplerCreateInfo, 0, sizeof(samplerCreateInfo));
1913 samplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
1914 samplerCreateInfo.magFilter = VK_FILTER_NEAREST;
1915 samplerCreateInfo.minFilter = VK_FILTER_NEAREST;
1916 samplerCreateInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
1917 samplerCreateInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1918 samplerCreateInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1919 samplerCreateInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1920 samplerCreateInfo.mipLodBias = 0.0;
1921 samplerCreateInfo.anisotropyEnable = VK_FALSE;
1922 samplerCreateInfo.maxAnisotropy = 1;
1923 samplerCreateInfo.compareOp = VK_COMPARE_OP_NEVER;
1924 samplerCreateInfo.minLod = 0.0;
1925 samplerCreateInfo.maxLod = 0.0;
1926 samplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
1927 samplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
1928
1929 init(*m_device, samplerCreateInfo);
1930 }
1931
1932 /*
1933 * Basic ConstantBuffer constructor. Then use create methods to fill in the
1934 * details.
1935 */
1936 VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkBufferUsageFlags usage) {
1937 m_device = device;
1938
1939 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
1940
1941 // Special case for usages outside of original limits of framework
1942 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) != usage) {
1943 init_no_mem(*m_device, create_info(0, usage));
1944 }
1945 }
1946
1947 VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize allocationSize, const void *data,
1948 VkBufferUsageFlags usage) {
1949 m_device = device;
1950
1951 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
1952
1953 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1954
1955 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) == usage) {
1956 init_as_src_and_dst(*m_device, allocationSize, reqs);
1957 } else {
1958 init(*m_device, create_info(allocationSize, usage), reqs);
1959 }
1960
1961 void *pData = memory().map();
1962 memcpy(pData, data, static_cast<size_t>(allocationSize));
1963 memory().unmap();
1964
1965 /*
1966 * Constant buffers are going to be used as vertex input buffers
1967 * or as shader uniform buffers. So, we'll create the shaderbuffer
1968 * descriptor here so it's ready if needed.
1969 */
1970 this->m_descriptorBufferInfo.buffer = handle();
1971 this->m_descriptorBufferInfo.offset = 0;
1972 this->m_descriptorBufferInfo.range = allocationSize;
1973 }
1974
1975 VkPipelineShaderStageCreateInfo const &VkShaderObj::GetStageCreateInfo() const { return m_stage_info; }
1976
1977 VkShaderObj::VkShaderObj(VkDeviceObj &device, VkShaderStageFlagBits stage, char const *name, const VkSpecializationInfo *specInfo)
1978 : m_device(device) {
1979 m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1980 m_stage_info.pNext = nullptr;
1981 m_stage_info.flags = 0;
1982 m_stage_info.stage = stage;
1983 m_stage_info.module = VK_NULL_HANDLE;
1984 m_stage_info.pName = name;
1985 m_stage_info.pSpecializationInfo = specInfo;
1986 }
1987
1988 VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderStageFlagBits stage, VkRenderFramework *framework,
1989 char const *name, bool debug, const VkSpecializationInfo *specInfo, const spv_target_env env)
1990 : VkShaderObj(*device, stage, name, specInfo) {
1991 InitFromGLSL(*framework, shader_code, debug, env);
1992 }
1993
1994 bool VkShaderObj::InitFromGLSL(VkRenderFramework &framework, const char *shader_code, bool debug, const spv_target_env env) {
1995 std::vector<uint32_t> spv;
1996 framework.GLSLtoSPV(&m_device.props.limits, m_stage_info.stage, shader_code, spv, debug, env);
1997
1998 VkShaderModuleCreateInfo moduleCreateInfo = {};
1999 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
2000 moduleCreateInfo.codeSize = spv.size() * sizeof(uint32_t);
2001 moduleCreateInfo.pCode = spv.data();
2002
2003 init(m_device, moduleCreateInfo);
2004 m_stage_info.module = handle();
2005 return VK_NULL_HANDLE != handle();
2006 }
2007
2008 // Because shaders are currently validated at pipeline creation time, there are test cases that might fail shader module creation
2009 // due to supplying an invalid/unknown SPIR-V capability/operation. This is called after VkShaderObj creation when tests are found
2010 // to crash on a CI device
2011 VkResult VkShaderObj::InitFromGLSLTry(VkRenderFramework &framework, const char *shader_code, bool debug, const spv_target_env env) {
2012 std::vector<uint32_t> spv;
2013 framework.GLSLtoSPV(&m_device.props.limits, m_stage_info.stage, shader_code, spv, debug, env);
2014
2015 VkShaderModuleCreateInfo moduleCreateInfo = {};
2016 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
2017 moduleCreateInfo.codeSize = spv.size() * sizeof(uint32_t);
2018 moduleCreateInfo.pCode = spv.data();
2019
2020 const auto result = init_try(m_device, moduleCreateInfo);
2021 m_stage_info.module = handle();
2022 return result;
2023 }
2024
2025 VkShaderObj::VkShaderObj(VkDeviceObj *device, const string spv_source, VkShaderStageFlagBits stage, VkRenderFramework *framework,
2026 char const *name, const VkSpecializationInfo *specInfo, const spv_target_env env)
2027 : VkShaderObj(*device, stage, name, specInfo) {
2028 InitFromASM(*framework, spv_source, env);
2029 }
2030
2031 bool VkShaderObj::InitFromASM(VkRenderFramework &framework, const std::string &spv_source, const spv_target_env env) {
2032 vector<uint32_t> spv;
2033 framework.ASMtoSPV(env, 0, spv_source.data(), spv);
2034
2035 VkShaderModuleCreateInfo moduleCreateInfo = {};
2036 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
2037 moduleCreateInfo.codeSize = spv.size() * sizeof(uint32_t);
2038 moduleCreateInfo.pCode = spv.data();
2039
2040 init(m_device, moduleCreateInfo);
2041 m_stage_info.module = handle();
2042 return VK_NULL_HANDLE != handle();
2043 }
2044
2045 VkResult VkShaderObj::InitFromASMTry(VkRenderFramework &framework, const std::string &spv_source, const spv_target_env spv_env) {
2046 vector<uint32_t> spv;
2047 framework.ASMtoSPV(spv_env, 0, spv_source.data(), spv);
2048
2049 VkShaderModuleCreateInfo moduleCreateInfo = {};
2050 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
2051 moduleCreateInfo.codeSize = spv.size() * sizeof(uint32_t);
2052 moduleCreateInfo.pCode = spv.data();
2053
2054 const auto result = init_try(m_device, moduleCreateInfo);
2055 m_stage_info.module = handle();
2056 return result;
2057 }
2058
2059 // static
2060 std::unique_ptr<VkShaderObj> VkShaderObj::CreateFromGLSL(VkDeviceObj &dev, VkRenderFramework &framework,
2061 VkShaderStageFlagBits stage, const std::string &code,
2062 const char *entry_point, const VkSpecializationInfo *spec_info,
2063 const spv_target_env spv_env, bool debug) {
2064 auto shader = layer_data::make_unique<VkShaderObj>(dev, stage, entry_point, spec_info);
2065 if (VK_SUCCESS == shader->InitFromGLSLTry(framework, code.c_str(), debug, spv_env)) {
2066 return shader;
2067 }
2068 return {};
2069 }
2070
2071 // static
2072 std::unique_ptr<VkShaderObj> VkShaderObj::CreateFromASM(VkDeviceObj &dev, VkRenderFramework &framework, VkShaderStageFlagBits stage,
2073 const std::string &code, const char *entry_point,
2074 const VkSpecializationInfo *spec_info, const spv_target_env spv_env) {
2075 auto shader = layer_data::make_unique<VkShaderObj>(dev, stage, entry_point, spec_info);
2076 if (VK_SUCCESS == shader->InitFromASMTry(framework, code.c_str(), spv_env)) {
2077 return shader;
2078 }
2079 return {};
2080 }
2081
2082 VkPipelineLayoutObj::VkPipelineLayoutObj(VkDeviceObj *device, const vector<const VkDescriptorSetLayoutObj *> &descriptor_layouts,
2083 const vector<VkPushConstantRange> &push_constant_ranges) {
2084 VkPipelineLayoutCreateInfo pl_ci = {};
2085 pl_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
2086 pl_ci.pushConstantRangeCount = static_cast<uint32_t>(push_constant_ranges.size());
2087 pl_ci.pPushConstantRanges = push_constant_ranges.data();
2088
2089 auto descriptor_layouts_unwrapped = MakeTestbindingHandles<const vk_testing::DescriptorSetLayout>(descriptor_layouts);
2090
2091 init(*device, pl_ci, descriptor_layouts_unwrapped);
2092 }
2093
2094 void VkPipelineLayoutObj::Reset() { *this = VkPipelineLayoutObj(); }
2095
2096 VkPipelineObj::VkPipelineObj(VkDeviceObj *device) {
2097 m_device = device;
2098
2099 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
2100 m_vi_state.pNext = nullptr;
2101 m_vi_state.flags = 0;
2102 m_vi_state.vertexBindingDescriptionCount = 0;
2103 m_vi_state.pVertexBindingDescriptions = nullptr;
2104 m_vi_state.vertexAttributeDescriptionCount = 0;
2105 m_vi_state.pVertexAttributeDescriptions = nullptr;
2106
2107 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
2108 m_ia_state.pNext = nullptr;
2109 m_ia_state.flags = 0;
2110 m_ia_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
2111 m_ia_state.primitiveRestartEnable = VK_FALSE;
2112
2113 m_te_state = nullptr;
2114
2115 m_vp_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
2116 m_vp_state.pNext = VK_NULL_HANDLE;
2117 m_vp_state.flags = 0;
2118 m_vp_state.viewportCount = 1;
2119 m_vp_state.scissorCount = 1;
2120 m_vp_state.pViewports = nullptr;
2121 m_vp_state.pScissors = nullptr;
2122
2123 m_rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
2124 m_rs_state.pNext = &m_line_state;
2125 m_rs_state.flags = 0;
2126 m_rs_state.depthClampEnable = VK_FALSE;
2127 m_rs_state.rasterizerDiscardEnable = VK_FALSE;
2128 m_rs_state.polygonMode = VK_POLYGON_MODE_FILL;
2129 m_rs_state.cullMode = VK_CULL_MODE_BACK_BIT;
2130 m_rs_state.frontFace = VK_FRONT_FACE_CLOCKWISE;
2131 m_rs_state.depthBiasEnable = VK_FALSE;
2132 m_rs_state.depthBiasConstantFactor = 0.0f;
2133 m_rs_state.depthBiasClamp = 0.0f;
2134 m_rs_state.depthBiasSlopeFactor = 0.0f;
2135 m_rs_state.lineWidth = 1.0f;
2136
2137 m_line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
2138 m_line_state.pNext = nullptr;
2139 m_line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
2140 m_line_state.stippledLineEnable = VK_FALSE;
2141 m_line_state.lineStippleFactor = 0;
2142 m_line_state.lineStipplePattern = 0;
2143
2144 m_ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
2145 m_ms_state.pNext = nullptr;
2146 m_ms_state.flags = 0;
2147 m_ms_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
2148 m_ms_state.sampleShadingEnable = VK_FALSE;
2149 m_ms_state.minSampleShading = 0.0f;
2150 m_ms_state.pSampleMask = nullptr;
2151 m_ms_state.alphaToCoverageEnable = VK_FALSE;
2152 m_ms_state.alphaToOneEnable = VK_FALSE;
2153
2154 m_ds_state = nullptr;
2155
2156 memset(&m_cb_state, 0, sizeof(m_cb_state));
2157 m_cb_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
2158 m_cb_state.blendConstants[0] = 1.0f;
2159 m_cb_state.blendConstants[1] = 1.0f;
2160 m_cb_state.blendConstants[2] = 1.0f;
2161 m_cb_state.blendConstants[3] = 1.0f;
2162
2163 memset(&m_pd_state, 0, sizeof(m_pd_state));
2164 }
2165
2166 void VkPipelineObj::AddShader(VkShaderObj *shader) { m_shaderStages.push_back(shader->GetStageCreateInfo()); }
2167
2168 void VkPipelineObj::AddShader(VkPipelineShaderStageCreateInfo const &createInfo) { m_shaderStages.push_back(createInfo); }
2169
2170 void VkPipelineObj::AddVertexInputAttribs(VkVertexInputAttributeDescription *vi_attrib, uint32_t count) {
2171 m_vi_state.pVertexAttributeDescriptions = vi_attrib;
2172 m_vi_state.vertexAttributeDescriptionCount = count;
2173 }
2174
2175 void VkPipelineObj::AddVertexInputBindings(VkVertexInputBindingDescription *vi_binding, uint32_t count) {
2176 m_vi_state.pVertexBindingDescriptions = vi_binding;
2177 m_vi_state.vertexBindingDescriptionCount = count;
2178 }
2179
2180 void VkPipelineObj::AddColorAttachment(uint32_t binding, const VkPipelineColorBlendAttachmentState &att) {
2181 if (binding + 1 > m_colorAttachments.size()) {
2182 m_colorAttachments.resize(binding + 1);
2183 }
2184 m_colorAttachments[binding] = att;
2185 }
2186
2187 void VkPipelineObj::SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo *ds_state) { m_ds_state = ds_state; }
2188
2189 void VkPipelineObj::SetViewport(const vector<VkViewport> viewports) {
2190 m_viewports = viewports;
2191 // If we explicitly set a null viewport, pass it through to create info
2192 // but preserve viewportCount because it musn't change
2193 if (m_viewports.size() == 0) {
2194 m_vp_state.pViewports = nullptr;
2195 }
2196 }
2197
2198 void VkPipelineObj::SetScissor(const vector<VkRect2D> scissors) {
2199 m_scissors = scissors;
2200 // If we explicitly set a null scissor, pass it through to create info
2201 // but preserve scissorCount because it musn't change
2202 if (m_scissors.size() == 0) {
2203 m_vp_state.pScissors = nullptr;
2204 }
2205 }
2206
2207 void VkPipelineObj::MakeDynamic(VkDynamicState state) {
2208 /* Only add a state once */
2209 for (auto it = m_dynamic_state_enables.begin(); it != m_dynamic_state_enables.end(); it++) {
2210 if ((*it) == state) return;
2211 }
2212 m_dynamic_state_enables.push_back(state);
2213 }
2214
2215 void VkPipelineObj::SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state) { m_ms_state = *ms_state; }
2216
2217 void VkPipelineObj::SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state) { m_ia_state = *ia_state; }
2218
2219 void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) {
2220 m_rs_state = *rs_state;
2221 m_rs_state.pNext = &m_line_state;
2222 }
2223
2224 void VkPipelineObj::SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state) { m_te_state = te_state; }
2225
2226 void VkPipelineObj::SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT *line_state) { m_line_state = *line_state; }
2227
2228 void VkPipelineObj::InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci) {
2229 gp_ci->stageCount = m_shaderStages.size();
2230 gp_ci->pStages = m_shaderStages.size() ? m_shaderStages.data() : nullptr;
2231
2232 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
2233 gp_ci->pVertexInputState = &m_vi_state;
2234
2235 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
2236 gp_ci->pInputAssemblyState = &m_ia_state;
2237
2238 gp_ci->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
2239 gp_ci->pNext = NULL;
2240 gp_ci->flags = 0;
2241
2242 m_cb_state.attachmentCount = m_colorAttachments.size();
2243 m_cb_state.pAttachments = m_colorAttachments.data();
2244
2245 if (m_viewports.size() > 0) {
2246 m_vp_state.viewportCount = m_viewports.size();
2247 m_vp_state.pViewports = m_viewports.data();
2248 } else {
2249 MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
2250 }
2251
2252 if (m_scissors.size() > 0) {
2253 m_vp_state.scissorCount = m_scissors.size();
2254 m_vp_state.pScissors = m_scissors.data();
2255 } else {
2256 MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
2257 }
2258
2259 memset(&m_pd_state, 0, sizeof(m_pd_state));
2260 if (m_dynamic_state_enables.size() > 0) {
2261 m_pd_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
2262 m_pd_state.dynamicStateCount = m_dynamic_state_enables.size();
2263 m_pd_state.pDynamicStates = m_dynamic_state_enables.data();
2264 gp_ci->pDynamicState = &m_pd_state;
2265 }
2266
2267 gp_ci->subpass = 0;
2268 gp_ci->pViewportState = &m_vp_state;
2269 gp_ci->pRasterizationState = &m_rs_state;
2270 gp_ci->pMultisampleState = &m_ms_state;
2271 gp_ci->pDepthStencilState = m_ds_state;
2272 gp_ci->pColorBlendState = &m_cb_state;
2273 gp_ci->pTessellationState = m_te_state;
2274 }
2275
2276 VkResult VkPipelineObj::CreateVKPipeline(VkPipelineLayout layout, VkRenderPass render_pass, VkGraphicsPipelineCreateInfo *gp_ci) {
2277 VkGraphicsPipelineCreateInfo info = {};
2278
2279 // if not given a CreateInfo, create and initialize a local one.
2280 if (gp_ci == nullptr) {
2281 gp_ci = &info;
2282 InitGraphicsPipelineCreateInfo(gp_ci);
2283 }
2284
2285 gp_ci->layout = layout;
2286 gp_ci->renderPass = render_pass;
2287
2288 return init_try(*m_device, *gp_ci);
2289 }
2290
2291 VkCommandBufferObj::VkCommandBufferObj(VkDeviceObj *device, VkCommandPoolObj *pool, VkCommandBufferLevel level, VkQueueObj *queue) {
2292 m_device = device;
2293 if (queue) {
2294 m_queue = queue;
2295 } else {
2296 m_queue = m_device->GetDefaultQueue();
2297 }
2298 assert(m_queue);
2299
2300 auto create_info = vk_testing::CommandBuffer::create_info(pool->handle());
2301 create_info.level = level;
2302 init(*device, create_info);
2303 }
2304
2305 void VkCommandBufferObj::PipelineBarrier(VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
2306 VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
2307 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
2308 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
2309 const VkImageMemoryBarrier *pImageMemoryBarriers) {
2310 vk::CmdPipelineBarrier(handle(), src_stages, dest_stages, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2311 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2312 }
2313
2314 void VkCommandBufferObj::PipelineBarrier2KHR(const VkDependencyInfoKHR *pDependencyInfo) {
2315 auto fpCmdPipelineBarrier2KHR =
2316 (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
2317 assert(fpCmdPipelineBarrier2KHR != nullptr);
2318
2319 fpCmdPipelineBarrier2KHR(handle(), pDependencyInfo);
2320 }
2321
2322 void VkCommandBufferObj::ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> &color_objs, VkClearColorValue clear_color,
2323 VkDepthStencilObj *depth_stencil_obj, float depth_clear_value,
2324 uint32_t stencil_clear_value) {
2325 // whatever we want to do, we do it to the whole buffer
2326 VkImageSubresourceRange subrange = {};
2327 // srRange.aspectMask to be set later
2328 subrange.baseMipLevel = 0;
2329 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
2330 subrange.levelCount = 1; // VK_REMAINING_MIP_LEVELS;
2331 subrange.baseArrayLayer = 0;
2332 // TODO: Mesa crashing with VK_REMAINING_ARRAY_LAYERS
2333 subrange.layerCount = 1; // VK_REMAINING_ARRAY_LAYERS;
2334
2335 const VkImageLayout clear_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2336
2337 for (const auto &color_obj : color_objs) {
2338 subrange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2339 color_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
2340 color_obj->SetLayout(this, subrange.aspectMask, clear_layout);
2341 ClearColorImage(color_obj->image(), clear_layout, &clear_color, 1, &subrange);
2342 }
2343
2344 if (depth_stencil_obj && depth_stencil_obj->Initialized()) {
2345 subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
2346 if (FormatIsDepthOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
2347 if (FormatIsStencilOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
2348
2349 depth_stencil_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
2350 depth_stencil_obj->SetLayout(this, subrange.aspectMask, clear_layout);
2351
2352 VkClearDepthStencilValue clear_value = {depth_clear_value, stencil_clear_value};
2353 ClearDepthStencilImage(depth_stencil_obj->handle(), clear_layout, &clear_value, 1, &subrange);
2354 }
2355 }
2356
2357 void VkCommandBufferObj::FillBuffer(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize fill_size, uint32_t data) {
2358 vk::CmdFillBuffer(handle(), buffer, offset, fill_size, data);
2359 }
2360
2361 void VkCommandBufferObj::UpdateBuffer(VkBuffer buffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
2362 vk::CmdUpdateBuffer(handle(), buffer, dstOffset, dataSize, pData);
2363 }
2364
2365 void VkCommandBufferObj::CopyImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
2366 uint32_t regionCount, const VkImageCopy *pRegions) {
2367 vk::CmdCopyImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2368 }
2369
2370 void VkCommandBufferObj::ResolveImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
2371 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) {
2372 vk::CmdResolveImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2373 }
2374
2375 void VkCommandBufferObj::ClearColorImage(VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor,
2376 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
2377 vk::CmdClearColorImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
2378 }
2379
2380 void VkCommandBufferObj::ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor,
2381 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
2382 vk::CmdClearDepthStencilImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
2383 }
2384
2385 void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer) {
2386 BuildAccelerationStructure(as, scratchBuffer, VK_NULL_HANDLE);
2387 }
2388
2389 void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer, VkBuffer instanceData) {
2390 PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
2391 (PFN_vkCmdBuildAccelerationStructureNV)vk::GetDeviceProcAddr(as->dev(), "vkCmdBuildAccelerationStructureNV");
2392 assert(vkCmdBuildAccelerationStructureNV != nullptr);
2393
2394 vkCmdBuildAccelerationStructureNV(handle(), &as->info(), instanceData, 0, VK_FALSE, as->handle(), VK_NULL_HANDLE, scratchBuffer,
2395 0);
2396 }
2397
2398 void VkCommandBufferObj::PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> &color_atts,
2399 VkDepthStencilObj *depth_stencil_att) {
2400 for (const auto &color_att : color_atts) {
2401 color_att->SetLayout(this, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
2402 }
2403
2404 if (depth_stencil_att && depth_stencil_att->Initialized()) {
2405 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
2406 if (FormatIsDepthOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
2407 if (FormatIsStencilOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
2408
2409 depth_stencil_att->SetLayout(this, aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
2410 }
2411 }
2412
2413 void VkCommandBufferObj::BeginRenderPass(const VkRenderPassBeginInfo &info, VkSubpassContents contents) {
2414 vk::CmdBeginRenderPass(handle(), &info, contents);
2415 }
2416
2417 void VkCommandBufferObj::EndRenderPass() { vk::CmdEndRenderPass(handle()); }
2418
2419 void VkCommandBufferObj::BeginRendering(const VkRenderingInfoKHR &renderingInfo) {
2420 PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR =
2421 (PFN_vkCmdBeginRenderingKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdBeginRenderingKHR");
2422 assert(vkCmdBeginRenderingKHR != nullptr);
2423
2424 vkCmdBeginRenderingKHR(handle(), &renderingInfo);
2425 }
2426
2427 void VkCommandBufferObj::EndRendering() {
2428 PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR =
2429 (PFN_vkCmdEndRenderingKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdEndRenderingKHR");
2430 assert(vkCmdEndRenderingKHR != nullptr);
2431
2432 vkCmdEndRenderingKHR(handle());
2433 }
2434
2435 void VkCommandBufferObj::SetViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) {
2436 vk::CmdSetViewport(handle(), firstViewport, viewportCount, pViewports);
2437 }
2438
2439 void VkCommandBufferObj::SetStencilReference(VkStencilFaceFlags faceMask, uint32_t reference) {
2440 vk::CmdSetStencilReference(handle(), faceMask, reference);
2441 }
2442
2443 void VkCommandBufferObj::DrawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
2444 uint32_t firstInstance) {
2445 vk::CmdDrawIndexed(handle(), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2446 }
2447
2448 void VkCommandBufferObj::Draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
2449 vk::CmdDraw(handle(), vertexCount, instanceCount, firstVertex, firstInstance);
2450 }
2451
2452 void VkCommandBufferObj::QueueCommandBuffer(bool checkSuccess) {
2453 VkFenceObj nullFence;
2454 QueueCommandBuffer(nullFence, checkSuccess);
2455 }
2456
2457 void VkCommandBufferObj::QueueCommandBuffer(const VkFenceObj &fence, bool checkSuccess) {
2458 VkResult err = VK_SUCCESS;
2459
2460 err = m_queue->submit(*this, fence, checkSuccess);
2461 if (checkSuccess) {
2462 ASSERT_VK_SUCCESS(err);
2463 }
2464
2465 err = m_queue->wait();
2466 if (checkSuccess) {
2467 ASSERT_VK_SUCCESS(err);
2468 }
2469
2470 // TODO: Determine if we really want this serialization here
2471 // Wait for work to finish before cleaning up.
2472 vk::DeviceWaitIdle(m_device->device());
2473 }
2474
2475 void VkCommandBufferObj::BindDescriptorSet(VkDescriptorSetObj &descriptorSet) {
2476 VkDescriptorSet set_obj = descriptorSet.GetDescriptorSetHandle();
2477
2478 // bind pipeline, vertex buffer (descriptor set) and WVP (dynamic buffer view)
2479 if (set_obj) {
2480 vk::CmdBindDescriptorSets(handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, descriptorSet.GetPipelineLayout(), 0, 1, &set_obj, 0,
2481 NULL);
2482 }
2483 }
2484
2485 void VkCommandBufferObj::BindIndexBuffer(VkBufferObj *indexBuffer, VkDeviceSize offset, VkIndexType indexType) {
2486 vk::CmdBindIndexBuffer(handle(), indexBuffer->handle(), offset, indexType);
2487 }
2488
2489 void VkCommandBufferObj::BindVertexBuffer(VkConstantBufferObj *vertexBuffer, VkDeviceSize offset, uint32_t binding) {
2490 vk::CmdBindVertexBuffers(handle(), binding, 1, &vertexBuffer->handle(), &offset);
2491 }
2492
2493 VkCommandPoolObj::VkCommandPoolObj(VkDeviceObj *device, uint32_t queue_family_index, VkCommandPoolCreateFlags flags) {
2494 init(*device, vk_testing::CommandPool::create_info(queue_family_index, flags));
2495 }
2496
2497 bool VkDepthStencilObj::Initialized() { return m_initialized; }
2498 VkDepthStencilObj::VkDepthStencilObj(VkDeviceObj *device) : VkImageObj(device) { m_initialized = false; }
2499
2500 VkImageView *VkDepthStencilObj::BindInfo() { return &m_attachmentBindInfo; }
2501
2502 VkFormat VkDepthStencilObj::Format() const { return this->m_depth_stencil_fmt; }
2503
2504 void VkDepthStencilObj::Init(VkDeviceObj *device, int32_t width, int32_t height, VkFormat format, VkImageUsageFlags usage,
2505 VkImageAspectFlags aspect) {
2506 VkImageViewCreateInfo view_info = {};
2507
2508 m_device = device;
2509 m_initialized = true;
2510 m_depth_stencil_fmt = format;
2511
2512 /* create image */
2513 VkImageObj::Init(width, height, 1, m_depth_stencil_fmt, usage, VK_IMAGE_TILING_OPTIMAL);
2514
2515 // allows for overriding by caller
2516 if (aspect == 0) {
2517 aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
2518 if (FormatIsDepthOnly(format))
2519 aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
2520 else if (FormatIsStencilOnly(format))
2521 aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
2522 }
2523 SetLayout(aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
2524
2525 view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2526 view_info.pNext = NULL;
2527 view_info.image = VK_NULL_HANDLE;
2528 view_info.subresourceRange.aspectMask = aspect;
2529 view_info.subresourceRange.baseMipLevel = 0;
2530 view_info.subresourceRange.levelCount = 1;
2531 view_info.subresourceRange.baseArrayLayer = 0;
2532 view_info.subresourceRange.layerCount = 1;
2533 view_info.flags = 0;
2534 view_info.format = m_depth_stencil_fmt;
2535 view_info.image = handle();
2536 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
2537 m_imageView.init(*m_device, view_info);
2538
2539 m_attachmentBindInfo = m_imageView.handle();
2540 }
2541