1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RendererVk.cpp:
7 // Implements the class methods for RendererVk.
8 //
9
10 #include "libANGLE/renderer/vulkan/RendererVk.h"
11
12 // Placing this first seems to solve an intellisense bug.
13 #include "libANGLE/renderer/vulkan/vk_utils.h"
14
15 #include <EGL/eglext.h>
16
17 #include "common/debug.h"
18 #include "common/platform.h"
19 #include "common/system_utils.h"
20 #include "gpu_info_util/SystemInfo.h"
21 #include "libANGLE/Context.h"
22 #include "libANGLE/Display.h"
23 #include "libANGLE/renderer/driver_utils.h"
24 #include "libANGLE/renderer/glslang_wrapper_utils.h"
25 #include "libANGLE/renderer/vulkan/CompilerVk.h"
26 #include "libANGLE/renderer/vulkan/ContextVk.h"
27 #include "libANGLE/renderer/vulkan/DisplayVk.h"
28 #include "libANGLE/renderer/vulkan/FramebufferVk.h"
29 #include "libANGLE/renderer/vulkan/ProgramVk.h"
30 #include "libANGLE/renderer/vulkan/ResourceVk.h"
31 #include "libANGLE/renderer/vulkan/VertexArrayVk.h"
32 #include "libANGLE/renderer/vulkan/vk_caps_utils.h"
33 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
34 #include "libANGLE/trace.h"
35 #include "platform/Platform.h"
36
37 // Consts
38 namespace
39 {
40 const uint32_t kMockVendorID = 0xba5eba11;
41 const uint32_t kMockDeviceID = 0xf005ba11;
42 constexpr char kMockDeviceName[] = "Vulkan Mock Device";
43 constexpr char kSwiftShaderDeviceName[] = "SwiftShader Device";
44 constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
45 } // anonymous namespace
46
47 namespace rx
48 {
49
50 namespace
51 {
52 // Update the pipeline cache every this many swaps.
53 constexpr uint32_t kPipelineCacheVkUpdatePeriod = 60;
54 // Per the Vulkan specification, as long as Vulkan 1.1+ is returned by vkEnumerateInstanceVersion,
55 // ANGLE must indicate the highest version of Vulkan functionality that it uses. The Vulkan
56 // validation layers will issue messages for any core functionality that requires a higher version.
57 // This value must be increased whenever ANGLE starts using functionality from a newer core
58 // version of Vulkan.
59 constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
60
ChooseICDFromAttribs(const egl::AttributeMap & attribs)61 vk::ICD ChooseICDFromAttribs(const egl::AttributeMap &attribs)
62 {
63 #if !defined(ANGLE_PLATFORM_ANDROID)
64 // Mock ICD does not currently run on Android
65 EGLAttrib deviceType = attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
66 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE);
67
68 switch (deviceType)
69 {
70 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE:
71 break;
72 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE:
73 return vk::ICD::Mock;
74 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_SWIFTSHADER_ANGLE:
75 return vk::ICD::SwiftShader;
76 default:
77 UNREACHABLE();
78 break;
79 }
80 #endif // !defined(ANGLE_PLATFORM_ANDROID)
81
82 return vk::ICD::Default;
83 }
84
StrLess(const char * a,const char * b)85 bool StrLess(const char *a, const char *b)
86 {
87 return strcmp(a, b) < 0;
88 }
89
ExtensionFound(const char * needle,const RendererVk::ExtensionNameList & haystack)90 bool ExtensionFound(const char *needle, const RendererVk::ExtensionNameList &haystack)
91 {
92 // NOTE: The list must be sorted.
93 return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
94 }
95
VerifyExtensionsPresent(const RendererVk::ExtensionNameList & haystack,const RendererVk::ExtensionNameList & needles)96 VkResult VerifyExtensionsPresent(const RendererVk::ExtensionNameList &haystack,
97 const RendererVk::ExtensionNameList &needles)
98 {
99 // NOTE: The lists must be sorted.
100 if (std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess))
101 {
102 return VK_SUCCESS;
103 }
104 for (const char *needle : needles)
105 {
106 if (!ExtensionFound(needle, haystack))
107 {
108 ERR() << "Extension not supported: " << needle;
109 }
110 }
111 return VK_ERROR_EXTENSION_NOT_PRESENT;
112 }
113
114 // Array of Validation error/warning messages that will be ignored, should include bugID
115 constexpr const char *kSkippedMessages[] = {
116 // http://anglebug.com/2866
117 "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed",
118 // http://anglebug.com/2796
119 "UNASSIGNED-CoreValidation-Shader-PointSizeMissing",
120 // http://anglebug.com/3832
121 "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
122 // http://anglebug.com/3450
123 "VUID-vkDestroySemaphore-semaphore-parameter",
124 // http://anglebug.com/4063
125 "VUID-VkDeviceCreateInfo-pNext-pNext",
126 "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext",
127 "VUID_Undefined",
128 // http://anglebug.com/3078
129 "UNASSIGNED-CoreValidation-Shader-InterfaceTypeMismatch",
130 // http://anglebug.com/4510
131 "VUID-vkQueuePresentKHR-pWaitSemaphores-03268",
132 };
133
134 // Suppress validation errors that are known
135 // return "true" if given code/prefix/message is known, else return "false"
IsIgnoredDebugMessage(const char * message)136 bool IsIgnoredDebugMessage(const char *message)
137 {
138 if (!message)
139 {
140 return false;
141 }
142 for (const char *msg : kSkippedMessages)
143 {
144 if (strstr(message, msg) != nullptr)
145 {
146 return true;
147 }
148 }
149 return false;
150 }
151
GetVkObjectTypeName(VkObjectType type)152 const char *GetVkObjectTypeName(VkObjectType type)
153 {
154 switch (type)
155 {
156 case VK_OBJECT_TYPE_UNKNOWN:
157 return "Unknown";
158 case VK_OBJECT_TYPE_INSTANCE:
159 return "Instance";
160 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
161 return "Physical Device";
162 case VK_OBJECT_TYPE_DEVICE:
163 return "Device";
164 case VK_OBJECT_TYPE_QUEUE:
165 return "Queue";
166 case VK_OBJECT_TYPE_SEMAPHORE:
167 return "Semaphore";
168 case VK_OBJECT_TYPE_COMMAND_BUFFER:
169 return "Command Buffer";
170 case VK_OBJECT_TYPE_FENCE:
171 return "Fence";
172 case VK_OBJECT_TYPE_DEVICE_MEMORY:
173 return "Device Memory";
174 case VK_OBJECT_TYPE_BUFFER:
175 return "Buffer";
176 case VK_OBJECT_TYPE_IMAGE:
177 return "Image";
178 case VK_OBJECT_TYPE_EVENT:
179 return "Event";
180 case VK_OBJECT_TYPE_QUERY_POOL:
181 return "Query Pool";
182 case VK_OBJECT_TYPE_BUFFER_VIEW:
183 return "Buffer View";
184 case VK_OBJECT_TYPE_IMAGE_VIEW:
185 return "Image View";
186 case VK_OBJECT_TYPE_SHADER_MODULE:
187 return "Shader Module";
188 case VK_OBJECT_TYPE_PIPELINE_CACHE:
189 return "Pipeline Cache";
190 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
191 return "Pipeline Layout";
192 case VK_OBJECT_TYPE_RENDER_PASS:
193 return "Render Pass";
194 case VK_OBJECT_TYPE_PIPELINE:
195 return "Pipeline";
196 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
197 return "Descriptor Set Layout";
198 case VK_OBJECT_TYPE_SAMPLER:
199 return "Sampler";
200 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
201 return "Descriptor Pool";
202 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
203 return "Descriptor Set";
204 case VK_OBJECT_TYPE_FRAMEBUFFER:
205 return "Framebuffer";
206 case VK_OBJECT_TYPE_COMMAND_POOL:
207 return "Command Pool";
208 case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
209 return "Sampler YCbCr Conversion";
210 case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
211 return "Descriptor Update Template";
212 case VK_OBJECT_TYPE_SURFACE_KHR:
213 return "Surface";
214 case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
215 return "Swapchain";
216 case VK_OBJECT_TYPE_DISPLAY_KHR:
217 return "Display";
218 case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
219 return "Display Mode";
220 case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
221 return "Debug Report Callback";
222 case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV:
223 return "Indirect Commands Layout";
224 case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
225 return "Debug Utils Messenger";
226 case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
227 return "Validation Cache";
228 case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
229 return "Acceleration Structure";
230 default:
231 return "<Unrecognized>";
232 }
233 }
234
235 // This function is unused on Android/Fuschia/GGP
236 #if !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA) && \
237 !defined(ANGLE_PLATFORM_GGP)
WrapICDEnvironment(const char * icdEnvironment)238 const std::string WrapICDEnvironment(const char *icdEnvironment)
239 {
240 # if defined(ANGLE_PLATFORM_APPLE)
241 // On MacOS the libraries are bundled into the application directory
242 std::string ret = angle::GetHelperExecutableDir() + icdEnvironment;
243 return ret;
244 # endif // defined(ANGLE_PLATFORM_APPLE)
245 return icdEnvironment;
246 }
247 #endif // !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA) &&
248 // !defined(ANGLE_PLATFORM_GGP)
249
250 VKAPI_ATTR VkBool32 VKAPI_CALL
DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * callbackData,void * userData)251 DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
252 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
253 const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
254 void *userData)
255 {
256 // See if it's an issue we are aware of and don't want to be spammed about.
257 if (IsIgnoredDebugMessage(callbackData->pMessageIdName))
258 {
259 return VK_FALSE;
260 }
261
262 std::ostringstream log;
263 if (callbackData->pMessageIdName)
264 {
265 log << "[ " << callbackData->pMessageIdName << " ] ";
266 }
267 log << callbackData->pMessage << std::endl;
268
269 // Aesthetic value based on length of the function name, line number, etc.
270 constexpr size_t kStartIndent = 28;
271
272 // Output the debug marker hierarchy under which this error has occured.
273 size_t indent = kStartIndent;
274 if (callbackData->queueLabelCount > 0)
275 {
276 log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
277 for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
278 {
279 log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
280 << std::endl;
281 }
282 }
283 if (callbackData->cmdBufLabelCount > 0)
284 {
285 log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
286 for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
287 {
288 log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
289 << std::endl;
290 }
291 }
292 // Output the objects involved in this error message.
293 if (callbackData->objectCount > 0)
294 {
295 for (uint32_t i = 0; i < callbackData->objectCount; ++i)
296 {
297 const char *objectName = callbackData->pObjects[i].pObjectName;
298 const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
299 uint64_t objectHandle = callbackData->pObjects[i].objectHandle;
300 log << std::string(indent, ' ') << "Object: ";
301 if (objectHandle == 0)
302 {
303 log << "VK_NULL_HANDLE";
304 }
305 else
306 {
307 log << "0x" << std::hex << objectHandle << std::dec;
308 }
309 log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
310 if (objectName)
311 {
312 log << " [" << objectName << "]";
313 }
314 log << std::endl;
315 }
316 }
317
318 bool isError = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
319 std::string msg = log.str();
320
321 RendererVk *rendererVk = static_cast<RendererVk *>(userData);
322 rendererVk->onNewValidationMessage(msg);
323
324 if (isError)
325 {
326 ERR() << msg;
327 }
328 else
329 {
330 WARN() << msg;
331 }
332
333 return VK_FALSE;
334 }
335
DebugReportCallback(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * layerPrefix,const char * message,void * userData)336 VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
337 VkDebugReportObjectTypeEXT objectType,
338 uint64_t object,
339 size_t location,
340 int32_t messageCode,
341 const char *layerPrefix,
342 const char *message,
343 void *userData)
344 {
345 if (IsIgnoredDebugMessage(message))
346 {
347 return VK_FALSE;
348 }
349 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
350 {
351 ERR() << message;
352 #if !defined(NDEBUG)
353 // Abort the call in Debug builds.
354 return VK_TRUE;
355 #endif
356 }
357 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
358 {
359 WARN() << message;
360 }
361 else
362 {
363 // Uncomment this if you want Vulkan spam.
364 // WARN() << message;
365 }
366
367 return VK_FALSE;
368 }
369
370 // If we're loading the validation layers, we could be running from any random directory.
371 // Change to the executable directory so we can find the layers, then change back to the
372 // previous directory to be safe we don't disrupt the application.
373 class ScopedVkLoaderEnvironment : angle::NonCopyable
374 {
375 public:
ScopedVkLoaderEnvironment(bool enableValidationLayers,vk::ICD icd)376 ScopedVkLoaderEnvironment(bool enableValidationLayers, vk::ICD icd)
377 : mEnableValidationLayers(enableValidationLayers),
378 mICD(icd),
379 mChangedCWD(false),
380 mChangedICDEnv(false)
381 {
382 // Changing CWD and setting environment variables makes no sense on Android,
383 // since this code is a part of Java application there.
384 // Android Vulkan loader doesn't need this either.
385 #if !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA) && \
386 !defined(ANGLE_PLATFORM_GGP)
387 if (icd == vk::ICD::Mock)
388 {
389 if (!setICDEnvironment(WrapICDEnvironment(ANGLE_VK_MOCK_ICD_JSON).c_str()))
390 {
391 ERR() << "Error setting environment for Mock/Null Driver.";
392 }
393 }
394 # if defined(ANGLE_VK_SWIFTSHADER_ICD_JSON)
395 else if (icd == vk::ICD::SwiftShader)
396 {
397 if (!setICDEnvironment(WrapICDEnvironment(ANGLE_VK_SWIFTSHADER_ICD_JSON).c_str()))
398 {
399 ERR() << "Error setting environment for SwiftShader.";
400 }
401 }
402 # endif // defined(ANGLE_VK_SWIFTSHADER_ICD_JSON)
403 if (mEnableValidationLayers || icd != vk::ICD::Default)
404 {
405 const auto &cwd = angle::GetCWD();
406 if (!cwd.valid())
407 {
408 ERR() << "Error getting CWD for Vulkan layers init.";
409 mEnableValidationLayers = false;
410 mICD = vk::ICD::Default;
411 }
412 else
413 {
414 mPreviousCWD = cwd.value();
415 std::string exeDir = angle::GetExecutableDirectory();
416 mChangedCWD = angle::SetCWD(exeDir.c_str());
417 if (!mChangedCWD)
418 {
419 ERR() << "Error setting CWD for Vulkan layers init.";
420 mEnableValidationLayers = false;
421 mICD = vk::ICD::Default;
422 }
423 }
424 }
425
426 // Override environment variable to use the ANGLE layers.
427 if (mEnableValidationLayers)
428 {
429 if (!angle::PrependPathToEnvironmentVar(vk::gLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
430 {
431 ERR() << "Error setting environment for Vulkan layers init.";
432 mEnableValidationLayers = false;
433 }
434 }
435 #endif // !defined(ANGLE_PLATFORM_ANDROID)
436 }
437
~ScopedVkLoaderEnvironment()438 ~ScopedVkLoaderEnvironment()
439 {
440 if (mChangedCWD)
441 {
442 #if !defined(ANGLE_PLATFORM_ANDROID)
443 ASSERT(mPreviousCWD.valid());
444 angle::SetCWD(mPreviousCWD.value().c_str());
445 #endif // !defined(ANGLE_PLATFORM_ANDROID)
446 }
447 if (mChangedICDEnv)
448 {
449 if (mPreviousICDEnv.value().empty())
450 {
451 angle::UnsetEnvironmentVar(vk::gLoaderICDFilenamesEnv);
452 }
453 else
454 {
455 angle::SetEnvironmentVar(vk::gLoaderICDFilenamesEnv,
456 mPreviousICDEnv.value().c_str());
457 }
458 }
459 }
460
canEnableValidationLayers() const461 bool canEnableValidationLayers() const { return mEnableValidationLayers; }
getEnabledICD() const462 vk::ICD getEnabledICD() const { return mICD; }
463
464 private:
setICDEnvironment(const char * icd)465 bool setICDEnvironment(const char *icd)
466 {
467 // Override environment variable to use built Mock ICD
468 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
469 mPreviousICDEnv = angle::GetEnvironmentVar(vk::gLoaderICDFilenamesEnv);
470 mChangedICDEnv = angle::SetEnvironmentVar(vk::gLoaderICDFilenamesEnv, icd);
471
472 if (!mChangedICDEnv)
473 {
474 mICD = vk::ICD::Default;
475 }
476 return mChangedICDEnv;
477 }
478
479 bool mEnableValidationLayers;
480 vk::ICD mICD;
481 bool mChangedCWD;
482 Optional<std::string> mPreviousCWD;
483 bool mChangedICDEnv;
484 Optional<std::string> mPreviousICDEnv;
485 };
486
487 using ICDFilterFunc = std::function<bool(const VkPhysicalDeviceProperties &)>;
488
GetFilterForICD(vk::ICD preferredICD)489 ICDFilterFunc GetFilterForICD(vk::ICD preferredICD)
490 {
491 switch (preferredICD)
492 {
493 case vk::ICD::Mock:
494 return [](const VkPhysicalDeviceProperties &deviceProperties) {
495 return ((deviceProperties.vendorID == kMockVendorID) &&
496 (deviceProperties.deviceID == kMockDeviceID) &&
497 (strcmp(deviceProperties.deviceName, kMockDeviceName) == 0));
498 };
499 case vk::ICD::SwiftShader:
500 return [](const VkPhysicalDeviceProperties &deviceProperties) {
501 return (IsSwiftshader(deviceProperties.vendorID, deviceProperties.deviceID) &&
502 (strncmp(deviceProperties.deviceName, kSwiftShaderDeviceName,
503 strlen(kSwiftShaderDeviceName)) == 0));
504 };
505 default:
506 const std::string anglePreferredDevice =
507 angle::GetEnvironmentVar(vk::gANGLEPreferredDevice);
508 return [anglePreferredDevice](const VkPhysicalDeviceProperties &deviceProperties) {
509 return (anglePreferredDevice.empty() ||
510 anglePreferredDevice == deviceProperties.deviceName);
511 };
512 }
513 }
514
ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> & physicalDevices,vk::ICD preferredICD,VkPhysicalDevice * physicalDeviceOut,VkPhysicalDeviceProperties * physicalDevicePropertiesOut)515 void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
516 vk::ICD preferredICD,
517 VkPhysicalDevice *physicalDeviceOut,
518 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
519 {
520 ASSERT(!physicalDevices.empty());
521
522 ICDFilterFunc filter = GetFilterForICD(preferredICD);
523
524 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
525 {
526 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
527 if (filter(*physicalDevicePropertiesOut))
528 {
529 *physicalDeviceOut = physicalDevice;
530 return;
531 }
532 }
533 WARN() << "Preferred device ICD not found. Using default physicalDevice instead.";
534
535 // Fall back to first device.
536 *physicalDeviceOut = physicalDevices[0];
537 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
538 }
539
ShouldUseValidationLayers(const egl::AttributeMap & attribs)540 bool ShouldUseValidationLayers(const egl::AttributeMap &attribs)
541 {
542 #if defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
543 return ShouldUseDebugLayers(attribs);
544 #else
545 EGLAttrib debugSetting =
546 attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE);
547 return debugSetting == EGL_TRUE;
548 #endif // defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
549 }
550
LimitVersionTo(const gl::Version & current,const gl::Version & lower)551 gl::Version LimitVersionTo(const gl::Version ¤t, const gl::Version &lower)
552 {
553 return std::min(current, lower);
554 }
555 } // namespace
556
557 // RendererVk implementation.
RendererVk()558 RendererVk::RendererVk()
559 : mDisplay(nullptr),
560 mCapsInitialized(false),
561 mInstance(VK_NULL_HANDLE),
562 mEnableValidationLayers(false),
563 mEnableDebugUtils(false),
564 mEnabledICD(vk::ICD::Default),
565 mDebugUtilsMessenger(VK_NULL_HANDLE),
566 mDebugReportCallback(VK_NULL_HANDLE),
567 mPhysicalDevice(VK_NULL_HANDLE),
568 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
569 mMaxVertexAttribDivisor(1),
570 mMaxVertexAttribStride(0),
571 mMinImportedHostPointerAlignment(1),
572 mDevice(VK_NULL_HANDLE),
573 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
574 mCurrentQueueSerial(mQueueSerialFactory.generate()),
575 mDeviceLost(false),
576 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
577 mPipelineCacheDirty(false),
578 mPipelineCacheInitialized(false),
579 mGlslangInitialized(false)
580 {
581 VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
582 mFormatProperties.fill(invalid);
583
584 // We currently don't have any big-endian devices in the list of supported platforms. There are
585 // a number of places in the Vulkan backend that make this assumption. This assertion is made
586 // early to fail immediately on big-endian platforms.
587 ASSERT(IsLittleEndian());
588 }
589
~RendererVk()590 RendererVk::~RendererVk()
591 {
592 ASSERT(mSharedGarbage.empty());
593 }
594
onDestroy()595 void RendererVk::onDestroy()
596 {
597 // Force all commands to finish by flushing all queues.
598 for (VkQueue queue : mQueues)
599 {
600 if (queue != VK_NULL_HANDLE)
601 {
602 vkQueueWaitIdle(queue);
603 }
604 }
605
606 // Then assign an infinite "last completed" serial to force garbage to delete.
607 mLastCompletedQueueSerial = Serial::Infinite();
608 (void)cleanupGarbage(true);
609 ASSERT(mSharedGarbage.empty());
610
611 for (PendingOneOffCommands &pending : mPendingOneOffCommands)
612 {
613 pending.commandBuffer.releaseHandle();
614 }
615
616 mOneOffCommandPool.destroy(mDevice);
617
618 mFenceRecycler.destroy(mDevice);
619
620 mPipelineLayoutCache.destroy(mDevice);
621 mDescriptorSetLayoutCache.destroy(mDevice);
622
623 mPipelineCache.destroy(mDevice);
624
625 if (mGlslangInitialized)
626 {
627 GlslangRelease();
628 mGlslangInitialized = false;
629 }
630
631 if (mDevice)
632 {
633 vkDestroyDevice(mDevice, nullptr);
634 mDevice = VK_NULL_HANDLE;
635 }
636
637 if (mDebugUtilsMessenger)
638 {
639 ASSERT(mInstance && vkDestroyDebugUtilsMessengerEXT);
640 vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
641
642 ASSERT(mDebugReportCallback == VK_NULL_HANDLE);
643 }
644 else if (mDebugReportCallback)
645 {
646 ASSERT(mInstance && vkDestroyDebugReportCallbackEXT);
647 vkDestroyDebugReportCallbackEXT(mInstance, mDebugReportCallback, nullptr);
648 }
649
650 if (mInstance)
651 {
652 vkDestroyInstance(mInstance, nullptr);
653 mInstance = VK_NULL_HANDLE;
654 }
655
656 mMemoryProperties.destroy();
657 mPhysicalDevice = VK_NULL_HANDLE;
658 }
659
notifyDeviceLost()660 void RendererVk::notifyDeviceLost()
661 {
662 mLastCompletedQueueSerial = mLastSubmittedQueueSerial;
663 mDeviceLost = true;
664 mDisplay->notifyDeviceLost();
665 }
666
isDeviceLost() const667 bool RendererVk::isDeviceLost() const
668 {
669 return mDeviceLost;
670 }
671
initialize(DisplayVk * displayVk,egl::Display * display,const char * wsiExtension,const char * wsiLayer)672 angle::Result RendererVk::initialize(DisplayVk *displayVk,
673 egl::Display *display,
674 const char *wsiExtension,
675 const char *wsiLayer)
676 {
677 // Set all vk* function ptrs
678 ANGLE_VK_TRY(displayVk, volkInitialize());
679 mDisplay = display;
680 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
681 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseValidationLayers(attribs),
682 ChooseICDFromAttribs(attribs));
683 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
684 mEnabledICD = scopedEnvironment.getEnabledICD();
685
686 // Gather global layer properties.
687 uint32_t instanceLayerCount = 0;
688 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
689
690 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
691 if (instanceLayerCount > 0)
692 {
693 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
694 instanceLayerProps.data()));
695 }
696
697 VulkanLayerVector enabledInstanceLayerNames;
698 if (mEnableValidationLayers)
699 {
700 bool layersRequested =
701 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
702 mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
703 &enabledInstanceLayerNames);
704 }
705
706 if (wsiLayer)
707 {
708 enabledInstanceLayerNames.push_back(wsiLayer);
709 }
710
711 // Enumerate instance extensions that are provided by the vulkan
712 // implementation and implicit layers.
713 uint32_t instanceExtensionCount = 0;
714 ANGLE_VK_TRY(displayVk,
715 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
716
717 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
718 if (instanceExtensionCount > 0)
719 {
720 ANGLE_VK_TRY(displayVk,
721 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
722 instanceExtensionProps.data()));
723 }
724
725 // Enumerate instance extensions that are provided by explicit layers.
726 for (const char *layerName : enabledInstanceLayerNames)
727 {
728 uint32_t previousExtensionCount = static_cast<uint32_t>(instanceExtensionProps.size());
729 uint32_t instanceLayerExtensionCount = 0;
730 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
731 layerName, &instanceLayerExtensionCount, nullptr));
732 instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
733 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
734 layerName, &instanceLayerExtensionCount,
735 instanceExtensionProps.data() + previousExtensionCount));
736 }
737
738 ExtensionNameList instanceExtensionNames;
739 if (!instanceExtensionProps.empty())
740 {
741 for (const VkExtensionProperties &i : instanceExtensionProps)
742 {
743 instanceExtensionNames.push_back(i.extensionName);
744 }
745 std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
746 }
747
748 ExtensionNameList enabledInstanceExtensions;
749 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
750 enabledInstanceExtensions.push_back(wsiExtension);
751 mEnableDebugUtils = mEnableValidationLayers &&
752 ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
753
754 bool enableDebugReport =
755 mEnableValidationLayers && !mEnableDebugUtils &&
756 ExtensionFound(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instanceExtensionNames);
757
758 if (mEnableDebugUtils)
759 {
760 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
761 }
762 else if (enableDebugReport)
763 {
764 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
765 }
766
767 if (ExtensionFound(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, instanceExtensionNames))
768 {
769 enabledInstanceExtensions.push_back(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME);
770 ANGLE_FEATURE_CONDITION(&mFeatures, supportsSwapchainColorspace, true);
771 }
772
773 // Verify the required extensions are in the extension names set. Fail if not.
774 std::sort(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(), StrLess);
775 ANGLE_VK_TRY(displayVk,
776 VerifyExtensionsPresent(instanceExtensionNames, enabledInstanceExtensions));
777
778 // Enable VK_KHR_get_physical_device_properties_2 if available.
779 if (ExtensionFound(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
780 instanceExtensionNames))
781 {
782 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
783 }
784
785 VkApplicationInfo applicationInfo = {};
786 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
787 applicationInfo.pApplicationName = "ANGLE";
788 applicationInfo.applicationVersion = 1;
789 applicationInfo.pEngineName = "ANGLE";
790 applicationInfo.engineVersion = 1;
791
792 auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
793 vkGetInstanceProcAddr(mInstance, "vkEnumerateInstanceVersion"));
794 if (!enumerateInstanceVersion)
795 {
796 applicationInfo.apiVersion = VK_API_VERSION_1_0;
797 }
798 else
799 {
800 uint32_t apiVersion = VK_API_VERSION_1_0;
801 ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&apiVersion));
802 if ((VK_VERSION_MAJOR(apiVersion) > 1) || (VK_VERSION_MINOR(apiVersion) >= 1))
803 {
804 // This is the highest version of core Vulkan functionality that ANGLE uses.
805 applicationInfo.apiVersion = kPreferredVulkanAPIVersion;
806 }
807 else
808 {
809 // Since only 1.0 instance-level functionality is available, this must set to 1.0.
810 applicationInfo.apiVersion = VK_API_VERSION_1_0;
811 }
812 }
813
814 VkInstanceCreateInfo instanceInfo = {};
815 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
816 instanceInfo.flags = 0;
817 instanceInfo.pApplicationInfo = &applicationInfo;
818
819 // Enable requested layers and extensions.
820 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
821 instanceInfo.ppEnabledExtensionNames =
822 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
823 instanceInfo.enabledLayerCount = static_cast<uint32_t>(enabledInstanceLayerNames.size());
824 instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
825 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
826 volkLoadInstance(mInstance);
827
828 if (mEnableDebugUtils)
829 {
830 // Use the newer EXT_debug_utils if it exists.
831 // Create the messenger callback.
832 VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
833
834 constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
835 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
836 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
837
838 constexpr VkDebugUtilsMessageTypeFlagsEXT kMessagesToLog =
839 VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
840 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
841 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
842
843 messengerInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
844 messengerInfo.messageSeverity = kSeveritiesToLog;
845 messengerInfo.messageType = kMessagesToLog;
846 messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
847 messengerInfo.pUserData = this;
848
849 ANGLE_VK_TRY(displayVk, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
850 &mDebugUtilsMessenger));
851 }
852 else if (enableDebugReport)
853 {
854 // Fallback to EXT_debug_report.
855 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
856
857 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
858 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
859 debugReportInfo.pfnCallback = &DebugReportCallback;
860 debugReportInfo.pUserData = this;
861
862 ANGLE_VK_TRY(displayVk, vkCreateDebugReportCallbackEXT(mInstance, &debugReportInfo, nullptr,
863 &mDebugReportCallback));
864 }
865
866 if (std::find(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(),
867 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) !=
868 enabledInstanceExtensions.end())
869 {
870 ASSERT(vkGetPhysicalDeviceProperties2KHR);
871 }
872
873 uint32_t physicalDeviceCount = 0;
874 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
875 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
876
877 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
878 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
879 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
880 physicalDevices.data()));
881 ChoosePhysicalDevice(physicalDevices, mEnabledICD, &mPhysicalDevice,
882 &mPhysicalDeviceProperties);
883
884 mGarbageCollectionFlushThreshold =
885 static_cast<uint32_t>(mPhysicalDeviceProperties.limits.maxMemoryAllocationCount *
886 kPercentMaxMemoryAllocationCount);
887
888 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
889
890 // Ensure we can find a graphics queue family.
891 uint32_t queueCount = 0;
892 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
893
894 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
895
896 mQueueFamilyProperties.resize(queueCount);
897 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
898 mQueueFamilyProperties.data());
899
900 size_t graphicsQueueFamilyCount = false;
901 uint32_t firstGraphicsQueueFamily = 0;
902 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
903 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
904 {
905 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
906 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
907 {
908 ASSERT(queueInfo.queueCount > 0);
909 graphicsQueueFamilyCount++;
910 if (firstGraphicsQueueFamily == 0)
911 {
912 firstGraphicsQueueFamily = familyIndex;
913 }
914 break;
915 }
916 }
917
918 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
919
920 // If only one queue family, go ahead and initialize the device. If there is more than one
921 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
922 if (graphicsQueueFamilyCount == 1)
923 {
924 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
925 }
926
927 // Store the physical device memory properties so we can find the right memory pools.
928 mMemoryProperties.init(mPhysicalDevice);
929
930 if (!mGlslangInitialized)
931 {
932 GlslangInitialize();
933 mGlslangInitialized = true;
934 }
935
936 // Initialize the format table.
937 mFormatTable.initialize(this, &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
938
939 return angle::Result::Continue;
940 }
941
queryDeviceExtensionFeatures(const ExtensionNameList & deviceExtensionNames)942 void RendererVk::queryDeviceExtensionFeatures(const ExtensionNameList &deviceExtensionNames)
943 {
944 // Default initialize all extension features to false.
945 mLineRasterizationFeatures = {};
946 mLineRasterizationFeatures.sType =
947 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
948
949 mProvokingVertexFeatures = {};
950 mProvokingVertexFeatures.sType =
951 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
952
953 mVertexAttributeDivisorFeatures = {};
954 mVertexAttributeDivisorFeatures.sType =
955 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
956
957 mVertexAttributeDivisorProperties = {};
958 mVertexAttributeDivisorProperties.sType =
959 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
960
961 mTransformFeedbackFeatures = {};
962 mTransformFeedbackFeatures.sType =
963 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
964
965 mIndexTypeUint8Features = {};
966 mIndexTypeUint8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
967
968 mPhysicalDeviceSubgroupProperties = {};
969 mPhysicalDeviceSubgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
970
971 mPhysicalDeviceExternalMemoryHostProperties = {};
972 mPhysicalDeviceExternalMemoryHostProperties.sType =
973 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
974
975 if (!vkGetPhysicalDeviceProperties2KHR || !vkGetPhysicalDeviceFeatures2KHR)
976 {
977 return;
978 }
979
980 // Query features and properties.
981 VkPhysicalDeviceFeatures2KHR deviceFeatures = {};
982 deviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
983
984 VkPhysicalDeviceProperties2 deviceProperties = {};
985 deviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
986
987 // Query line rasterization features
988 if (ExtensionFound(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, deviceExtensionNames))
989 {
990 vk::AddToPNextChain(&deviceFeatures, &mLineRasterizationFeatures);
991 }
992
993 // Query provoking vertex features
994 if (ExtensionFound(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, deviceExtensionNames))
995 {
996 vk::AddToPNextChain(&deviceFeatures, &mProvokingVertexFeatures);
997 }
998
999 // Query attribute divisor features and properties
1000 if (ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
1001 {
1002 vk::AddToPNextChain(&deviceFeatures, &mVertexAttributeDivisorFeatures);
1003 vk::AddToPNextChain(&deviceProperties, &mVertexAttributeDivisorProperties);
1004 }
1005
1006 // Query transform feedback features
1007 if (ExtensionFound(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, deviceExtensionNames))
1008 {
1009 vk::AddToPNextChain(&deviceFeatures, &mTransformFeedbackFeatures);
1010 }
1011
1012 // Query uint8 index type features
1013 if (ExtensionFound(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, deviceExtensionNames))
1014 {
1015 vk::AddToPNextChain(&deviceFeatures, &mIndexTypeUint8Features);
1016 }
1017
1018 // Query external memory host properties
1019 if (ExtensionFound(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, deviceExtensionNames))
1020 {
1021 vk::AddToPNextChain(&deviceProperties, &mPhysicalDeviceExternalMemoryHostProperties);
1022 }
1023
1024 // Query subgroup properties
1025 vk::AddToPNextChain(&deviceProperties, &mPhysicalDeviceSubgroupProperties);
1026
1027 vkGetPhysicalDeviceFeatures2KHR(mPhysicalDevice, &deviceFeatures);
1028 vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
1029
1030 // Clean up pNext chains
1031 mLineRasterizationFeatures.pNext = nullptr;
1032 mProvokingVertexFeatures.pNext = nullptr;
1033 mVertexAttributeDivisorFeatures.pNext = nullptr;
1034 mVertexAttributeDivisorProperties.pNext = nullptr;
1035 mTransformFeedbackFeatures.pNext = nullptr;
1036 mIndexTypeUint8Features.pNext = nullptr;
1037 mPhysicalDeviceSubgroupProperties.pNext = nullptr;
1038 mPhysicalDeviceExternalMemoryHostProperties.pNext = nullptr;
1039 }
1040
initializeDevice(DisplayVk * displayVk,uint32_t queueFamilyIndex)1041 angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
1042 {
1043 uint32_t deviceLayerCount = 0;
1044 ANGLE_VK_TRY(displayVk,
1045 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
1046
1047 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
1048 if (deviceLayerCount > 0)
1049 {
1050 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
1051 deviceLayerProps.data()));
1052 }
1053
1054 VulkanLayerVector enabledDeviceLayerNames;
1055 if (mEnableValidationLayers)
1056 {
1057 mEnableValidationLayers =
1058 GetAvailableValidationLayers(deviceLayerProps, false, &enabledDeviceLayerNames);
1059 }
1060
1061 const char *wsiLayer = displayVk->getWSILayer();
1062 if (wsiLayer)
1063 {
1064 enabledDeviceLayerNames.push_back(wsiLayer);
1065 }
1066
1067 // Enumerate device extensions that are provided by the vulkan
1068 // implementation and implicit layers.
1069 uint32_t deviceExtensionCount = 0;
1070 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
1071 &deviceExtensionCount, nullptr));
1072
1073 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
1074 if (deviceExtensionCount > 0)
1075 {
1076 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
1077 &deviceExtensionCount,
1078 deviceExtensionProps.data()));
1079 }
1080
1081 // Enumerate device extensions that are provided by explicit layers.
1082 for (const char *layerName : enabledDeviceLayerNames)
1083 {
1084 uint32_t previousExtensionCount = static_cast<uint32_t>(deviceExtensionProps.size());
1085 uint32_t deviceLayerExtensionCount = 0;
1086 ANGLE_VK_TRY(displayVk,
1087 vkEnumerateDeviceExtensionProperties(mPhysicalDevice, layerName,
1088 &deviceLayerExtensionCount, nullptr));
1089 deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
1090 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(
1091 mPhysicalDevice, layerName, &deviceLayerExtensionCount,
1092 deviceExtensionProps.data() + previousExtensionCount));
1093 }
1094
1095 ExtensionNameList deviceExtensionNames;
1096 if (!deviceExtensionProps.empty())
1097 {
1098 ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
1099 for (const VkExtensionProperties &prop : deviceExtensionProps)
1100 {
1101 deviceExtensionNames.push_back(prop.extensionName);
1102 }
1103 std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
1104 }
1105
1106 ExtensionNameList enabledDeviceExtensions;
1107 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1108
1109 // Queues: map low, med, high priority to whatever is supported up to 3 queues
1110 uint32_t queueCount = std::min(mQueueFamilyProperties[queueFamilyIndex].queueCount,
1111 static_cast<uint32_t>(egl::ContextPriority::EnumCount));
1112
1113 constexpr float kVulkanQueuePriorityLow = 0.0;
1114 constexpr float kVulkanQueuePriorityMedium = 0.4;
1115 constexpr float kVulkanQueuePriorityHigh = 1.0;
1116
1117 // Index order: Low, High, Medium - so no need to rearrange according to count:
1118 // If we have 1 queue - all same, if 2 - Low and High, if 3 Low, High and Medium.
1119 constexpr uint32_t kQueueIndexLow = 0;
1120 constexpr uint32_t kQueueIndexHigh = 1;
1121 constexpr uint32_t kQueueIndexMedium = 2;
1122
1123 constexpr float queuePriorities[static_cast<uint32_t>(egl::ContextPriority::EnumCount)] = {
1124 kVulkanQueuePriorityMedium, kVulkanQueuePriorityHigh, kVulkanQueuePriorityLow};
1125
1126 VkDeviceQueueCreateInfo queueCreateInfo = {};
1127 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1128 queueCreateInfo.flags = 0;
1129 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
1130 queueCreateInfo.queueCount = queueCount;
1131 queueCreateInfo.pQueuePriorities = queuePriorities;
1132
1133 // Query extensions and their features.
1134 queryDeviceExtensionFeatures(deviceExtensionNames);
1135
1136 // Initialize features and workarounds.
1137 initFeatures(displayVk, deviceExtensionNames);
1138
1139 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
1140 if ((getFeatures().flipViewportY.enabled) &&
1141 (mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 1, 0)))
1142 {
1143 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
1144 }
1145 if (getFeatures().supportsIncrementalPresent.enabled)
1146 {
1147 enabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
1148 }
1149
1150 #if defined(ANGLE_PLATFORM_ANDROID)
1151 if (getFeatures().supportsAndroidHardwareBuffer.enabled)
1152 {
1153 enabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
1154 enabledDeviceExtensions.push_back(
1155 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
1156 }
1157 #else
1158 ASSERT(!getFeatures().supportsAndroidHardwareBuffer.enabled);
1159 #endif
1160
1161 if (getFeatures().supportsAndroidHardwareBuffer.enabled ||
1162 getFeatures().supportsExternalMemoryFd.enabled ||
1163 getFeatures().supportsExternalMemoryFuchsia.enabled)
1164 {
1165 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
1166 }
1167
1168 if (getFeatures().supportsExternalMemoryFd.enabled)
1169 {
1170 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
1171 }
1172
1173 if (getFeatures().supportsExternalMemoryFuchsia.enabled)
1174 {
1175 enabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME);
1176 }
1177
1178 if (getFeatures().supportsExternalSemaphoreFd.enabled ||
1179 getFeatures().supportsExternalSemaphoreFuchsia.enabled)
1180 {
1181 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
1182 }
1183
1184 if (getFeatures().supportsExternalSemaphoreFd.enabled)
1185 {
1186 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
1187 }
1188
1189 if (getFeatures().supportsExternalSemaphoreFuchsia.enabled)
1190 {
1191 enabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
1192 }
1193
1194 if (getFeatures().supportsShaderStencilExport.enabled)
1195 {
1196 enabledDeviceExtensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
1197 }
1198
1199 std::sort(enabledDeviceExtensions.begin(), enabledDeviceExtensions.end(), StrLess);
1200 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionNames, enabledDeviceExtensions));
1201
1202 // Select additional features to be enabled.
1203 VkPhysicalDeviceFeatures2KHR enabledFeatures = {};
1204 enabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1205 // Used to support framebuffers with multiple attachments:
1206 enabledFeatures.features.independentBlend = mPhysicalDeviceFeatures.independentBlend;
1207 // Used to support robust buffer access:
1208 enabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
1209 // Used to support Anisotropic filtering:
1210 enabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
1211 // Used to emulate transform feedback:
1212 enabledFeatures.features.vertexPipelineStoresAndAtomics =
1213 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics;
1214 // Used to implement storage buffers and images in the fragment shader:
1215 enabledFeatures.features.fragmentStoresAndAtomics =
1216 mPhysicalDeviceFeatures.fragmentStoresAndAtomics;
1217 // Used to support geometry shaders:
1218 enabledFeatures.features.geometryShader = mPhysicalDeviceFeatures.geometryShader;
1219 // Used to support EXT_gpu_shader5:
1220 enabledFeatures.features.shaderImageGatherExtended =
1221 mPhysicalDeviceFeatures.shaderImageGatherExtended;
1222 // Used to support EXT_gpu_shader5:
1223 enabledFeatures.features.shaderUniformBufferArrayDynamicIndexing =
1224 mPhysicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing;
1225 // Used to support EXT_gpu_shader5 and sampler array of array emulation:
1226 enabledFeatures.features.shaderSampledImageArrayDynamicIndexing =
1227 mPhysicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing;
1228 // Used to support atomic counter emulation:
1229 enabledFeatures.features.shaderStorageBufferArrayDynamicIndexing =
1230 mPhysicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing;
1231
1232 if (!vk::CommandBuffer::ExecutesInline())
1233 {
1234 enabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
1235 }
1236
1237 // Setup device initialization struct
1238 VkDeviceCreateInfo createInfo = {};
1239
1240 // Based on available extension features, decide on which extensions and features to enable.
1241
1242 if (mLineRasterizationFeatures.bresenhamLines)
1243 {
1244 enabledDeviceExtensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
1245 vk::AddToPNextChain(&createInfo, &mLineRasterizationFeatures);
1246 }
1247
1248 if (mProvokingVertexFeatures.provokingVertexLast)
1249 {
1250 enabledDeviceExtensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1251 vk::AddToPNextChain(&createInfo, &mProvokingVertexFeatures);
1252 }
1253
1254 if (mVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor)
1255 {
1256 enabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
1257 vk::AddToPNextChain(&createInfo, &mVertexAttributeDivisorFeatures);
1258
1259 // We only store 8 bit divisor in GraphicsPipelineDesc so capping value & we emulate if
1260 // exceeded
1261 mMaxVertexAttribDivisor =
1262 std::min(mVertexAttributeDivisorProperties.maxVertexAttribDivisor,
1263 static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()));
1264 }
1265
1266 if (getFeatures().supportsTransformFeedbackExtension.enabled)
1267 {
1268 enabledDeviceExtensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
1269 vk::AddToPNextChain(&createInfo, &mTransformFeedbackFeatures);
1270 }
1271
1272 if (getFeatures().supportsIndexTypeUint8.enabled)
1273 {
1274 enabledDeviceExtensions.push_back(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME);
1275 vk::AddToPNextChain(&createInfo, &mIndexTypeUint8Features);
1276 }
1277
1278 if (getFeatures().supportsExternalMemoryHost.enabled)
1279 {
1280 enabledDeviceExtensions.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
1281 mMinImportedHostPointerAlignment =
1282 mPhysicalDeviceExternalMemoryHostProperties.minImportedHostPointerAlignment;
1283 }
1284
1285 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
1286 createInfo.flags = 0;
1287 createInfo.queueCreateInfoCount = 1;
1288 createInfo.pQueueCreateInfos = &queueCreateInfo;
1289 createInfo.enabledLayerCount = static_cast<uint32_t>(enabledDeviceLayerNames.size());
1290 createInfo.ppEnabledLayerNames = enabledDeviceLayerNames.data();
1291 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
1292 createInfo.ppEnabledExtensionNames =
1293 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
1294 // Enable core features without assuming VkPhysicalDeviceFeatures2KHR is accepted in the pNext
1295 // chain of VkDeviceCreateInfo.
1296 createInfo.pEnabledFeatures = &enabledFeatures.features;
1297
1298 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
1299 volkLoadDevice(mDevice);
1300
1301 mCurrentQueueFamilyIndex = queueFamilyIndex;
1302
1303 // When only 1 Queue, use same for all, Low index. Identify as Medium, since it's default.
1304 VkQueue queue;
1305 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexLow, &queue);
1306 mQueues[egl::ContextPriority::Low] = queue;
1307 mQueues[egl::ContextPriority::Medium] = queue;
1308 mQueues[egl::ContextPriority::High] = queue;
1309 mPriorities[egl::ContextPriority::Low] = egl::ContextPriority::Medium;
1310 mPriorities[egl::ContextPriority::Medium] = egl::ContextPriority::Medium;
1311 mPriorities[egl::ContextPriority::High] = egl::ContextPriority::Medium;
1312
1313 // If at least 2 queues, High has its own queue
1314 if (queueCount > 1)
1315 {
1316 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexHigh,
1317 &mQueues[egl::ContextPriority::High]);
1318 mPriorities[egl::ContextPriority::High] = egl::ContextPriority::High;
1319 }
1320 // If at least 3 queues, Medium has its own queue. Adjust Low priority.
1321 if (queueCount > 2)
1322 {
1323 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexMedium,
1324 &mQueues[egl::ContextPriority::Medium]);
1325 mPriorities[egl::ContextPriority::Low] = egl::ContextPriority::Low;
1326 }
1327
1328 // Initialize the vulkan pipeline cache.
1329 bool success = false;
1330 ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &success));
1331
1332 return angle::Result::Continue;
1333 }
1334
selectPresentQueueForSurface(DisplayVk * displayVk,VkSurfaceKHR surface,uint32_t * presentQueueOut)1335 angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
1336 VkSurfaceKHR surface,
1337 uint32_t *presentQueueOut)
1338 {
1339 // We've already initialized a device, and can't re-create it unless it's never been used.
1340 // TODO(jmadill): Handle the re-creation case if necessary.
1341 if (mDevice != VK_NULL_HANDLE)
1342 {
1343 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
1344
1345 // Check if the current device supports present on this surface.
1346 VkBool32 supportsPresent = VK_FALSE;
1347 ANGLE_VK_TRY(displayVk,
1348 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
1349 surface, &supportsPresent));
1350
1351 if (supportsPresent == VK_TRUE)
1352 {
1353 *presentQueueOut = mCurrentQueueFamilyIndex;
1354 return angle::Result::Continue;
1355 }
1356 }
1357
1358 // Find a graphics and present queue.
1359 Optional<uint32_t> newPresentQueue;
1360 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
1361 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1362 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
1363 {
1364 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
1365 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
1366 {
1367 VkBool32 supportsPresent = VK_FALSE;
1368 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
1369 mPhysicalDevice, queueIndex, surface, &supportsPresent));
1370
1371 if (supportsPresent == VK_TRUE)
1372 {
1373 newPresentQueue = queueIndex;
1374 break;
1375 }
1376 }
1377 }
1378
1379 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
1380 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
1381
1382 *presentQueueOut = newPresentQueue.value();
1383 return angle::Result::Continue;
1384 }
1385
getVendorString() const1386 std::string RendererVk::getVendorString() const
1387 {
1388 return GetVendorString(mPhysicalDeviceProperties.vendorID);
1389 }
1390
getRendererDescription() const1391 std::string RendererVk::getRendererDescription() const
1392 {
1393 std::stringstream strstr;
1394
1395 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
1396
1397 strstr << "Vulkan ";
1398 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
1399 strstr << VK_VERSION_MINOR(apiVersion) << ".";
1400 strstr << VK_VERSION_PATCH(apiVersion);
1401
1402 strstr << "(";
1403
1404 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
1405 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
1406 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
1407 // driver detection.
1408 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
1409 {
1410 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
1411 }
1412
1413 strstr << mPhysicalDeviceProperties.deviceName;
1414 strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
1415
1416 strstr << ")";
1417
1418 return strstr.str();
1419 }
1420
getMaxSupportedESVersion() const1421 gl::Version RendererVk::getMaxSupportedESVersion() const
1422 {
1423 // Current highest supported version
1424 gl::Version maxVersion = gl::Version(3, 1);
1425
1426 // Early out without downgrading ES version if mock ICD enabled.
1427 // Mock ICD doesn't expose sufficient capabilities yet.
1428 // https://github.com/KhronosGroup/Vulkan-Tools/issues/84
1429 if (isMockICDEnabled())
1430 {
1431 return maxVersion;
1432 }
1433
1434 // Limit to ES3.1 if there are any blockers for 3.2.
1435 if (!vk::CanSupportGPUShader5EXT(mPhysicalDeviceFeatures))
1436 {
1437 maxVersion = LimitVersionTo(maxVersion, {3, 1});
1438 }
1439
1440 // Limit to ES3.0 if there are any blockers for 3.1.
1441
1442 // ES3.1 requires at least one atomic counter buffer and four storage buffers in compute.
1443 // Atomic counter buffers are emulated with storage buffers. For simplicity, we always support
1444 // either none or IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS atomic counter buffers. So if
1445 // Vulkan doesn't support at least that many storage buffers in compute, we don't support 3.1.
1446 const uint32_t kMinimumStorageBuffersForES31 =
1447 gl::limits::kMinimumComputeStorageBuffers + gl::IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS;
1448 if (mPhysicalDeviceProperties.limits.maxPerStageDescriptorStorageBuffers <
1449 kMinimumStorageBuffersForES31)
1450 {
1451 maxVersion = LimitVersionTo(maxVersion, {3, 0});
1452 }
1453
1454 // ES3.1 requires at least a maximum offset of at least 2047.
1455 // If the Vulkan implementation can't support that, we cannot support 3.1.
1456 if (mPhysicalDeviceProperties.limits.maxVertexInputAttributeOffset < 2047)
1457 {
1458 maxVersion = LimitVersionTo(maxVersion, {3, 0});
1459 }
1460
1461 // Limit to ES2.0 if there are any blockers for 3.0.
1462 // TODO: http://anglebug.com/3972 Limit to GLES 2.0 if flat shading can't be emulated
1463
1464 // Multisample textures (ES3.1) and multisample renderbuffers (ES3.0) require the Vulkan driver
1465 // to support the standard sample locations (in order to pass dEQP tests that check these
1466 // locations). If the Vulkan implementation can't support that, we cannot support 3.0/3.1.
1467 if (mPhysicalDeviceProperties.limits.standardSampleLocations != VK_TRUE)
1468 {
1469 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1470 }
1471
1472 // If the command buffer doesn't support queries, we can't support ES3.
1473 if (!vk::CommandBuffer::SupportsQueries(mPhysicalDeviceFeatures))
1474 {
1475 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1476 }
1477
1478 // If independentBlend is not supported, we can't have a mix of has-alpha and emulated-alpha
1479 // render targets in a framebuffer. We also cannot perform masked clears of multiple render
1480 // targets.
1481 if (!mPhysicalDeviceFeatures.independentBlend)
1482 {
1483 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1484 }
1485
1486 // If the Vulkan transform feedback extension is not present, we use an emulation path that
1487 // requires the vertexPipelineStoresAndAtomics feature. Without the extension or this feature,
1488 // we can't currently support transform feedback.
1489 if (!mFeatures.supportsTransformFeedbackExtension.enabled &&
1490 !mFeatures.emulateTransformFeedback.enabled)
1491 {
1492 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1493 }
1494
1495 // Limit to GLES 2.0 if maxPerStageDescriptorUniformBuffers is too low.
1496 // Table 6.31 MAX_VERTEX_UNIFORM_BLOCKS minimum value = 12
1497 // Table 6.32 MAX_FRAGMENT_UNIFORM_BLOCKS minimum value = 12
1498 // NOTE: We reserve some uniform buffers for emulation, so use the NativeCaps which takes this
1499 // into account, rather than the physical device maxPerStageDescriptorUniformBuffers limits.
1500 for (gl::ShaderType shaderType : gl::AllShaderTypes())
1501 {
1502 if (static_cast<GLuint>(getNativeCaps().maxShaderUniformBlocks[shaderType]) <
1503 gl::limits::kMinimumShaderUniformBlocks)
1504 {
1505 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1506 }
1507 }
1508
1509 // Limit to GLES 2.0 if maxVertexOutputComponents is too low.
1510 // Table 6.31 MAX VERTEX OUTPUT COMPONENTS minimum value = 64
1511 // NOTE: We reserve some vertex output components for emulation, so use the NativeCaps which
1512 // takes this into account, rather than the physical device maxVertexOutputComponents limits.
1513 if (static_cast<GLuint>(getNativeCaps().maxVertexOutputComponents) <
1514 gl::limits::kMinimumVertexOutputComponents)
1515 {
1516 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1517 }
1518
1519 return maxVersion;
1520 }
1521
getMaxConformantESVersion() const1522 gl::Version RendererVk::getMaxConformantESVersion() const
1523 {
1524 return LimitVersionTo(getMaxSupportedESVersion(), {3, 0});
1525 }
1526
initFeatures(DisplayVk * displayVk,const ExtensionNameList & deviceExtensionNames)1527 void RendererVk::initFeatures(DisplayVk *displayVk, const ExtensionNameList &deviceExtensionNames)
1528 {
1529 if (displayVk->getState().featuresAllDisabled)
1530 {
1531 ApplyFeatureOverrides(&mFeatures, displayVk->getState());
1532 return;
1533 }
1534
1535 bool isAMD = IsAMD(mPhysicalDeviceProperties.vendorID);
1536 bool isIntel = IsIntel(mPhysicalDeviceProperties.vendorID);
1537 bool isNvidia = IsNvidia(mPhysicalDeviceProperties.vendorID);
1538 bool isQualcomm = IsQualcomm(mPhysicalDeviceProperties.vendorID);
1539 bool isSwS =
1540 IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
1541
1542 if (mLineRasterizationFeatures.bresenhamLines == VK_TRUE)
1543 {
1544 ASSERT(mLineRasterizationFeatures.sType ==
1545 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT);
1546 ANGLE_FEATURE_CONDITION(&mFeatures, bresenhamLineRasterization, true);
1547 }
1548 else
1549 {
1550 // Use OpenGL line rasterization rules if extension not available by default.
1551 // TODO(jmadill): Fix Android support. http://anglebug.com/2830
1552 ANGLE_FEATURE_CONDITION(&mFeatures, basicGLLineRasterization, !IsAndroid());
1553 }
1554
1555 if (mProvokingVertexFeatures.provokingVertexLast == VK_TRUE)
1556 {
1557 ASSERT(mProvokingVertexFeatures.sType ==
1558 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT);
1559 ANGLE_FEATURE_CONDITION(&mFeatures, provokingVertex, true);
1560 }
1561
1562 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
1563 // investigation. http://anglebug.com/2728
1564 ANGLE_FEATURE_CONDITION(
1565 &mFeatures, flipViewportY,
1566 !IsIntel(mPhysicalDeviceProperties.vendorID) &&
1567 (mPhysicalDeviceProperties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) ||
1568 ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionNames));
1569
1570 // http://anglebug.com/2838
1571 ANGLE_FEATURE_CONDITION(&mFeatures, extraCopyBufferRegion, IsWindows() && isIntel);
1572
1573 // http://anglebug.com/3055
1574 ANGLE_FEATURE_CONDITION(&mFeatures, forceCPUPathForCubeMapCopy, IsWindows() && isIntel);
1575
1576 // Work around incorrect NVIDIA point size range clamping.
1577 // http://anglebug.com/2970#c10
1578 // Clamp if driver version is:
1579 // < 430 on Windows
1580 // < 421 otherwise
1581 angle::VersionInfo nvidiaVersion;
1582 if (isNvidia)
1583 {
1584 nvidiaVersion =
1585 angle::ParseNvidiaDriverVersion(this->mPhysicalDeviceProperties.driverVersion);
1586 }
1587 ANGLE_FEATURE_CONDITION(&mFeatures, clampPointSize,
1588 isNvidia && nvidiaVersion.major < uint32_t(IsWindows() ? 430 : 421));
1589
1590 // Work around ineffective compute-graphics barriers on Nexus 5X.
1591 // TODO(syoussefi): Figure out which other vendors and driver versions are affected.
1592 // http://anglebug.com/3019
1593 ANGLE_FEATURE_CONDITION(&mFeatures, flushAfterVertexConversion,
1594 IsAndroid() && IsNexus5X(mPhysicalDeviceProperties.vendorID,
1595 mPhysicalDeviceProperties.deviceID));
1596
1597 ANGLE_FEATURE_CONDITION(
1598 &mFeatures, supportsIncrementalPresent,
1599 ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames));
1600
1601 #if defined(ANGLE_PLATFORM_ANDROID)
1602 ANGLE_FEATURE_CONDITION(
1603 &mFeatures, supportsAndroidHardwareBuffer,
1604 IsAndroid() &&
1605 ExtensionFound(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
1606 deviceExtensionNames) &&
1607 ExtensionFound(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, deviceExtensionNames));
1608 #endif
1609
1610 ANGLE_FEATURE_CONDITION(
1611 &mFeatures, supportsExternalMemoryFd,
1612 ExtensionFound(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, deviceExtensionNames));
1613
1614 ANGLE_FEATURE_CONDITION(
1615 &mFeatures, supportsExternalMemoryFuchsia,
1616 ExtensionFound(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, deviceExtensionNames));
1617
1618 ANGLE_FEATURE_CONDITION(
1619 &mFeatures, supportsExternalSemaphoreFd,
1620 ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, deviceExtensionNames));
1621
1622 ANGLE_FEATURE_CONDITION(
1623 &mFeatures, supportsExternalSemaphoreFuchsia,
1624 ExtensionFound(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames));
1625
1626 ANGLE_FEATURE_CONDITION(
1627 &mFeatures, supportsShaderStencilExport,
1628 ExtensionFound(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, deviceExtensionNames));
1629
1630 ANGLE_FEATURE_CONDITION(&mFeatures, supportsTransformFeedbackExtension,
1631 mTransformFeedbackFeatures.transformFeedback == VK_TRUE);
1632
1633 ANGLE_FEATURE_CONDITION(&mFeatures, supportsIndexTypeUint8,
1634 mIndexTypeUint8Features.indexTypeUint8 == VK_TRUE);
1635
1636 ANGLE_FEATURE_CONDITION(&mFeatures, emulateTransformFeedback,
1637 (mFeatures.supportsTransformFeedbackExtension.enabled == VK_FALSE &&
1638 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics == VK_TRUE));
1639
1640 ANGLE_FEATURE_CONDITION(&mFeatures, disableFifoPresentMode, IsLinux() && isIntel);
1641
1642 ANGLE_FEATURE_CONDITION(&mFeatures, bindEmptyForUnusedDescriptorSets,
1643 IsAndroid() && isQualcomm);
1644
1645 ANGLE_FEATURE_CONDITION(&mFeatures, forceOldRewriteStructSamplers, IsAndroid() && !isSwS);
1646
1647 ANGLE_FEATURE_CONDITION(&mFeatures, perFrameWindowSizeQuery,
1648 isIntel || (IsWindows() && isAMD) || IsFuchsia());
1649
1650 // Disabled on AMD/windows due to buggy behavior.
1651 ANGLE_FEATURE_CONDITION(&mFeatures, disallowSeamfulCubeMapEmulation, IsWindows() && isAMD);
1652
1653 ANGLE_FEATURE_CONDITION(&mFeatures, padBuffersToMaxVertexAttribStride, isAMD);
1654 mMaxVertexAttribStride = std::min(static_cast<uint32_t>(gl::limits::kMaxVertexAttribStride),
1655 mPhysicalDeviceProperties.limits.maxVertexInputBindingStride);
1656
1657 ANGLE_FEATURE_CONDITION(&mFeatures, forceD16TexFilter, IsAndroid() && isQualcomm);
1658
1659 ANGLE_FEATURE_CONDITION(&mFeatures, disableFlippingBlitWithCommand, IsAndroid() && isQualcomm);
1660
1661 // Allocation sanitization disabled by default because of a heaveyweight implementation
1662 // that can cause OOM and timeouts.
1663 ANGLE_FEATURE_CONDITION(&mFeatures, allocateNonZeroMemory, false);
1664
1665 ANGLE_FEATURE_CONDITION(
1666 &mFeatures, supportsExternalMemoryHost,
1667 ExtensionFound(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, deviceExtensionNames));
1668
1669 // Pre-rotation support is not fully ready to be enabled.
1670 ANGLE_FEATURE_CONDITION(&mFeatures, enablePreRotateSurfaces, false);
1671
1672 // Currently disable FramebufferVk cache on Apple: http://anglebug.com/4442
1673 ANGLE_FEATURE_CONDITION(&mFeatures, enableFramebufferVkCache, !IsApple());
1674
1675 // Currently disabled by default: http://anglebug.com/3078
1676 ANGLE_FEATURE_CONDITION(&mFeatures, enablePrecisionQualifiers, false);
1677
1678 ANGLE_FEATURE_CONDITION(&mFeatures, supportDepthStencilRenderingFeedbackLoops, true);
1679
1680 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1681 platform->overrideFeaturesVk(platform, &mFeatures);
1682
1683 ApplyFeatureOverrides(&mFeatures, displayVk->getState());
1684 }
1685
initPipelineCacheVkKey()1686 void RendererVk::initPipelineCacheVkKey()
1687 {
1688 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
1689 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
1690 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
1691 // there is no '\0' in the result.
1692 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
1693 {
1694 hashStream << std::hex << c;
1695 }
1696 // Add the vendor and device id too for good measure.
1697 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
1698 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
1699
1700 const std::string &hashString = hashStream.str();
1701 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
1702 hashString.length(), mPipelineCacheVkBlobKey.data());
1703 }
1704
initPipelineCache(DisplayVk * display,vk::PipelineCache * pipelineCache,bool * success)1705 angle::Result RendererVk::initPipelineCache(DisplayVk *display,
1706 vk::PipelineCache *pipelineCache,
1707 bool *success)
1708 {
1709 initPipelineCacheVkKey();
1710
1711 egl::BlobCache::Value initialData;
1712 *success = display->getBlobCache()->get(display->getScratchBuffer(), mPipelineCacheVkBlobKey,
1713 &initialData);
1714
1715 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
1716
1717 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1718 pipelineCacheCreateInfo.flags = 0;
1719 pipelineCacheCreateInfo.initialDataSize = *success ? initialData.size() : 0;
1720 pipelineCacheCreateInfo.pInitialData = *success ? initialData.data() : nullptr;
1721
1722 ANGLE_VK_TRY(display, pipelineCache->init(mDevice, pipelineCacheCreateInfo));
1723
1724 return angle::Result::Continue;
1725 }
1726
getPipelineCache(vk::PipelineCache ** pipelineCache)1727 angle::Result RendererVk::getPipelineCache(vk::PipelineCache **pipelineCache)
1728 {
1729 if (mPipelineCacheInitialized)
1730 {
1731 *pipelineCache = &mPipelineCache;
1732 return angle::Result::Continue;
1733 }
1734
1735 // We should now recreate the pipeline cache with the blob cache pipeline data.
1736 vk::PipelineCache pCache;
1737 bool success = false;
1738 ANGLE_TRY(initPipelineCache(vk::GetImpl(mDisplay), &pCache, &success));
1739 if (success)
1740 {
1741 // Merge the newly created pipeline cache into the existing one.
1742 mPipelineCache.merge(mDevice, mPipelineCache.getHandle(), 1, pCache.ptr());
1743 }
1744 mPipelineCacheInitialized = true;
1745 pCache.destroy(mDevice);
1746
1747 *pipelineCache = &mPipelineCache;
1748 return angle::Result::Continue;
1749 }
1750
getNativeCaps() const1751 const gl::Caps &RendererVk::getNativeCaps() const
1752 {
1753 ensureCapsInitialized();
1754 return mNativeCaps;
1755 }
1756
getNativeTextureCaps() const1757 const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
1758 {
1759 ensureCapsInitialized();
1760 return mNativeTextureCaps;
1761 }
1762
getNativeExtensions() const1763 const gl::Extensions &RendererVk::getNativeExtensions() const
1764 {
1765 ensureCapsInitialized();
1766 return mNativeExtensions;
1767 }
1768
getNativeLimitations() const1769 const gl::Limitations &RendererVk::getNativeLimitations() const
1770 {
1771 ensureCapsInitialized();
1772 return mNativeLimitations;
1773 }
1774
getDescriptorSetLayout(vk::Context * context,const vk::DescriptorSetLayoutDesc & desc,vk::BindingPointer<vk::DescriptorSetLayout> * descriptorSetLayoutOut)1775 angle::Result RendererVk::getDescriptorSetLayout(
1776 vk::Context *context,
1777 const vk::DescriptorSetLayoutDesc &desc,
1778 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1779 {
1780 std::lock_guard<decltype(mDescriptorSetLayoutCacheMutex)> lock(mDescriptorSetLayoutCacheMutex);
1781 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
1782 }
1783
getPipelineLayout(vk::Context * context,const vk::PipelineLayoutDesc & desc,const vk::DescriptorSetLayoutPointerArray & descriptorSetLayouts,vk::BindingPointer<vk::PipelineLayout> * pipelineLayoutOut)1784 angle::Result RendererVk::getPipelineLayout(
1785 vk::Context *context,
1786 const vk::PipelineLayoutDesc &desc,
1787 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1788 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1789 {
1790 std::lock_guard<decltype(mPipelineLayoutCacheMutex)> lock(mPipelineLayoutCacheMutex);
1791 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
1792 pipelineLayoutOut);
1793 }
1794
getPipelineCacheSize(DisplayVk * displayVk,size_t * pipelineCacheSizeOut)1795 angle::Result RendererVk::getPipelineCacheSize(DisplayVk *displayVk, size_t *pipelineCacheSizeOut)
1796 {
1797 VkResult result = mPipelineCache.getCacheData(mDevice, pipelineCacheSizeOut, nullptr);
1798 ANGLE_VK_TRY(displayVk, result);
1799
1800 return angle::Result::Continue;
1801 }
1802
syncPipelineCacheVk(DisplayVk * displayVk)1803 angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1804 {
1805 // TODO: Synchronize access to the pipeline/blob caches?
1806 ASSERT(mPipelineCache.valid());
1807
1808 if (--mPipelineCacheVkUpdateTimeout > 0)
1809 {
1810 return angle::Result::Continue;
1811 }
1812 if (!mPipelineCacheDirty)
1813 {
1814 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1815 return angle::Result::Continue;
1816 }
1817
1818 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1819
1820 size_t pipelineCacheSize = 0;
1821 ANGLE_TRY(getPipelineCacheSize(displayVk, &pipelineCacheSize));
1822 // Make sure we will receive enough data to hold the pipeline cache header
1823 // Table 7. Layout for pipeline cache header version VK_PIPELINE_CACHE_HEADER_VERSION_ONE
1824 const size_t kPipelineCacheHeaderSize = 16 + VK_UUID_SIZE;
1825 if (pipelineCacheSize < kPipelineCacheHeaderSize)
1826 {
1827 // No pipeline cache data to read, so return
1828 return angle::Result::Continue;
1829 }
1830
1831 angle::MemoryBuffer *pipelineCacheData = nullptr;
1832 ANGLE_VK_CHECK_ALLOC(displayVk,
1833 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1834
1835 size_t oldPipelineCacheSize = pipelineCacheSize;
1836 VkResult result =
1837 mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
1838 // We don't need all of the cache data, so just make sure we at least got the header
1839 // Vulkan Spec 9.6. Pipeline Cache
1840 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/chap9.html#pipelines-cache
1841 // If pDataSize is less than what is necessary to store this header, nothing will be written to
1842 // pData and zero will be written to pDataSize.
1843 // Any data written to pData is valid and can be provided as the pInitialData member of the
1844 // VkPipelineCacheCreateInfo structure passed to vkCreatePipelineCache.
1845 if (ANGLE_UNLIKELY(pipelineCacheSize < kPipelineCacheHeaderSize))
1846 {
1847 WARN() << "Not enough pipeline cache data read.";
1848 return angle::Result::Continue;
1849 }
1850 else if (ANGLE_UNLIKELY(result == VK_INCOMPLETE))
1851 {
1852 WARN() << "Received VK_INCOMPLETE: Old: " << oldPipelineCacheSize
1853 << ", New: " << pipelineCacheSize;
1854 }
1855 else
1856 {
1857 ANGLE_VK_TRY(displayVk, result);
1858 }
1859
1860 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1861 // the buffer to avoid leaking garbage memory.
1862 ASSERT(pipelineCacheSize <= pipelineCacheData->size());
1863 if (pipelineCacheSize < pipelineCacheData->size())
1864 {
1865 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1866 pipelineCacheData->size() - pipelineCacheSize);
1867 }
1868
1869 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1870 mPipelineCacheDirty = false;
1871
1872 return angle::Result::Continue;
1873 }
1874
issueShaderSerial()1875 Serial RendererVk::issueShaderSerial()
1876 {
1877 return mShaderSerialFactory.generate();
1878 }
1879
1880 // These functions look at the mandatory format for support, and fallback to querying the device (if
1881 // necessary) to test the availability of the bits.
hasLinearImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1882 bool RendererVk::hasLinearImageFormatFeatureBits(VkFormat format,
1883 const VkFormatFeatureFlags featureBits)
1884 {
1885 return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(format, featureBits);
1886 }
1887
getImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1888 VkFormatFeatureFlags RendererVk::getImageFormatFeatureBits(VkFormat format,
1889 const VkFormatFeatureFlags featureBits)
1890 {
1891 return getFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1892 }
1893
hasImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1894 bool RendererVk::hasImageFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1895 {
1896 return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1897 }
1898
hasBufferFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1899 bool RendererVk::hasBufferFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1900 {
1901 return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(format, featureBits);
1902 }
1903
queueSubmit(vk::Context * context,egl::ContextPriority priority,const VkSubmitInfo & submitInfo,const vk::Fence * fence,Serial * serialOut)1904 angle::Result RendererVk::queueSubmit(vk::Context *context,
1905 egl::ContextPriority priority,
1906 const VkSubmitInfo &submitInfo,
1907 const vk::Fence *fence,
1908 Serial *serialOut)
1909 {
1910 {
1911 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1912 VkFence handle = fence ? fence->getHandle() : VK_NULL_HANDLE;
1913 ANGLE_VK_TRY(context, vkQueueSubmit(mQueues[priority], 1, &submitInfo, handle));
1914 }
1915
1916 ANGLE_TRY(cleanupGarbage(false));
1917
1918 *serialOut = mCurrentQueueSerial;
1919 mLastSubmittedQueueSerial = mCurrentQueueSerial;
1920 mCurrentQueueSerial = mQueueSerialFactory.generate();
1921
1922 return angle::Result::Continue;
1923 }
1924
queueSubmitOneOff(vk::Context * context,vk::PrimaryCommandBuffer && primary,egl::ContextPriority priority,Serial * serialOut)1925 angle::Result RendererVk::queueSubmitOneOff(vk::Context *context,
1926 vk::PrimaryCommandBuffer &&primary,
1927 egl::ContextPriority priority,
1928 Serial *serialOut)
1929 {
1930 VkSubmitInfo submitInfo = {};
1931 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1932 submitInfo.commandBufferCount = 1;
1933 submitInfo.pCommandBuffers = primary.ptr();
1934
1935 ANGLE_TRY(queueSubmit(context, priority, submitInfo, nullptr, serialOut));
1936
1937 mPendingOneOffCommands.push_back({*serialOut, std::move(primary)});
1938
1939 return angle::Result::Continue;
1940 }
1941
queueWaitIdle(vk::Context * context,egl::ContextPriority priority)1942 angle::Result RendererVk::queueWaitIdle(vk::Context *context, egl::ContextPriority priority)
1943 {
1944 {
1945 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1946 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueues[priority]));
1947 }
1948
1949 ANGLE_TRY(cleanupGarbage(false));
1950
1951 return angle::Result::Continue;
1952 }
1953
deviceWaitIdle(vk::Context * context)1954 angle::Result RendererVk::deviceWaitIdle(vk::Context *context)
1955 {
1956 {
1957 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1958 ANGLE_VK_TRY(context, vkDeviceWaitIdle(mDevice));
1959 }
1960
1961 ANGLE_TRY(cleanupGarbage(false));
1962
1963 return angle::Result::Continue;
1964 }
1965
queuePresent(egl::ContextPriority priority,const VkPresentInfoKHR & presentInfo)1966 VkResult RendererVk::queuePresent(egl::ContextPriority priority,
1967 const VkPresentInfoKHR &presentInfo)
1968 {
1969 ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::queuePresent");
1970
1971 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1972
1973 {
1974 ANGLE_TRACE_EVENT0("gpu.angle", "vkQueuePresentKHR");
1975 return vkQueuePresentKHR(mQueues[priority], &presentInfo);
1976 }
1977 }
1978
newSharedFence(vk::Context * context,vk::Shared<vk::Fence> * sharedFenceOut)1979 angle::Result RendererVk::newSharedFence(vk::Context *context,
1980 vk::Shared<vk::Fence> *sharedFenceOut)
1981 {
1982 vk::Fence fence;
1983 if (mFenceRecycler.empty())
1984 {
1985 VkFenceCreateInfo fenceCreateInfo = {};
1986 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1987 fenceCreateInfo.flags = 0;
1988 ANGLE_VK_TRY(context, fence.init(mDevice, fenceCreateInfo));
1989 }
1990 else
1991 {
1992 mFenceRecycler.fetch(&fence);
1993 ANGLE_VK_TRY(context, fence.reset(mDevice));
1994 }
1995 sharedFenceOut->assign(mDevice, std::move(fence));
1996 return angle::Result::Continue;
1997 }
1998
1999 template <VkFormatFeatureFlags VkFormatProperties::*features>
getFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)2000 VkFormatFeatureFlags RendererVk::getFormatFeatureBits(VkFormat format,
2001 const VkFormatFeatureFlags featureBits)
2002 {
2003 ASSERT(static_cast<uint32_t>(format) < vk::kNumVkFormats);
2004 VkFormatProperties &deviceProperties = mFormatProperties[format];
2005
2006 if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
2007 {
2008 // If we don't have the actual device features, see if the requested features are mandatory.
2009 // If so, there's no need to query the device.
2010 const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(format);
2011 if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
2012 {
2013 return featureBits;
2014 }
2015
2016 // Otherwise query the format features and cache it.
2017 vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &deviceProperties);
2018 // Workaround for some Android devices that don't indicate filtering
2019 // support on D16_UNORM and they should.
2020 if (mFeatures.forceD16TexFilter.enabled && format == VK_FORMAT_D16_UNORM)
2021 {
2022 deviceProperties.*features |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
2023 }
2024 }
2025
2026 return deviceProperties.*features & featureBits;
2027 }
2028
2029 template <VkFormatFeatureFlags VkFormatProperties::*features>
hasFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)2030 bool RendererVk::hasFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
2031 {
2032 return IsMaskFlagSet(getFormatFeatureBits<features>(format, featureBits), featureBits);
2033 }
2034
cleanupGarbage(bool block)2035 angle::Result RendererVk::cleanupGarbage(bool block)
2036 {
2037 std::lock_guard<decltype(mGarbageMutex)> lock(mGarbageMutex);
2038
2039 for (auto garbageIter = mSharedGarbage.begin(); garbageIter != mSharedGarbage.end();)
2040 {
2041 // Possibly 'counter' should be always zero when we add the object to garbage.
2042 vk::SharedGarbage &garbage = *garbageIter;
2043 if (garbage.destroyIfComplete(this, mLastCompletedQueueSerial))
2044 {
2045 garbageIter = mSharedGarbage.erase(garbageIter);
2046 }
2047 else
2048 {
2049 garbageIter++;
2050 }
2051 }
2052
2053 return angle::Result::Continue;
2054 }
2055
onNewValidationMessage(const std::string & message)2056 void RendererVk::onNewValidationMessage(const std::string &message)
2057 {
2058 mLastValidationMessage = message;
2059 ++mValidationMessageCount;
2060 }
2061
getAndClearLastValidationMessage(uint32_t * countSinceLastClear)2062 std::string RendererVk::getAndClearLastValidationMessage(uint32_t *countSinceLastClear)
2063 {
2064 *countSinceLastClear = mValidationMessageCount;
2065 mValidationMessageCount = 0;
2066
2067 return std::move(mLastValidationMessage);
2068 }
2069
getMaxFenceWaitTimeNs() const2070 uint64_t RendererVk::getMaxFenceWaitTimeNs() const
2071 {
2072 constexpr uint64_t kMaxFenceWaitTimeNs = 120'000'000'000llu;
2073
2074 return kMaxFenceWaitTimeNs;
2075 }
2076
onCompletedSerial(Serial serial)2077 void RendererVk::onCompletedSerial(Serial serial)
2078 {
2079 if (serial > mLastCompletedQueueSerial)
2080 {
2081 mLastCompletedQueueSerial = serial;
2082 }
2083 }
2084
reloadVolkIfNeeded() const2085 void RendererVk::reloadVolkIfNeeded() const
2086 {
2087 if ((mInstance != VK_NULL_HANDLE) && (volkGetLoadedInstance() != mInstance))
2088 {
2089 volkLoadInstance(mInstance);
2090 }
2091
2092 if ((mDevice != VK_NULL_HANDLE) && (volkGetLoadedDevice() != mDevice))
2093 {
2094 volkLoadDevice(mDevice);
2095 }
2096 }
2097
getCommandBufferOneOff(vk::Context * context,vk::PrimaryCommandBuffer * commandBufferOut)2098 angle::Result RendererVk::getCommandBufferOneOff(vk::Context *context,
2099 vk::PrimaryCommandBuffer *commandBufferOut)
2100 {
2101 if (!mOneOffCommandPool.valid())
2102 {
2103 VkCommandPoolCreateInfo createInfo = {};
2104 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
2105 createInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2106 ANGLE_VK_TRY(context, mOneOffCommandPool.init(mDevice, createInfo));
2107 }
2108
2109 if (!mPendingOneOffCommands.empty() &&
2110 mPendingOneOffCommands.front().serial < mLastCompletedQueueSerial)
2111 {
2112 *commandBufferOut = std::move(mPendingOneOffCommands.front().commandBuffer);
2113 mPendingOneOffCommands.pop_front();
2114 ANGLE_VK_TRY(context, commandBufferOut->reset());
2115 }
2116 else
2117 {
2118 VkCommandBufferAllocateInfo allocInfo = {};
2119 allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
2120 allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2121 allocInfo.commandBufferCount = 1;
2122 allocInfo.commandPool = mOneOffCommandPool.getHandle();
2123
2124 ANGLE_VK_TRY(context, commandBufferOut->init(context->getDevice(), allocInfo));
2125 }
2126
2127 VkCommandBufferBeginInfo beginInfo = {};
2128 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2129 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2130 beginInfo.pInheritanceInfo = nullptr;
2131 ANGLE_VK_TRY(context, commandBufferOut->begin(beginInfo));
2132
2133 return angle::Result::Continue;
2134 }
2135 } // namespace rx
2136