1 /*
2 Copyright 2020 David Robillard <d@drobilla.net>
3
4 Permission to use, copy, modify, and/or distribute this software for any
5 purpose with or without fee is hereby granted, provided that the above
6 copyright notice and this permission notice appear in all copies.
7
8 THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17 /**
18 @file sybok.hpp
19 @brief A minimal C++ wrapper for the Vulkan API.
20
21 This is a manually-written minimal wrapper for Vulkan. It makes working
22 with Vulkan a little easier in C++, but takes a different approach than
23 vulkan.hpp. In particular:
24
25 - Works nicely with dynamic loading. Since the API itself is an object, it
26 is simple to ensure the dynamically loaded API (or a consistent API in
27 general) is used everywhere. Passing a dispatch parameter to every
28 function as in vulkan.hpp makes dynamic loading extremely painful (not to
29 mention ugly), and mistakes tend to become link time errors. This is, in
30 my opinion, a glaring design flaw, and the real reason why this wrapper
31 reluctantly exists.
32
33 - Explicit separation of the initial API that does not require an instance
34 to load, from the rest of the API that does.
35
36 - Opinionated use of scoped handles everywhere.
37
38 - Remains close to the C API so that code can be easily ported. This means
39 that the pattern of return codes with output parameters is preserved,
40 except with smart handles that make leaks impossible. While less pretty,
41 this does not require exceptions.
42
43 - No exceptions or RTTI required.
44
45 - A safe scoped API for commands that encodes the semantics of the Vulkan
46 API. For example, it is statically impossible to call render scope
47 commands while not in a render scope.
48
49 - A reasonable amount of relatively readable code.
50
51 On the other hand, there are far fewer niceties, and the C API is used
52 directly as much as possible, particularly for structs (although they are
53 taken by const reference so they can be written inline). There is only
54 support for a minimal portable subset of Vulkan 1.1 with a few portable KHR
55 extensions.
56
57 In short, if the above sounds appealing, or you want a minimal wrapper that
58 can be extended if necessary to suit your application, you might find this
59 useful. If you want a fully-featured wrapper for Vulkan and don't care
60 about linker dependencies, you probably won't.
61 */
62
63 #ifndef SYBOK_HPP
64 #define SYBOK_HPP
65
66 #ifdef VULKAN_CORE_H_
67 # error "sybok.hpp must be included before or instead of vulkan headers"
68 #endif
69
70 #ifdef __GNUC__
71 # pragma GCC diagnostic push
72 # pragma GCC diagnostic ignored "-Wswitch-enum"
73 #endif
74
75 #define VK_NO_PROTOTYPES
76
77 // On 64-bit platforms, all handles are "dispatchable" pointers
78 #if defined(__LP64__) || defined(_WIN64) || \
79 (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || \
80 defined(__ia64) || defined(_M_IA64) || defined(__aarch64__) || \
81 defined(__powerpc64__)
82
83 # define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) \
84 typedef struct object##_T* object; // NOLINT(bugprone-macro-parentheses)
85
86 // On 32-bit platforms, some "non-dispatchable" handles are 64 bit integers
87 #else
88
89 /// Trivial wrapper class for a 64-bit integer handle for type safety
90 template<class Tag>
91 struct NonDispatchableHandle {
operator uint64_tNonDispatchableHandle92 explicit operator uint64_t() const noexcept { return handle; }
operator boolNonDispatchableHandle93 explicit operator bool() const noexcept { return handle; }
94
95 uint64_t handle;
96 };
97
98 # define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) \
99 using object = NonDispatchableHandle<struct Sk##object##Tag>;
100
101 #endif
102
103 #include <vulkan/vulkan_core.h> // IWYU pragma: export
104
105 #include <array>
106 #include <cassert>
107 #include <cstddef>
108 #include <cstdint>
109 #include <type_traits>
110 #include <utility>
111
112 #if __cplusplus >= 201703L
113 # define SYBOK_NODISCARD [[nodiscard]]
114 #elif defined(__GNUC__)
115 # define SYBOK_NODISCARD [[gnu::warn_unused_result]]
116 #else
117 # define SYBOK_NODISCARD
118 #endif
119
120 /// Helper macro to make array arguments format nicely
121 #define SK_COUNTED(count, ...) count, __VA_ARGS__
122
123 namespace sk {
124
125 class CommandScope;
126 class RenderCommandScope;
127
128 inline const char*
string(const VkResult result)129 string(const VkResult result)
130 {
131 switch (result) {
132 case VK_SUCCESS:
133 return "Success";
134 case VK_NOT_READY:
135 return "Not Ready";
136 case VK_TIMEOUT:
137 return "Timeout";
138 case VK_EVENT_SET:
139 return "Event set";
140 case VK_EVENT_RESET:
141 return "Event reset";
142 case VK_INCOMPLETE:
143 return "Incomplete";
144 case VK_ERROR_OUT_OF_HOST_MEMORY:
145 return "Out of host memory";
146 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
147 return "Out of device memory";
148 case VK_ERROR_INITIALIZATION_FAILED:
149 return "Initialization failed";
150 case VK_ERROR_DEVICE_LOST:
151 return "Device lost";
152 case VK_ERROR_MEMORY_MAP_FAILED:
153 return "Memory map failed";
154 case VK_ERROR_LAYER_NOT_PRESENT:
155 return "Layer not present";
156 case VK_ERROR_EXTENSION_NOT_PRESENT:
157 return "Extension not present";
158 case VK_ERROR_FEATURE_NOT_PRESENT:
159 return "Feature not present";
160 case VK_ERROR_INCOMPATIBLE_DRIVER:
161 return "Incompatible driver";
162 case VK_ERROR_TOO_MANY_OBJECTS:
163 return "Too many objects";
164 case VK_ERROR_FORMAT_NOT_SUPPORTED:
165 return "Format not supported";
166 case VK_ERROR_FRAGMENTED_POOL:
167 return "Fragmented pool";
168 case VK_ERROR_OUT_OF_POOL_MEMORY: // Vulkan 1.1
169 return "Out of pool memory";
170 case VK_ERROR_INVALID_EXTERNAL_HANDLE: // Vulkan 1.1
171 return "Invalid external handle";
172 case VK_ERROR_SURFACE_LOST_KHR: // VK_KHR_surface
173 return "Surface lost";
174 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: // VK_KHR_surface
175 return "Native window in use";
176 case VK_SUBOPTIMAL_KHR: // VK_KHR_swapchain
177 return "Suboptimal";
178 case VK_ERROR_OUT_OF_DATE_KHR: // VK_KHR_swapchain
179 return "Out of date";
180 case VK_ERROR_VALIDATION_FAILED_EXT: // VK_EXT_debug_report
181 return "Validation failed";
182 default:
183 break;
184 }
185
186 return "Unknown error";
187 }
188
189 inline const char*
string(const VkPresentModeKHR presentMode)190 string(const VkPresentModeKHR presentMode)
191 {
192 switch (presentMode) {
193 case VK_PRESENT_MODE_IMMEDIATE_KHR:
194 return "Immediate";
195 case VK_PRESENT_MODE_MAILBOX_KHR:
196 return "Mailbox";
197 case VK_PRESENT_MODE_FIFO_KHR:
198 return "FIFO";
199 case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
200 return "Relaxed FIFO";
201 default:
202 break;
203 }
204
205 return "Unknown present mode";
206 }
207
208 inline const char*
string(const VkDebugReportFlagBitsEXT flag)209 string(const VkDebugReportFlagBitsEXT flag)
210 {
211 switch (flag) {
212 case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
213 return "Information";
214 case VK_DEBUG_REPORT_WARNING_BIT_EXT:
215 return "Warning";
216 case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
217 return "Performance Warning";
218 case VK_DEBUG_REPORT_ERROR_BIT_EXT:
219 return "Error";
220 case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
221 return "Debug";
222 default:
223 break;
224 }
225
226 return "Unknown report";
227 }
228
229 template<class T>
230 class GlobalDeleter
231 {
232 public:
233 using DestroyFunc = void (*)(T, const VkAllocationCallbacks*);
234
235 GlobalDeleter() = default;
236 ~GlobalDeleter() = default;
237
238 // NOLINTNEXTLINE(google-explicit-constructor, hicpp-explicit-conversions)
GlobalDeleter(DestroyFunc destroyFunc)239 GlobalDeleter(DestroyFunc destroyFunc) noexcept
240 : _destroyFunc{destroyFunc}
241 {}
242
243 GlobalDeleter(const GlobalDeleter&) = delete;
244 GlobalDeleter& operator=(const GlobalDeleter&) = delete;
245
GlobalDeleter(GlobalDeleter && other)246 GlobalDeleter(GlobalDeleter&& other) noexcept
247 {
248 std::swap(_destroyFunc, other._destroyFunc);
249 }
250
operator =(GlobalDeleter && other)251 GlobalDeleter& operator=(GlobalDeleter&& other) noexcept
252 {
253 std::swap(_destroyFunc, other._destroyFunc);
254 return *this;
255 }
256
operator ()(T handle)257 void operator()(T handle) noexcept
258 {
259 if (_destroyFunc && handle) {
260 _destroyFunc(handle, nullptr);
261 }
262 }
263
264 private:
265 DestroyFunc _destroyFunc{};
266 };
267
268 template<class T, class Parent>
269 class DependantDeleter
270 {
271 public:
272 using DestroyFunc = void (*)(Parent, T, const VkAllocationCallbacks*);
273
274 DependantDeleter() = default;
275 ~DependantDeleter() = default;
276
DependantDeleter(Parent parent,DestroyFunc destroyFunc)277 DependantDeleter(Parent parent, DestroyFunc destroyFunc) noexcept
278 : _parent{parent}
279 , _destroyFunc{destroyFunc}
280 {}
281
282 DependantDeleter(const DependantDeleter&) = delete;
283 DependantDeleter& operator=(const DependantDeleter&) = delete;
284
DependantDeleter(DependantDeleter && other)285 DependantDeleter(DependantDeleter&& other) noexcept { swap(other); }
286
operator =(DependantDeleter && other)287 DependantDeleter& operator=(DependantDeleter&& other) noexcept
288 {
289 swap(other);
290 return *this;
291 }
292
operator ()(T handle)293 void operator()(T handle) noexcept
294 {
295 if (_parent && _destroyFunc && handle) {
296 _destroyFunc(_parent, handle, nullptr);
297 }
298 }
299
300 private:
swap(DependantDeleter & other)301 void swap(DependantDeleter& other) noexcept
302 {
303 std::swap(_parent, other._parent);
304 std::swap(_destroyFunc, other._destroyFunc);
305 }
306
307 Parent _parent{};
308 DestroyFunc _destroyFunc{};
309 };
310
311 template<class T, class Pool, class FreeFuncResult>
312 class PoolDeleter
313 {
314 public:
315 using FreeFunc = FreeFuncResult (*)(VkDevice, Pool, uint32_t, const T*);
316
317 PoolDeleter() noexcept = default;
318 ~PoolDeleter() noexcept = default;
319
PoolDeleter(VkDevice device,Pool pool,uint32_t count,FreeFunc freeFunc)320 PoolDeleter(VkDevice device,
321 Pool pool,
322 uint32_t count,
323 FreeFunc freeFunc) noexcept
324 : _device{device}
325 , _pool{pool}
326 , _count{count}
327 , _freeFunc{freeFunc}
328 {}
329
330 PoolDeleter(const PoolDeleter&) = delete;
331 PoolDeleter& operator=(const PoolDeleter&) = delete;
332
PoolDeleter(PoolDeleter && other)333 PoolDeleter(PoolDeleter&& other) noexcept { swap(other); }
334
operator =(PoolDeleter && other)335 PoolDeleter& operator=(PoolDeleter&& other) noexcept
336 {
337 swap(other);
338 return *this;
339 }
340
operator ()(T * handle)341 void operator()(T* handle) noexcept
342 {
343 if (_device && _pool && handle) {
344 _freeFunc(_device, _pool, _count, handle);
345 }
346 }
347
348 private:
swap(PoolDeleter & other)349 void swap(PoolDeleter& other) noexcept
350 {
351 std::swap(_device, other._device);
352 std::swap(_pool, other._pool);
353 std::swap(_count, other._count);
354 std::swap(_freeFunc, other._freeFunc);
355 }
356
357 VkDevice _device{};
358 Pool _pool{};
359 uint32_t _count{};
360 FreeFunc _freeFunc{};
361 };
362
363 template<class T, class TDeleter>
364 class UniqueDispatchableHandle
365 {
366 public:
367 using Deleter = TDeleter;
368 using Handle = T;
369
370 static_assert(std::is_pointer<T>::value, "");
371
372 UniqueDispatchableHandle() = default;
373
UniqueDispatchableHandle(Handle handle,Deleter deleter)374 UniqueDispatchableHandle(Handle handle, Deleter deleter) noexcept
375 : _handle{handle}
376 , _deleter{std::move(deleter)}
377 {}
378
~UniqueDispatchableHandle()379 ~UniqueDispatchableHandle() noexcept
380 {
381 if (_handle) {
382 _deleter(_handle);
383 }
384 }
385
386 UniqueDispatchableHandle(const UniqueDispatchableHandle&) noexcept = delete;
387 UniqueDispatchableHandle& operator =(
388 const UniqueDispatchableHandle&) noexcept = delete;
389
UniqueDispatchableHandle(UniqueDispatchableHandle && other)390 UniqueDispatchableHandle(UniqueDispatchableHandle&& other) noexcept
391 {
392 swap(other);
393 }
394
operator =(UniqueDispatchableHandle && other)395 UniqueDispatchableHandle& operator=(UniqueDispatchableHandle&& other) noexcept
396 {
397 swap(other);
398 return *this;
399 }
400
get() const401 const Handle& get() const noexcept { return _handle; }
402
403 // NOLINTNEXTLINE(google-explicit-constructor, hicpp-explicit-conversions)
operator Handle() const404 operator Handle() const noexcept { return _handle; }
405
406 private:
swap(UniqueDispatchableHandle & other)407 void swap(UniqueDispatchableHandle& other) noexcept
408 {
409 std::swap(_handle, other._handle);
410 std::swap(_deleter, other._deleter);
411 }
412
413 Handle _handle{};
414 Deleter _deleter{};
415 };
416
417 #if defined(__LP64__) || defined(_WIN64) || \
418 (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || \
419 defined(__ia64) || defined(_M_IA64) || defined(__aarch64__) || \
420 defined(__powerpc64__)
421
422 template<class T, class TDeleter>
423 using UniqueNonDispatchableHandle = UniqueDispatchableHandle<T, TDeleter>;
424
425 #else
426
427 template<class T, class TDeleter>
428 class UniqueNonDispatchableHandle
429 {
430 public:
431 using Deleter = TDeleter;
432 using Handle = T;
433
434 UniqueNonDispatchableHandle() = default;
435
UniqueNonDispatchableHandle(T handle,Deleter deleter)436 UniqueNonDispatchableHandle(T handle, Deleter deleter) noexcept
437 : _handle{handle}
438 , _deleter{std::move(deleter)}
439 {
440 assert(handle);
441 }
442
~UniqueNonDispatchableHandle()443 ~UniqueNonDispatchableHandle() noexcept
444 {
445 if (_handle) {
446 _deleter(_handle);
447 }
448 }
449
450 UniqueNonDispatchableHandle(const UniqueNonDispatchableHandle&) noexcept =
451 delete;
452 UniqueNonDispatchableHandle& operator =(
453 const UniqueNonDispatchableHandle&) noexcept = delete;
454
UniqueNonDispatchableHandle(UniqueNonDispatchableHandle && other)455 UniqueNonDispatchableHandle(UniqueNonDispatchableHandle&& other) noexcept
456 {
457 swap(other);
458 }
459
operator =(UniqueNonDispatchableHandle && other)460 UniqueNonDispatchableHandle& operator=(
461 UniqueNonDispatchableHandle&& other) noexcept
462 {
463 swap(other);
464 return *this;
465 }
466
get() const467 const Handle& get() const noexcept { return _handle; }
468
operator Handle() const469 operator Handle() const noexcept { return _handle; }
470
471 private:
swap(UniqueNonDispatchableHandle & other)472 void swap(UniqueNonDispatchableHandle& other) noexcept
473 {
474 std::swap(_handle, other._handle);
475 std::swap(_deleter, other._deleter);
476 }
477
478 T _handle{};
479 Deleter _deleter{};
480 };
481
482 #endif
483
484 template<class Vector, class Deleter>
485 class UniqueArrayHandle
486 {
487 public:
488 using T = typename Vector::value_type;
489
490 UniqueArrayHandle() = default;
491
UniqueArrayHandle(uint32_t size,Vector && array,Deleter deleter)492 UniqueArrayHandle(uint32_t size, Vector&& array, Deleter deleter) noexcept
493 : _array{std::move(array)}
494 , _deleter{std::move(deleter)}
495 , _size{size}
496 {
497 assert(!_array.empty());
498 }
499
~UniqueArrayHandle()500 ~UniqueArrayHandle() noexcept
501 {
502 if (!_array.empty()) {
503 _deleter(_array.data());
504 }
505 }
506
507 UniqueArrayHandle(const UniqueArrayHandle&) noexcept = delete;
508 UniqueArrayHandle& operator=(const UniqueArrayHandle&) noexcept = delete;
509
UniqueArrayHandle(UniqueArrayHandle && other)510 UniqueArrayHandle(UniqueArrayHandle&& other) noexcept { swap(other); }
511
operator =(UniqueArrayHandle && other)512 UniqueArrayHandle& operator=(UniqueArrayHandle&& other) noexcept
513 {
514 swap(other);
515 return *this;
516 }
517
operator [](const size_t index) const518 const T& operator[](const size_t index) const noexcept
519 {
520 return _array[index];
521 }
522
operator [](const size_t index)523 T& operator[](const size_t index) noexcept { return _array[index]; }
524
get() const525 const T* get() const noexcept { return _array.data(); }
get()526 T* get() noexcept { return _array.data(); }
527
528 private:
swap(UniqueArrayHandle & other)529 void swap(UniqueArrayHandle& other) noexcept
530 {
531 std::swap(_array, other._array);
532 std::swap(_deleter, other._deleter);
533 std::swap(_size, other._size);
534 }
535
536 Vector _array{};
537 Deleter _deleter{};
538 uint32_t _size{};
539 };
540
541 template<typename T>
542 class OptionalParameter
543 {
544 public:
545 using Handle = typename T::Handle;
546
547 // NOLINTNEXTLINE(hicpp-explicit-conversions, google-explicit-constructor)
OptionalParameter(const T & value)548 OptionalParameter(const T& value) noexcept
549 : _handle{value.get()}
550 {}
551
552 OptionalParameter() noexcept = default;
553 ~OptionalParameter() noexcept = default;
554
555 OptionalParameter(const OptionalParameter&) = delete;
556 OptionalParameter& operator=(const OptionalParameter&) = delete;
557
558 OptionalParameter(OptionalParameter&&) = delete;
559 OptionalParameter& operator=(OptionalParameter&&) = delete;
560
get() const561 Handle get() const noexcept { return _handle; }
562
563 private:
564 Handle _handle{};
565 };
566
567 template<typename T>
568 using GlobalObject = UniqueDispatchableHandle<T, GlobalDeleter<T>>;
569
570 template<typename T>
571 using InstanceChild =
572 UniqueNonDispatchableHandle<T, DependantDeleter<T, VkInstance>>;
573
574 template<typename T>
575 using DispatchableDeviceChild =
576 UniqueDispatchableHandle<T, DependantDeleter<T, VkDevice>>;
577
578 template<typename T>
579 using NonDispatchableDeviceChild =
580 UniqueNonDispatchableHandle<T, DependantDeleter<T, VkDevice>>;
581
582 template<typename Vector, typename Pool, typename FreeFuncResult>
583 using PoolChild = UniqueArrayHandle<
584 Vector,
585 PoolDeleter<typename Vector::value_type, Pool, FreeFuncResult>>;
586
587 using Device = GlobalObject<VkDevice>;
588 using Instance = GlobalObject<VkInstance>;
589
590 using PhysicalDevice = VkPhysicalDevice; // Weak handle, no destroy function
591 using Queue = VkQueue; // Weak handle, no destroy function
592
593 using Buffer = NonDispatchableDeviceChild<VkBuffer>;
594 using BufferView = NonDispatchableDeviceChild<VkBufferView>;
595 using CommandBuffer = DispatchableDeviceChild<VkCommandBuffer>;
596 using CommandPool = NonDispatchableDeviceChild<VkCommandPool>;
597 using DescriptorPool = NonDispatchableDeviceChild<VkDescriptorPool>;
598 using DescriptorSetLayout = NonDispatchableDeviceChild<VkDescriptorSetLayout>;
599 using DeviceMemory = NonDispatchableDeviceChild<VkDeviceMemory>;
600 using Event = NonDispatchableDeviceChild<VkEvent>;
601 using Fence = NonDispatchableDeviceChild<VkFence>;
602 using Framebuffer = NonDispatchableDeviceChild<VkFramebuffer>;
603 using Image = NonDispatchableDeviceChild<VkImage>;
604 using ImageView = NonDispatchableDeviceChild<VkImageView>;
605 using Pipeline = NonDispatchableDeviceChild<VkPipeline>;
606 using PipelineCache = NonDispatchableDeviceChild<VkPipelineCache>;
607 using PipelineLayout = NonDispatchableDeviceChild<VkPipelineLayout>;
608 using QueryPool = NonDispatchableDeviceChild<VkQueryPool>;
609 using RenderPass = NonDispatchableDeviceChild<VkRenderPass>;
610 using Sampler = NonDispatchableDeviceChild<VkSampler>;
611 using Semaphore = NonDispatchableDeviceChild<VkSemaphore>;
612 using ShaderModule = NonDispatchableDeviceChild<VkShaderModule>;
613
614 template<class VkCommandBufferVector>
615 using CommandBuffers = PoolChild<VkCommandBufferVector, VkCommandPool, void>;
616
617 template<class VkDescriptorSetVector>
618 using DescriptorSets =
619 PoolChild<VkDescriptorSetVector, VkDescriptorPool, VkResult>;
620
621 // VK_KHR_swapchain
622 using SwapchainKHR = NonDispatchableDeviceChild<VkSwapchainKHR>;
623
624 // VK_KHR_surface
625 using SurfaceKHR = InstanceChild<VkSurfaceKHR>;
626
627 // VK_EXT_debug_report
628 using DebugReportCallbackEXT = InstanceChild<VkDebugReportCallbackEXT>;
629
630 template<size_t...>
631 struct IndexSequence {};
632
633 template<size_t N, size_t... Next>
634 struct IndexSequenceHelper
635 : public IndexSequenceHelper<N - 1U, N - 1U, Next...> {};
636
637 template<size_t... Next>
638 struct IndexSequenceHelper<0U, Next...> {
639 using type = IndexSequence<Next...>;
640 };
641
642 template<size_t N>
643 using makeIndexSequence = typename IndexSequenceHelper<N>::type;
644
645 template<class T, class Parent, class DestroyFunc, size_t count, size_t... Is>
646 std::array<T, count>
make_handle_array_h(Parent parent,DestroyFunc destroyFunc,std::array<typename T::Handle,count> handles,IndexSequence<Is...>)647 make_handle_array_h(Parent parent,
648 DestroyFunc destroyFunc,
649 std::array<typename T::Handle, count> handles,
650 IndexSequence<Is...>) noexcept
651 {
652 return {T{handles[Is], {parent, destroyFunc}}...};
653 }
654
655 template<class T, class Parent, class DestroyFunc, size_t count>
656 std::array<T, count>
make_handle_array(Parent parent,DestroyFunc destroyFunc,std::array<typename T::Handle,count> handles)657 make_handle_array(Parent parent,
658 DestroyFunc destroyFunc,
659 std::array<typename T::Handle, count> handles) noexcept
660 {
661 return make_handle_array_h<T, Parent, DestroyFunc, count>(
662 parent, destroyFunc, handles, makeIndexSequence<count>());
663 }
664
665 namespace detail {
666
667 template<class Value, class Vector, class Func, class... Args>
668 inline VkResult
wrapVectorAccessor(Vector & vector,Func func,Args...args)669 wrapVectorAccessor(Vector& vector, Func func, Args... args) noexcept
670 {
671 uint32_t count = 0u;
672 VkResult r = func(args..., &count, nullptr);
673 if (r > VK_INCOMPLETE) {
674 vector.clear();
675 return r;
676 }
677
678 vector = Vector(count);
679 if ((r = func(args..., &count, vector.data()))) {
680 vector.clear();
681 return r;
682 }
683
684 return VK_SUCCESS;
685 }
686
687 } // namespace detail
688
689 class VulkanApi;
690
691 struct MappedMemory {
692 MappedMemory() noexcept = default;
693
MappedMemorysk::MappedMemory694 MappedMemory(const VulkanApi& api,
695 VkDevice device,
696 VkDeviceMemory memory,
697 void* data) noexcept
698 : _api{&api}
699 , _device{device}
700 , _memory{memory}
701 , _data{data}
702 {}
703
704 MappedMemory(const MappedMemory&) = delete;
705 MappedMemory& operator=(const MappedMemory&) = delete;
706
MappedMemorysk::MappedMemory707 MappedMemory(MappedMemory&& mappedMemory) noexcept
708 : _api{mappedMemory._api}
709 , _device{mappedMemory._device}
710 , _memory{mappedMemory._memory}
711 , _data{mappedMemory._data}
712 {
713 mappedMemory._device = {};
714 mappedMemory._memory = {};
715 mappedMemory._data = {};
716 }
717
operator =sk::MappedMemory718 MappedMemory& operator=(MappedMemory&& mappedMemory) noexcept
719 {
720 std::swap(_api, mappedMemory._api);
721 std::swap(_device, mappedMemory._device);
722 std::swap(_memory, mappedMemory._memory);
723 std::swap(_data, mappedMemory._data);
724 return *this;
725 }
726
727 ~MappedMemory() noexcept;
728
getsk::MappedMemory729 const void* get() const noexcept { return _data; }
getsk::MappedMemory730 void* get() noexcept { return _data; }
731
732 private:
733 const VulkanApi* _api{};
734 VkDevice _device{};
735 VkDeviceMemory _memory{};
736 void* _data{};
737 };
738
739 class VulkanInitApi
740 {
741 public:
742 template<typename NotFoundFunc>
init(PFN_vkGetInstanceProcAddr pGetInstanceProcAddr,NotFoundFunc notFound)743 VkResult init(PFN_vkGetInstanceProcAddr pGetInstanceProcAddr,
744 NotFoundFunc notFound) noexcept
745 {
746 #define SK_INIT(name) \
747 do { \
748 if (!(name = PFN_##name(getInstanceProcAddr(NULL, #name)))) { \
749 notFound(#name); \
750 } \
751 } while (0)
752
753 vkGetInstanceProcAddr = pGetInstanceProcAddr;
754 SK_INIT(vkCreateInstance);
755 vkDestroyInstance = {}; // Loaded after we create an instance
756 SK_INIT(vkEnumerateInstanceExtensionProperties);
757 SK_INIT(vkEnumerateInstanceLayerProperties);
758
759 if (!vkCreateInstance || !vkEnumerateInstanceExtensionProperties ||
760 !vkEnumerateInstanceLayerProperties) {
761 return VK_ERROR_INITIALIZATION_FAILED;
762 }
763
764 return VK_SUCCESS;
765 #undef SK_INIT
766 }
767
init(PFN_vkGetInstanceProcAddr pGetInstanceProcAddr)768 VkResult init(PFN_vkGetInstanceProcAddr pGetInstanceProcAddr) noexcept
769 {
770 return init(pGetInstanceProcAddr, [](const char*) {});
771 }
772
getInstanceProcAddr(VkInstance instance,const char * const name) const773 PFN_vkVoidFunction getInstanceProcAddr(VkInstance instance,
774 const char* const name) const noexcept
775 {
776 return vkGetInstanceProcAddr(instance, name);
777 }
778
createInstance(const VkInstanceCreateInfo & createInfo,Instance & instance)779 VkResult createInstance(const VkInstanceCreateInfo& createInfo,
780 Instance& instance) noexcept
781 {
782 VkInstance h = {};
783 if (const VkResult r = vkCreateInstance(&createInfo, nullptr, &h)) {
784 return r;
785 }
786
787 if (!h) {
788 // Shouldn't actually happen, but this lets the compiler know that
789 return VK_ERROR_INITIALIZATION_FAILED;
790 }
791
792 if (!vkDestroyInstance) {
793 vkDestroyInstance = PFN_vkDestroyInstance(
794 getInstanceProcAddr(instance, "vkDestroyInstance"));
795 }
796
797 instance = {h, {vkDestroyInstance}};
798 return VK_SUCCESS;
799 }
800
801 template<class Vector>
enumerateInstanceExtensionProperties(Vector & properties) const802 VkResult enumerateInstanceExtensionProperties(
803 Vector& properties) const noexcept
804 {
805 return detail::wrapVectorAccessor<VkExtensionProperties>(
806 properties, vkEnumerateInstanceExtensionProperties, nullptr);
807 }
808
809 template<class Vector>
enumerateInstanceExtensionProperties(const char * const layerName,Vector & properties) const810 VkResult enumerateInstanceExtensionProperties(
811 const char* const layerName,
812 Vector& properties) const noexcept
813 {
814 return detail::wrapVectorAccessor<VkExtensionProperties>(
815 properties, vkEnumerateInstanceExtensionProperties, layerName);
816 }
817
818 template<class Vector>
enumerateInstanceLayerProperties(Vector & properties) const819 VkResult enumerateInstanceLayerProperties(Vector& properties) const noexcept
820 {
821 return detail::wrapVectorAccessor<VkLayerProperties>(
822 properties, vkEnumerateInstanceLayerProperties);
823 }
824
825 private:
826 PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr{};
827
828 #define SK_FUNC(name) \
829 PFN_##name name {}
830
831 SK_FUNC(vkCreateInstance);
832 SK_FUNC(vkDestroyInstance);
833 SK_FUNC(vkEnumerateInstanceExtensionProperties);
834 SK_FUNC(vkEnumerateInstanceLayerProperties);
835
836 #undef SK_FUNC
837 };
838
839 class VulkanApi
840 {
841 public:
842 template<typename NotFoundFunc>
init(const VulkanInitApi & initApi,const Instance & instance,NotFoundFunc notFound)843 VkResult init(const VulkanInitApi& initApi,
844 const Instance& instance,
845 NotFoundFunc notFound) noexcept
846 {
847 VkResult r = VK_SUCCESS;
848
849 const auto notFoundWrapper = [&r, notFound](const char* name) {
850 r = VK_INCOMPLETE;
851 notFound(name);
852 };
853
854 #define SK_INIT(name) \
855 do { \
856 if (!(name = PFN_##name(initApi.getInstanceProcAddr(instance, #name)))) { \
857 notFoundWrapper(#name); \
858 } \
859 } while (0)
860
861 SK_INIT(vkAllocateCommandBuffers);
862 SK_INIT(vkAllocateDescriptorSets);
863 SK_INIT(vkAllocateMemory);
864 SK_INIT(vkBeginCommandBuffer);
865 SK_INIT(vkBindBufferMemory);
866 SK_INIT(vkBindImageMemory);
867 SK_INIT(vkCmdBeginQuery);
868 SK_INIT(vkCmdBeginRenderPass);
869 SK_INIT(vkCmdBindDescriptorSets);
870 SK_INIT(vkCmdBindIndexBuffer);
871 SK_INIT(vkCmdBindPipeline);
872 SK_INIT(vkCmdBindVertexBuffers);
873 SK_INIT(vkCmdBlitImage);
874 SK_INIT(vkCmdClearAttachments);
875 SK_INIT(vkCmdClearColorImage);
876 SK_INIT(vkCmdClearDepthStencilImage);
877 SK_INIT(vkCmdCopyBuffer);
878 SK_INIT(vkCmdCopyBufferToImage);
879 SK_INIT(vkCmdCopyImage);
880 SK_INIT(vkCmdCopyImageToBuffer);
881 SK_INIT(vkCmdCopyQueryPoolResults);
882 SK_INIT(vkCmdDispatch);
883 SK_INIT(vkCmdDispatchIndirect);
884 SK_INIT(vkCmdDraw);
885 SK_INIT(vkCmdDrawIndexed);
886 SK_INIT(vkCmdDrawIndexedIndirect);
887 SK_INIT(vkCmdDrawIndirect);
888 SK_INIT(vkCmdEndQuery);
889 SK_INIT(vkCmdEndRenderPass);
890 SK_INIT(vkCmdExecuteCommands);
891 SK_INIT(vkCmdFillBuffer);
892 SK_INIT(vkCmdNextSubpass);
893 SK_INIT(vkCmdPipelineBarrier);
894 SK_INIT(vkCmdPushConstants);
895 SK_INIT(vkCmdResetEvent);
896 SK_INIT(vkCmdResetQueryPool);
897 SK_INIT(vkCmdResolveImage);
898 SK_INIT(vkCmdSetBlendConstants);
899 SK_INIT(vkCmdSetDepthBias);
900 SK_INIT(vkCmdSetDepthBounds);
901 SK_INIT(vkCmdSetEvent);
902 SK_INIT(vkCmdSetLineWidth);
903 SK_INIT(vkCmdSetScissor);
904 SK_INIT(vkCmdSetStencilCompareMask);
905 SK_INIT(vkCmdSetStencilReference);
906 SK_INIT(vkCmdSetStencilWriteMask);
907 SK_INIT(vkCmdSetViewport);
908 SK_INIT(vkCmdUpdateBuffer);
909 SK_INIT(vkCmdWaitEvents);
910 SK_INIT(vkCmdWriteTimestamp);
911 SK_INIT(vkCreateBuffer);
912 SK_INIT(vkCreateBufferView);
913 SK_INIT(vkCreateCommandPool);
914 SK_INIT(vkCreateComputePipelines);
915 SK_INIT(vkCreateDescriptorPool);
916 SK_INIT(vkCreateDescriptorSetLayout);
917 SK_INIT(vkCreateDevice);
918 SK_INIT(vkCreateEvent);
919 SK_INIT(vkCreateFence);
920 SK_INIT(vkCreateFramebuffer);
921 SK_INIT(vkCreateGraphicsPipelines);
922 SK_INIT(vkCreateImage);
923 SK_INIT(vkCreateImageView);
924 SK_INIT(vkCreateInstance);
925 SK_INIT(vkCreatePipelineCache);
926 SK_INIT(vkCreatePipelineLayout);
927 SK_INIT(vkCreateQueryPool);
928 SK_INIT(vkCreateRenderPass);
929 SK_INIT(vkCreateSampler);
930 SK_INIT(vkCreateSemaphore);
931 SK_INIT(vkCreateShaderModule);
932 SK_INIT(vkDestroyBuffer);
933 SK_INIT(vkDestroyBufferView);
934 SK_INIT(vkDestroyCommandPool);
935 SK_INIT(vkDestroyDescriptorPool);
936 SK_INIT(vkDestroyDescriptorSetLayout);
937 SK_INIT(vkDestroyDevice);
938 SK_INIT(vkDestroyEvent);
939 SK_INIT(vkDestroyFence);
940 SK_INIT(vkDestroyFramebuffer);
941 SK_INIT(vkDestroyImage);
942 SK_INIT(vkDestroyImageView);
943 SK_INIT(vkDestroyPipeline);
944 SK_INIT(vkDestroyPipelineCache);
945 SK_INIT(vkDestroyPipelineLayout);
946 SK_INIT(vkDestroyQueryPool);
947 SK_INIT(vkDestroyRenderPass);
948 SK_INIT(vkDestroySampler);
949 SK_INIT(vkDestroySemaphore);
950 SK_INIT(vkDestroyShaderModule);
951 SK_INIT(vkDeviceWaitIdle);
952 SK_INIT(vkEndCommandBuffer);
953 SK_INIT(vkEnumerateDeviceExtensionProperties);
954 SK_INIT(vkEnumerateDeviceLayerProperties);
955 SK_INIT(vkEnumeratePhysicalDevices);
956 SK_INIT(vkFlushMappedMemoryRanges);
957 SK_INIT(vkFreeCommandBuffers);
958 SK_INIT(vkFreeDescriptorSets);
959 SK_INIT(vkFreeMemory);
960 SK_INIT(vkGetBufferMemoryRequirements);
961 SK_INIT(vkGetDeviceMemoryCommitment);
962 SK_INIT(vkGetDeviceProcAddr);
963 SK_INIT(vkGetDeviceQueue);
964 SK_INIT(vkGetEventStatus);
965 SK_INIT(vkGetFenceStatus);
966 SK_INIT(vkGetImageMemoryRequirements);
967 SK_INIT(vkGetImageSparseMemoryRequirements);
968 SK_INIT(vkGetImageSubresourceLayout);
969 SK_INIT(vkGetInstanceProcAddr);
970 SK_INIT(vkGetPhysicalDeviceFeatures);
971 SK_INIT(vkGetPhysicalDeviceFormatProperties);
972 SK_INIT(vkGetPhysicalDeviceImageFormatProperties);
973 SK_INIT(vkGetPhysicalDeviceMemoryProperties);
974 SK_INIT(vkGetPhysicalDeviceProperties);
975 SK_INIT(vkGetPhysicalDeviceQueueFamilyProperties);
976 SK_INIT(vkGetPhysicalDeviceSparseImageFormatProperties);
977 SK_INIT(vkGetPipelineCacheData);
978 SK_INIT(vkGetQueryPoolResults);
979 SK_INIT(vkGetRenderAreaGranularity);
980 SK_INIT(vkInvalidateMappedMemoryRanges);
981 SK_INIT(vkMapMemory);
982 SK_INIT(vkMergePipelineCaches);
983 SK_INIT(vkQueueBindSparse);
984 SK_INIT(vkQueueSubmit);
985 SK_INIT(vkQueueWaitIdle);
986 SK_INIT(vkResetCommandBuffer);
987 SK_INIT(vkResetCommandPool);
988 SK_INIT(vkResetDescriptorPool);
989 SK_INIT(vkResetEvent);
990 SK_INIT(vkResetFences);
991 SK_INIT(vkSetEvent);
992 SK_INIT(vkUnmapMemory);
993 SK_INIT(vkUpdateDescriptorSets);
994 SK_INIT(vkWaitForFences);
995
996 // VK_EXT_debug_report
997 SK_INIT(vkCreateDebugReportCallbackEXT);
998 SK_INIT(vkDebugReportMessageEXT);
999 SK_INIT(vkDestroyDebugReportCallbackEXT);
1000
1001 // VK_KHR_surface
1002 SK_INIT(vkDestroySurfaceKHR);
1003 SK_INIT(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
1004 SK_INIT(vkGetPhysicalDeviceSurfaceFormatsKHR);
1005 SK_INIT(vkGetPhysicalDeviceSurfacePresentModesKHR);
1006 SK_INIT(vkGetPhysicalDeviceSurfaceSupportKHR);
1007
1008 // VK_KHR_swapchain
1009 SK_INIT(vkAcquireNextImageKHR);
1010 SK_INIT(vkCreateSwapchainKHR);
1011 SK_INIT(vkDestroySwapchainKHR);
1012 SK_INIT(vkGetDeviceGroupPresentCapabilitiesKHR);
1013 SK_INIT(vkGetDeviceGroupSurfacePresentModesKHR);
1014 SK_INIT(vkGetPhysicalDevicePresentRectanglesKHR);
1015 SK_INIT(vkGetSwapchainImagesKHR);
1016 SK_INIT(vkQueuePresentKHR);
1017
1018 #undef SK_INIT
1019
1020 return r;
1021 }
1022
init(const VulkanInitApi & initApi,const Instance & instance)1023 VkResult init(const VulkanInitApi& initApi, const Instance& instance) noexcept
1024 {
1025 return init(initApi, instance, [](const char*) {});
1026 }
1027
1028 template<class VkCommandBufferVector>
allocateCommandBuffers(const Device & device,const VkCommandBufferAllocateInfo & allocateInfo,CommandBuffers<VkCommandBufferVector> & commandBuffers) const1029 VkResult allocateCommandBuffers(
1030 const Device& device,
1031 const VkCommandBufferAllocateInfo& allocateInfo,
1032 CommandBuffers<VkCommandBufferVector>& commandBuffers) const noexcept
1033 {
1034 VkCommandBufferVector rawCommandBuffers =
1035 VkCommandBufferVector(allocateInfo.commandBufferCount);
1036
1037 if (const VkResult r = vkAllocateCommandBuffers(
1038 device, &allocateInfo, rawCommandBuffers.data())) {
1039 return r;
1040 }
1041
1042 commandBuffers = CommandBuffers<VkCommandBufferVector>{
1043 allocateInfo.commandBufferCount,
1044 std::move(rawCommandBuffers),
1045 PoolDeleter<VkCommandBuffer, VkCommandPool, void>{
1046 device,
1047 allocateInfo.commandPool,
1048 allocateInfo.commandBufferCount,
1049 vkFreeCommandBuffers}};
1050 return VK_SUCCESS;
1051 }
1052
1053 template<class VkDescriptorSetVector>
allocateDescriptorSets(const Device & device,const VkDescriptorSetAllocateInfo & allocateInfo,DescriptorSets<VkDescriptorSetVector> & descriptorSets) const1054 VkResult allocateDescriptorSets(
1055 const Device& device,
1056 const VkDescriptorSetAllocateInfo& allocateInfo,
1057 DescriptorSets<VkDescriptorSetVector>& descriptorSets) const noexcept
1058 {
1059 auto descriptorSetVector =
1060 VkDescriptorSetVector(allocateInfo.descriptorSetCount);
1061
1062 if (const VkResult r = vkAllocateDescriptorSets(
1063 device, &allocateInfo, descriptorSetVector.data())) {
1064 return r;
1065 }
1066
1067 descriptorSets = DescriptorSets<VkDescriptorSetVector>{
1068 allocateInfo.descriptorSetCount,
1069 std::move(descriptorSetVector),
1070 PoolDeleter<VkDescriptorSet, VkDescriptorPool, VkResult>{
1071 device,
1072 allocateInfo.descriptorPool,
1073 allocateInfo.descriptorSetCount,
1074 vkFreeDescriptorSets}};
1075 return VK_SUCCESS;
1076 }
1077
bindBufferMemory(const Device & device,const Buffer & buffer,const DeviceMemory & memory,VkDeviceSize memoryOffset) const1078 VkResult bindBufferMemory(const Device& device,
1079 const Buffer& buffer,
1080 const DeviceMemory& memory,
1081 VkDeviceSize memoryOffset) const noexcept
1082 {
1083 return vkBindBufferMemory
1084 ? vkBindBufferMemory(device, buffer, memory, memoryOffset)
1085 : VK_ERROR_FEATURE_NOT_PRESENT;
1086 }
1087
createBuffer(const Device & device,const VkBufferCreateInfo & createInfo,Buffer & buffer) const1088 VkResult createBuffer(const Device& device,
1089 const VkBufferCreateInfo& createInfo,
1090 Buffer& buffer) const noexcept
1091 {
1092 VkBuffer h = {};
1093 const VkResult r = vkCreateBuffer(device, &createInfo, nullptr, &h);
1094 return wrapResult(r, h, {device, vkDestroyBuffer}, buffer);
1095 }
1096
createBufferView(const Device & device,const VkBufferViewCreateInfo & createInfo,BufferView & bufferView) const1097 VkResult createBufferView(const Device& device,
1098 const VkBufferViewCreateInfo& createInfo,
1099 BufferView& bufferView) const noexcept
1100 {
1101 VkBufferView h = {};
1102 const VkResult r = vkCreateBufferView(device, &createInfo, nullptr, &h);
1103 return wrapResult(r, h, {device, vkDestroyBufferView}, bufferView);
1104 }
1105
createCommandPool(const Device & device,const VkCommandPoolCreateInfo & createInfo,CommandPool & commandPool) const1106 VkResult createCommandPool(const Device& device,
1107 const VkCommandPoolCreateInfo& createInfo,
1108 CommandPool& commandPool) const noexcept
1109 {
1110 VkCommandPool h = {};
1111 const VkResult r = vkCreateCommandPool(device, &createInfo, nullptr, &h);
1112 return wrapResult(r, h, {device, vkDestroyCommandPool}, commandPool);
1113 }
1114
createDescriptorPool(const Device & device,const VkDescriptorPoolCreateInfo & createInfo,DescriptorPool & descriptorPool) const1115 VkResult createDescriptorPool(const Device& device,
1116 const VkDescriptorPoolCreateInfo& createInfo,
1117 DescriptorPool& descriptorPool) const noexcept
1118 {
1119 VkDescriptorPool h = {};
1120 const VkResult r = vkCreateDescriptorPool(device, &createInfo, nullptr, &h);
1121
1122 return wrapResult(r, h, {device, vkDestroyDescriptorPool}, descriptorPool);
1123 }
1124
createDescriptorSetLayout(const Device & device,const VkDescriptorSetLayoutCreateInfo & createInfo,DescriptorSetLayout & descriptorSetLayout) const1125 VkResult createDescriptorSetLayout(
1126 const Device& device,
1127 const VkDescriptorSetLayoutCreateInfo& createInfo,
1128 DescriptorSetLayout& descriptorSetLayout) const noexcept
1129 {
1130 VkDescriptorSetLayout h = {};
1131 const VkResult r =
1132 vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &h);
1133
1134 return wrapResult(
1135 r, h, {device, vkDestroyDescriptorSetLayout}, descriptorSetLayout);
1136 }
1137
createDevice(const PhysicalDevice & physicalDevice,const VkDeviceCreateInfo & createInfo,Device & result) const1138 VkResult createDevice(const PhysicalDevice& physicalDevice,
1139 const VkDeviceCreateInfo& createInfo,
1140 Device& result) const noexcept
1141 {
1142 VkDevice h = {};
1143 const VkResult r = vkCreateDevice(physicalDevice, &createInfo, nullptr, &h);
1144
1145 return wrapResult(r, h, {vkDestroyDevice}, result);
1146 }
1147
createEvent(const Device & device,const VkEventCreateInfo & createInfo,Event & event) const1148 VkResult createEvent(const Device& device,
1149 const VkEventCreateInfo& createInfo,
1150 Event& event) const noexcept
1151 {
1152 VkEvent h = {};
1153 const VkResult r = vkCreateEvent(device, &createInfo, nullptr, &h);
1154
1155 return wrapResult(r, h, {device, vkDestroyEvent}, event);
1156 }
1157
createFence(const Device & device,const VkFenceCreateInfo & createInfo,Fence & fence) const1158 VkResult createFence(const Device& device,
1159 const VkFenceCreateInfo& createInfo,
1160 Fence& fence) const noexcept
1161 {
1162 VkFence h = {};
1163 const VkResult r = vkCreateFence(device, &createInfo, nullptr, &h);
1164
1165 return wrapResult(r, h, {device, vkDestroyFence}, fence);
1166 }
1167
createFramebuffer(const Device & device,const VkFramebufferCreateInfo & createInfo,Framebuffer & framebuffer) const1168 VkResult createFramebuffer(const Device& device,
1169 const VkFramebufferCreateInfo& createInfo,
1170 Framebuffer& framebuffer) const noexcept
1171 {
1172 VkFramebuffer h = {};
1173 const VkResult r = vkCreateFramebuffer(device, &createInfo, nullptr, &h);
1174
1175 return wrapResult(r, h, {device, vkDestroyFramebuffer}, framebuffer);
1176 }
1177
createImage(const Device & device,const VkImageCreateInfo & createInfo,Image & image) const1178 VkResult createImage(const Device& device,
1179 const VkImageCreateInfo& createInfo,
1180 Image& image) const noexcept
1181 {
1182 VkImage h = {};
1183 const VkResult r = vkCreateImage(device, &createInfo, nullptr, &h);
1184
1185 return wrapResult(r, h, {device, vkDestroyImage}, image);
1186 }
1187
createImageView(const Device & device,const VkImageViewCreateInfo & createInfo,ImageView & imageView) const1188 VkResult createImageView(const Device& device,
1189 const VkImageViewCreateInfo& createInfo,
1190 ImageView& imageView) const noexcept
1191 {
1192 VkImageView h = {};
1193 const VkResult r = vkCreateImageView(device, &createInfo, nullptr, &h);
1194
1195 return wrapResult(r, h, {device, vkDestroyImageView}, imageView);
1196 }
1197
1198 template<size_t count>
createGraphicsPipelines(const Device & device,const OptionalParameter<PipelineCache> & pipelineCache,const std::array<VkGraphicsPipelineCreateInfo,count> & createInfos,std::array<Pipeline,count> & pipelines) const1199 VkResult createGraphicsPipelines(
1200 const Device& device,
1201 const OptionalParameter<PipelineCache>& pipelineCache,
1202 const std::array<VkGraphicsPipelineCreateInfo, count>& createInfos,
1203 std::array<Pipeline, count>& pipelines) const noexcept
1204 {
1205 std::array<VkPipeline, count> pipelineHandles{};
1206
1207 if (const VkResult r =
1208 vkCreateGraphicsPipelines(device,
1209 pipelineCache.get(),
1210 static_cast<uint32_t>(createInfos.size()),
1211 createInfos.data(),
1212 nullptr,
1213 pipelineHandles.data())) {
1214 return r;
1215 }
1216
1217 pipelines = make_handle_array<Pipeline>(
1218 device.get(), vkDestroyPipeline, pipelineHandles);
1219 return VK_SUCCESS;
1220 }
1221
createPipelineCache(const Device & device,const VkPipelineCacheCreateInfo & createInfo,PipelineCache & pipelineCache) const1222 VkResult createPipelineCache(const Device& device,
1223 const VkPipelineCacheCreateInfo& createInfo,
1224 PipelineCache& pipelineCache) const noexcept
1225 {
1226 VkPipelineCache h = {};
1227 const VkResult r = vkCreatePipelineCache(device, &createInfo, nullptr, &h);
1228
1229 return wrapResult(r, h, {device, vkDestroyPipelineCache}, pipelineCache);
1230 }
1231
createPipelineLayout(const Device & device,const VkPipelineLayoutCreateInfo & createInfo,PipelineLayout & pipelineLayout) const1232 VkResult createPipelineLayout(const Device& device,
1233 const VkPipelineLayoutCreateInfo& createInfo,
1234 PipelineLayout& pipelineLayout) const noexcept
1235 {
1236 VkPipelineLayout h = {};
1237 const VkResult r = vkCreatePipelineLayout(device, &createInfo, nullptr, &h);
1238
1239 return wrapResult(r, h, {device, vkDestroyPipelineLayout}, pipelineLayout);
1240 }
1241
createQueryPool(const Device & device,const VkQueryPoolCreateInfo & createInfo,QueryPool & queryPool) const1242 VkResult createQueryPool(const Device& device,
1243 const VkQueryPoolCreateInfo& createInfo,
1244 QueryPool& queryPool) const noexcept
1245 {
1246 VkQueryPool h = {};
1247 const VkResult r = vkCreateQueryPool(device, &createInfo, nullptr, &h);
1248
1249 return wrapResult(r, h, {device, vkDestroyQueryPool}, queryPool);
1250 }
1251
createRenderPass(const Device & device,const VkRenderPassCreateInfo & createInfo,RenderPass & renderPass) const1252 VkResult createRenderPass(const Device& device,
1253 const VkRenderPassCreateInfo& createInfo,
1254 RenderPass& renderPass) const noexcept
1255 {
1256 VkRenderPass h = {};
1257 const VkResult r = vkCreateRenderPass(device, &createInfo, nullptr, &h);
1258
1259 return wrapResult(r, h, {device, vkDestroyRenderPass}, renderPass);
1260 }
1261
createSampler(const Device & device,const VkSamplerCreateInfo & createInfo,Sampler & sampler) const1262 VkResult createSampler(const Device& device,
1263 const VkSamplerCreateInfo& createInfo,
1264 Sampler& sampler) const noexcept
1265 {
1266 VkSampler h = {};
1267 const VkResult r = vkCreateSampler(device, &createInfo, nullptr, &h);
1268
1269 return wrapResult(r, h, {device, vkDestroySampler}, sampler);
1270 }
1271
createSemaphore(const Device & device,const VkSemaphoreCreateInfo & createInfo,Semaphore & semaphore) const1272 VkResult createSemaphore(const Device& device,
1273 const VkSemaphoreCreateInfo& createInfo,
1274 Semaphore& semaphore) const noexcept
1275 {
1276 VkSemaphore h = {};
1277 const VkResult r = vkCreateSemaphore(device, &createInfo, nullptr, &h);
1278
1279 return wrapResult(r, h, {device, vkDestroySemaphore}, semaphore);
1280 }
1281
createShaderModule(const Device & device,const VkShaderModuleCreateInfo & createInfo,ShaderModule & shaderModule) const1282 VkResult createShaderModule(const Device& device,
1283 const VkShaderModuleCreateInfo& createInfo,
1284 ShaderModule& shaderModule) const noexcept
1285 {
1286 VkShaderModule h = {};
1287 const VkResult r = vkCreateShaderModule(device, &createInfo, nullptr, &h);
1288
1289 return wrapResult(r, h, {device, vkDestroyShaderModule}, shaderModule);
1290 }
1291
deviceWaitIdle(const Device & device) const1292 VkResult deviceWaitIdle(const Device& device) const noexcept
1293 {
1294 return vkDeviceWaitIdle(device);
1295 }
1296
1297 template<class Vector>
enumerateDeviceExtensionProperties(const PhysicalDevice & physicalDevice,const char * const layerName,Vector & properties) const1298 VkResult enumerateDeviceExtensionProperties(
1299 const PhysicalDevice& physicalDevice,
1300 const char* const layerName,
1301 Vector& properties) const noexcept
1302 {
1303 return detail::wrapVectorAccessor<VkExtensionProperties>(
1304 properties,
1305 vkEnumerateDeviceExtensionProperties,
1306 physicalDevice,
1307 layerName);
1308 }
1309
1310 template<class Vector>
enumerateDeviceExtensionProperties(const PhysicalDevice & physicalDevice,Vector & properties) const1311 VkResult enumerateDeviceExtensionProperties(
1312 const PhysicalDevice& physicalDevice,
1313 Vector& properties) const noexcept
1314 {
1315 return detail::wrapVectorAccessor<VkExtensionProperties>(
1316 properties,
1317 vkEnumerateDeviceExtensionProperties,
1318 physicalDevice,
1319 nullptr);
1320 }
1321
1322 template<class Vector>
enumeratePhysicalDevices(const Instance & instance,Vector & physicalDevices) const1323 VkResult enumeratePhysicalDevices(const Instance& instance,
1324 Vector& physicalDevices) const noexcept
1325 {
1326 uint32_t count = 0u;
1327 VkResult r = vkEnumeratePhysicalDevices(instance, &count, nullptr);
1328 if (r > VK_INCOMPLETE) {
1329 return r;
1330 }
1331
1332 physicalDevices = Vector(count);
1333 if ((r = vkEnumeratePhysicalDevices(
1334 instance, &count, physicalDevices.data()))) {
1335 return r;
1336 }
1337
1338 return VK_SUCCESS;
1339 }
1340
getDeviceQueue(const Device & device,const uint32_t queueFamilyIndex,const uint32_t queueIndex) const1341 sk::Queue getDeviceQueue(const Device& device,
1342 const uint32_t queueFamilyIndex,
1343 const uint32_t queueIndex) const noexcept
1344 {
1345 VkQueue queue{};
1346 vkGetDeviceQueue(device, queueFamilyIndex, queueIndex, &queue);
1347 return sk::Queue{queue};
1348 }
1349
getPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice) const1350 VkPhysicalDeviceMemoryProperties getPhysicalDeviceMemoryProperties(
1351 VkPhysicalDevice physicalDevice) const noexcept
1352 {
1353 VkPhysicalDeviceMemoryProperties properties{};
1354 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &properties);
1355 return properties;
1356 }
1357
getPhysicalDeviceProperties(const PhysicalDevice & physicalDevice) const1358 VkPhysicalDeviceProperties getPhysicalDeviceProperties(
1359 const PhysicalDevice& physicalDevice) const noexcept
1360 {
1361 VkPhysicalDeviceProperties properties{};
1362 vkGetPhysicalDeviceProperties(physicalDevice, &properties);
1363 return properties;
1364 }
1365
1366 template<class Vector>
getPhysicalDeviceQueueFamilyProperties(const PhysicalDevice & physicalDevice,Vector & queueFamilyProperties) const1367 VkResult getPhysicalDeviceQueueFamilyProperties(
1368 const PhysicalDevice& physicalDevice,
1369 Vector& queueFamilyProperties) const noexcept
1370 {
1371 uint32_t count = 0u;
1372 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, nullptr);
1373
1374 queueFamilyProperties = Vector(count);
1375 vkGetPhysicalDeviceQueueFamilyProperties(
1376 physicalDevice, &count, queueFamilyProperties.data());
1377
1378 return VK_SUCCESS;
1379 }
1380
getBufferMemoryRequirements(const Device & device,const Buffer & buffer) const1381 VkMemoryRequirements getBufferMemoryRequirements(
1382 const Device& device,
1383 const Buffer& buffer) const noexcept
1384 {
1385 VkMemoryRequirements requirements;
1386 vkGetBufferMemoryRequirements(device, buffer, &requirements);
1387 return requirements;
1388 }
1389
allocateMemory(const Device & device,const VkMemoryAllocateInfo & info,DeviceMemory & memory) const1390 VkResult allocateMemory(const Device& device,
1391 const VkMemoryAllocateInfo& info,
1392 DeviceMemory& memory) const noexcept
1393 {
1394 VkDeviceMemory h = {};
1395 if (const VkResult r = vkAllocateMemory(device, &info, nullptr, &h)) {
1396 return r;
1397 }
1398
1399 if (!h) {
1400 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1401 }
1402
1403 memory = DeviceMemory{h, {device, vkFreeMemory}};
1404 return VK_SUCCESS;
1405 }
1406
mapMemory(const Device & device,const DeviceMemory & memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,MappedMemory & mappedMemory) const1407 VkResult mapMemory(const Device& device,
1408 const DeviceMemory& memory,
1409 VkDeviceSize offset,
1410 VkDeviceSize size,
1411 VkMemoryMapFlags flags,
1412 MappedMemory& mappedMemory) const noexcept
1413 {
1414 void* data = nullptr;
1415 if (const VkResult r =
1416 vkMapMemory(device, memory, offset, size, flags, &data)) {
1417 return r;
1418 }
1419
1420 mappedMemory = MappedMemory{*this, device, memory, data};
1421 return VK_SUCCESS;
1422 }
1423
queueSubmit(const Queue & queue,uint32_t submitCount,const VkSubmitInfo & submits,const Fence & fence) const1424 VkResult queueSubmit(const Queue& queue,
1425 uint32_t submitCount,
1426 const VkSubmitInfo& submits,
1427 const Fence& fence) const noexcept
1428 {
1429 return vkQueueSubmit(queue, submitCount, &submits, fence);
1430 }
1431
queueSubmit(const Queue & queue,const VkSubmitInfo & submit,const Fence & fence) const1432 VkResult queueSubmit(const Queue& queue,
1433 const VkSubmitInfo& submit,
1434 const Fence& fence) const noexcept
1435 {
1436 return vkQueueSubmit(queue, 1u, &submit, fence);
1437 }
1438
1439 template<size_t descriptorWriteCount, size_t descriptorCopyCount>
updateDescriptorSets(const Device & device,std::array<VkWriteDescriptorSet,descriptorWriteCount> descriptorWrites,std::array<VkCopyDescriptorSet,descriptorCopyCount> descriptorCopies) const1440 void updateDescriptorSets(
1441 const Device& device,
1442 std::array<VkWriteDescriptorSet, descriptorWriteCount> descriptorWrites,
1443 std::array<VkCopyDescriptorSet, descriptorCopyCount> descriptorCopies)
1444 const noexcept
1445 {
1446 vkUpdateDescriptorSets(device,
1447 static_cast<uint32_t>(descriptorWrites.size()),
1448 descriptorWrites.data(),
1449 static_cast<uint32_t>(descriptorCopies.size()),
1450 descriptorCopies.data());
1451 }
1452
resetFence(const Device & device,const Fence & fence) const1453 VkResult resetFence(const Device& device, const Fence& fence) const noexcept
1454 {
1455 VkFence h = fence;
1456 return vkResetFences(device, 1u, &h);
1457 }
1458
waitForFence(const Device & device,const Fence & fence,uint64_t timeout) const1459 VkResult waitForFence(const Device& device,
1460 const Fence& fence,
1461 uint64_t timeout) const noexcept
1462 {
1463 VkFence h = fence;
1464 return vkWaitForFences(device, 1u, &h, VK_TRUE, timeout);
1465 }
1466
waitForFence(const Device & device,const Fence & fence) const1467 VkResult waitForFence(const Device& device, const Fence& fence) const noexcept
1468 {
1469 VkFence h = fence;
1470 return vkWaitForFences(device, 1u, &h, VK_TRUE, UINT64_MAX);
1471 }
1472
1473 // Scoped command buffer interface
1474 SYBOK_NODISCARD
1475 CommandScope beginCommandBuffer(
1476 VkCommandBuffer commandBuffer,
1477 VkCommandBufferBeginInfo beginInfo) const noexcept;
1478
1479 // VK_EXT_debug_report
1480
createDebugReportCallbackEXT(const Instance & instance,const VkDebugReportCallbackCreateInfoEXT & createInfo,DebugReportCallbackEXT & callback) const1481 VkResult createDebugReportCallbackEXT(
1482 const Instance& instance,
1483 const VkDebugReportCallbackCreateInfoEXT& createInfo,
1484 DebugReportCallbackEXT& callback) const noexcept
1485 {
1486 VkDebugReportCallbackEXT h = {};
1487
1488 if (const VkResult r =
1489 vkCreateDebugReportCallbackEXT(instance, &createInfo, nullptr, &h)) {
1490 return r;
1491 }
1492
1493 if (!h) {
1494 return VK_ERROR_FEATURE_NOT_PRESENT;
1495 }
1496
1497 callback = {h, {instance, vkDestroyDebugReportCallbackEXT}};
1498 return VK_SUCCESS;
1499 }
1500
1501 // VK_KHR_surface
1502
getPhysicalDeviceSurfaceCapabilitiesKHR(const PhysicalDevice & physicalDevice,const SurfaceKHR & surface,VkSurfaceCapabilitiesKHR & capabilities) const1503 VkResult getPhysicalDeviceSurfaceCapabilitiesKHR(
1504 const PhysicalDevice& physicalDevice,
1505 const SurfaceKHR& surface,
1506 VkSurfaceCapabilitiesKHR& capabilities) const noexcept
1507 {
1508 return vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
1509 physicalDevice, surface, &capabilities);
1510 }
1511
1512 template<typename Vector>
getPhysicalDeviceSurfaceFormatsKHR(const PhysicalDevice & physicalDevice,const SurfaceKHR & surface,Vector & surfaceFormats) const1513 VkResult getPhysicalDeviceSurfaceFormatsKHR(
1514 const PhysicalDevice& physicalDevice,
1515 const SurfaceKHR& surface,
1516 Vector& surfaceFormats) const noexcept
1517 {
1518 return detail::wrapVectorAccessor<VkSurfaceFormatKHR>(
1519 surfaceFormats,
1520 vkGetPhysicalDeviceSurfaceFormatsKHR,
1521 physicalDevice,
1522 surface.get());
1523 }
1524
1525 template<typename Vector>
getPhysicalDeviceSurfacePresentModesKHR(const PhysicalDevice & physicalDevice,const SurfaceKHR & surface,Vector & presentModes) const1526 VkResult getPhysicalDeviceSurfacePresentModesKHR(
1527 const PhysicalDevice& physicalDevice,
1528 const SurfaceKHR& surface,
1529 Vector& presentModes) const noexcept
1530 {
1531 return detail::wrapVectorAccessor<VkPresentModeKHR>(
1532 presentModes,
1533 vkGetPhysicalDeviceSurfacePresentModesKHR,
1534 physicalDevice,
1535 surface.get());
1536 }
1537
getPhysicalDeviceSurfaceSupportKHR(const PhysicalDevice & physicalDevice,uint32_t queueFamilyIndex,const SurfaceKHR & surface,bool & supported) const1538 VkResult getPhysicalDeviceSurfaceSupportKHR(
1539 const PhysicalDevice& physicalDevice,
1540 uint32_t queueFamilyIndex,
1541 const SurfaceKHR& surface,
1542 bool& supported) const noexcept
1543 {
1544 VkBool32 s = {};
1545
1546 if (VkResult r = vkGetPhysicalDeviceSurfaceSupportKHR(
1547 physicalDevice, queueFamilyIndex, surface, &s)) {
1548 return r;
1549 }
1550
1551 supported = s;
1552 return VK_SUCCESS;
1553 }
1554
1555 // VK_KHR_swapchain
1556
acquireNextImageKHR(const Device & device,const SwapchainKHR & swapchain,uint64_t timeout,const Semaphore & semaphore,const OptionalParameter<Fence> & fence,uint32_t * pImageIndex) const1557 VkResult acquireNextImageKHR(const Device& device,
1558 const SwapchainKHR& swapchain,
1559 uint64_t timeout,
1560 const Semaphore& semaphore,
1561 const OptionalParameter<Fence>& fence,
1562 uint32_t* pImageIndex) const noexcept
1563 {
1564 return vkAcquireNextImageKHR(
1565 device, swapchain, timeout, semaphore, fence.get(), pImageIndex);
1566 }
1567
1568 template<class Vector>
getSwapchainImagesKHR(const Device & device,const SwapchainKHR & swapchain,Vector & images) const1569 VkResult getSwapchainImagesKHR(const Device& device,
1570 const SwapchainKHR& swapchain,
1571 Vector& images) const noexcept
1572 {
1573 return detail::wrapVectorAccessor<VkImage>(
1574 images, vkGetSwapchainImagesKHR, device.get(), swapchain.get());
1575 }
1576
createSwapchainKHR(const Device & device,const VkSwapchainCreateInfoKHR & createInfo,SwapchainKHR & swapchain) const1577 VkResult createSwapchainKHR(const Device& device,
1578 const VkSwapchainCreateInfoKHR& createInfo,
1579 SwapchainKHR& swapchain) const noexcept
1580 {
1581 VkSwapchainKHR h = {};
1582 const VkResult r = vkCreateSwapchainKHR(device, &createInfo, nullptr, &h);
1583
1584 if (r) {
1585 return r;
1586 }
1587
1588 if (!h) {
1589 return VK_ERROR_INCOMPATIBLE_DRIVER;
1590 }
1591
1592 swapchain = {h, {device, vkDestroySwapchainKHR}};
1593 return VK_SUCCESS;
1594 }
1595
queuePresentKHR(const Queue & queue,const VkPresentInfoKHR & presentInfo) const1596 VkResult queuePresentKHR(const Queue& queue,
1597 const VkPresentInfoKHR& presentInfo) const noexcept
1598 {
1599 return vkQueuePresentKHR(queue, &presentInfo);
1600 }
1601
1602 #define SK_FUNC(name) \
1603 PFN_##name name {} // NOLINT
1604
1605 // Vulkan 1.0 Core
1606 SK_FUNC(vkAllocateCommandBuffers);
1607 SK_FUNC(vkAllocateDescriptorSets);
1608 SK_FUNC(vkAllocateMemory);
1609 SK_FUNC(vkBeginCommandBuffer);
1610 SK_FUNC(vkBindBufferMemory);
1611 SK_FUNC(vkBindImageMemory);
1612 SK_FUNC(vkCmdBeginQuery);
1613 SK_FUNC(vkCmdBeginRenderPass);
1614 SK_FUNC(vkCmdBindDescriptorSets);
1615 SK_FUNC(vkCmdBindIndexBuffer);
1616 SK_FUNC(vkCmdBindPipeline);
1617 SK_FUNC(vkCmdBindVertexBuffers);
1618 SK_FUNC(vkCmdBlitImage);
1619 SK_FUNC(vkCmdClearAttachments);
1620 SK_FUNC(vkCmdClearColorImage);
1621 SK_FUNC(vkCmdClearDepthStencilImage);
1622 SK_FUNC(vkCmdCopyBuffer);
1623 SK_FUNC(vkCmdCopyBufferToImage);
1624 SK_FUNC(vkCmdCopyImage);
1625 SK_FUNC(vkCmdCopyImageToBuffer);
1626 SK_FUNC(vkCmdCopyQueryPoolResults);
1627 SK_FUNC(vkCmdDispatch);
1628 SK_FUNC(vkCmdDispatchIndirect);
1629 SK_FUNC(vkCmdDraw);
1630 SK_FUNC(vkCmdDrawIndexed);
1631 SK_FUNC(vkCmdDrawIndexedIndirect);
1632 SK_FUNC(vkCmdDrawIndirect);
1633 SK_FUNC(vkCmdEndQuery);
1634 SK_FUNC(vkCmdEndRenderPass);
1635 SK_FUNC(vkCmdExecuteCommands);
1636 SK_FUNC(vkCmdFillBuffer);
1637 SK_FUNC(vkCmdNextSubpass);
1638 SK_FUNC(vkCmdPipelineBarrier);
1639 SK_FUNC(vkCmdPushConstants);
1640 SK_FUNC(vkCmdResetEvent);
1641 SK_FUNC(vkCmdResetQueryPool);
1642 SK_FUNC(vkCmdResolveImage);
1643 SK_FUNC(vkCmdSetBlendConstants);
1644 SK_FUNC(vkCmdSetDepthBias);
1645 SK_FUNC(vkCmdSetDepthBounds);
1646 SK_FUNC(vkCmdSetEvent);
1647 SK_FUNC(vkCmdSetLineWidth);
1648 SK_FUNC(vkCmdSetScissor);
1649 SK_FUNC(vkCmdSetStencilCompareMask);
1650 SK_FUNC(vkCmdSetStencilReference);
1651 SK_FUNC(vkCmdSetStencilWriteMask);
1652 SK_FUNC(vkCmdSetViewport);
1653 SK_FUNC(vkCmdUpdateBuffer);
1654 SK_FUNC(vkCmdWaitEvents);
1655 SK_FUNC(vkCmdWriteTimestamp);
1656 SK_FUNC(vkCreateBuffer);
1657 SK_FUNC(vkCreateBufferView);
1658 SK_FUNC(vkCreateCommandPool);
1659 SK_FUNC(vkCreateComputePipelines);
1660 SK_FUNC(vkCreateDescriptorPool);
1661 SK_FUNC(vkCreateDescriptorSetLayout);
1662 SK_FUNC(vkCreateDevice);
1663 SK_FUNC(vkCreateEvent);
1664 SK_FUNC(vkCreateFence);
1665 SK_FUNC(vkCreateFramebuffer);
1666 SK_FUNC(vkCreateGraphicsPipelines);
1667 SK_FUNC(vkCreateImage);
1668 SK_FUNC(vkCreateImageView);
1669 SK_FUNC(vkCreateInstance);
1670 SK_FUNC(vkCreatePipelineCache);
1671 SK_FUNC(vkCreatePipelineLayout);
1672 SK_FUNC(vkCreateQueryPool);
1673 SK_FUNC(vkCreateRenderPass);
1674 SK_FUNC(vkCreateSampler);
1675 SK_FUNC(vkCreateSemaphore);
1676 SK_FUNC(vkCreateShaderModule);
1677 SK_FUNC(vkDestroyBuffer);
1678 SK_FUNC(vkDestroyBufferView);
1679 SK_FUNC(vkDestroyCommandPool);
1680 SK_FUNC(vkDestroyDescriptorPool);
1681 SK_FUNC(vkDestroyDescriptorSetLayout);
1682 SK_FUNC(vkDestroyDevice);
1683 SK_FUNC(vkDestroyEvent);
1684 SK_FUNC(vkDestroyFence);
1685 SK_FUNC(vkDestroyFramebuffer);
1686 SK_FUNC(vkDestroyImage);
1687 SK_FUNC(vkDestroyImageView);
1688 SK_FUNC(vkDestroyPipeline);
1689 SK_FUNC(vkDestroyPipelineCache);
1690 SK_FUNC(vkDestroyPipelineLayout);
1691 SK_FUNC(vkDestroyQueryPool);
1692 SK_FUNC(vkDestroyRenderPass);
1693 SK_FUNC(vkDestroySampler);
1694 SK_FUNC(vkDestroySemaphore);
1695 SK_FUNC(vkDestroyShaderModule);
1696 SK_FUNC(vkDeviceWaitIdle);
1697 SK_FUNC(vkEndCommandBuffer);
1698 SK_FUNC(vkEnumerateDeviceExtensionProperties);
1699 SK_FUNC(vkEnumerateDeviceLayerProperties);
1700 SK_FUNC(vkEnumeratePhysicalDevices);
1701 SK_FUNC(vkFlushMappedMemoryRanges);
1702 SK_FUNC(vkFreeCommandBuffers);
1703 SK_FUNC(vkFreeDescriptorSets);
1704 SK_FUNC(vkFreeMemory);
1705 SK_FUNC(vkGetBufferMemoryRequirements);
1706 SK_FUNC(vkGetDeviceMemoryCommitment);
1707 SK_FUNC(vkGetDeviceProcAddr);
1708 SK_FUNC(vkGetDeviceQueue);
1709 SK_FUNC(vkGetEventStatus);
1710 SK_FUNC(vkGetFenceStatus);
1711 SK_FUNC(vkGetImageMemoryRequirements);
1712 SK_FUNC(vkGetImageSparseMemoryRequirements);
1713 SK_FUNC(vkGetImageSubresourceLayout);
1714 SK_FUNC(vkGetInstanceProcAddr);
1715 SK_FUNC(vkGetPhysicalDeviceFeatures);
1716 SK_FUNC(vkGetPhysicalDeviceFormatProperties);
1717 SK_FUNC(vkGetPhysicalDeviceImageFormatProperties);
1718 SK_FUNC(vkGetPhysicalDeviceMemoryProperties);
1719 SK_FUNC(vkGetPhysicalDeviceProperties);
1720 SK_FUNC(vkGetPhysicalDeviceQueueFamilyProperties);
1721 SK_FUNC(vkGetPhysicalDeviceSparseImageFormatProperties);
1722 SK_FUNC(vkGetPipelineCacheData);
1723 SK_FUNC(vkGetQueryPoolResults);
1724 SK_FUNC(vkGetRenderAreaGranularity);
1725 SK_FUNC(vkInvalidateMappedMemoryRanges);
1726 SK_FUNC(vkMapMemory);
1727 SK_FUNC(vkMergePipelineCaches);
1728 SK_FUNC(vkQueueBindSparse);
1729 SK_FUNC(vkQueueSubmit);
1730 SK_FUNC(vkQueueWaitIdle);
1731 SK_FUNC(vkResetCommandBuffer);
1732 SK_FUNC(vkResetCommandPool);
1733 SK_FUNC(vkResetDescriptorPool);
1734 SK_FUNC(vkResetEvent);
1735 SK_FUNC(vkResetFences);
1736 SK_FUNC(vkSetEvent);
1737 SK_FUNC(vkUnmapMemory);
1738 SK_FUNC(vkUpdateDescriptorSets);
1739 SK_FUNC(vkWaitForFences);
1740
1741 // VK_EXT_debug_report
1742 SK_FUNC(vkCreateDebugReportCallbackEXT);
1743 SK_FUNC(vkDebugReportMessageEXT);
1744 SK_FUNC(vkDestroyDebugReportCallbackEXT);
1745
1746 // VK_KHR_surface
1747 SK_FUNC(vkDestroySurfaceKHR);
1748 SK_FUNC(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
1749 SK_FUNC(vkGetPhysicalDeviceSurfaceFormatsKHR);
1750 SK_FUNC(vkGetPhysicalDeviceSurfacePresentModesKHR);
1751 SK_FUNC(vkGetPhysicalDeviceSurfaceSupportKHR);
1752
1753 // VK_KHR_swapchain
1754 SK_FUNC(vkAcquireNextImageKHR);
1755 SK_FUNC(vkCreateSwapchainKHR);
1756 SK_FUNC(vkDestroySwapchainKHR);
1757 SK_FUNC(vkGetDeviceGroupPresentCapabilitiesKHR);
1758 SK_FUNC(vkGetDeviceGroupSurfacePresentModesKHR);
1759 SK_FUNC(vkGetPhysicalDevicePresentRectanglesKHR);
1760 SK_FUNC(vkGetSwapchainImagesKHR);
1761 SK_FUNC(vkQueuePresentKHR);
1762
1763 #undef SK_FUNC
1764
1765 private:
1766 template<class T>
wrapResult(const VkResult r,const typename T::Handle handle,typename T::Deleter && deleter,T & result)1767 static inline VkResult wrapResult(const VkResult r,
1768 const typename T::Handle handle,
1769 typename T::Deleter&& deleter,
1770 T& result) noexcept
1771 {
1772 if (r) {
1773 return r;
1774 }
1775
1776 if (!handle) {
1777 return VK_ERROR_INITIALIZATION_FAILED;
1778 }
1779
1780 result = T{handle, std::move(deleter)};
1781 return VK_SUCCESS;
1782 }
1783 };
1784
1785 /// Scope for commands that work both inside and outside a render pass
1786 class CommonCommandScope
1787 {
1788 public:
CommonCommandScope(const VulkanApi & api,VkCommandBuffer commandBuffer,VkResult result)1789 CommonCommandScope(const VulkanApi& api,
1790 VkCommandBuffer commandBuffer,
1791 VkResult result) noexcept
1792 : _api{api}
1793 , _commandBuffer{commandBuffer}
1794 , _result{result}
1795 {}
1796
1797 CommonCommandScope(const CommonCommandScope&) noexcept = delete;
1798 CommonCommandScope& operator=(const CommonCommandScope&) noexcept = delete;
1799
CommonCommandScope(CommonCommandScope && scope)1800 CommonCommandScope(CommonCommandScope&& scope) noexcept
1801 : _api{scope._api}
1802 , _commandBuffer{scope._commandBuffer}
1803 , _result{scope._result}
1804 {
1805 scope._commandBuffer = {};
1806 }
1807
1808 CommonCommandScope& operator=(CommonCommandScope&&) = delete;
1809
1810 ~CommonCommandScope() noexcept = default;
1811
operator bool() const1812 explicit operator bool() const noexcept { return _result == VK_SUCCESS; }
1813
error() const1814 VkResult error() const noexcept { return _result; }
1815
bindPipeline(VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline) const1816 void bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1817 VkPipeline pipeline) const noexcept
1818 {
1819 _api.vkCmdBindPipeline(_commandBuffer, pipelineBindPoint, pipeline);
1820 }
1821
setViewport(uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports) const1822 void setViewport(uint32_t firstViewport,
1823 uint32_t viewportCount,
1824 const VkViewport* pViewports) const noexcept
1825 {
1826 _api.vkCmdSetViewport(
1827 _commandBuffer, firstViewport, viewportCount, pViewports);
1828 }
1829
setScissor(uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors) const1830 void setScissor(uint32_t firstScissor,
1831 uint32_t scissorCount,
1832 const VkRect2D* pScissors) const noexcept
1833 {
1834 _api.vkCmdSetScissor(_commandBuffer, firstScissor, scissorCount, pScissors);
1835 }
1836
setLineWidth(float lineWidth) const1837 void setLineWidth(float lineWidth) const noexcept
1838 {
1839 _api.vkCmdSetLineWidth(_commandBuffer, lineWidth);
1840 }
1841
setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor) const1842 void setDepthBias(float depthBiasConstantFactor,
1843 float depthBiasClamp,
1844 float depthBiasSlopeFactor) const noexcept
1845 {
1846 _api.vkCmdSetDepthBias(_commandBuffer,
1847 depthBiasConstantFactor,
1848 depthBiasClamp,
1849 depthBiasSlopeFactor);
1850 }
1851
setBlendConstants(const float blendConstants[4]) const1852 void setBlendConstants(const float blendConstants[4]) const noexcept
1853 {
1854 _api.vkCmdSetBlendConstants(_commandBuffer, blendConstants);
1855 }
1856
setDepthBounds(float minDepthBounds,float maxDepthBounds) const1857 void setDepthBounds(float minDepthBounds, float maxDepthBounds) const noexcept
1858 {
1859 _api.vkCmdSetDepthBounds(_commandBuffer, minDepthBounds, maxDepthBounds);
1860 }
1861
setStencilCompareMask(VkStencilFaceFlags faceMask,uint32_t compareMask) const1862 void setStencilCompareMask(VkStencilFaceFlags faceMask,
1863 uint32_t compareMask) const noexcept
1864 {
1865 _api.vkCmdSetStencilCompareMask(_commandBuffer, faceMask, compareMask);
1866 }
1867
setStencilWriteMask(VkStencilFaceFlags faceMask,uint32_t writeMask) const1868 void setStencilWriteMask(VkStencilFaceFlags faceMask,
1869 uint32_t writeMask) const noexcept
1870 {
1871 _api.vkCmdSetStencilWriteMask(_commandBuffer, faceMask, writeMask);
1872 }
1873
setStencilReference(VkStencilFaceFlags faceMask,uint32_t reference) const1874 void setStencilReference(VkStencilFaceFlags faceMask,
1875 uint32_t reference) const noexcept
1876 {
1877 _api.vkCmdSetStencilReference(_commandBuffer, faceMask, reference);
1878 }
1879
bindDescriptorSets(VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets) const1880 void bindDescriptorSets(VkPipelineBindPoint pipelineBindPoint,
1881 VkPipelineLayout layout,
1882 uint32_t firstSet,
1883 uint32_t descriptorSetCount,
1884 const VkDescriptorSet* pDescriptorSets,
1885 uint32_t dynamicOffsetCount,
1886 const uint32_t* pDynamicOffsets) const noexcept
1887 {
1888 _api.vkCmdBindDescriptorSets(_commandBuffer,
1889 pipelineBindPoint,
1890 layout,
1891 firstSet,
1892 descriptorSetCount,
1893 pDescriptorSets,
1894 dynamicOffsetCount,
1895 pDynamicOffsets);
1896 }
1897
bindIndexBuffer(VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType) const1898 void bindIndexBuffer(VkBuffer buffer,
1899 VkDeviceSize offset,
1900 VkIndexType indexType) const noexcept
1901 {
1902 _api.vkCmdBindIndexBuffer(_commandBuffer, buffer, offset, indexType);
1903 }
1904
bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets) const1905 void bindVertexBuffers(uint32_t firstBinding,
1906 uint32_t bindingCount,
1907 const VkBuffer* pBuffers,
1908 const VkDeviceSize* pOffsets) const noexcept
1909 {
1910 _api.vkCmdBindVertexBuffers(
1911 _commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
1912 }
1913
waitEvents(uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers) const1914 void waitEvents(
1915 uint32_t eventCount,
1916 const VkEvent* pEvents,
1917 VkPipelineStageFlags srcStageMask,
1918 VkPipelineStageFlags dstStageMask,
1919 uint32_t memoryBarrierCount,
1920 const VkMemoryBarrier* pMemoryBarriers,
1921 uint32_t bufferMemoryBarrierCount,
1922 const VkBufferMemoryBarrier* pBufferMemoryBarriers,
1923 uint32_t imageMemoryBarrierCount,
1924 const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept
1925 {
1926 _api.vkCmdWaitEvents(_commandBuffer,
1927 eventCount,
1928 pEvents,
1929 srcStageMask,
1930 dstStageMask,
1931 memoryBarrierCount,
1932 pMemoryBarriers,
1933 bufferMemoryBarrierCount,
1934 pBufferMemoryBarriers,
1935 imageMemoryBarrierCount,
1936 pImageMemoryBarriers);
1937 }
1938
pipelineBarrier(VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers) const1939 void pipelineBarrier(
1940 VkPipelineStageFlags srcStageMask,
1941 VkPipelineStageFlags dstStageMask,
1942 VkDependencyFlags dependencyFlags,
1943 uint32_t memoryBarrierCount,
1944 const VkMemoryBarrier* pMemoryBarriers,
1945 uint32_t bufferMemoryBarrierCount,
1946 const VkBufferMemoryBarrier* pBufferMemoryBarriers,
1947 uint32_t imageMemoryBarrierCount,
1948 const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept
1949 {
1950 _api.vkCmdPipelineBarrier(_commandBuffer,
1951 srcStageMask,
1952 dstStageMask,
1953 dependencyFlags,
1954 memoryBarrierCount,
1955 pMemoryBarriers,
1956 bufferMemoryBarrierCount,
1957 pBufferMemoryBarriers,
1958 imageMemoryBarrierCount,
1959 pImageMemoryBarriers);
1960 }
1961
beginQuery(VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags) const1962 void beginQuery(VkQueryPool queryPool,
1963 uint32_t query,
1964 VkQueryControlFlags flags) const noexcept
1965 {
1966 _api.vkCmdBeginQuery(_commandBuffer, queryPool, query, flags);
1967 }
1968
endQuery(VkQueryPool queryPool,uint32_t query) const1969 void endQuery(VkQueryPool queryPool, uint32_t query) const noexcept
1970 {
1971 _api.vkCmdEndQuery(_commandBuffer, queryPool, query);
1972 }
1973
writeTimestamp(VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query) const1974 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1975 VkQueryPool queryPool,
1976 uint32_t query) const noexcept
1977 {
1978 _api.vkCmdWriteTimestamp(_commandBuffer, pipelineStage, queryPool, query);
1979 }
1980
pushConstants(VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues) const1981 void pushConstants(VkPipelineLayout layout,
1982 VkShaderStageFlags stageFlags,
1983 uint32_t offset,
1984 uint32_t size,
1985 const void* pValues) const noexcept
1986 {
1987 _api.vkCmdPushConstants(
1988 _commandBuffer, layout, stageFlags, offset, size, pValues);
1989 }
1990
executeCommands(uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers) const1991 void executeCommands(uint32_t commandBufferCount,
1992 const VkCommandBuffer* pCommandBuffers) const noexcept
1993 {
1994 _api.vkCmdExecuteCommands(
1995 _commandBuffer, commandBufferCount, pCommandBuffers);
1996 }
1997
1998 protected:
1999 const VulkanApi& _api;
2000 VkCommandBuffer _commandBuffer;
2001 VkResult _result;
2002 };
2003
2004 // Top level command scope outside a render pass
2005 class CommandScope : public CommonCommandScope
2006 {
2007 public:
CommandScope(const VulkanApi & api,VkCommandBuffer commandBuffer,VkResult result)2008 CommandScope(const VulkanApi& api,
2009 VkCommandBuffer commandBuffer,
2010 VkResult result) noexcept
2011 : CommonCommandScope{api, commandBuffer, result}
2012 {}
2013
2014 CommandScope(const CommandScope&) = delete;
2015 CommandScope& operator=(const CommandScope&) = delete;
2016
CommandScope(CommandScope && scope)2017 CommandScope(CommandScope&& scope) noexcept
2018 : CommonCommandScope{std::forward<CommandScope>(scope)}
2019 {}
2020
2021 CommandScope& operator=(CommandScope&&) = delete;
2022
~CommandScope()2023 ~CommandScope() noexcept
2024 {
2025 assert(!_commandBuffer); // Buffer must be finished with end()
2026 }
2027
end()2028 VkResult end() noexcept
2029 {
2030 if (_commandBuffer) {
2031 VkResult r = _api.vkEndCommandBuffer(_commandBuffer);
2032 _commandBuffer = {};
2033 return r;
2034 }
2035
2036 return VK_NOT_READY;
2037 }
2038
dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ) const2039 void dispatch(uint32_t groupCountX,
2040 uint32_t groupCountY,
2041 uint32_t groupCountZ) const noexcept
2042 {
2043 _api.vkCmdDispatch(_commandBuffer, groupCountX, groupCountY, groupCountZ);
2044 }
2045
dispatchIndirect(VkBuffer buffer,VkDeviceSize offset) const2046 void dispatchIndirect(VkBuffer buffer, VkDeviceSize offset) const noexcept
2047 {
2048 _api.vkCmdDispatchIndirect(_commandBuffer, buffer, offset);
2049 }
2050
copyBuffer(VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions) const2051 void copyBuffer(VkBuffer srcBuffer,
2052 VkBuffer dstBuffer,
2053 uint32_t regionCount,
2054 const VkBufferCopy* pRegions) const noexcept
2055 {
2056 _api.vkCmdCopyBuffer(
2057 _commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2058 }
2059
copyImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions) const2060 void copyImage(VkImage srcImage,
2061 VkImageLayout srcImageLayout,
2062 VkImage dstImage,
2063 VkImageLayout dstImageLayout,
2064 uint32_t regionCount,
2065 const VkImageCopy* pRegions) const noexcept
2066 {
2067 _api.vkCmdCopyImage(_commandBuffer,
2068 srcImage,
2069 srcImageLayout,
2070 dstImage,
2071 dstImageLayout,
2072 regionCount,
2073 pRegions);
2074 }
2075
blitImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter) const2076 void blitImage(VkImage srcImage,
2077 VkImageLayout srcImageLayout,
2078 VkImage dstImage,
2079 VkImageLayout dstImageLayout,
2080 uint32_t regionCount,
2081 const VkImageBlit* pRegions,
2082 VkFilter filter) const noexcept
2083 {
2084 _api.vkCmdBlitImage(_commandBuffer,
2085 srcImage,
2086 srcImageLayout,
2087 dstImage,
2088 dstImageLayout,
2089 regionCount,
2090 pRegions,
2091 filter);
2092 }
2093
copyBufferToImage(VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions) const2094 void copyBufferToImage(VkBuffer srcBuffer,
2095 VkImage dstImage,
2096 VkImageLayout dstImageLayout,
2097 uint32_t regionCount,
2098 const VkBufferImageCopy* pRegions) const noexcept
2099 {
2100 _api.vkCmdCopyBufferToImage(_commandBuffer,
2101 srcBuffer,
2102 dstImage,
2103 dstImageLayout,
2104 regionCount,
2105 pRegions);
2106 }
2107
copyImageToBuffer(VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions) const2108 void copyImageToBuffer(VkImage srcImage,
2109 VkImageLayout srcImageLayout,
2110 VkBuffer dstBuffer,
2111 uint32_t regionCount,
2112 const VkBufferImageCopy* pRegions) const noexcept
2113 {
2114 _api.vkCmdCopyImageToBuffer(_commandBuffer,
2115 srcImage,
2116 srcImageLayout,
2117 dstBuffer,
2118 regionCount,
2119 pRegions);
2120 }
2121
updateBuffer(VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData) const2122 void updateBuffer(VkBuffer dstBuffer,
2123 VkDeviceSize dstOffset,
2124 VkDeviceSize dataSize,
2125 const void* pData) const noexcept
2126 {
2127 _api.vkCmdUpdateBuffer(
2128 _commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2129 }
2130
fillBuffer(VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data) const2131 void fillBuffer(VkBuffer dstBuffer,
2132 VkDeviceSize dstOffset,
2133 VkDeviceSize size,
2134 uint32_t data) const noexcept
2135 {
2136 _api.vkCmdFillBuffer(_commandBuffer, dstBuffer, dstOffset, size, data);
2137 }
2138
clearColorImage(VkImage image,VkImageLayout imageLayout,const VkClearColorValue & color,uint32_t rangeCount,const VkImageSubresourceRange * pRanges) const2139 void clearColorImage(VkImage image,
2140 VkImageLayout imageLayout,
2141 const VkClearColorValue& color,
2142 uint32_t rangeCount,
2143 const VkImageSubresourceRange* pRanges) const noexcept
2144 {
2145 _api.vkCmdClearColorImage(
2146 _commandBuffer, image, imageLayout, &color, rangeCount, pRanges);
2147 }
2148
clearDepthStencilImage(VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue & depthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges) const2149 void clearDepthStencilImage(
2150 VkImage image,
2151 VkImageLayout imageLayout,
2152 const VkClearDepthStencilValue& depthStencil,
2153 uint32_t rangeCount,
2154 const VkImageSubresourceRange* pRanges) const noexcept
2155 {
2156 _api.vkCmdClearDepthStencilImage(
2157 _commandBuffer, image, imageLayout, &depthStencil, rangeCount, pRanges);
2158 }
2159
resolveImage(VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions) const2160 void resolveImage(VkImage srcImage,
2161 VkImageLayout srcImageLayout,
2162 VkImage dstImage,
2163 VkImageLayout dstImageLayout,
2164 uint32_t regionCount,
2165 const VkImageResolve* pRegions) const noexcept
2166 {
2167 _api.vkCmdResolveImage(_commandBuffer,
2168 srcImage,
2169 srcImageLayout,
2170 dstImage,
2171 dstImageLayout,
2172 regionCount,
2173 pRegions);
2174 }
2175
setEvent(VkEvent event,VkPipelineStageFlags stageMask) const2176 void setEvent(VkEvent event, VkPipelineStageFlags stageMask) const noexcept
2177 {
2178 _api.vkCmdSetEvent(_commandBuffer, event, stageMask);
2179 }
2180
resetEvent(VkEvent event,VkPipelineStageFlags stageMask) const2181 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask) const noexcept
2182 {
2183 _api.vkCmdResetEvent(_commandBuffer, event, stageMask);
2184 }
2185
resetQueryPool(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount) const2186 void resetQueryPool(VkQueryPool queryPool,
2187 uint32_t firstQuery,
2188 uint32_t queryCount) const noexcept
2189 {
2190 _api.vkCmdResetQueryPool(_commandBuffer, queryPool, firstQuery, queryCount);
2191 }
2192
copyQueryPoolResults(VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags) const2193 void copyQueryPoolResults(VkQueryPool queryPool,
2194 uint32_t firstQuery,
2195 uint32_t queryCount,
2196 VkBuffer dstBuffer,
2197 VkDeviceSize dstOffset,
2198 VkDeviceSize stride,
2199 VkQueryResultFlags flags) const noexcept
2200 {
2201 _api.vkCmdCopyQueryPoolResults(_commandBuffer,
2202 queryPool,
2203 firstQuery,
2204 queryCount,
2205 dstBuffer,
2206 dstOffset,
2207 stride,
2208 flags);
2209 }
2210
2211 SYBOK_NODISCARD
2212 RenderCommandScope beginRenderPass(
2213 const VkRenderPassBeginInfo& renderPassBegin,
2214 VkSubpassContents contents) const noexcept;
2215 };
2216
2217 class RenderCommandScope : public CommonCommandScope
2218 {
2219 public:
RenderCommandScope(const VulkanApi & api,VkCommandBuffer commandBuffer)2220 RenderCommandScope(const VulkanApi& api,
2221 VkCommandBuffer commandBuffer) noexcept
2222 : CommonCommandScope{api, commandBuffer, VK_SUCCESS}
2223 {}
2224
2225 RenderCommandScope(const RenderCommandScope&) = delete;
2226 RenderCommandScope& operator=(const RenderCommandScope&) = delete;
2227
RenderCommandScope(RenderCommandScope && scope)2228 RenderCommandScope(RenderCommandScope&& scope) noexcept
2229 : CommonCommandScope{std::forward<RenderCommandScope>(scope)}
2230 {}
2231
2232 RenderCommandScope& operator=(RenderCommandScope&&) = delete;
2233
~RenderCommandScope()2234 ~RenderCommandScope() noexcept { _api.vkCmdEndRenderPass(_commandBuffer); }
2235
draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance) const2236 void draw(uint32_t vertexCount,
2237 uint32_t instanceCount,
2238 uint32_t firstVertex,
2239 uint32_t firstInstance) const noexcept
2240 {
2241 _api.vkCmdDraw(
2242 _commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2243 }
2244
drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance) const2245 void drawIndexed(uint32_t indexCount,
2246 uint32_t instanceCount,
2247 uint32_t firstIndex,
2248 int32_t vertexOffset,
2249 uint32_t firstInstance) const noexcept
2250 {
2251 _api.vkCmdDrawIndexed(_commandBuffer,
2252 indexCount,
2253 instanceCount,
2254 firstIndex,
2255 vertexOffset,
2256 firstInstance);
2257 }
2258
drawIndirect(VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride) const2259 void drawIndirect(VkBuffer buffer,
2260 VkDeviceSize offset,
2261 uint32_t drawCount,
2262 uint32_t stride) const noexcept
2263 {
2264 _api.vkCmdDrawIndirect(_commandBuffer, buffer, offset, drawCount, stride);
2265 }
2266
drawIndexedIndirect(VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride) const2267 void drawIndexedIndirect(VkBuffer buffer,
2268 VkDeviceSize offset,
2269 uint32_t drawCount,
2270 uint32_t stride) const noexcept
2271 {
2272 _api.vkCmdDrawIndexedIndirect(
2273 _commandBuffer, buffer, offset, drawCount, stride);
2274 }
2275
clearAttachments(uint32_t attachmentCount,const VkClearAttachment & attachments,uint32_t rectCount,const VkClearRect * pRects) const2276 void clearAttachments(uint32_t attachmentCount,
2277 const VkClearAttachment& attachments,
2278 uint32_t rectCount,
2279 const VkClearRect* pRects) const noexcept
2280 {
2281 _api.vkCmdClearAttachments(
2282 _commandBuffer, attachmentCount, &attachments, rectCount, pRects);
2283 }
2284
nextSubpass(VkSubpassContents contents) const2285 void nextSubpass(VkSubpassContents contents) const noexcept
2286 {
2287 _api.vkCmdNextSubpass(_commandBuffer, contents);
2288 }
2289 };
2290
2291 inline CommandScope
beginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo beginInfo) const2292 VulkanApi::beginCommandBuffer(
2293 VkCommandBuffer commandBuffer,
2294 const VkCommandBufferBeginInfo beginInfo) const noexcept
2295 {
2296 if (const VkResult r = vkBeginCommandBuffer(commandBuffer, &beginInfo)) {
2297 return {*this, nullptr, r};
2298 }
2299
2300 return {*this, commandBuffer, VK_SUCCESS};
2301 }
2302
2303 inline RenderCommandScope
beginRenderPass(const VkRenderPassBeginInfo & renderPassBegin,VkSubpassContents contents) const2304 CommandScope::beginRenderPass(const VkRenderPassBeginInfo& renderPassBegin,
2305 VkSubpassContents contents) const noexcept
2306 {
2307 _api.vkCmdBeginRenderPass(_commandBuffer, &renderPassBegin, contents);
2308
2309 return {_api, _commandBuffer};
2310 }
2311
~MappedMemory()2312 inline MappedMemory::~MappedMemory() noexcept
2313 {
2314 if (_api && _memory) {
2315 _api->vkUnmapMemory(_device, _memory);
2316 }
2317 }
2318
2319 } // namespace sk
2320
2321 #ifdef __GNUC__
2322 # pragma GCC diagnostic pop
2323 #endif
2324
2325 #endif // SYBOK_HPP
2326