1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "common/vulkan/vk_headers.h"
15 #include "libANGLE/renderer/renderer_utils.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17 #include "libANGLE/trace.h"
18
19 namespace rx
20 {
21 namespace vk
22 {
23 // Helper macros that apply to all the wrapped object types.
24 // Unimplemented handle types:
25 // Instance
26 // PhysicalDevice
27 // Device
28 // Queue
29 // DescriptorSet
30
31 #define ANGLE_HANDLE_TYPES_X(FUNC) \
32 FUNC(Allocation) \
33 FUNC(Allocator) \
34 FUNC(Buffer) \
35 FUNC(BufferView) \
36 FUNC(CommandPool) \
37 FUNC(DescriptorPool) \
38 FUNC(DescriptorSetLayout) \
39 FUNC(DeviceMemory) \
40 FUNC(Event) \
41 FUNC(Fence) \
42 FUNC(Framebuffer) \
43 FUNC(Image) \
44 FUNC(ImageView) \
45 FUNC(Pipeline) \
46 FUNC(PipelineCache) \
47 FUNC(PipelineLayout) \
48 FUNC(QueryPool) \
49 FUNC(RenderPass) \
50 FUNC(Sampler) \
51 FUNC(SamplerYcbcrConversion) \
52 FUNC(Semaphore) \
53 FUNC(ShaderModule)
54
55 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
56
57 enum class HandleType
58 {
59 Invalid,
60 CommandBuffer,
61 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
62 };
63
64 #undef ANGLE_COMMA_SEP_FUNC
65
66 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)67 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
68 namespace priv
69 {
70 class CommandBuffer;
71 } // namespace priv
72 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
73
74 // Returns the HandleType of a Vk Handle.
75 template <typename T>
76 struct HandleTypeHelper;
77
78 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
79 template <> \
80 struct HandleTypeHelper<TYPE> \
81 { \
82 constexpr static HandleType kHandleType = HandleType::TYPE; \
83 };
84
85 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
86 template <>
87 struct HandleTypeHelper<priv::CommandBuffer>
88 {
89 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
90 };
91
92 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
93
94 // Base class for all wrapped vulkan objects. Implements several common helper routines.
95 template <typename DerivedT, typename HandleT>
96 class WrappedObject : angle::NonCopyable
97 {
98 public:
99 HandleT getHandle() const { return mHandle; }
100 void setHandle(HandleT handle) { mHandle = handle; }
101 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
102
103 const HandleT *ptr() const { return &mHandle; }
104
105 HandleT release()
106 {
107 HandleT handle = mHandle;
108 mHandle = VK_NULL_HANDLE;
109 return handle;
110 }
111
112 protected:
113 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
114 ~WrappedObject() { ASSERT(!valid()); }
115
116 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
117 {
118 other.mHandle = VK_NULL_HANDLE;
119 }
120
121 // Only works to initialize empty objects, since we don't have the device handle.
122 WrappedObject &operator=(WrappedObject &&other)
123 {
124 ASSERT(!valid());
125 std::swap(mHandle, other.mHandle);
126 return *this;
127 }
128
129 HandleT mHandle;
130 };
131
132 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
133 {
134 public:
135 CommandPool() = default;
136
137 void destroy(VkDevice device);
138 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
139 void freeCommandBuffers(VkDevice device,
140 uint32_t commandBufferCount,
141 const VkCommandBuffer *commandBuffers);
142
143 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
144 };
145
146 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
147 {
148 public:
149 Pipeline() = default;
150 void destroy(VkDevice device);
151
152 VkResult initGraphics(VkDevice device,
153 const VkGraphicsPipelineCreateInfo &createInfo,
154 const PipelineCache &pipelineCacheVk);
155 VkResult initCompute(VkDevice device,
156 const VkComputePipelineCreateInfo &createInfo,
157 const PipelineCache &pipelineCacheVk);
158 };
159
160 namespace priv
161 {
162
163 // Helper class that wraps a Vulkan command buffer.
164 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
165 {
166 public:
167 CommandBuffer() = default;
168
169 VkCommandBuffer releaseHandle();
170
171 // This is used for normal pool allocated command buffers. It reset the handle.
172 void destroy(VkDevice device);
173
174 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
175 void destroy(VkDevice device, const CommandPool &commandPool);
176
177 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
178
179 // There is no way to know if the command buffer contains any commands.
180 static bool CanKnowIfEmpty() { return false; }
181 bool empty() const { return false; }
182
183 using WrappedObject::operator=;
184
185 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
186 {
187 return (features.inheritedQueries == VK_TRUE);
188 }
189
190 // Vulkan command buffers are executed as secondary command buffers within a primary command
191 // buffer.
192 static constexpr bool ExecutesInline() { return false; }
193
194 VkResult begin(const VkCommandBufferBeginInfo &info);
195
196 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
197
198 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
199
200 void bindDescriptorSets(const PipelineLayout &layout,
201 VkPipelineBindPoint pipelineBindPoint,
202 uint32_t firstSet,
203 uint32_t descriptorSetCount,
204 const VkDescriptorSet *descriptorSets,
205 uint32_t dynamicOffsetCount,
206 const uint32_t *dynamicOffsets);
207 void bindGraphicsPipeline(const Pipeline &pipeline);
208 void bindComputePipeline(const Pipeline &pipeline);
209 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
210
211 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
212 void bindVertexBuffers(uint32_t firstBinding,
213 uint32_t bindingCount,
214 const VkBuffer *buffers,
215 const VkDeviceSize *offsets);
216
217 void blitImage(const Image &srcImage,
218 VkImageLayout srcImageLayout,
219 const Image &dstImage,
220 VkImageLayout dstImageLayout,
221 uint32_t regionCount,
222 const VkImageBlit *regions,
223 VkFilter filter);
224
225 void clearColorImage(const Image &image,
226 VkImageLayout imageLayout,
227 const VkClearColorValue &color,
228 uint32_t rangeCount,
229 const VkImageSubresourceRange *ranges);
230 void clearDepthStencilImage(const Image &image,
231 VkImageLayout imageLayout,
232 const VkClearDepthStencilValue &depthStencil,
233 uint32_t rangeCount,
234 const VkImageSubresourceRange *ranges);
235
236 void clearAttachments(uint32_t attachmentCount,
237 const VkClearAttachment *attachments,
238 uint32_t rectCount,
239 const VkClearRect *rects);
240
241 void copyBuffer(const Buffer &srcBuffer,
242 const Buffer &destBuffer,
243 uint32_t regionCount,
244 const VkBufferCopy *regions);
245
246 void copyBufferToImage(VkBuffer srcBuffer,
247 const Image &dstImage,
248 VkImageLayout dstImageLayout,
249 uint32_t regionCount,
250 const VkBufferImageCopy *regions);
251 void copyImageToBuffer(const Image &srcImage,
252 VkImageLayout srcImageLayout,
253 VkBuffer dstBuffer,
254 uint32_t regionCount,
255 const VkBufferImageCopy *regions);
256 void copyImage(const Image &srcImage,
257 VkImageLayout srcImageLayout,
258 const Image &dstImage,
259 VkImageLayout dstImageLayout,
260 uint32_t regionCount,
261 const VkImageCopy *regions);
262
263 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
264 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
265
266 void draw(uint32_t vertexCount,
267 uint32_t instanceCount,
268 uint32_t firstVertex,
269 uint32_t firstInstance);
270 void draw(uint32_t vertexCount, uint32_t firstVertex);
271 void drawInstanced(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex);
272 void drawInstancedBaseInstance(uint32_t vertexCount,
273 uint32_t instanceCount,
274 uint32_t firstVertex,
275 uint32_t firstInstance);
276 void drawIndexed(uint32_t indexCount,
277 uint32_t instanceCount,
278 uint32_t firstIndex,
279 int32_t vertexOffset,
280 uint32_t firstInstance);
281 void drawIndexed(uint32_t indexCount);
282 void drawIndexedBaseVertex(uint32_t indexCount, uint32_t vertexOffset);
283 void drawIndexedInstanced(uint32_t indexCount, uint32_t instanceCount);
284 void drawIndexedInstancedBaseVertex(uint32_t indexCount,
285 uint32_t instanceCount,
286 uint32_t vertexOffset);
287 void drawIndexedInstancedBaseVertexBaseInstance(uint32_t indexCount,
288 uint32_t instanceCount,
289 uint32_t firstIndex,
290 int32_t vertexOffset,
291 uint32_t firstInstance);
292 void drawIndexedIndirect(const Buffer &buffer,
293 VkDeviceSize offset,
294 uint32_t drawCount,
295 uint32_t stride);
296 void drawIndirect(const Buffer &buffer,
297 VkDeviceSize offset,
298 uint32_t drawCount,
299 uint32_t stride);
300
301 VkResult end();
302 void endQuery(const QueryPool &queryPool, uint32_t query);
303 void endRenderPass();
304 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
305
306 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
307
308 void executionBarrier(VkPipelineStageFlags stageMask);
309
310 void fillBuffer(const Buffer &dstBuffer,
311 VkDeviceSize dstOffset,
312 VkDeviceSize size,
313 uint32_t data);
314
315 void bufferBarrier(VkPipelineStageFlags srcStageMask,
316 VkPipelineStageFlags dstStageMask,
317 const VkBufferMemoryBarrier *bufferMemoryBarrier);
318
319 void imageBarrier(VkPipelineStageFlags srcStageMask,
320 VkPipelineStageFlags dstStageMask,
321 const VkImageMemoryBarrier &imageMemoryBarrier);
322
323 void memoryBarrier(VkPipelineStageFlags srcStageMask,
324 VkPipelineStageFlags dstStageMask,
325 const VkMemoryBarrier *memoryBarrier);
326
327 void nextSubpass(VkSubpassContents subpassContents);
328
329 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
330 VkPipelineStageFlags dstStageMask,
331 VkDependencyFlags dependencyFlags,
332 uint32_t memoryBarrierCount,
333 const VkMemoryBarrier *memoryBarriers,
334 uint32_t bufferMemoryBarrierCount,
335 const VkBufferMemoryBarrier *bufferMemoryBarriers,
336 uint32_t imageMemoryBarrierCount,
337 const VkImageMemoryBarrier *imageMemoryBarriers);
338
339 void pushConstants(const PipelineLayout &layout,
340 VkShaderStageFlags flag,
341 uint32_t offset,
342 uint32_t size,
343 const void *data);
344
345 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
346 void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *scissors);
347 VkResult reset();
348 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
349 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
350 void resolveImage(const Image &srcImage,
351 VkImageLayout srcImageLayout,
352 const Image &dstImage,
353 VkImageLayout dstImageLayout,
354 uint32_t regionCount,
355 const VkImageResolve *regions);
356 void waitEvents(uint32_t eventCount,
357 const VkEvent *events,
358 VkPipelineStageFlags srcStageMask,
359 VkPipelineStageFlags dstStageMask,
360 uint32_t memoryBarrierCount,
361 const VkMemoryBarrier *memoryBarriers,
362 uint32_t bufferMemoryBarrierCount,
363 const VkBufferMemoryBarrier *bufferMemoryBarriers,
364 uint32_t imageMemoryBarrierCount,
365 const VkImageMemoryBarrier *imageMemoryBarriers);
366
367 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
368 const QueryPool &queryPool,
369 uint32_t query);
370
371 // VK_EXT_transform_feedback
372 void beginTransformFeedbackEXT(uint32_t firstCounterBuffer,
373 uint32_t counterBufferCount,
374 const VkBuffer *counterBuffers,
375 const VkDeviceSize *counterBufferOffsets);
376 void endTransformFeedbackEXT(uint32_t firstCounterBuffer,
377 uint32_t counterBufferCount,
378 const VkBuffer *counterBuffers,
379 const VkDeviceSize *counterBufferOffsets);
380 void bindTransformFeedbackBuffersEXT(uint32_t firstBinding,
381 uint32_t bindingCount,
382 const VkBuffer *buffers,
383 const VkDeviceSize *offsets,
384 const VkDeviceSize *sizes);
385
386 // VK_EXT_debug_utils
387 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
388 void endDebugUtilsLabelEXT();
389 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
390 };
391 } // namespace priv
392
393 class Image final : public WrappedObject<Image, VkImage>
394 {
395 public:
396 Image() = default;
397
398 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
399 void setHandle(VkImage handle);
400
401 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
402 void reset();
403
404 // Called on shutdown when the helper class *does* own the handle to the image resource.
405 void destroy(VkDevice device);
406
407 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
408
409 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
410 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
411
412 void getSubresourceLayout(VkDevice device,
413 VkImageAspectFlagBits aspectMask,
414 uint32_t mipLevel,
415 uint32_t arrayLayer,
416 VkSubresourceLayout *outSubresourceLayout) const;
417 };
418
419 class ImageView final : public WrappedObject<ImageView, VkImageView>
420 {
421 public:
422 ImageView() = default;
423 void destroy(VkDevice device);
424
425 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
426 };
427
428 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
429 {
430 public:
431 Semaphore() = default;
432 void destroy(VkDevice device);
433
434 VkResult init(VkDevice device);
435 VkResult init(VkDevice device, const VkSemaphoreCreateInfo &createInfo);
436 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
437 };
438
439 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
440 {
441 public:
442 Framebuffer() = default;
443 void destroy(VkDevice device);
444
445 // Use this method only in necessary cases. (RenderPass)
446 void setHandle(VkFramebuffer handle);
447
448 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
449 };
450
451 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
452 {
453 public:
454 DeviceMemory() = default;
455 void destroy(VkDevice device);
456
457 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
458 VkResult map(VkDevice device,
459 VkDeviceSize offset,
460 VkDeviceSize size,
461 VkMemoryMapFlags flags,
462 uint8_t **mapPointer) const;
463 void unmap(VkDevice device) const;
464 };
465
466 class Allocator : public WrappedObject<Allocator, VmaAllocator>
467 {
468 public:
469 Allocator() = default;
470 void destroy();
471
472 VkResult init(VkPhysicalDevice physicalDevice,
473 VkDevice device,
474 VkInstance instance,
475 uint32_t apiVersion,
476 VkDeviceSize preferredLargeHeapBlockSize);
477
478 // Initializes the buffer handle and memory allocation.
479 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
480 VkMemoryPropertyFlags requiredFlags,
481 VkMemoryPropertyFlags preferredFlags,
482 bool persistentlyMappedBuffers,
483 uint32_t *memoryTypeIndexOut,
484 Buffer *bufferOut,
485 Allocation *allocationOut) const;
486
487 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
488 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
489 VkMemoryPropertyFlags requiredFlags,
490 VkMemoryPropertyFlags preferredFlags,
491 bool persistentlyMappedBuffers,
492 uint32_t *memoryTypeIndexOut) const;
493
494 void buildStatsString(char **statsString, VkBool32 detailedMap);
495 void freeStatsString(char *statsString);
496 };
497
498 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
499 {
500 public:
501 Allocation() = default;
502 void destroy(const Allocator &allocator);
503
504 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
505 void unmap(const Allocator &allocator) const;
506 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size);
507 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size);
508
509 private:
510 friend class Allocator;
511 };
512
513 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
514 {
515 public:
516 RenderPass() = default;
517 void destroy(VkDevice device);
518
519 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
520 VkResult init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo);
521 };
522
523 enum class StagingUsage
524 {
525 Read,
526 Write,
527 Both,
528 };
529
530 class Buffer final : public WrappedObject<Buffer, VkBuffer>
531 {
532 public:
533 Buffer() = default;
534 void destroy(VkDevice device);
535
536 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
537 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
538 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
539
540 private:
541 friend class Allocator;
542 };
543
544 class BufferView final : public WrappedObject<BufferView, VkBufferView>
545 {
546 public:
547 BufferView() = default;
548 void destroy(VkDevice device);
549
550 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
551 };
552
553 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
554 {
555 public:
556 ShaderModule() = default;
557 void destroy(VkDevice device);
558
559 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
560 };
561
562 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
563 {
564 public:
565 PipelineLayout() = default;
566 void destroy(VkDevice device);
567
568 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
569 };
570
571 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
572 {
573 public:
574 PipelineCache() = default;
575 void destroy(VkDevice device);
576
577 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
578 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData);
579 VkResult merge(VkDevice device,
580 VkPipelineCache dstCache,
581 uint32_t srcCacheCount,
582 const VkPipelineCache *srcCaches);
583 };
584
585 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
586 {
587 public:
588 DescriptorSetLayout() = default;
589 void destroy(VkDevice device);
590
591 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
592 };
593
594 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
595 {
596 public:
597 DescriptorPool() = default;
598 void destroy(VkDevice device);
599
600 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
601
602 VkResult allocateDescriptorSets(VkDevice device,
603 const VkDescriptorSetAllocateInfo &allocInfo,
604 VkDescriptorSet *descriptorSetsOut);
605 VkResult freeDescriptorSets(VkDevice device,
606 uint32_t descriptorSetCount,
607 const VkDescriptorSet *descriptorSets);
608 };
609
610 class Sampler final : public WrappedObject<Sampler, VkSampler>
611 {
612 public:
613 Sampler() = default;
614 void destroy(VkDevice device);
615 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
616 };
617
618 class SamplerYcbcrConversion final
619 : public WrappedObject<SamplerYcbcrConversion, VkSamplerYcbcrConversion>
620 {
621 public:
622 SamplerYcbcrConversion() = default;
623 void destroy(VkDevice device);
624 VkResult init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo);
625 };
626
627 class Event final : public WrappedObject<Event, VkEvent>
628 {
629 public:
630 Event() = default;
631 void destroy(VkDevice device);
632 using WrappedObject::operator=;
633
634 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
635 VkResult getStatus(VkDevice device) const;
636 VkResult set(VkDevice device) const;
637 VkResult reset(VkDevice device) const;
638 };
639
640 class Fence final : public WrappedObject<Fence, VkFence>
641 {
642 public:
643 Fence() = default;
644 void destroy(VkDevice device);
645 using WrappedObject::operator=;
646
647 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
648 VkResult reset(VkDevice device);
649 VkResult getStatus(VkDevice device) const;
650 VkResult wait(VkDevice device, uint64_t timeout) const;
651 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
652 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
653 };
654
655 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
656 {
657 public:
658 QueryPool() = default;
659 void destroy(VkDevice device);
660
661 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
662 VkResult getResults(VkDevice device,
663 uint32_t firstQuery,
664 uint32_t queryCount,
665 size_t dataSize,
666 void *data,
667 VkDeviceSize stride,
668 VkQueryResultFlags flags) const;
669 };
670
671 // CommandPool implementation.
672 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
673 {
674 if (valid())
675 {
676 vkDestroyCommandPool(device, mHandle, nullptr);
677 mHandle = VK_NULL_HANDLE;
678 }
679 }
680
681 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
682 {
683 ASSERT(valid());
684 return vkResetCommandPool(device, mHandle, flags);
685 }
686
687 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
688 uint32_t commandBufferCount,
689 const VkCommandBuffer *commandBuffers)
690 {
691 ASSERT(valid());
692 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
693 }
694
695 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
696 {
697 ASSERT(!valid());
698 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
699 }
700
701 namespace priv
702 {
703
704 // CommandBuffer implementation.
705 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
706 {
707 VkCommandBuffer handle = mHandle;
708 mHandle = nullptr;
709 return handle;
710 }
711
712 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
713 const VkCommandBufferAllocateInfo &createInfo)
714 {
715 ASSERT(!valid());
716 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
717 }
718
719 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
720 VkImageLayout srcImageLayout,
721 const Image &dstImage,
722 VkImageLayout dstImageLayout,
723 uint32_t regionCount,
724 const VkImageBlit *regions,
725 VkFilter filter)
726 {
727 ASSERT(valid() && srcImage.valid() && dstImage.valid());
728 ASSERT(regionCount == 1);
729 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
730 dstImageLayout, 1, regions, filter);
731 }
732
733 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
734 {
735 ASSERT(valid());
736 return vkBeginCommandBuffer(mHandle, &info);
737 }
738
739 ANGLE_INLINE VkResult CommandBuffer::end()
740 {
741 ANGLE_TRACE_EVENT0("gpu.angle", "CommandBuffer::end");
742 ASSERT(valid());
743 return vkEndCommandBuffer(mHandle);
744 }
745
746 ANGLE_INLINE VkResult CommandBuffer::reset()
747 {
748 ASSERT(valid());
749 return vkResetCommandBuffer(mHandle, 0);
750 }
751
752 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
753 VkPipelineStageFlags dstStageMask,
754 const VkMemoryBarrier *memoryBarrier)
755 {
756 ASSERT(valid());
757 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, memoryBarrier, 0, nullptr, 0,
758 nullptr);
759 }
760
761 ANGLE_INLINE void CommandBuffer::nextSubpass(VkSubpassContents subpassContents)
762 {
763 ASSERT(valid());
764 vkCmdNextSubpass(mHandle, subpassContents);
765 }
766
767 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
768 VkPipelineStageFlags dstStageMask,
769 VkDependencyFlags dependencyFlags,
770 uint32_t memoryBarrierCount,
771 const VkMemoryBarrier *memoryBarriers,
772 uint32_t bufferMemoryBarrierCount,
773 const VkBufferMemoryBarrier *bufferMemoryBarriers,
774 uint32_t imageMemoryBarrierCount,
775 const VkImageMemoryBarrier *imageMemoryBarriers)
776 {
777 ASSERT(valid());
778 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
779 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
780 imageMemoryBarrierCount, imageMemoryBarriers);
781 }
782
783 ANGLE_INLINE void CommandBuffer::executionBarrier(VkPipelineStageFlags stageMask)
784 {
785 ASSERT(valid());
786 vkCmdPipelineBarrier(mHandle, stageMask, stageMask, 0, 0, nullptr, 0, nullptr, 0, nullptr);
787 }
788
789 ANGLE_INLINE void CommandBuffer::bufferBarrier(VkPipelineStageFlags srcStageMask,
790 VkPipelineStageFlags dstStageMask,
791 const VkBufferMemoryBarrier *bufferMemoryBarrier)
792 {
793 ASSERT(valid());
794 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 1, bufferMemoryBarrier,
795 0, nullptr);
796 }
797
798 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
799 VkPipelineStageFlags dstStageMask,
800 const VkImageMemoryBarrier &imageMemoryBarrier)
801 {
802 ASSERT(valid());
803 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
804 &imageMemoryBarrier);
805 }
806
807 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
808 {
809 releaseHandle();
810 }
811
812 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
813 {
814 if (valid())
815 {
816 ASSERT(commandPool.valid());
817 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
818 mHandle = VK_NULL_HANDLE;
819 }
820 }
821
822 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
823 const Buffer &destBuffer,
824 uint32_t regionCount,
825 const VkBufferCopy *regions)
826 {
827 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
828 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
829 }
830
831 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
832 const Image &dstImage,
833 VkImageLayout dstImageLayout,
834 uint32_t regionCount,
835 const VkBufferImageCopy *regions)
836 {
837 ASSERT(valid() && dstImage.valid());
838 ASSERT(srcBuffer != VK_NULL_HANDLE);
839 ASSERT(regionCount == 1);
840 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
841 }
842
843 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
844 VkImageLayout srcImageLayout,
845 VkBuffer dstBuffer,
846 uint32_t regionCount,
847 const VkBufferImageCopy *regions)
848 {
849 ASSERT(valid() && srcImage.valid());
850 ASSERT(dstBuffer != VK_NULL_HANDLE);
851 ASSERT(regionCount == 1);
852 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
853 }
854
855 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
856 VkImageLayout imageLayout,
857 const VkClearColorValue &color,
858 uint32_t rangeCount,
859 const VkImageSubresourceRange *ranges)
860 {
861 ASSERT(valid());
862 ASSERT(rangeCount == 1);
863 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
864 }
865
866 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
867 const Image &image,
868 VkImageLayout imageLayout,
869 const VkClearDepthStencilValue &depthStencil,
870 uint32_t rangeCount,
871 const VkImageSubresourceRange *ranges)
872 {
873 ASSERT(valid());
874 ASSERT(rangeCount == 1);
875 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
876 }
877
878 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
879 const VkClearAttachment *attachments,
880 uint32_t rectCount,
881 const VkClearRect *rects)
882 {
883 ASSERT(valid());
884 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
885 }
886
887 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
888 VkImageLayout srcImageLayout,
889 const Image &dstImage,
890 VkImageLayout dstImageLayout,
891 uint32_t regionCount,
892 const VkImageCopy *regions)
893 {
894 ASSERT(valid() && srcImage.valid() && dstImage.valid());
895 ASSERT(regionCount == 1);
896 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
897 dstImageLayout, 1, regions);
898 }
899
900 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
901 VkSubpassContents subpassContents)
902 {
903 ASSERT(valid());
904 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
905 }
906
907 ANGLE_INLINE void CommandBuffer::endRenderPass()
908 {
909 ASSERT(mHandle != VK_NULL_HANDLE);
910 vkCmdEndRenderPass(mHandle);
911 }
912
913 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
914 VkDeviceSize offset,
915 VkIndexType indexType)
916 {
917 ASSERT(valid());
918 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
919 }
920
921 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
922 VkPipelineBindPoint pipelineBindPoint,
923 uint32_t firstSet,
924 uint32_t descriptorSetCount,
925 const VkDescriptorSet *descriptorSets,
926 uint32_t dynamicOffsetCount,
927 const uint32_t *dynamicOffsets)
928 {
929 ASSERT(valid() && layout.valid());
930 vkCmdBindDescriptorSets(mHandle, pipelineBindPoint, layout.getHandle(), firstSet,
931 descriptorSetCount, descriptorSets, dynamicOffsetCount, dynamicOffsets);
932 }
933
934 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
935 const CommandBuffer *commandBuffers)
936 {
937 ASSERT(valid());
938 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
939 }
940
941 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
942 size_t *allocatedMemoryOut) const
943 {
944 // No data available.
945 *usedMemoryOut = 0;
946 *allocatedMemoryOut = 1;
947 }
948
949 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
950 VkDeviceSize dstOffset,
951 VkDeviceSize size,
952 uint32_t data)
953 {
954 ASSERT(valid());
955 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
956 }
957
958 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
959 VkShaderStageFlags flag,
960 uint32_t offset,
961 uint32_t size,
962 const void *data)
963 {
964 ASSERT(valid() && layout.valid());
965 ASSERT(offset == 0);
966 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
967 }
968
969 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
970 {
971 ASSERT(valid() && event != VK_NULL_HANDLE);
972 vkCmdSetEvent(mHandle, event, stageMask);
973 }
974
975 ANGLE_INLINE void CommandBuffer::setScissor(uint32_t firstScissor,
976 uint32_t scissorCount,
977 const VkRect2D *scissors)
978 {
979 ASSERT(valid() && scissors != nullptr);
980 vkCmdSetScissor(mHandle, firstScissor, scissorCount, scissors);
981 }
982
983 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
984 {
985 ASSERT(valid() && event != VK_NULL_HANDLE);
986 vkCmdResetEvent(mHandle, event, stageMask);
987 }
988
989 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
990 const VkEvent *events,
991 VkPipelineStageFlags srcStageMask,
992 VkPipelineStageFlags dstStageMask,
993 uint32_t memoryBarrierCount,
994 const VkMemoryBarrier *memoryBarriers,
995 uint32_t bufferMemoryBarrierCount,
996 const VkBufferMemoryBarrier *bufferMemoryBarriers,
997 uint32_t imageMemoryBarrierCount,
998 const VkImageMemoryBarrier *imageMemoryBarriers)
999 {
1000 ASSERT(valid());
1001 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
1002 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
1003 imageMemoryBarrierCount, imageMemoryBarriers);
1004 }
1005
1006 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
1007 uint32_t firstQuery,
1008 uint32_t queryCount)
1009 {
1010 ASSERT(valid() && queryPool.valid());
1011 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
1012 }
1013
1014 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
1015 VkImageLayout srcImageLayout,
1016 const Image &dstImage,
1017 VkImageLayout dstImageLayout,
1018 uint32_t regionCount,
1019 const VkImageResolve *regions)
1020 {
1021 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1022 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1023 dstImageLayout, regionCount, regions);
1024 }
1025
1026 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
1027 uint32_t query,
1028 VkQueryControlFlags flags)
1029 {
1030 ASSERT(valid() && queryPool.valid());
1031 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
1032 }
1033
1034 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1035 {
1036 ASSERT(valid() && queryPool.valid());
1037 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1038 }
1039
1040 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1041 const QueryPool &queryPool,
1042 uint32_t query)
1043 {
1044 ASSERT(valid());
1045 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1046 }
1047
1048 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1049 uint32_t instanceCount,
1050 uint32_t firstVertex,
1051 uint32_t firstInstance)
1052 {
1053 ASSERT(valid());
1054 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1055 }
1056
1057 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount, uint32_t firstVertex)
1058 {
1059 ASSERT(valid());
1060 vkCmdDraw(mHandle, vertexCount, 1, firstVertex, 0);
1061 }
1062
1063 ANGLE_INLINE void CommandBuffer::drawInstanced(uint32_t vertexCount,
1064 uint32_t instanceCount,
1065 uint32_t firstVertex)
1066 {
1067 ASSERT(valid());
1068 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, 0);
1069 }
1070
1071 ANGLE_INLINE void CommandBuffer::drawInstancedBaseInstance(uint32_t vertexCount,
1072 uint32_t instanceCount,
1073 uint32_t firstVertex,
1074 uint32_t firstInstance)
1075 {
1076 ASSERT(valid());
1077 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1078 }
1079
1080 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1081 uint32_t instanceCount,
1082 uint32_t firstIndex,
1083 int32_t vertexOffset,
1084 uint32_t firstInstance)
1085 {
1086 ASSERT(valid());
1087 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1088 }
1089
1090 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount)
1091 {
1092 ASSERT(valid());
1093 vkCmdDrawIndexed(mHandle, indexCount, 1, 0, 0, 0);
1094 }
1095
1096 ANGLE_INLINE void CommandBuffer::drawIndexedBaseVertex(uint32_t indexCount, uint32_t vertexOffset)
1097 {
1098 ASSERT(valid());
1099 vkCmdDrawIndexed(mHandle, indexCount, 1, 0, vertexOffset, 0);
1100 }
1101
1102 ANGLE_INLINE void CommandBuffer::drawIndexedInstanced(uint32_t indexCount, uint32_t instanceCount)
1103 {
1104 ASSERT(valid());
1105 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, 0, 0, 0);
1106 }
1107
1108 ANGLE_INLINE void CommandBuffer::drawIndexedInstancedBaseVertex(uint32_t indexCount,
1109 uint32_t instanceCount,
1110 uint32_t vertexOffset)
1111 {
1112 ASSERT(valid());
1113 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, 0, vertexOffset, 0);
1114 }
1115
1116 ANGLE_INLINE void CommandBuffer::drawIndexedInstancedBaseVertexBaseInstance(uint32_t indexCount,
1117 uint32_t instanceCount,
1118 uint32_t firstIndex,
1119 int32_t vertexOffset,
1120 uint32_t firstInstance)
1121 {
1122 ASSERT(valid());
1123 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1124 }
1125
1126 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1127 VkDeviceSize offset,
1128 uint32_t drawCount,
1129 uint32_t stride)
1130 {
1131 ASSERT(valid());
1132 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1133 }
1134
1135 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1136 VkDeviceSize offset,
1137 uint32_t drawCount,
1138 uint32_t stride)
1139 {
1140 ASSERT(valid());
1141 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1142 }
1143
1144 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1145 uint32_t groupCountY,
1146 uint32_t groupCountZ)
1147 {
1148 ASSERT(valid());
1149 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1150 }
1151
1152 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1153 {
1154 ASSERT(valid());
1155 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1156 }
1157
1158 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1159 const Pipeline &pipeline)
1160 {
1161 ASSERT(valid() && pipeline.valid());
1162 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1163 }
1164
1165 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1166 {
1167 ASSERT(valid() && pipeline.valid());
1168 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1169 }
1170
1171 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1172 {
1173 ASSERT(valid() && pipeline.valid());
1174 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1175 }
1176
1177 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1178 uint32_t bindingCount,
1179 const VkBuffer *buffers,
1180 const VkDeviceSize *offsets)
1181 {
1182 ASSERT(valid());
1183 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1184 }
1185
1186 ANGLE_INLINE void CommandBuffer::beginTransformFeedbackEXT(uint32_t firstCounterBuffer,
1187 uint32_t counterBufferCount,
1188 const VkBuffer *counterBuffers,
1189 const VkDeviceSize *counterBufferOffsets)
1190 {
1191 ASSERT(valid());
1192 ASSERT(vkCmdBeginTransformFeedbackEXT);
1193 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1194 counterBufferOffsets);
1195 }
1196
1197 ANGLE_INLINE void CommandBuffer::endTransformFeedbackEXT(uint32_t firstCounterBuffer,
1198 uint32_t counterBufferCount,
1199 const VkBuffer *counterBuffers,
1200 const VkDeviceSize *counterBufferOffsets)
1201 {
1202 ASSERT(valid());
1203 ASSERT(vkCmdEndTransformFeedbackEXT);
1204 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1205 counterBufferOffsets);
1206 }
1207
1208 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT(uint32_t firstBinding,
1209 uint32_t bindingCount,
1210 const VkBuffer *buffers,
1211 const VkDeviceSize *offsets,
1212 const VkDeviceSize *sizes)
1213 {
1214 ASSERT(valid());
1215 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1216 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1217 sizes);
1218 }
1219
1220 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1221 {
1222 ASSERT(valid());
1223 {
1224 #if !defined(ANGLE_SHARED_LIBVULKAN)
1225 // When the vulkan-loader is statically linked, we need to use the extension
1226 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1227 // with volk, this will default to the function definitions with no namespace
1228 using rx::vkCmdBeginDebugUtilsLabelEXT;
1229 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1230 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1231 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1232 }
1233 }
1234
1235 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1236 {
1237 ASSERT(valid());
1238 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1239 vkCmdEndDebugUtilsLabelEXT(mHandle);
1240 }
1241
1242 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1243 {
1244 ASSERT(valid());
1245 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1246 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1247 }
1248 } // namespace priv
1249
1250 // Image implementation.
1251 ANGLE_INLINE void Image::setHandle(VkImage handle)
1252 {
1253 mHandle = handle;
1254 }
1255
1256 ANGLE_INLINE void Image::reset()
1257 {
1258 mHandle = VK_NULL_HANDLE;
1259 }
1260
1261 ANGLE_INLINE void Image::destroy(VkDevice device)
1262 {
1263 if (valid())
1264 {
1265 vkDestroyImage(device, mHandle, nullptr);
1266 mHandle = VK_NULL_HANDLE;
1267 }
1268 }
1269
1270 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1271 {
1272 ASSERT(!valid());
1273 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1274 }
1275
1276 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1277 VkMemoryRequirements *requirementsOut) const
1278 {
1279 ASSERT(valid());
1280 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1281 }
1282
1283 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1284 {
1285 ASSERT(valid() && deviceMemory.valid());
1286 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1287 }
1288
1289 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1290 VkImageAspectFlagBits aspectMask,
1291 uint32_t mipLevel,
1292 uint32_t arrayLayer,
1293 VkSubresourceLayout *outSubresourceLayout) const
1294 {
1295 VkImageSubresource subresource = {};
1296 subresource.aspectMask = aspectMask;
1297 subresource.mipLevel = mipLevel;
1298 subresource.arrayLayer = arrayLayer;
1299
1300 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1301 }
1302
1303 // ImageView implementation.
1304 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1305 {
1306 if (valid())
1307 {
1308 vkDestroyImageView(device, mHandle, nullptr);
1309 mHandle = VK_NULL_HANDLE;
1310 }
1311 }
1312
1313 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1314 {
1315 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1316 }
1317
1318 // Semaphore implementation.
1319 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1320 {
1321 if (valid())
1322 {
1323 vkDestroySemaphore(device, mHandle, nullptr);
1324 mHandle = VK_NULL_HANDLE;
1325 }
1326 }
1327
1328 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1329 {
1330 ASSERT(!valid());
1331
1332 VkSemaphoreCreateInfo semaphoreInfo = {};
1333 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1334 semaphoreInfo.flags = 0;
1335
1336 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1337 }
1338
1339 ANGLE_INLINE VkResult Semaphore::init(VkDevice device, const VkSemaphoreCreateInfo &createInfo)
1340 {
1341 ASSERT(valid());
1342 return vkCreateSemaphore(device, &createInfo, nullptr, &mHandle);
1343 }
1344
1345 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1346 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1347 {
1348 ASSERT(valid());
1349 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1350 }
1351
1352 // Framebuffer implementation.
1353 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1354 {
1355 if (valid())
1356 {
1357 vkDestroyFramebuffer(device, mHandle, nullptr);
1358 mHandle = VK_NULL_HANDLE;
1359 }
1360 }
1361
1362 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1363 {
1364 ASSERT(!valid());
1365 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1366 }
1367
1368 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1369 {
1370 mHandle = handle;
1371 }
1372
1373 // DeviceMemory implementation.
1374 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1375 {
1376 if (valid())
1377 {
1378 vkFreeMemory(device, mHandle, nullptr);
1379 mHandle = VK_NULL_HANDLE;
1380 }
1381 }
1382
1383 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1384 {
1385 ASSERT(!valid());
1386 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1387 }
1388
1389 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1390 VkDeviceSize offset,
1391 VkDeviceSize size,
1392 VkMemoryMapFlags flags,
1393 uint8_t **mapPointer) const
1394 {
1395 ANGLE_TRACE_EVENT0("gpu.angle", "DeviceMemory::map");
1396 ASSERT(valid());
1397 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1398 }
1399
1400 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1401 {
1402 ASSERT(valid());
1403 vkUnmapMemory(device, mHandle);
1404 }
1405
1406 // Allocator implementation.
1407 ANGLE_INLINE void Allocator::destroy()
1408 {
1409 if (valid())
1410 {
1411 vma::DestroyAllocator(mHandle);
1412 mHandle = VK_NULL_HANDLE;
1413 }
1414 }
1415
1416 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1417 VkDevice device,
1418 VkInstance instance,
1419 uint32_t apiVersion,
1420 VkDeviceSize preferredLargeHeapBlockSize)
1421 {
1422 ASSERT(!valid());
1423 return vma::InitAllocator(physicalDevice, device, instance, apiVersion,
1424 preferredLargeHeapBlockSize, &mHandle);
1425 }
1426
1427 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1428 VkMemoryPropertyFlags requiredFlags,
1429 VkMemoryPropertyFlags preferredFlags,
1430 bool persistentlyMappedBuffers,
1431 uint32_t *memoryTypeIndexOut,
1432 Buffer *bufferOut,
1433 Allocation *allocationOut) const
1434 {
1435 ASSERT(valid());
1436 ASSERT(bufferOut && !bufferOut->valid());
1437 ASSERT(allocationOut && !allocationOut->valid());
1438 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1439 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1440 &allocationOut->mHandle);
1441 }
1442
1443 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1444 VkMemoryPropertyFlags *flagsOut) const
1445 {
1446 ASSERT(valid());
1447 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1448 }
1449
1450 ANGLE_INLINE VkResult
1451 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1452 VkMemoryPropertyFlags requiredFlags,
1453 VkMemoryPropertyFlags preferredFlags,
1454 bool persistentlyMappedBuffers,
1455 uint32_t *memoryTypeIndexOut) const
1456 {
1457 ASSERT(valid());
1458 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1459 preferredFlags, persistentlyMappedBuffers,
1460 memoryTypeIndexOut);
1461 }
1462
1463 ANGLE_INLINE void Allocator::buildStatsString(char **statsString, VkBool32 detailedMap)
1464 {
1465 ASSERT(valid());
1466 vma::BuildStatsString(mHandle, statsString, detailedMap);
1467 }
1468
1469 ANGLE_INLINE void Allocator::freeStatsString(char *statsString)
1470 {
1471 ASSERT(valid());
1472 vma::FreeStatsString(mHandle, statsString);
1473 }
1474
1475 // Allocation implementation.
1476 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1477 {
1478 if (valid())
1479 {
1480 vma::FreeMemory(allocator.getHandle(), mHandle);
1481 mHandle = VK_NULL_HANDLE;
1482 }
1483 }
1484
1485 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1486 {
1487 ASSERT(valid());
1488 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1489 }
1490
1491 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1492 {
1493 ASSERT(valid());
1494 vma::UnmapMemory(allocator.getHandle(), mHandle);
1495 }
1496
1497 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1498 VkDeviceSize offset,
1499 VkDeviceSize size)
1500 {
1501 ASSERT(valid());
1502 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1503 }
1504
1505 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1506 VkDeviceSize offset,
1507 VkDeviceSize size)
1508 {
1509 ASSERT(valid());
1510 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1511 }
1512
1513 // RenderPass implementation.
1514 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1515 {
1516 if (valid())
1517 {
1518 vkDestroyRenderPass(device, mHandle, nullptr);
1519 mHandle = VK_NULL_HANDLE;
1520 }
1521 }
1522
1523 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1524 {
1525 ASSERT(!valid());
1526 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1527 }
1528
1529 ANGLE_INLINE VkResult RenderPass::init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo)
1530 {
1531 ASSERT(!valid());
1532 return vkCreateRenderPass2KHR(device, &createInfo, nullptr, &mHandle);
1533 }
1534
1535 // Buffer implementation.
1536 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1537 {
1538 if (valid())
1539 {
1540 vkDestroyBuffer(device, mHandle, nullptr);
1541 mHandle = VK_NULL_HANDLE;
1542 }
1543 }
1544
1545 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1546 {
1547 ASSERT(!valid());
1548 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1549 }
1550
1551 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device, const DeviceMemory &deviceMemory)
1552 {
1553 ASSERT(valid() && deviceMemory.valid());
1554 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), 0);
1555 }
1556
1557 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1558 VkMemoryRequirements *memoryRequirementsOut)
1559 {
1560 ASSERT(valid());
1561 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1562 }
1563
1564 // BufferView implementation.
1565 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1566 {
1567 if (valid())
1568 {
1569 vkDestroyBufferView(device, mHandle, nullptr);
1570 mHandle = VK_NULL_HANDLE;
1571 }
1572 }
1573
1574 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1575 {
1576 ASSERT(!valid());
1577 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1578 }
1579
1580 // ShaderModule implementation.
1581 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1582 {
1583 if (mHandle != VK_NULL_HANDLE)
1584 {
1585 vkDestroyShaderModule(device, mHandle, nullptr);
1586 mHandle = VK_NULL_HANDLE;
1587 }
1588 }
1589
1590 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1591 const VkShaderModuleCreateInfo &createInfo)
1592 {
1593 ASSERT(!valid());
1594 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1595 }
1596
1597 // PipelineLayout implementation.
1598 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1599 {
1600 if (valid())
1601 {
1602 vkDestroyPipelineLayout(device, mHandle, nullptr);
1603 mHandle = VK_NULL_HANDLE;
1604 }
1605 }
1606
1607 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1608 const VkPipelineLayoutCreateInfo &createInfo)
1609 {
1610 ASSERT(!valid());
1611 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1612 }
1613
1614 // PipelineCache implementation.
1615 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1616 {
1617 if (valid())
1618 {
1619 vkDestroyPipelineCache(device, mHandle, nullptr);
1620 mHandle = VK_NULL_HANDLE;
1621 }
1622 }
1623
1624 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1625 const VkPipelineCacheCreateInfo &createInfo)
1626 {
1627 ASSERT(!valid());
1628 // Note: if we are concerned with memory usage of this cache, we should give it custom
1629 // allocators. Also, failure of this function is of little importance.
1630 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1631 }
1632
1633 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1634 VkPipelineCache dstCache,
1635 uint32_t srcCacheCount,
1636 const VkPipelineCache *srcCaches)
1637 {
1638 ASSERT(valid());
1639 return vkMergePipelineCaches(device, dstCache, srcCacheCount, srcCaches);
1640 }
1641
1642 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1643 size_t *cacheSize,
1644 void *cacheData)
1645 {
1646 ASSERT(valid());
1647
1648 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1649 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1650 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1651 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1652 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1653 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1654 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1655 }
1656
1657 // Pipeline implementation.
1658 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1659 {
1660 if (valid())
1661 {
1662 vkDestroyPipeline(device, mHandle, nullptr);
1663 mHandle = VK_NULL_HANDLE;
1664 }
1665 }
1666
1667 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1668 const VkGraphicsPipelineCreateInfo &createInfo,
1669 const PipelineCache &pipelineCacheVk)
1670 {
1671 ASSERT(!valid());
1672 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1673 &mHandle);
1674 }
1675
1676 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1677 const VkComputePipelineCreateInfo &createInfo,
1678 const PipelineCache &pipelineCacheVk)
1679 {
1680 ASSERT(!valid());
1681 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1682 &mHandle);
1683 }
1684
1685 // DescriptorSetLayout implementation.
1686 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1687 {
1688 if (valid())
1689 {
1690 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1691 mHandle = VK_NULL_HANDLE;
1692 }
1693 }
1694
1695 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1696 const VkDescriptorSetLayoutCreateInfo &createInfo)
1697 {
1698 ASSERT(!valid());
1699 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1700 }
1701
1702 // DescriptorPool implementation.
1703 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1704 {
1705 if (valid())
1706 {
1707 vkDestroyDescriptorPool(device, mHandle, nullptr);
1708 mHandle = VK_NULL_HANDLE;
1709 }
1710 }
1711
1712 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1713 const VkDescriptorPoolCreateInfo &createInfo)
1714 {
1715 ASSERT(!valid());
1716 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1717 }
1718
1719 ANGLE_INLINE VkResult
1720 DescriptorPool::allocateDescriptorSets(VkDevice device,
1721 const VkDescriptorSetAllocateInfo &allocInfo,
1722 VkDescriptorSet *descriptorSetsOut)
1723 {
1724 ASSERT(valid());
1725 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1726 }
1727
1728 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1729 uint32_t descriptorSetCount,
1730 const VkDescriptorSet *descriptorSets)
1731 {
1732 ASSERT(valid());
1733 ASSERT(descriptorSetCount > 0);
1734 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1735 }
1736
1737 // Sampler implementation.
1738 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1739 {
1740 if (valid())
1741 {
1742 vkDestroySampler(device, mHandle, nullptr);
1743 mHandle = VK_NULL_HANDLE;
1744 }
1745 }
1746
1747 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1748 {
1749 ASSERT(!valid());
1750 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1751 }
1752
1753 // SamplerYuvConversion implementation.
1754 ANGLE_INLINE void SamplerYcbcrConversion::destroy(VkDevice device)
1755 {
1756 if (valid())
1757 {
1758 vkDestroySamplerYcbcrConversionKHR(device, mHandle, nullptr);
1759 mHandle = VK_NULL_HANDLE;
1760 }
1761 }
1762
1763 ANGLE_INLINE VkResult
1764 SamplerYcbcrConversion::init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo)
1765 {
1766 ASSERT(!valid());
1767 return vkCreateSamplerYcbcrConversionKHR(device, &createInfo, nullptr, &mHandle);
1768 }
1769
1770 // Event implementation.
1771 ANGLE_INLINE void Event::destroy(VkDevice device)
1772 {
1773 if (valid())
1774 {
1775 vkDestroyEvent(device, mHandle, nullptr);
1776 mHandle = VK_NULL_HANDLE;
1777 }
1778 }
1779
1780 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
1781 {
1782 ASSERT(!valid());
1783 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
1784 }
1785
1786 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
1787 {
1788 ASSERT(valid());
1789 return vkGetEventStatus(device, mHandle);
1790 }
1791
1792 ANGLE_INLINE VkResult Event::set(VkDevice device) const
1793 {
1794 ASSERT(valid());
1795 return vkSetEvent(device, mHandle);
1796 }
1797
1798 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
1799 {
1800 ASSERT(valid());
1801 return vkResetEvent(device, mHandle);
1802 }
1803
1804 // Fence implementation.
1805 ANGLE_INLINE void Fence::destroy(VkDevice device)
1806 {
1807 if (valid())
1808 {
1809 vkDestroyFence(device, mHandle, nullptr);
1810 mHandle = VK_NULL_HANDLE;
1811 }
1812 }
1813
1814 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
1815 {
1816 ASSERT(!valid());
1817 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
1818 }
1819
1820 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
1821 {
1822 ASSERT(valid());
1823 return vkResetFences(device, 1, &mHandle);
1824 }
1825
1826 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
1827 {
1828 ASSERT(valid());
1829 return vkGetFenceStatus(device, mHandle);
1830 }
1831
1832 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
1833 {
1834 ASSERT(valid());
1835 return vkWaitForFences(device, 1, &mHandle, true, timeout);
1836 }
1837
1838 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
1839 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
1840 {
1841 ASSERT(valid());
1842 return vkImportFenceFdKHR(device, &importFenceFdInfo);
1843 }
1844
1845 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
1846 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
1847 int *fdOut) const
1848 {
1849 ASSERT(valid());
1850 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
1851 }
1852
1853 // QueryPool implementation.
1854 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
1855 {
1856 if (valid())
1857 {
1858 vkDestroyQueryPool(device, mHandle, nullptr);
1859 mHandle = VK_NULL_HANDLE;
1860 }
1861 }
1862
1863 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
1864 {
1865 ASSERT(!valid());
1866 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
1867 }
1868
1869 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
1870 uint32_t firstQuery,
1871 uint32_t queryCount,
1872 size_t dataSize,
1873 void *data,
1874 VkDeviceSize stride,
1875 VkQueryResultFlags flags) const
1876 {
1877 ASSERT(valid());
1878 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
1879 flags);
1880 }
1881 } // namespace vk
1882 } // namespace rx
1883
1884 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
1885