1 /* Copyright (c) 2017-2018 Hans-Kristian Arntzen
2  *
3  * Permission is hereby granted, free of charge, to any person obtaining
4  * a copy of this software and associated documentation files (the
5  * "Software"), to deal in the Software without restriction, including
6  * without limitation the rights to use, copy, modify, merge, publish,
7  * distribute, sublicense, and/or sell copies of the Software, and to
8  * permit persons to whom the Software is furnished to do so, subject to
9  * the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be
12  * included in all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21  */
22 
23 #pragma once
24 
25 #include "buffer.hpp"
26 #include "command_buffer.hpp"
27 #include "command_pool.hpp"
28 #include "fence.hpp"
29 #include "fence_manager.hpp"
30 #include "image.hpp"
31 #include "memory_allocator.hpp"
32 #include "render_pass.hpp"
33 #include "sampler.hpp"
34 #include "semaphore.hpp"
35 #include "semaphore_manager.hpp"
36 #include "event_manager.hpp"
37 #include "shader.hpp"
38 #include "vulkan.hpp"
39 #include "query_pool.hpp"
40 #include "buffer_pool.hpp"
41 #include <memory>
42 #include <vector>
43 #include <functional>
44 #include <unordered_map>
45 
46 #ifdef GRANITE_VULKAN_FILESYSTEM
47 #include "shader_manager.hpp"
48 #include "texture_manager.hpp"
49 #endif
50 
51 #ifdef GRANITE_VULKAN_MT
52 #include <atomic>
53 #include <mutex>
54 #include <condition_variable>
55 #include "thread_group.hpp"
56 #endif
57 
58 #ifdef GRANITE_VULKAN_FOSSILIZE
59 #include "fossilize.hpp"
60 #endif
61 
62 #include "quirks.hpp"
63 
64 namespace Vulkan
65 {
66 enum class SwapchainRenderPass
67 {
68 	ColorOnly,
69 	Depth,
70 	DepthStencil
71 };
72 
73 struct InitialImageBuffer
74 {
75 	BufferHandle buffer;
76 	std::vector<VkBufferImageCopy> blits;
77 };
78 
79 struct HandlePool
80 {
81 	VulkanObjectPool<Buffer> buffers;
82 	VulkanObjectPool<Image> images;
83 	VulkanObjectPool<LinearHostImage> linear_images;
84 	VulkanObjectPool<ImageView> image_views;
85 	VulkanObjectPool<BufferView> buffer_views;
86 	VulkanObjectPool<Sampler> samplers;
87 	VulkanObjectPool<FenceHolder> fences;
88 	VulkanObjectPool<SemaphoreHolder> semaphores;
89 	VulkanObjectPool<EventHolder> events;
90 	VulkanObjectPool<QueryPoolResult> query;
91 	VulkanObjectPool<CommandBuffer> command_buffers;
92 };
93 
94 class Device
95 #ifdef GRANITE_VULKAN_FOSSILIZE
96 	: public Fossilize::StateCreatorInterface
97 #endif
98 {
99 public:
100 	// Device-based objects which need to poke at internal data structures when their lifetimes end.
101 	// Don't want to expose a lot of internal guts to make this work.
102 	friend class QueryPool;
103 	friend struct QueryPoolResultDeleter;
104 	friend class EventHolder;
105 	friend struct EventHolderDeleter;
106 	friend class SemaphoreHolder;
107 	friend struct SemaphoreHolderDeleter;
108 	friend class FenceHolder;
109 	friend struct FenceHolderDeleter;
110 	friend class Sampler;
111 	friend struct SamplerDeleter;
112 	friend class Buffer;
113 	friend struct BufferDeleter;
114 	friend class BufferView;
115 	friend struct BufferViewDeleter;
116 	friend class ImageView;
117 	friend struct ImageViewDeleter;
118 	friend class Image;
119 	friend struct ImageDeleter;
120 	friend struct LinearHostImageDeleter;
121 	friend class CommandBuffer;
122 	friend struct CommandBufferDeleter;
123 	friend class Program;
124 	friend class WSI;
125 	friend class Cookie;
126 	friend class Framebuffer;
127 	friend class PipelineLayout;
128 	friend class FramebufferAllocator;
129 	friend class RenderPass;
130 	friend class Texture;
131 	friend class DescriptorSetAllocator;
132 	friend class Shader;
133 
134 	Device();
135 	~Device();
136 
137 	// No move-copy.
138 	void operator=(Device &&) = delete;
139 	Device(Device &&) = delete;
140 
141 	// Only called by main thread, during setup phase.
142 	void set_context(const Context &context);
143 	void init_swapchain(const std::vector<VkImage> &swapchain_images, unsigned width, unsigned height, VkFormat format);
144 	void init_external_swapchain(const std::vector<ImageHandle> &swapchain_images);
145 	void init_frame_contexts(unsigned count);
146 
147 	ImageView &get_swapchain_view();
148 	ImageView &get_swapchain_view(unsigned index);
149 	unsigned get_num_swapchain_images() const;
150 	unsigned get_num_frame_contexts() const;
151 	unsigned get_swapchain_index() const;
152 	unsigned get_current_frame_context() const;
153 
154 	size_t get_pipeline_cache_size();
155 	bool get_pipeline_cache_data(uint8_t *data, size_t size);
156 	bool init_pipeline_cache(const uint8_t *data, size_t size);
157 
158 	// Frame-pushing interface.
159 	void next_frame_context();
160 	void wait_idle();
161 	void end_frame_context();
162 
163 	// Set names for objects for debuggers and profilers.
164 	void set_name(const Buffer &buffer, const char *name);
165 	void set_name(const Image &image, const char *name);
166 	void set_name(const CommandBuffer &cmd, const char *name);
167 
168 	// Submission interface, may be called from any thread at any time.
169 	void flush_frame();
170 	CommandBufferHandle request_command_buffer(CommandBuffer::Type type = CommandBuffer::Type::Generic);
171 	CommandBufferHandle request_command_buffer_for_thread(unsigned thread_index, CommandBuffer::Type type = CommandBuffer::Type::Generic);
172 	void submit(CommandBufferHandle &cmd, Fence *fence = nullptr,
173 	            unsigned semaphore_count = 0, Semaphore *semaphore = nullptr);
174 	void submit_empty(CommandBuffer::Type type,
175 	                  Fence *fence = nullptr,
176 	                  unsigned semaphore_count = 0,
177 	                  Semaphore *semaphore = nullptr);
178 	void add_wait_semaphore(CommandBuffer::Type type, Semaphore semaphore, VkPipelineStageFlags stages, bool flush);
179 	CommandBuffer::Type get_physical_queue_type(CommandBuffer::Type queue_type) const;
180 
181 	// Request shaders and programs. These objects are owned by the Device.
182 	Shader *request_shader(const uint32_t *code, size_t size);
183 	Shader *request_shader_by_hash(Util::Hash hash);
184 	Program *request_program(const uint32_t *vertex_data, size_t vertex_size, const uint32_t *fragment_data,
185 	                         size_t fragment_size);
186 	Program *request_program(const uint32_t *compute_data, size_t compute_size);
187 	Program *request_program(Shader *vertex, Shader *fragment);
188 	Program *request_program(Shader *compute);
189 
190 	// Map and unmap buffer objects.
191 	void *map_host_buffer(const Buffer &buffer, MemoryAccessFlags access);
192 	void unmap_host_buffer(const Buffer &buffer, MemoryAccessFlags access);
193 
194 	void *map_linear_host_image(const LinearHostImage &image, MemoryAccessFlags access);
195 	void unmap_linear_host_image_and_sync(const LinearHostImage &image, MemoryAccessFlags access);
196 
197 	// Create buffers and images.
198 	BufferHandle create_buffer(const BufferCreateInfo &info, const void *initial = nullptr);
199 	ImageHandle create_image(const ImageCreateInfo &info, const ImageInitialData *initial = nullptr);
200 	ImageHandle create_image_from_staging_buffer(const ImageCreateInfo &info, const InitialImageBuffer *buffer);
201 	LinearHostImageHandle create_linear_host_image(const LinearHostImageCreateInfo &info);
202 
203 	// Create staging buffers for images.
204 	InitialImageBuffer create_image_staging_buffer(const ImageCreateInfo &info, const ImageInitialData *initial);
205 	InitialImageBuffer create_image_staging_buffer(const TextureFormatLayout &layout);
206 
207 #ifndef _WIN32
208 	ImageHandle create_imported_image(int fd,
209 	                                  VkDeviceSize size,
210 	                                  uint32_t memory_type,
211 	                                  VkExternalMemoryHandleTypeFlagBitsKHR handle_type,
212 	                                  const ImageCreateInfo &create_info);
213 #endif
214 
215 	// Create image view, buffer views and samplers.
216 	ImageViewHandle create_image_view(const ImageViewCreateInfo &view_info);
217 	BufferViewHandle create_buffer_view(const BufferViewCreateInfo &view_info);
218 	SamplerHandle create_sampler(const SamplerCreateInfo &info);
219 
220 	// Render pass helpers.
221 	bool image_format_is_supported(VkFormat format, VkFormatFeatureFlags required, VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL) const;
222 	void get_format_properties(VkFormat format, VkFormatProperties *properties);
223 	bool get_image_format_properties(VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
224 	                                 VkImageFormatProperties *properties);
225 
226 	VkFormat get_default_depth_stencil_format() const;
227 	VkFormat get_default_depth_format() const;
228 	ImageView &get_transient_attachment(unsigned width, unsigned height, VkFormat format,
229 	                                    unsigned index = 0, unsigned samples = 1, unsigned layers = 1);
230 	RenderPassInfo get_swapchain_render_pass(SwapchainRenderPass style);
231 
232 	// Request semaphores.
233 	Semaphore request_semaphore();
234 	Semaphore request_external_semaphore(VkSemaphore semaphore, bool signalled);
235 #ifndef _WIN32
236 	Semaphore request_imported_semaphore(int fd, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type);
237 #endif
238 
get_device()239 	VkDevice get_device()
240 	{
241 		return device;
242 	}
243 
get_memory_properties() const244 	const VkPhysicalDeviceMemoryProperties &get_memory_properties() const
245 	{
246 		return mem_props;
247 	}
248 
get_gpu_properties() const249 	const VkPhysicalDeviceProperties &get_gpu_properties() const
250 	{
251 		return gpu_props;
252 	}
253 
254 	const Sampler &get_stock_sampler(StockSampler sampler) const;
255 
256 #ifdef GRANITE_VULKAN_FILESYSTEM
257 	ShaderManager &get_shader_manager();
258 	TextureManager &get_texture_manager();
259 	void init_shader_manager_cache();
260 	void flush_shader_manager_cache();
261 #endif
262 
263 	// For some platforms, the device and queue might be shared, possibly across threads, so need some mechanism to
264 	// lock the global device and queue.
265 	void set_queue_lock(std::function<void ()> lock_callback,
266 	                    std::function<void ()> unlock_callback);
267 
get_workarounds() const268 	const ImplementationWorkarounds &get_workarounds() const
269 	{
270 		return workarounds;
271 	}
272 
get_device_features() const273 	const DeviceFeatures &get_device_features() const
274 	{
275 		return ext;
276 	}
277 
278 	bool swapchain_touched() const;
279 
280 private:
281 	VkInstance instance = VK_NULL_HANDLE;
282 	VkPhysicalDevice gpu = VK_NULL_HANDLE;
283 	VkDevice device = VK_NULL_HANDLE;
284 	VkQueue graphics_queue = VK_NULL_HANDLE;
285 	VkQueue compute_queue = VK_NULL_HANDLE;
286 	VkQueue transfer_queue = VK_NULL_HANDLE;
287 
288 #ifdef GRANITE_VULKAN_MT
289 	std::atomic<uint64_t> cookie;
290 #else
291 	uint64_t cookie = 0;
292 #endif
293 
294 	uint64_t allocate_cookie();
295 	void bake_program(Program &program);
296 
297 	void request_vertex_block(BufferBlock &block, VkDeviceSize size);
298 	void request_index_block(BufferBlock &block, VkDeviceSize size);
299 	void request_uniform_block(BufferBlock &block, VkDeviceSize size);
300 	void request_staging_block(BufferBlock &block, VkDeviceSize size);
301 
302 	QueryPoolHandle write_timestamp(VkCommandBuffer cmd, VkPipelineStageFlagBits stage);
303 
304 	void set_acquire_semaphore(unsigned index, Semaphore acquire);
305 	Semaphore consume_release_semaphore();
306 
307 	PipelineLayout *request_pipeline_layout(const CombinedResourceLayout &layout);
308 	DescriptorSetAllocator *request_descriptor_set_allocator(const DescriptorSetLayout &layout, const uint32_t *stages_for_sets);
309 	const Framebuffer &request_framebuffer(const RenderPassInfo &info);
310 	const RenderPass &request_render_pass(const RenderPassInfo &info, bool compatible);
311 
312 	VkPhysicalDeviceMemoryProperties mem_props;
313 	VkPhysicalDeviceProperties gpu_props;
314 
315 	DeviceFeatures ext;
316 	void init_stock_samplers();
317 
318 	// Make sure this is deleted last.
319 	HandlePool handle_pool;
320 
321 	struct Managers
322 	{
323 		DeviceAllocator memory;
324 		FenceManager fence;
325 		SemaphoreManager semaphore;
326 		EventManager event;
327 		BufferPool vbo, ibo, ubo, staging;
328 	};
329 	Managers managers;
330 
331 	struct
332 	{
333 #ifdef GRANITE_VULKAN_MT
334 		std::mutex lock;
335 		std::condition_variable cond;
336 #endif
337 		unsigned counter = 0;
338 	} lock;
339 	void add_frame_counter();
340 	void decrement_frame_counter();
341 
342 	struct PerFrame
343 	{
344 		PerFrame(Device *device);
345 		~PerFrame();
346 		void operator=(const PerFrame &) = delete;
347 		PerFrame(const PerFrame &) = delete;
348 
349 		void begin();
350 
351 		VkDevice device;
352 		Managers &managers;
353 		std::vector<CommandPool> graphics_cmd_pool;
354 		std::vector<CommandPool> compute_cmd_pool;
355 		std::vector<CommandPool> transfer_cmd_pool;
356 		QueryPool query_pool;
357 
358 		std::vector<BufferBlock> vbo_blocks;
359 		std::vector<BufferBlock> ibo_blocks;
360 		std::vector<BufferBlock> ubo_blocks;
361 		std::vector<BufferBlock> staging_blocks;
362 
363 		std::vector<VkFence> wait_fences;
364 		std::vector<VkFence> recycle_fences;
365 		std::vector<DeviceAllocation> allocations;
366 		std::vector<VkFramebuffer> destroyed_framebuffers;
367 		std::vector<VkSampler> destroyed_samplers;
368 		std::vector<VkPipeline> destroyed_pipelines;
369 		std::vector<VkImageView> destroyed_image_views;
370 		std::vector<VkBufferView> destroyed_buffer_views;
371 		std::vector<VkImage> destroyed_images;
372 		std::vector<VkBuffer> destroyed_buffers;
373 		std::vector<CommandBufferHandle> graphics_submissions;
374 		std::vector<CommandBufferHandle> compute_submissions;
375 		std::vector<CommandBufferHandle> transfer_submissions;
376 		std::vector<VkSemaphore> recycled_semaphores;
377 		std::vector<VkEvent> recycled_events;
378 		std::vector<VkSemaphore> destroyed_semaphores;
379 		std::vector<ImageHandle> keep_alive_images;
380 	};
381 	// The per frame structure must be destroyed after
382 	// the hashmap data structures below, so it must be declared before.
383 	std::vector<std::unique_ptr<PerFrame>> per_frame;
384 
385 	struct
386 	{
387 		Semaphore acquire;
388 		Semaphore release;
389 		bool touched = false;
390 		bool consumed = false;
391 		std::vector<ImageHandle> swapchain;
392 		unsigned index = 0;
393 	} wsi;
394 
395 	struct QueueData
396 	{
397 		std::vector<Semaphore> wait_semaphores;
398 		std::vector<VkPipelineStageFlags> wait_stages;
399 		bool need_fence = false;
400 	} graphics, compute, transfer;
401 
402 	// Pending buffers which need to be copied from CPU to GPU before submitting graphics or compute work.
403 	struct
404 	{
405 		std::vector<BufferBlock> vbo;
406 		std::vector<BufferBlock> ibo;
407 		std::vector<BufferBlock> ubo;
408 	} dma;
409 
410 	void submit_queue(CommandBuffer::Type type, VkFence *fence,
411 	                  unsigned semaphore_count = 0,
412 	                  Semaphore *semaphore = nullptr);
413 
frame()414 	PerFrame &frame()
415 	{
416 		VK_ASSERT(frame_context_index < per_frame.size());
417 		VK_ASSERT(per_frame[frame_context_index]);
418 		return *per_frame[frame_context_index];
419 	}
420 
frame() const421 	const PerFrame &frame() const
422 	{
423 		VK_ASSERT(frame_context_index < per_frame.size());
424 		VK_ASSERT(per_frame[frame_context_index]);
425 		return *per_frame[frame_context_index];
426 	}
427 
428 	unsigned frame_context_index = 0;
429 	uint32_t graphics_queue_family_index = 0;
430 	uint32_t compute_queue_family_index = 0;
431 	uint32_t transfer_queue_family_index = 0;
432 
433 	uint32_t find_memory_type(BufferDomain domain, uint32_t mask);
434 	uint32_t find_memory_type(ImageDomain domain, uint32_t mask);
435 	bool memory_type_is_device_optimal(uint32_t type) const;
436 	bool memory_type_is_host_visible(uint32_t type) const;
437 
438 	SamplerHandle samplers[static_cast<unsigned>(StockSampler::Count)];
439 
440 	VulkanCache<PipelineLayout> pipeline_layouts;
441 	VulkanCache<DescriptorSetAllocator> descriptor_set_allocators;
442 	VulkanCache<RenderPass> render_passes;
443 	VulkanCache<Shader> shaders;
444 	VulkanCache<Program> programs;
445 
446 	FramebufferAllocator framebuffer_allocator;
447 	TransientAttachmentAllocator transient_allocator;
448 	VkPipelineCache pipeline_cache = VK_NULL_HANDLE;
449 
450 	SamplerHandle create_sampler(const SamplerCreateInfo &info, StockSampler sampler);
451 	void init_pipeline_cache();
452 	void flush_pipeline_cache();
453 
454 	CommandPool &get_command_pool(CommandBuffer::Type type, unsigned thread);
455 	QueueData &get_queue_data(CommandBuffer::Type type);
456 	std::vector<CommandBufferHandle> &get_queue_submissions(CommandBuffer::Type type);
457 	void clear_wait_semaphores();
458 	void submit_staging(CommandBufferHandle &cmd, VkBufferUsageFlags usage, bool flush);
459 	PipelineEvent request_pipeline_event();
460 
461 	std::function<void ()> queue_lock_callback;
462 	std::function<void ()> queue_unlock_callback;
463 	void flush_frame(CommandBuffer::Type type);
464 	void sync_buffer_blocks();
465 	void submit_empty_inner(CommandBuffer::Type type, VkFence *fence,
466 	                        unsigned semaphore_count,
467 	                        Semaphore *semaphore);
468 
469 	void destroy_buffer(VkBuffer buffer);
470 	void destroy_image(VkImage image);
471 	void destroy_image_view(VkImageView view);
472 	void destroy_buffer_view(VkBufferView view);
473 	void destroy_pipeline(VkPipeline pipeline);
474 	void destroy_sampler(VkSampler sampler);
475 	void destroy_framebuffer(VkFramebuffer framebuffer);
476 	void destroy_semaphore(VkSemaphore semaphore);
477 	void recycle_semaphore(VkSemaphore semaphore);
478 	void destroy_event(VkEvent event);
479 	void free_memory(const DeviceAllocation &alloc);
480 	void reset_fence(VkFence fence);
481 	void keep_handle_alive(ImageHandle handle);
482 
483 	void destroy_buffer_nolock(VkBuffer buffer);
484 	void destroy_image_nolock(VkImage image);
485 	void destroy_image_view_nolock(VkImageView view);
486 	void destroy_buffer_view_nolock(VkBufferView view);
487 	void destroy_pipeline_nolock(VkPipeline pipeline);
488 	void destroy_sampler_nolock(VkSampler sampler);
489 	void destroy_framebuffer_nolock(VkFramebuffer framebuffer);
490 	void destroy_semaphore_nolock(VkSemaphore semaphore);
491 	void recycle_semaphore_nolock(VkSemaphore semaphore);
492 	void destroy_event_nolock(VkEvent event);
493 	void free_memory_nolock(const DeviceAllocation &alloc);
494 
495 	void flush_frame_nolock();
496 	CommandBufferHandle request_command_buffer_nolock(unsigned thread_index, CommandBuffer::Type type = CommandBuffer::Type::Generic);
497 	void submit_nolock(CommandBufferHandle cmd, Fence *fence,
498 	                   unsigned semaphore_count, Semaphore *semaphore);
499 	void submit_empty_nolock(CommandBuffer::Type type, Fence *fence,
500 	                         unsigned semaphore_count,
501 	                         Semaphore *semaphore);
502 	void add_wait_semaphore_nolock(CommandBuffer::Type type, Semaphore semaphore, VkPipelineStageFlags stages,
503 	                               bool flush);
504 
505 	void request_vertex_block_nolock(BufferBlock &block, VkDeviceSize size);
506 	void request_index_block_nolock(BufferBlock &block, VkDeviceSize size);
507 	void request_uniform_block_nolock(BufferBlock &block, VkDeviceSize size);
508 	void request_staging_block_nolock(BufferBlock &block, VkDeviceSize size);
509 
510 	CommandBufferHandle request_secondary_command_buffer_for_thread(unsigned thread_index,
511 	                                                                const Framebuffer *framebuffer,
512 	                                                                unsigned subpass,
513 	                                                                CommandBuffer::Type type = CommandBuffer::Type::Generic);
514 	void add_frame_counter_nolock();
515 	void decrement_frame_counter_nolock();
516 	void submit_secondary(CommandBuffer &primary, CommandBuffer &secondary);
517 	void wait_idle_nolock();
518 	void end_frame_nolock();
519 
520 	Fence request_fence();
521 
522 #ifdef GRANITE_VULKAN_FILESYSTEM
523 	ShaderManager shader_manager;
524 	TextureManager texture_manager;
525 #endif
526 
527 	std::string get_pipeline_cache_string() const;
528 
529 #ifdef GRANITE_VULKAN_FOSSILIZE
530 	Fossilize::StateRecorder state_recorder;
531 	std::mutex state_recorder_lock;
532 	bool enqueue_create_sampler(Fossilize::Hash hash, unsigned index, const VkSamplerCreateInfo *create_info, VkSampler *sampler) override;
533 	bool enqueue_create_descriptor_set_layout(Fossilize::Hash hash, unsigned index, const VkDescriptorSetLayoutCreateInfo *create_info, VkDescriptorSetLayout *layout) override;
534 	bool enqueue_create_pipeline_layout(Fossilize::Hash hash, unsigned index, const VkPipelineLayoutCreateInfo *create_info, VkPipelineLayout *layout) override;
535 	bool enqueue_create_shader_module(Fossilize::Hash hash, unsigned index, const VkShaderModuleCreateInfo *create_info, VkShaderModule *module) override;
536 	bool enqueue_create_render_pass(Fossilize::Hash hash, unsigned index, const VkRenderPassCreateInfo *create_info, VkRenderPass *render_pass) override;
537 	bool enqueue_create_compute_pipeline(Fossilize::Hash hash, unsigned index, const VkComputePipelineCreateInfo *create_info, VkPipeline *pipeline) override;
538 	bool enqueue_create_graphics_pipeline(Fossilize::Hash hash, unsigned index, const VkGraphicsPipelineCreateInfo *create_info, VkPipeline *pipeline) override;
539 	void wait_enqueue() override;
540 	VkPipeline fossilize_create_graphics_pipeline(Fossilize::Hash hash, VkGraphicsPipelineCreateInfo &info);
541 	VkPipeline fossilize_create_compute_pipeline(Fossilize::Hash hash, VkComputePipelineCreateInfo &info);
542 
543 	unsigned register_graphics_pipeline(Fossilize::Hash hash, const VkGraphicsPipelineCreateInfo &info);
544 	unsigned register_compute_pipeline(Fossilize::Hash hash, const VkComputePipelineCreateInfo &info);
545 	unsigned register_render_pass(Fossilize::Hash hash, const VkRenderPassCreateInfo &info);
546 	unsigned register_descriptor_set_layout(Fossilize::Hash hash, const VkDescriptorSetLayoutCreateInfo &info);
547 	unsigned register_pipeline_layout(Fossilize::Hash hash, const VkPipelineLayoutCreateInfo &info);
548 	unsigned register_shader_module(Fossilize::Hash hash, const VkShaderModuleCreateInfo &info);
549 
550 	void set_render_pass_handle(unsigned index, VkRenderPass render_pass);
551 	void set_descriptor_set_layout_handle(unsigned index, VkDescriptorSetLayout set_layout);
552 	void set_pipeline_layout_handle(unsigned index, VkPipelineLayout layout);
553 	void set_shader_module_handle(unsigned index, VkShaderModule module);
554 
555 	struct
556 	{
557 		std::unordered_map<VkShaderModule, Shader *> shader_map;
558 		std::unordered_map<VkRenderPass, RenderPass *> render_pass_map;
559 #ifdef GRANITE_VULKAN_MT
560 		Granite::TaskGroup pipeline_group;
561 #endif
562 	} replayer_state;
563 
564 	void init_pipeline_state();
565 	void flush_pipeline_state();
566 #endif
567 
568 	ImplementationWorkarounds workarounds;
569 	void init_workarounds();
570 };
571 }
572