1 // Copyright 2016 Dolphin Emulator Project
2 // Licensed under GPLv2+
3 // Refer to the license.txt file included.
4
5 #include "VideoBackends/Vulkan/StateTracker.h"
6
7 #include "Common/Assert.h"
8
9 #include "VideoBackends/Vulkan/CommandBufferManager.h"
10 #include "VideoBackends/Vulkan/ObjectCache.h"
11 #include "VideoBackends/Vulkan/Renderer.h"
12 #include "VideoBackends/Vulkan/VKPipeline.h"
13 #include "VideoBackends/Vulkan/VKShader.h"
14 #include "VideoBackends/Vulkan/VKTexture.h"
15 #include "VideoBackends/Vulkan/VertexFormat.h"
16 #include "VideoBackends/Vulkan/VulkanContext.h"
17
18 namespace Vulkan
19 {
20 static std::unique_ptr<StateTracker> s_state_tracker;
21
22 StateTracker::StateTracker() = default;
23
24 StateTracker::~StateTracker() = default;
25
GetInstance()26 StateTracker* StateTracker::GetInstance()
27 {
28 return s_state_tracker.get();
29 }
30
CreateInstance()31 bool StateTracker::CreateInstance()
32 {
33 ASSERT(!s_state_tracker);
34 s_state_tracker = std::make_unique<StateTracker>();
35 if (!s_state_tracker->Initialize())
36 {
37 s_state_tracker.reset();
38 return false;
39 }
40 return true;
41 }
42
DestroyInstance()43 void StateTracker::DestroyInstance()
44 {
45 if (!s_state_tracker)
46 return;
47
48 // When the dummy texture is destroyed, it unbinds itself, then references itself.
49 // Clear everything out so this doesn't happen.
50 for (auto& it : s_state_tracker->m_bindings.samplers)
51 it.imageView = VK_NULL_HANDLE;
52 s_state_tracker->m_bindings.image_texture.imageView = VK_NULL_HANDLE;
53 s_state_tracker->m_dummy_texture.reset();
54
55 s_state_tracker.reset();
56 }
57
Initialize()58 bool StateTracker::Initialize()
59 {
60 // Create a dummy texture which can be used in place of a real binding.
61 m_dummy_texture =
62 VKTexture::Create(TextureConfig(1, 1, 1, 1, 1, AbstractTextureFormat::RGBA8, 0));
63 if (!m_dummy_texture)
64 return false;
65 m_dummy_texture->TransitionToLayout(g_command_buffer_mgr->GetCurrentInitCommandBuffer(),
66 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
67
68 // Initialize all samplers to point by default
69 for (size_t i = 0; i < NUM_PIXEL_SHADER_SAMPLERS; i++)
70 {
71 m_bindings.samplers[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
72 m_bindings.samplers[i].imageView = m_dummy_texture->GetView();
73 m_bindings.samplers[i].sampler = g_object_cache->GetPointSampler();
74 }
75
76 // Default dirty flags include all descriptors
77 InvalidateCachedState();
78 return true;
79 }
80
SetVertexBuffer(VkBuffer buffer,VkDeviceSize offset)81 void StateTracker::SetVertexBuffer(VkBuffer buffer, VkDeviceSize offset)
82 {
83 if (m_vertex_buffer == buffer && m_vertex_buffer_offset == offset)
84 return;
85
86 m_vertex_buffer = buffer;
87 m_vertex_buffer_offset = offset;
88 m_dirty_flags |= DIRTY_FLAG_VERTEX_BUFFER;
89 }
90
SetIndexBuffer(VkBuffer buffer,VkDeviceSize offset,VkIndexType type)91 void StateTracker::SetIndexBuffer(VkBuffer buffer, VkDeviceSize offset, VkIndexType type)
92 {
93 if (m_index_buffer == buffer && m_index_buffer_offset == offset && m_index_type == type)
94 return;
95
96 m_index_buffer = buffer;
97 m_index_buffer_offset = offset;
98 m_index_type = type;
99 m_dirty_flags |= DIRTY_FLAG_INDEX_BUFFER;
100 }
101
SetFramebuffer(VKFramebuffer * framebuffer)102 void StateTracker::SetFramebuffer(VKFramebuffer* framebuffer)
103 {
104 // Should not be changed within a render pass.
105 ASSERT(!InRenderPass());
106 m_framebuffer = framebuffer;
107 }
108
SetPipeline(const VKPipeline * pipeline)109 void StateTracker::SetPipeline(const VKPipeline* pipeline)
110 {
111 if (m_pipeline == pipeline)
112 return;
113
114 // If the usage changes, we need to re-bind everything, as the layout is different.
115 const bool new_usage =
116 pipeline && (!m_pipeline || m_pipeline->GetUsage() != pipeline->GetUsage());
117
118 m_pipeline = pipeline;
119 m_dirty_flags |= DIRTY_FLAG_PIPELINE;
120 if (new_usage)
121 m_dirty_flags |= DIRTY_FLAG_DESCRIPTOR_SETS;
122 }
123
SetComputeShader(const VKShader * shader)124 void StateTracker::SetComputeShader(const VKShader* shader)
125 {
126 if (m_compute_shader == shader)
127 return;
128
129 m_compute_shader = shader;
130 m_dirty_flags |= DIRTY_FLAG_COMPUTE_SHADER;
131 }
132
SetGXUniformBuffer(u32 index,VkBuffer buffer,u32 offset,u32 size)133 void StateTracker::SetGXUniformBuffer(u32 index, VkBuffer buffer, u32 offset, u32 size)
134 {
135 auto& binding = m_bindings.gx_ubo_bindings[index];
136 if (binding.buffer != buffer || binding.range != size)
137 {
138 binding.buffer = buffer;
139 binding.range = size;
140 m_dirty_flags |= DIRTY_FLAG_GX_UBOS;
141 }
142
143 if (m_bindings.gx_ubo_offsets[index] != offset)
144 {
145 m_bindings.gx_ubo_offsets[index] = offset;
146 m_dirty_flags |= DIRTY_FLAG_GX_UBO_OFFSETS;
147 }
148 }
149
SetUtilityUniformBuffer(VkBuffer buffer,u32 offset,u32 size)150 void StateTracker::SetUtilityUniformBuffer(VkBuffer buffer, u32 offset, u32 size)
151 {
152 auto& binding = m_bindings.utility_ubo_binding;
153 if (binding.buffer != buffer || binding.range != size)
154 {
155 binding.buffer = buffer;
156 binding.range = size;
157 m_dirty_flags |= DIRTY_FLAG_UTILITY_UBO;
158 }
159
160 if (m_bindings.utility_ubo_offset != offset)
161 {
162 m_bindings.utility_ubo_offset = offset;
163 m_dirty_flags |= DIRTY_FLAG_UTILITY_UBO_OFFSET | DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
164 }
165 }
166
SetTexture(u32 index,VkImageView view)167 void StateTracker::SetTexture(u32 index, VkImageView view)
168 {
169 if (m_bindings.samplers[index].imageView == view)
170 return;
171
172 m_bindings.samplers[index].imageView = view;
173 m_bindings.samplers[index].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
174 m_dirty_flags |=
175 DIRTY_FLAG_GX_SAMPLERS | DIRTY_FLAG_UTILITY_BINDINGS | DIRTY_FLAG_COMPUTE_BINDINGS;
176 }
177
SetSampler(u32 index,VkSampler sampler)178 void StateTracker::SetSampler(u32 index, VkSampler sampler)
179 {
180 if (m_bindings.samplers[index].sampler == sampler)
181 return;
182
183 m_bindings.samplers[index].sampler = sampler;
184 m_dirty_flags |=
185 DIRTY_FLAG_GX_SAMPLERS | DIRTY_FLAG_UTILITY_BINDINGS | DIRTY_FLAG_COMPUTE_BINDINGS;
186 }
187
SetSSBO(VkBuffer buffer,VkDeviceSize offset,VkDeviceSize range)188 void StateTracker::SetSSBO(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize range)
189 {
190 if (m_bindings.ssbo.buffer == buffer && m_bindings.ssbo.offset == offset &&
191 m_bindings.ssbo.range == range)
192 {
193 return;
194 }
195
196 m_bindings.ssbo.buffer = buffer;
197 m_bindings.ssbo.offset = offset;
198 m_bindings.ssbo.range = range;
199 m_dirty_flags |= DIRTY_FLAG_GX_SSBO;
200 }
201
SetTexelBuffer(u32 index,VkBufferView view)202 void StateTracker::SetTexelBuffer(u32 index, VkBufferView view)
203 {
204 if (m_bindings.texel_buffers[index] == view)
205 return;
206
207 m_bindings.texel_buffers[index] = view;
208 m_dirty_flags |= DIRTY_FLAG_UTILITY_BINDINGS | DIRTY_FLAG_COMPUTE_BINDINGS;
209 }
210
SetImageTexture(VkImageView view)211 void StateTracker::SetImageTexture(VkImageView view)
212 {
213 if (m_bindings.image_texture.imageView == view)
214 return;
215
216 m_bindings.image_texture.imageView = view;
217 m_bindings.image_texture.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
218 m_dirty_flags |= DIRTY_FLAG_COMPUTE_BINDINGS;
219 }
220
UnbindTexture(VkImageView view)221 void StateTracker::UnbindTexture(VkImageView view)
222 {
223 for (VkDescriptorImageInfo& it : m_bindings.samplers)
224 {
225 if (it.imageView == view)
226 {
227 it.imageView = m_dummy_texture->GetView();
228 it.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
229 }
230 }
231
232 if (m_bindings.image_texture.imageView == view)
233 {
234 m_bindings.image_texture.imageView = m_dummy_texture->GetView();
235 m_bindings.image_texture.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
236 }
237 }
238
InvalidateCachedState()239 void StateTracker::InvalidateCachedState()
240 {
241 m_gx_descriptor_sets.fill(VK_NULL_HANDLE);
242 m_utility_descriptor_sets.fill(VK_NULL_HANDLE);
243 m_compute_descriptor_set = VK_NULL_HANDLE;
244 m_dirty_flags |= DIRTY_FLAG_ALL_DESCRIPTORS | DIRTY_FLAG_VIEWPORT | DIRTY_FLAG_SCISSOR |
245 DIRTY_FLAG_PIPELINE | DIRTY_FLAG_COMPUTE_SHADER | DIRTY_FLAG_DESCRIPTOR_SETS |
246 DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
247 if (m_vertex_buffer != VK_NULL_HANDLE)
248 m_dirty_flags |= DIRTY_FLAG_VERTEX_BUFFER;
249 if (m_index_buffer != VK_NULL_HANDLE)
250 m_dirty_flags |= DIRTY_FLAG_INDEX_BUFFER;
251 }
252
BeginRenderPass()253 void StateTracker::BeginRenderPass()
254 {
255 if (InRenderPass())
256 return;
257
258 m_current_render_pass = m_framebuffer->GetLoadRenderPass();
259 m_framebuffer_render_area = m_framebuffer->GetRect();
260
261 VkRenderPassBeginInfo begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
262 nullptr,
263 m_current_render_pass,
264 m_framebuffer->GetFB(),
265 m_framebuffer_render_area,
266 0,
267 nullptr};
268
269 vkCmdBeginRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer(), &begin_info,
270 VK_SUBPASS_CONTENTS_INLINE);
271 }
272
BeginDiscardRenderPass()273 void StateTracker::BeginDiscardRenderPass()
274 {
275 if (InRenderPass())
276 return;
277
278 m_current_render_pass = m_framebuffer->GetDiscardRenderPass();
279 m_framebuffer_render_area = m_framebuffer->GetRect();
280
281 VkRenderPassBeginInfo begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
282 nullptr,
283 m_current_render_pass,
284 m_framebuffer->GetFB(),
285 m_framebuffer_render_area,
286 0,
287 nullptr};
288
289 vkCmdBeginRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer(), &begin_info,
290 VK_SUBPASS_CONTENTS_INLINE);
291 }
292
EndRenderPass()293 void StateTracker::EndRenderPass()
294 {
295 if (!InRenderPass())
296 return;
297
298 vkCmdEndRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer());
299 m_current_render_pass = VK_NULL_HANDLE;
300 }
301
BeginClearRenderPass(const VkRect2D & area,const VkClearValue * clear_values,u32 num_clear_values)302 void StateTracker::BeginClearRenderPass(const VkRect2D& area, const VkClearValue* clear_values,
303 u32 num_clear_values)
304 {
305 ASSERT(!InRenderPass());
306
307 m_current_render_pass = m_framebuffer->GetClearRenderPass();
308 m_framebuffer_render_area = area;
309
310 VkRenderPassBeginInfo begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
311 nullptr,
312 m_current_render_pass,
313 m_framebuffer->GetFB(),
314 m_framebuffer_render_area,
315 num_clear_values,
316 clear_values};
317
318 vkCmdBeginRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer(), &begin_info,
319 VK_SUBPASS_CONTENTS_INLINE);
320 }
321
SetViewport(const VkViewport & viewport)322 void StateTracker::SetViewport(const VkViewport& viewport)
323 {
324 if (memcmp(&m_viewport, &viewport, sizeof(viewport)) == 0)
325 return;
326
327 m_viewport = viewport;
328 m_dirty_flags |= DIRTY_FLAG_VIEWPORT;
329 }
330
SetScissor(const VkRect2D & scissor)331 void StateTracker::SetScissor(const VkRect2D& scissor)
332 {
333 if (memcmp(&m_scissor, &scissor, sizeof(scissor)) == 0)
334 return;
335
336 m_scissor = scissor;
337 m_dirty_flags |= DIRTY_FLAG_SCISSOR;
338 }
339
Bind()340 bool StateTracker::Bind()
341 {
342 // Must have a pipeline.
343 if (!m_pipeline)
344 return false;
345
346 // Check the render area if we were in a clear pass.
347 if (m_current_render_pass == m_framebuffer->GetClearRenderPass() && !IsViewportWithinRenderArea())
348 EndRenderPass();
349
350 // Get a new descriptor set if any parts have changed
351 if (!UpdateDescriptorSet())
352 {
353 // We can fail to allocate descriptors if we exhaust the pool for this command buffer.
354 WARN_LOG(VIDEO, "Failed to get a descriptor set, executing buffer");
355 Renderer::GetInstance()->ExecuteCommandBuffer(false, false);
356 if (!UpdateDescriptorSet())
357 {
358 // Something strange going on.
359 ERROR_LOG(VIDEO, "Failed to get descriptor set, skipping draw");
360 return false;
361 }
362 }
363
364 // Start render pass if not already started
365 if (!InRenderPass())
366 BeginRenderPass();
367
368 // Re-bind parts of the pipeline
369 const VkCommandBuffer command_buffer = g_command_buffer_mgr->GetCurrentCommandBuffer();
370 if (m_dirty_flags & DIRTY_FLAG_VERTEX_BUFFER)
371 vkCmdBindVertexBuffers(command_buffer, 0, 1, &m_vertex_buffer, &m_vertex_buffer_offset);
372
373 if (m_dirty_flags & DIRTY_FLAG_INDEX_BUFFER)
374 vkCmdBindIndexBuffer(command_buffer, m_index_buffer, m_index_buffer_offset, m_index_type);
375
376 if (m_dirty_flags & DIRTY_FLAG_PIPELINE)
377 vkCmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipeline());
378
379 if (m_dirty_flags & DIRTY_FLAG_VIEWPORT)
380 vkCmdSetViewport(command_buffer, 0, 1, &m_viewport);
381
382 if (m_dirty_flags & DIRTY_FLAG_SCISSOR)
383 vkCmdSetScissor(command_buffer, 0, 1, &m_scissor);
384
385 m_dirty_flags &= ~(DIRTY_FLAG_VERTEX_BUFFER | DIRTY_FLAG_INDEX_BUFFER | DIRTY_FLAG_PIPELINE |
386 DIRTY_FLAG_VIEWPORT | DIRTY_FLAG_SCISSOR);
387 return true;
388 }
389
BindCompute()390 bool StateTracker::BindCompute()
391 {
392 if (!m_compute_shader)
393 return false;
394
395 // Can't kick compute in a render pass.
396 if (InRenderPass())
397 EndRenderPass();
398
399 const VkCommandBuffer command_buffer = g_command_buffer_mgr->GetCurrentCommandBuffer();
400 if (m_dirty_flags & DIRTY_FLAG_COMPUTE_SHADER)
401 {
402 vkCmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
403 m_compute_shader->GetComputePipeline());
404 }
405
406 if (!UpdateComputeDescriptorSet())
407 {
408 WARN_LOG(VIDEO, "Failed to get a compute descriptor set, executing buffer");
409 Renderer::GetInstance()->ExecuteCommandBuffer(false, false);
410 if (!UpdateComputeDescriptorSet())
411 {
412 // Something strange going on.
413 ERROR_LOG(VIDEO, "Failed to get descriptor set, skipping dispatch");
414 return false;
415 }
416 }
417
418 m_dirty_flags &= ~DIRTY_FLAG_COMPUTE_SHADER;
419 return true;
420 }
421
IsWithinRenderArea(s32 x,s32 y,u32 width,u32 height) const422 bool StateTracker::IsWithinRenderArea(s32 x, s32 y, u32 width, u32 height) const
423 {
424 // Check that the viewport does not lie outside the render area.
425 // If it does, we need to switch to a normal load/store render pass.
426 s32 left = m_framebuffer_render_area.offset.x;
427 s32 top = m_framebuffer_render_area.offset.y;
428 s32 right = left + static_cast<s32>(m_framebuffer_render_area.extent.width);
429 s32 bottom = top + static_cast<s32>(m_framebuffer_render_area.extent.height);
430 s32 test_left = x;
431 s32 test_top = y;
432 s32 test_right = test_left + static_cast<s32>(width);
433 s32 test_bottom = test_top + static_cast<s32>(height);
434 return test_left >= left && test_right <= right && test_top >= top && test_bottom <= bottom;
435 }
436
IsViewportWithinRenderArea() const437 bool StateTracker::IsViewportWithinRenderArea() const
438 {
439 return IsWithinRenderArea(static_cast<s32>(m_viewport.x), static_cast<s32>(m_viewport.y),
440 static_cast<u32>(m_viewport.width),
441 static_cast<u32>(m_viewport.height));
442 }
443
EndClearRenderPass()444 void StateTracker::EndClearRenderPass()
445 {
446 if (m_current_render_pass != m_framebuffer->GetClearRenderPass())
447 return;
448
449 // End clear render pass. Bind() will call BeginRenderPass() which
450 // will switch to the load/store render pass.
451 EndRenderPass();
452 }
453
UpdateDescriptorSet()454 bool StateTracker::UpdateDescriptorSet()
455 {
456 if (m_pipeline->GetUsage() == AbstractPipelineUsage::GX)
457 return UpdateGXDescriptorSet();
458 else
459 return UpdateUtilityDescriptorSet();
460 }
461
UpdateGXDescriptorSet()462 bool StateTracker::UpdateGXDescriptorSet()
463 {
464 const size_t MAX_DESCRIPTOR_WRITES = NUM_UBO_DESCRIPTOR_SET_BINDINGS + // UBO
465 1 + // Samplers
466 1; // SSBO
467 std::array<VkWriteDescriptorSet, MAX_DESCRIPTOR_WRITES> writes;
468 u32 num_writes = 0;
469
470 if (m_dirty_flags & DIRTY_FLAG_GX_UBOS || m_gx_descriptor_sets[0] == VK_NULL_HANDLE)
471 {
472 m_gx_descriptor_sets[0] = g_command_buffer_mgr->AllocateDescriptorSet(
473 g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_STANDARD_UNIFORM_BUFFERS));
474 if (m_gx_descriptor_sets[0] == VK_NULL_HANDLE)
475 return false;
476
477 for (size_t i = 0; i < NUM_UBO_DESCRIPTOR_SET_BINDINGS; i++)
478 {
479 if (i == UBO_DESCRIPTOR_SET_BINDING_GS &&
480 !g_ActiveConfig.backend_info.bSupportsGeometryShaders)
481 {
482 continue;
483 }
484
485 writes[num_writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
486 nullptr,
487 m_gx_descriptor_sets[0],
488 static_cast<uint32_t>(i),
489 0,
490 1,
491 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
492 nullptr,
493 &m_bindings.gx_ubo_bindings[i],
494 nullptr};
495 }
496
497 m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_UBOS) | DIRTY_FLAG_DESCRIPTOR_SETS;
498 }
499
500 if (m_dirty_flags & DIRTY_FLAG_GX_SAMPLERS || m_gx_descriptor_sets[1] == VK_NULL_HANDLE)
501 {
502 m_gx_descriptor_sets[1] = g_command_buffer_mgr->AllocateDescriptorSet(
503 g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_STANDARD_SAMPLERS));
504 if (m_gx_descriptor_sets[1] == VK_NULL_HANDLE)
505 return false;
506
507 writes[num_writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
508 nullptr,
509 m_gx_descriptor_sets[1],
510 0,
511 0,
512 static_cast<u32>(NUM_PIXEL_SHADER_SAMPLERS),
513 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
514 m_bindings.samplers.data(),
515 nullptr,
516 nullptr};
517 m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_SAMPLERS) | DIRTY_FLAG_DESCRIPTOR_SETS;
518 }
519
520 if (g_ActiveConfig.backend_info.bSupportsBBox &&
521 (m_dirty_flags & DIRTY_FLAG_GX_SSBO || m_gx_descriptor_sets[2] == VK_NULL_HANDLE))
522 {
523 m_gx_descriptor_sets[2] =
524 g_command_buffer_mgr->AllocateDescriptorSet(g_object_cache->GetDescriptorSetLayout(
525 DESCRIPTOR_SET_LAYOUT_STANDARD_SHADER_STORAGE_BUFFERS));
526 if (m_gx_descriptor_sets[2] == VK_NULL_HANDLE)
527 return false;
528
529 writes[num_writes++] = {
530 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, nullptr, m_gx_descriptor_sets[2], 0, 0, 1,
531 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, nullptr, &m_bindings.ssbo, nullptr};
532 m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_SSBO) | DIRTY_FLAG_DESCRIPTOR_SETS;
533 }
534
535 if (num_writes > 0)
536 vkUpdateDescriptorSets(g_vulkan_context->GetDevice(), num_writes, writes.data(), 0, nullptr);
537
538 if (m_dirty_flags & DIRTY_FLAG_DESCRIPTOR_SETS)
539 {
540 vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
541 VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
542 g_ActiveConfig.backend_info.bSupportsBBox ?
543 NUM_GX_DESCRIPTOR_SETS :
544 (NUM_GX_DESCRIPTOR_SETS - 1),
545 m_gx_descriptor_sets.data(), NUM_UBO_DESCRIPTOR_SET_BINDINGS,
546 m_bindings.gx_ubo_offsets.data());
547 m_dirty_flags &= ~(DIRTY_FLAG_DESCRIPTOR_SETS | DIRTY_FLAG_GX_UBO_OFFSETS);
548 }
549 else if (m_dirty_flags & DIRTY_FLAG_GX_UBO_OFFSETS)
550 {
551 vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
552 VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
553 1, m_gx_descriptor_sets.data(), NUM_UBO_DESCRIPTOR_SET_BINDINGS,
554 m_bindings.gx_ubo_offsets.data());
555 m_dirty_flags &= ~DIRTY_FLAG_GX_UBO_OFFSETS;
556 }
557
558 return true;
559 }
560
UpdateUtilityDescriptorSet()561 bool StateTracker::UpdateUtilityDescriptorSet()
562 {
563 // Max number of updates - UBO, Samplers, TexelBuffer
564 std::array<VkWriteDescriptorSet, 3> dswrites;
565 u32 writes = 0;
566
567 // Allocate descriptor sets.
568 if (m_dirty_flags & DIRTY_FLAG_UTILITY_UBO || m_utility_descriptor_sets[0] == VK_NULL_HANDLE)
569 {
570 m_utility_descriptor_sets[0] = g_command_buffer_mgr->AllocateDescriptorSet(
571 g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_UTILITY_UNIFORM_BUFFER));
572 if (!m_utility_descriptor_sets[0])
573 return false;
574
575 dswrites[writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
576 nullptr,
577 m_utility_descriptor_sets[0],
578 0,
579 0,
580 1,
581 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
582 nullptr,
583 &m_bindings.utility_ubo_binding,
584 nullptr};
585
586 m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_UTILITY_UBO) | DIRTY_FLAG_DESCRIPTOR_SETS;
587 }
588
589 if (m_dirty_flags & DIRTY_FLAG_UTILITY_BINDINGS || m_utility_descriptor_sets[1] == VK_NULL_HANDLE)
590 {
591 m_utility_descriptor_sets[1] = g_command_buffer_mgr->AllocateDescriptorSet(
592 g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_UTILITY_SAMPLERS));
593 if (!m_utility_descriptor_sets[1])
594 return false;
595
596 dswrites[writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
597 nullptr,
598 m_utility_descriptor_sets[1],
599 0,
600 0,
601 NUM_PIXEL_SHADER_SAMPLERS,
602 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
603 m_bindings.samplers.data(),
604 nullptr,
605 nullptr};
606 dswrites[writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
607 nullptr,
608 m_utility_descriptor_sets[1],
609 8,
610 0,
611 1,
612 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
613 nullptr,
614 nullptr,
615 m_bindings.texel_buffers.data()};
616
617 m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_UTILITY_BINDINGS) | DIRTY_FLAG_DESCRIPTOR_SETS;
618 }
619
620 if (writes > 0)
621 vkUpdateDescriptorSets(g_vulkan_context->GetDevice(), writes, dswrites.data(), 0, nullptr);
622
623 if (m_dirty_flags & DIRTY_FLAG_DESCRIPTOR_SETS)
624 {
625 vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
626 VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
627 NUM_UTILITY_DESCRIPTOR_SETS, m_utility_descriptor_sets.data(), 1,
628 &m_bindings.utility_ubo_offset);
629 m_dirty_flags &= ~(DIRTY_FLAG_DESCRIPTOR_SETS | DIRTY_FLAG_UTILITY_UBO_OFFSET);
630 }
631 else if (m_dirty_flags & DIRTY_FLAG_UTILITY_UBO_OFFSET)
632 {
633 vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
634 VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
635 1, m_utility_descriptor_sets.data(), 1, &m_bindings.utility_ubo_offset);
636 m_dirty_flags &= ~(DIRTY_FLAG_DESCRIPTOR_SETS | DIRTY_FLAG_UTILITY_UBO_OFFSET);
637 }
638
639 return true;
640 }
641
UpdateComputeDescriptorSet()642 bool StateTracker::UpdateComputeDescriptorSet()
643 {
644 // Max number of updates - UBO, Samplers, TexelBuffer, Image
645 std::array<VkWriteDescriptorSet, 4> dswrites;
646
647 // Allocate descriptor sets.
648 if (m_dirty_flags & DIRTY_FLAG_COMPUTE_BINDINGS)
649 {
650 m_compute_descriptor_set = g_command_buffer_mgr->AllocateDescriptorSet(
651 g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_COMPUTE));
652 dswrites[0] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
653 nullptr,
654 m_compute_descriptor_set,
655 0,
656 0,
657 1,
658 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
659 nullptr,
660 &m_bindings.utility_ubo_binding,
661 nullptr};
662 dswrites[1] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
663 nullptr,
664 m_compute_descriptor_set,
665 1,
666 0,
667 NUM_COMPUTE_SHADER_SAMPLERS,
668 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
669 m_bindings.samplers.data(),
670 nullptr,
671 nullptr};
672 dswrites[2] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
673 nullptr,
674 m_compute_descriptor_set,
675 3,
676 0,
677 NUM_COMPUTE_TEXEL_BUFFERS,
678 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
679 nullptr,
680 nullptr,
681 m_bindings.texel_buffers.data()};
682 dswrites[3] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
683 nullptr,
684 m_compute_descriptor_set,
685 5,
686 0,
687 1,
688 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
689 &m_bindings.image_texture,
690 nullptr,
691 nullptr};
692
693 vkUpdateDescriptorSets(g_vulkan_context->GetDevice(), static_cast<uint32_t>(dswrites.size()),
694 dswrites.data(), 0, nullptr);
695 m_dirty_flags =
696 (m_dirty_flags & ~DIRTY_FLAG_COMPUTE_BINDINGS) | DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
697 }
698
699 if (m_dirty_flags & DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET)
700 {
701 vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
702 VK_PIPELINE_BIND_POINT_COMPUTE,
703 g_object_cache->GetPipelineLayout(PIPELINE_LAYOUT_COMPUTE), 0, 1,
704 &m_compute_descriptor_set, 1, &m_bindings.utility_ubo_offset);
705 m_dirty_flags &= ~DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
706 }
707
708 return true;
709 }
710
711 } // namespace Vulkan
712